From e1a7d4f24a969ec4e47b97fbb6fe55c1ef9984d6 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 12 Feb 2014 10:40:42 +0000 Subject: [PATCH 001/769] Initial open sourcing --- services/document-updater/.gitignore | 46 + services/document-updater/Gruntfile.coffee | 111 + services/document-updater/app.coffee | 68 + .../document-updater/app/DocumentUpdater.js | 181 ++ .../app/coffee/DiffCodec.coffee | 31 + .../app/coffee/DocOpsManager.coffee | 127 + .../app/coffee/DocumentManager.coffee | 127 + .../document-updater/app/coffee/Errors.coffee | 10 + .../app/coffee/HttpController.coffee | 85 + .../app/coffee/LockManager.coffee | 55 + .../app/coffee/Metrics.coffee | 23 + .../app/coffee/PersistenceManager.coffee | 66 + .../app/coffee/ProjectManager.coffee | 60 + .../app/coffee/RedisKeyBuilder.coffee | 28 + .../app/coffee/RedisManager.coffee | 184 ++ .../app/coffee/ShareJsDB.coffee | 58 + .../app/coffee/ShareJsUpdateManager.coffee | 68 + .../app/coffee/UpdateManager.coffee | 79 + .../app/coffee/mongojs.coffee | 7 + .../app/coffee/sharejs/README.md | 48 + .../app/coffee/sharejs/count.coffee | 22 + .../app/coffee/sharejs/helpers.coffee | 65 + .../app/coffee/sharejs/index.coffee | 15 + .../app/coffee/sharejs/json-api.coffee | 180 ++ .../app/coffee/sharejs/json.coffee | 441 ++++ .../app/coffee/sharejs/model.coffee | 603 +++++ .../app/coffee/sharejs/server/model.coffee | 603 +++++ .../coffee/sharejs/server/syncqueue.coffee | 42 + .../app/coffee/sharejs/simple.coffee | 38 + .../app/coffee/sharejs/syncqueue.coffee | 42 + .../app/coffee/sharejs/text-api.coffee | 32 + .../coffee/sharejs/text-composable-api.coffee | 43 + .../app/coffee/sharejs/text-composable.coffee | 261 ++ .../app/coffee/sharejs/text-tp2-api.coffee | 89 + .../app/coffee/sharejs/text-tp2.coffee | 322 +++ .../app/coffee/sharejs/text.coffee | 209 ++ .../app/coffee/sharejs/types/count.coffee | 22 + .../app/coffee/sharejs/types/helpers.coffee | 65 + .../app/coffee/sharejs/types/index.coffee | 15 + .../app/coffee/sharejs/types/json-api.coffee | 180 ++ .../app/coffee/sharejs/types/json.coffee | 441 ++++ .../app/coffee/sharejs/types/model.coffee | 603 +++++ .../app/coffee/sharejs/types/simple.coffee | 38 + .../app/coffee/sharejs/types/syncqueue.coffee | 42 + .../app/coffee/sharejs/types/text-api.coffee | 32 + .../sharejs/types/text-composable-api.coffee | 43 + .../sharejs/types/text-composable.coffee | 261 ++ .../coffee/sharejs/types/text-tp2-api.coffee | 89 + .../app/coffee/sharejs/types/text-tp2.coffee | 322 +++ .../app/coffee/sharejs/types/text.coffee | 209 ++ .../coffee/sharejs/types/web-prelude.coffee | 11 + .../app/coffee/sharejs/web-prelude.coffee | 11 + .../app/lib/diff_match_patch.js | 2193 +++++++++++++++++ .../config/settings.development.coffee | 23 + services/document-updater/package.json | 30 + .../coffee/ApplyingUpdatesToADocTests.coffee | 215 ++ .../coffee/DeletingADocumentTests.coffee | 89 + .../coffee/DeletingAProjectTests.coffee | 81 + .../coffee/FlushingAProjectTests.coffee | 76 + .../coffee/FlushingDocsTests.coffee | 97 + .../coffee/GettingADocumentTests.coffee | 107 + .../coffee/SettingADocumentTests.coffee | 58 + .../coffee/helpers/DocUpdaterClient.coffee | 66 + .../coffee/helpers/MockWebApi.coffee | 40 + .../unit/coffee/AddingDocsToMemory.coffee | 58 + .../unit/coffee/CheckingUpdatesLength.coffee | 27 + .../coffee/DiffCodec/DiffCodecTests.coffee | 56 + .../DocOpsManager/DocOpsManagerTests.coffee | 309 +++ .../flushAndDeleteDocTests.coffee | 41 + .../DocumentManager/flushDocTests.coffee | 73 + .../getDocAndRecentOpsTests.coffee | 67 + .../coffee/DocumentManager/getDocTests.coffee | 75 + .../coffee/DocumentManager/setDocTests.coffee | 105 + .../test/unit/coffee/GettingDoc.coffee | 41 + .../coffee/GettingListOfPendingUpdates.coffee | 42 + .../coffee/GettingTotalNumberOfDocs.coffee | 47 + .../HttpController/deleteProjectTests.coffee | 63 + .../flushAndDeleteDocTests.coffee | 64 + .../flushDocIfLoadedTests.coffee | 65 + .../HttpController/flushProjectTests.coffee | 62 + .../coffee/HttpController/getDocTests.coffee | 110 + .../coffee/HttpController/setDocTests.coffee | 67 + .../coffee/LockManager/CheckingTheLock.coffee | 50 + .../LockManager/ReleasingTheLock.coffee | 28 + .../coffee/LockManager/getLockTests.coffee | 69 + .../coffee/LockManager/tryLockTests.coffee | 37 + .../PersistenceManager/getDocTests.coffee | 85 + .../PersistenceManager/setDocTests.coffee | 86 + .../flushAndDeleteProjectTests.coffee | 75 + .../ProjectManager/flushProjectTests.coffee | 75 + .../clearDocFromPendingUpdatesSetTests.coffee | 27 + .../getDocsWithPendingUpdatesTests.coffee | 33 + .../getPendingUpdatesForDocTests.coffee | 56 + .../getPreviousDocOpsTests.coffee | 99 + .../RedisManager/prependDocOpsTests.coffee | 32 + .../coffee/RedisManager/pushDocOpTests.coffee | 37 + .../coffee/RemovingSingleDocFromMemory.coffee | 73 + .../unit/coffee/ShareJsDB/GetOpsTests.coffee | 54 + .../coffee/ShareJsDB/GetSnapshotTests.coffee | 85 + .../coffee/ShareJsDB/WriteOpsTests.coffee | 53 + .../coffee/ShareJsUpdateManagerTests.coffee | 174 ++ .../UpdateManager/ApplyingUpdates.coffee | 198 ++ .../lockUpdatesAndDoTests.coffee | 83 + .../test/unit/js/module-loader.js | 29 + 104 files changed, 12838 insertions(+) create mode 100644 services/document-updater/.gitignore create mode 100644 services/document-updater/Gruntfile.coffee create mode 100644 services/document-updater/app.coffee create mode 100644 services/document-updater/app/DocumentUpdater.js create mode 100644 services/document-updater/app/coffee/DiffCodec.coffee create mode 100644 services/document-updater/app/coffee/DocOpsManager.coffee create mode 100644 services/document-updater/app/coffee/DocumentManager.coffee create mode 100644 services/document-updater/app/coffee/Errors.coffee create mode 100644 services/document-updater/app/coffee/HttpController.coffee create mode 100644 services/document-updater/app/coffee/LockManager.coffee create mode 100644 services/document-updater/app/coffee/Metrics.coffee create mode 100644 services/document-updater/app/coffee/PersistenceManager.coffee create mode 100644 services/document-updater/app/coffee/ProjectManager.coffee create mode 100644 services/document-updater/app/coffee/RedisKeyBuilder.coffee create mode 100644 services/document-updater/app/coffee/RedisManager.coffee create mode 100644 services/document-updater/app/coffee/ShareJsDB.coffee create mode 100644 services/document-updater/app/coffee/ShareJsUpdateManager.coffee create mode 100644 services/document-updater/app/coffee/UpdateManager.coffee create mode 100644 services/document-updater/app/coffee/mongojs.coffee create mode 100644 services/document-updater/app/coffee/sharejs/README.md create mode 100644 services/document-updater/app/coffee/sharejs/count.coffee create mode 100644 services/document-updater/app/coffee/sharejs/helpers.coffee create mode 100644 services/document-updater/app/coffee/sharejs/index.coffee create mode 100644 services/document-updater/app/coffee/sharejs/json-api.coffee create mode 100644 services/document-updater/app/coffee/sharejs/json.coffee create mode 100644 services/document-updater/app/coffee/sharejs/model.coffee create mode 100644 services/document-updater/app/coffee/sharejs/server/model.coffee create mode 100644 services/document-updater/app/coffee/sharejs/server/syncqueue.coffee create mode 100644 services/document-updater/app/coffee/sharejs/simple.coffee create mode 100644 services/document-updater/app/coffee/sharejs/syncqueue.coffee create mode 100644 services/document-updater/app/coffee/sharejs/text-api.coffee create mode 100644 services/document-updater/app/coffee/sharejs/text-composable-api.coffee create mode 100644 services/document-updater/app/coffee/sharejs/text-composable.coffee create mode 100644 services/document-updater/app/coffee/sharejs/text-tp2-api.coffee create mode 100644 services/document-updater/app/coffee/sharejs/text-tp2.coffee create mode 100644 services/document-updater/app/coffee/sharejs/text.coffee create mode 100644 services/document-updater/app/coffee/sharejs/types/count.coffee create mode 100644 services/document-updater/app/coffee/sharejs/types/helpers.coffee create mode 100644 services/document-updater/app/coffee/sharejs/types/index.coffee create mode 100644 services/document-updater/app/coffee/sharejs/types/json-api.coffee create mode 100644 services/document-updater/app/coffee/sharejs/types/json.coffee create mode 100644 services/document-updater/app/coffee/sharejs/types/model.coffee create mode 100644 services/document-updater/app/coffee/sharejs/types/simple.coffee create mode 100644 services/document-updater/app/coffee/sharejs/types/syncqueue.coffee create mode 100644 services/document-updater/app/coffee/sharejs/types/text-api.coffee create mode 100644 services/document-updater/app/coffee/sharejs/types/text-composable-api.coffee create mode 100644 services/document-updater/app/coffee/sharejs/types/text-composable.coffee create mode 100644 services/document-updater/app/coffee/sharejs/types/text-tp2-api.coffee create mode 100644 services/document-updater/app/coffee/sharejs/types/text-tp2.coffee create mode 100644 services/document-updater/app/coffee/sharejs/types/text.coffee create mode 100644 services/document-updater/app/coffee/sharejs/types/web-prelude.coffee create mode 100644 services/document-updater/app/coffee/sharejs/web-prelude.coffee create mode 100644 services/document-updater/app/lib/diff_match_patch.js create mode 100755 services/document-updater/config/settings.development.coffee create mode 100644 services/document-updater/package.json create mode 100644 services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee create mode 100644 services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee create mode 100644 services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee create mode 100644 services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee create mode 100644 services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee create mode 100644 services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee create mode 100644 services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee create mode 100644 services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee create mode 100644 services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee create mode 100644 services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee create mode 100644 services/document-updater/test/unit/coffee/CheckingUpdatesLength.coffee create mode 100644 services/document-updater/test/unit/coffee/DiffCodec/DiffCodecTests.coffee create mode 100644 services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee create mode 100644 services/document-updater/test/unit/coffee/DocumentManager/flushAndDeleteDocTests.coffee create mode 100644 services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee create mode 100644 services/document-updater/test/unit/coffee/DocumentManager/getDocAndRecentOpsTests.coffee create mode 100644 services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee create mode 100644 services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee create mode 100644 services/document-updater/test/unit/coffee/GettingDoc.coffee create mode 100644 services/document-updater/test/unit/coffee/GettingListOfPendingUpdates.coffee create mode 100644 services/document-updater/test/unit/coffee/GettingTotalNumberOfDocs.coffee create mode 100644 services/document-updater/test/unit/coffee/HttpController/deleteProjectTests.coffee create mode 100644 services/document-updater/test/unit/coffee/HttpController/flushAndDeleteDocTests.coffee create mode 100644 services/document-updater/test/unit/coffee/HttpController/flushDocIfLoadedTests.coffee create mode 100644 services/document-updater/test/unit/coffee/HttpController/flushProjectTests.coffee create mode 100644 services/document-updater/test/unit/coffee/HttpController/getDocTests.coffee create mode 100644 services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee create mode 100644 services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee create mode 100644 services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee create mode 100644 services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee create mode 100644 services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee create mode 100644 services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee create mode 100644 services/document-updater/test/unit/coffee/PersistenceManager/setDocTests.coffee create mode 100644 services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee create mode 100644 services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.coffee create mode 100644 services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee create mode 100644 services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee create mode 100644 services/document-updater/test/unit/coffee/RedisManager/getPendingUpdatesForDocTests.coffee create mode 100644 services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee create mode 100644 services/document-updater/test/unit/coffee/RedisManager/prependDocOpsTests.coffee create mode 100644 services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee create mode 100644 services/document-updater/test/unit/coffee/RemovingSingleDocFromMemory.coffee create mode 100644 services/document-updater/test/unit/coffee/ShareJsDB/GetOpsTests.coffee create mode 100644 services/document-updater/test/unit/coffee/ShareJsDB/GetSnapshotTests.coffee create mode 100644 services/document-updater/test/unit/coffee/ShareJsDB/WriteOpsTests.coffee create mode 100644 services/document-updater/test/unit/coffee/ShareJsUpdateManagerTests.coffee create mode 100644 services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee create mode 100644 services/document-updater/test/unit/coffee/UpdateManager/lockUpdatesAndDoTests.coffee create mode 100644 services/document-updater/test/unit/js/module-loader.js diff --git a/services/document-updater/.gitignore b/services/document-updater/.gitignore new file mode 100644 index 0000000000..5755e37b12 --- /dev/null +++ b/services/document-updater/.gitignore @@ -0,0 +1,46 @@ +compileFolder + +Compiled source # +################### +*.com +*.class +*.dll +*.exe +*.o +*.so + +# Packages # +############ +# it's better to unpack these files and commit the raw source +# git has its own built in compression methods +*.7z +*.dmg +*.gz +*.iso +*.jar +*.rar +*.tar +*.zip + +# Logs and databases # +###################### +*.log +*.sql +*.sqlite + +# OS generated files # +###################### +.DS_Store? +ehthumbs.db +Icon? +Thumbs.db + +/node_modules/* + +app.js +app/js/* + +test/unit/js/* +test/acceptance/js/* + +**.swp diff --git a/services/document-updater/Gruntfile.coffee b/services/document-updater/Gruntfile.coffee new file mode 100644 index 0000000000..30dd63e708 --- /dev/null +++ b/services/document-updater/Gruntfile.coffee @@ -0,0 +1,111 @@ +module.exports = (grunt) -> + grunt.loadNpmTasks 'grunt-contrib-coffee' + grunt.loadNpmTasks 'grunt-contrib-clean' + grunt.loadNpmTasks 'grunt-mocha-test' + grunt.loadNpmTasks 'grunt-available-tasks' + grunt.loadNpmTasks 'grunt-execute' + grunt.loadNpmTasks 'grunt-bunyan' + + grunt.initConfig + execute: + app: + src: "app.js" + + bunyan: + strict: false + + coffee: + app_dir: + expand: true, + flatten: false, + cwd: 'app/coffee', + src: ['**/*.coffee'], + dest: 'app/js/', + ext: '.js' + + app: + src: 'app.coffee' + dest: 'app.js' + + acceptance_tests: + expand: true, + flatten: false, + cwd: 'test/acceptance/coffee', + src: ['**/*.coffee'], + dest: 'test/acceptance/js/', + ext: '.js' + + unit_tests: + expand: true, + flatten: false, + cwd: 'test/unit/coffee', + src: ['**/*.coffee'], + dest: 'test/unit/js/', + ext: '.js' + + clean: + app: ["app/js"] + acceptance_tests: ["test/unit/js"] + + mochaTest: + unit: + src: ['test/unit/js/**/*.js'] + options: + reporter: grunt.option('reporter') or 'spec' + grep: grunt.option("grep") + acceptance: + src: ['test/acceptance/js/**/*.js'] + options: + reporter: grunt.option('reporter') or 'spec' + grep: grunt.option("grep") + timeout: 10000 + + availabletasks: + tasks: + options: + filter: 'exclude', + tasks: [ + 'coffee' + 'clean' + 'mochaTest' + 'availabletasks' + 'execute' + 'bunyan' + ] + groups: + "Compile tasks": [ + "compile:server" + "compile:tests" + "compile" + "compile:unit_tests" + "compile:acceptance_tests" + "install" + ] + "Test tasks": [ + "test:unit" + "test:acceptance" + ] + "Run tasks": [ + "run" + "default" + ] + "Misc": [ + "help" + ] + + grunt.registerTask 'help', 'Display this help list', 'availabletasks' + + grunt.registerTask 'compile:server', 'Compile the server side coffee script', ['clean:app', 'coffee:app', 'coffee:app_dir'] + grunt.registerTask 'compile:unit_tests', 'Compile the unit tests', ['coffee:unit_tests'] + grunt.registerTask 'compile:acceptance_tests', 'Compile the acceptance tests', ['clean:acceptance_tests', 'coffee:acceptance_tests'] + grunt.registerTask 'compile:tests', 'Compile all the tests', ['compile:acceptance_tests', 'compile:unit_tests'] + grunt.registerTask 'compile', 'Compiles everything need to run document-updater-sharelatex', ['compile:server'] + + grunt.registerTask 'install', "Compile everything when installing as an npm module", ['compile'] + + grunt.registerTask 'test:unit', 'Run the unit tests (use --grep= for individual tests)', ['compile:unit_tests', 'mochaTest:unit'] + grunt.registerTask 'test:acceptance', 'Run the acceptance tests (use --grep= for individual tests)', ['compile:acceptance_tests', 'mochaTest:acceptance'] + + grunt.registerTask 'run', "Compile and run the document-updater-sharelatex server", ['compile', 'bunyan', 'execute'] + grunt.registerTask 'default', 'run' + diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee new file mode 100644 index 0000000000..f35b50fe30 --- /dev/null +++ b/services/document-updater/app.coffee @@ -0,0 +1,68 @@ +express = require('express') +http = require("http") +Settings = require('settings-sharelatex') +logger = require('logger-sharelatex') +logger.initialize("documentupdater") +RedisManager = require('./app/js/RedisManager.js') +UpdateManager = require('./app/js/UpdateManager.js') +Keys = require('./app/js/RedisKeyBuilder') +redis = require('redis') +rclient = redis.createClient(Settings.redis.port, Settings.redis.host) +rclient.auth(Settings.redis.password) +metrics = require('./app/js/Metrics') +Errors = require "./app/js/Errors" +HttpController = require "./app/js/HttpController" + +app = express() +app.configure -> + app.use(express.logger(':remote-addr - [:date] - :user-agent ":method :url" :status - :response-time ms')); + app.use express.bodyParser() + app.use app.router + +app.configure 'development', ()-> + console.log "Development Enviroment" + app.use express.errorHandler({ dumpExceptions: true, showStack: true }) + +app.configure 'production', ()-> + console.log "Production Enviroment" + app.use express.logger() + app.use express.errorHandler() + +rclient.subscribe("pending-updates") +rclient.on "message", (channel, doc_key)-> + [project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key) + UpdateManager.processOutstandingUpdatesWithLock project_id, doc_id, (error) -> + logger.error err: error, project_id: project_id, doc_id: doc_id, "error processing update" if error? + +UpdateManager.resumeProcessing() + +app.use (req, res, next)-> + metrics.inc "http-request" + next() + +app.get '/project/:project_id/doc/:doc_id', HttpController.getDoc +app.post '/project/:project_id/doc/:doc_id', HttpController.setDoc +app.post '/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLoaded +app.delete '/project/:project_id/doc/:doc_id', HttpController.flushAndDeleteDoc +app.delete '/project/:project_id', HttpController.deleteProject +app.post '/project/:project_id/flush', HttpController.flushProject + +app.get '/total', (req, res)-> + timer = new metrics.Timer("http.allDocList") + RedisManager.getCountOfDocsInMemory (err, count)-> + timer.done() + res.send {total:count} + +app.get '/status', (req, res)-> + res.send('document updater is alive') + +app.use (error, req, res, next) -> + logger.error err: error, "request errored" + if error instanceof Errors.NotFoundError + res.send 404 + else + res.send(500, "Oops, something went wrong") + +port = Settings.internal?.documentupdater?.port or Settings.apis?.documentupdater?.port or 3003 +app.listen port, "localhost", -> + logger.log("documentupdater-sharelatex server listening on port #{port}") diff --git a/services/document-updater/app/DocumentUpdater.js b/services/document-updater/app/DocumentUpdater.js new file mode 100644 index 0000000000..11e69368dd --- /dev/null +++ b/services/document-updater/app/DocumentUpdater.js @@ -0,0 +1,181 @@ +(function(exports){ + Ace = require('aceserverside-sharelatex') + Range = Ace.Range + + //look at applyDeltas method + exports.applyChange = function(aceDoc, change, callback) { + var r = change.range; + var range = new Range(r.start.row, r.start.column, r.end.row, r.end.column); + if('insertText'==change.action){ + aceDoc.insert(change.range.start, change.text); + }else if('insertLines'==change.action){ + aceDoc.insertLines(change.range.start.row, change.lines); + }else if('removeText'==change.action){ + aceDoc.remove(range); + }else if('removeLines'==change.action){ + aceDoc.removeLines(range.start.row, range.end.row-1); + } + + if(typeof callback === 'function'){ + callback(null, aceDoc); + }; + } + +})(typeof exports === 'undefined'? this['documentUpdater']={}: exports); + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/services/document-updater/app/coffee/DiffCodec.coffee b/services/document-updater/app/coffee/DiffCodec.coffee new file mode 100644 index 0000000000..ba5966648e --- /dev/null +++ b/services/document-updater/app/coffee/DiffCodec.coffee @@ -0,0 +1,31 @@ +diff_match_patch = require("../lib/diff_match_patch").diff_match_patch +dmp = new diff_match_patch() + +module.exports = DiffCodec = + ADDED: 1 + REMOVED: -1 + UNCHANGED: 0 + + diffAsShareJsOp: (before, after, callback = (error, ops) ->) -> + diffs = dmp.diff_main(before.join("\n"), after.join("\n")) + dmp.diff_cleanupSemantic(diffs) + + ops = [] + position = 0 + for diff in diffs + type = diff[0] + content = diff[1] + if type == @ADDED + ops.push + i: content + p: position + position += content.length + else if type == @REMOVED + ops.push + d: content + p: position + else if type == @UNCHANGED + position += content.length + else + throw "Unknown type" + callback null, ops diff --git a/services/document-updater/app/coffee/DocOpsManager.coffee b/services/document-updater/app/coffee/DocOpsManager.coffee new file mode 100644 index 0000000000..0e90f5b462 --- /dev/null +++ b/services/document-updater/app/coffee/DocOpsManager.coffee @@ -0,0 +1,127 @@ +RedisManager = require "./RedisManager" +mongojs = require("./mongojs") +db = mongojs.db +ObjectId = mongojs.ObjectId +logger = require "logger-sharelatex" +async = require "async" +Metrics = require("./Metrics") + +module.exports = DocOpsManager = + flushDocOpsToMongo: (project_id, doc_id, _callback = (error) ->) -> + timer = new Metrics.Timer("docOpsManager.flushDocOpsToMongo") + callback = (args...) -> + timer.done() + _callback(args...) + + DocOpsManager.getDocVersionInMongo doc_id, (error, mongoVersion) -> + return callback(error) if error? + RedisManager.getDocVersion doc_id, (error, redisVersion) -> + return callback(error) if error? + if !mongoVersion? or !redisVersion? or mongoVersion > redisVersion + logger.error doc_id: doc_id, redisVersion: redisVersion, mongoVersion: mongoVersion, "mongo version is ahead of redis" + return callback(new Error("inconsistent versions")) + + RedisManager.getPreviousDocOps doc_id, mongoVersion, -1, (error, ops) -> + return callback(error) if error? + if ops.length != redisVersion - mongoVersion + logger.error doc_id: doc_id, redisVersion: redisVersion, mongoVersion: mongoVersion, opsLength: ops.length, "version difference does not match ops length" + return callback(new Error("inconsistent versions")) + logger.log doc_id: doc_id, redisVersion: redisVersion, mongoVersion: mongoVersion, "flushing doc ops to mongo" + DocOpsManager._appendDocOpsInMongo doc_id, ops, redisVersion, (error) -> + return callback(error) if error? + callback null + + getPreviousDocOps: (project_id, doc_id, start, end, _callback = (error, ops) ->) -> + timer = new Metrics.Timer("docOpsManager.getPreviousDocOps") + callback = (args...) -> + timer.done() + _callback(args...) + + DocOpsManager._ensureOpsAreLoaded project_id, doc_id, start, (error) -> + return callback(error) if error? + RedisManager.getPreviousDocOps doc_id, start, end, (error, ops) -> + return callback(error) if error? + callback null, ops + + pushDocOp: (project_id, doc_id, op, callback = (error) ->) -> + RedisManager.pushDocOp doc_id, op, callback + + _ensureOpsAreLoaded: (project_id, doc_id, backToVersion, callback = (error) ->) -> + RedisManager.getDocVersion doc_id, (error, redisVersion) -> + return callback(error) if error? + RedisManager.getDocOpsLength doc_id, (error, opsLength) -> + return callback(error) if error? + oldestVersionInRedis = redisVersion - opsLength + if oldestVersionInRedis > backToVersion + # _getDocOpsFromMongo(, 4, 6, ...) will return the ops in positions 4 and 5, but not 6. + logger.log doc_id: doc_id, backToVersion: backToVersion, oldestVersionInRedis: oldestVersionInRedis, "loading old ops from mongo" + DocOpsManager._getDocOpsFromMongo doc_id, backToVersion, oldestVersionInRedis, (error, ops) -> + logger.log doc_id: doc_id, backToVersion: backToVersion, oldestVersionInRedis: oldestVersionInRedis, ops: ops, "loaded old ops from mongo" + return callback(error) if error? + RedisManager.prependDocOps doc_id, ops, (error) -> + return callback(error) if error? + callback null + else + logger.log doc_id: doc_id, backToVersion: backToVersion, oldestVersionInRedis: oldestVersionInRedis, "ops already in redis" + callback() + + getDocVersionInMongo: (doc_id, callback = (error, version) ->) -> + t = new Metrics.Timer("mongo-time") + db.docOps.find { + doc_id: ObjectId(doc_id) + }, { + version: 1 + }, (error, docs) -> + t.done() + return callback(error) if error? + if docs.length < 1 or !docs[0].version? + return callback null, 0 + else + return callback null, docs[0].version + + APPEND_OPS_BATCH_SIZE: 100 + + _appendDocOpsInMongo: (doc_id, docOps, newVersion, callback = (error) ->) -> + currentVersion = newVersion - docOps.length + batchSize = DocOpsManager.APPEND_OPS_BATCH_SIZE + noOfBatches = Math.ceil(docOps.length / batchSize) + if noOfBatches <= 0 + return callback() + jobs = [] + for batchNo in [0..(noOfBatches-1)] + do (batchNo) -> + jobs.push (callback) -> + batch = docOps.slice(batchNo * batchSize, (batchNo + 1) * batchSize) + currentVersion += batch.length + logger.log doc_id: doc_id, batchNo: batchNo, "appending doc op batch to Mongo" + t = new Metrics.Timer("mongo-time") + db.docOps.update { + doc_id: ObjectId(doc_id) + }, { + $push: docOps: { $each: batch, $slice: -100 } + $set: version: currentVersion + }, { + upsert: true + }, (err)-> + t.done() + callback(err) + + async.series jobs, (error) -> callback(error) + + _getDocOpsFromMongo: (doc_id, start, end, callback = (error, ops) ->) -> + DocOpsManager.getDocVersionInMongo doc_id, (error, version) -> + return callback(error) if error? + offset = - (version - start) # Negative tells mongo to count from the end backwards + limit = end - start + t = new Metrics.Timer("mongo-time") + db.docOps.find { + doc_id: ObjectId(doc_id) + }, { + docOps: $slice: [offset, limit] + }, (error, docs) -> + t.done() + if docs.length < 1 or !docs[0].docOps? + return callback null, [] + else + return callback null, docs[0].docOps + diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee new file mode 100644 index 0000000000..aa64ac3d7f --- /dev/null +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -0,0 +1,127 @@ +RedisManager = require "./RedisManager" +PersistenceManager = require "./PersistenceManager" +DocOpsManager = require "./DocOpsManager" +DiffCodec = require "./DiffCodec" +logger = require "logger-sharelatex" +Metrics = require "./Metrics" + +module.exports = DocumentManager = + getDoc: (project_id, doc_id, _callback = (error, lines, version) ->) -> + timer = new Metrics.Timer("docManager.getDoc") + callback = (args...) -> + timer.done() + _callback(args...) + + RedisManager.getDoc doc_id, (error, lines, version) -> + return callback(error) if error? + if !lines? or !version? + logger.log project_id: project_id, doc_id: doc_id, "doc not in redis so getting from persistence API" + PersistenceManager.getDoc project_id, doc_id, (error, lines) -> + return callback(error) if error? + DocOpsManager.getDocVersionInMongo doc_id, (error, version) -> + return callback(error) if error? + logger.log project_id: project_id, doc_id: doc_id, lines: lines, version: version, "got doc from persistence API" + RedisManager.putDocInMemory project_id, doc_id, lines, version, (error) -> + return callback(error) if error? + callback null, lines, version + else + callback null, lines, version + + getDocAndRecentOps: (project_id, doc_id, fromVersion, _callback = (error, lines, version, recentOps) ->) -> + timer = new Metrics.Timer("docManager.getDocAndRecentOps") + callback = (args...) -> + timer.done() + _callback(args...) + + DocumentManager.getDoc project_id, doc_id, (error, lines, version) -> + return callback(error) if error? + if fromVersion == -1 + callback null, lines, version, [] + else + DocOpsManager.getPreviousDocOps project_id, doc_id, fromVersion, version, (error, ops) -> + return callback(error) if error? + callback null, lines, version, ops + + setDoc: (project_id, doc_id, newLines, _callback = (error) ->) -> + timer = new Metrics.Timer("docManager.setDoc") + callback = (args...) -> + timer.done() + _callback(args...) + + if !newLines? + return callback(new Error("No lines were provided to setDoc")) + + UpdateManager = require "./UpdateManager" + DocumentManager.getDoc project_id, doc_id, (error, oldLines, version) -> + return callback(error) if error? + + if oldLines? and oldLines.length > 0 and oldLines[0].text? + logger.log doc_id: doc_id, project_id: project_id, oldLines: oldLines, newLines: newLines, "document is JSON so not updating" + return callback(null) + + logger.log doc_id: doc_id, project_id: project_id, oldLines: oldLines, newLines: newLines, "setting a document via http" + DiffCodec.diffAsShareJsOp oldLines, newLines, (error, op) -> + return callback(error) if error? + update = + doc: doc_id + op: op + v: version + meta: + type: "external" + UpdateManager.applyUpdates project_id, doc_id, [update], (error) -> + return callback(error) if error? + DocumentManager.flushDocIfLoaded project_id, doc_id, (error) -> + return callback(error) if error? + callback null + + + flushDocIfLoaded: (project_id, doc_id, _callback = (error) ->) -> + timer = new Metrics.Timer("docManager.flushDocIfLoaded") + callback = (args...) -> + timer.done() + _callback(args...) + + RedisManager.getDoc doc_id, (error, lines, version) -> + return callback(error) if error? + if !lines? or !version? + logger.log project_id: project_id, doc_id: doc_id, "doc is not loaded so not flushing" + callback null + else + logger.log project_id: project_id, doc_id: doc_id, "flushing doc" + PersistenceManager.setDoc project_id, doc_id, lines, (error) -> + return callback(error) if error? + DocOpsManager.flushDocOpsToMongo project_id, doc_id, (error) -> + return callback(error) if error? + callback null + + flushAndDeleteDoc: (project_id, doc_id, _callback = (error) ->) -> + timer = new Metrics.Timer("docManager.flushAndDeleteDoc") + callback = (args...) -> + timer.done() + _callback(args...) + + DocumentManager.flushDocIfLoaded project_id, doc_id, (error) -> + return callback(error) if error? + RedisManager.removeDocFromMemory project_id, doc_id, (error) -> + return callback(error) if error? + callback null + + getDocWithLock: (project_id, doc_id, callback = (error, lines, version) ->) -> + UpdateManager = require "./UpdateManager" + UpdateManager.lockUpdatesAndDo DocumentManager.getDoc, project_id, doc_id, callback + + getDocAndRecentOpsWithLock: (project_id, doc_id, fromVersion, callback = (error, lines, version) ->) -> + UpdateManager = require "./UpdateManager" + UpdateManager.lockUpdatesAndDo DocumentManager.getDocAndRecentOps, project_id, doc_id, fromVersion, callback + + setDocWithLock: (project_id, doc_id, lines, callback = (error) ->) -> + UpdateManager = require "./UpdateManager" + UpdateManager.lockUpdatesAndDo DocumentManager.setDoc, project_id, doc_id, lines, callback + + flushDocIfLoadedWithLock: (project_id, doc_id, callback = (error) ->) -> + UpdateManager = require "./UpdateManager" + UpdateManager.lockUpdatesAndDo DocumentManager.flushDocIfLoaded, project_id, doc_id, callback + + flushAndDeleteDocWithLock: (project_id, doc_id, callback = (error) ->) -> + UpdateManager = require "./UpdateManager" + UpdateManager.lockUpdatesAndDo DocumentManager.flushAndDeleteDoc, project_id, doc_id, callback diff --git a/services/document-updater/app/coffee/Errors.coffee b/services/document-updater/app/coffee/Errors.coffee new file mode 100644 index 0000000000..4a29822efc --- /dev/null +++ b/services/document-updater/app/coffee/Errors.coffee @@ -0,0 +1,10 @@ +NotFoundError = (message) -> + error = new Error(message) + error.name = "NotFoundError" + error.__proto__ = NotFoundError.prototype + return error +NotFoundError.prototype.__proto__ = Error.prototype + +module.exports = Errors = + NotFoundError: NotFoundError + diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee new file mode 100644 index 0000000000..391d02ee37 --- /dev/null +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -0,0 +1,85 @@ +DocumentManager = require "./DocumentManager" +ProjectManager = require "./ProjectManager" +Errors = require "./Errors" +logger = require "logger-sharelatex" +Metrics = require "./Metrics" + +module.exports = HttpController = + getDoc: (req, res, next = (error) ->) -> + doc_id = req.params.doc_id + project_id = req.params.project_id + logger.log project_id: project_id, doc_id: doc_id, "getting doc via http" + timer = new Metrics.Timer("http.getDoc") + + if req.query?.fromVersion? + fromVersion = parseInt(req.query.fromVersion, 10) + else + fromVersion = -1 + + DocumentManager.getDocAndRecentOpsWithLock project_id, doc_id, fromVersion, (error, lines, version, ops) -> + timer.done() + return next(error) if error? + logger.log project_id: project_id, doc_id: doc_id, "got doc via http" + if !lines? or !version? + return next(new Errors.NotFoundError("document not found")) + res.send JSON.stringify + id: doc_id + lines: lines + version: version + ops: ops + + setDoc: (req, res, next = (error) ->) -> + doc_id = req.params.doc_id + project_id = req.params.project_id + lines = req.body.lines + logger.log project_id: project_id, doc_id: doc_id, lines: lines, "setting doc via http" + timer = new Metrics.Timer("http.setDoc") + DocumentManager.setDocWithLock project_id, doc_id, lines, (error) -> + timer.done() + return next(error) if error? + logger.log project_id: project_id, doc_id: doc_id, "set doc via http" + res.send 204 # No Content + + + flushDocIfLoaded: (req, res, next = (error) ->) -> + doc_id = req.params.doc_id + project_id = req.params.project_id + logger.log project_id: project_id, doc_id: doc_id, "flushing doc via http" + timer = new Metrics.Timer("http.flushDoc") + DocumentManager.flushDocIfLoadedWithLock project_id, doc_id, (error) -> + timer.done() + return next(error) if error? + logger.log project_id: project_id, doc_id: doc_id, "flushed doc via http" + res.send 204 # No Content + + flushAndDeleteDoc: (req, res, next = (error) ->) -> + doc_id = req.params.doc_id + project_id = req.params.project_id + logger.log project_id: project_id, doc_id: doc_id, "deleting doc via http" + timer = new Metrics.Timer("http.deleteDoc") + DocumentManager.flushAndDeleteDocWithLock project_id, doc_id, (error) -> + timer.done() + return next(error) if error? + logger.log project_id: project_id, doc_id: doc_id, "deleted doc via http" + res.send 204 # No Content + + flushProject: (req, res, next = (error) ->) -> + project_id = req.params.project_id + logger.log project_id: project_id, "flushing project via http" + timer = new Metrics.Timer("http.flushProject") + ProjectManager.flushProjectWithLocks project_id, (error) -> + timer.done() + return next(error) if error? + logger.log project_id: project_id, "flushed project via http" + res.send 204 # No Content + + deleteProject: (req, res, next = (error) ->) -> + project_id = req.params.project_id + logger.log project_id: project_id, "deleting project via http" + timer = new Metrics.Timer("http.deleteProject") + ProjectManager.flushAndDeleteProjectWithLocks project_id, (error) -> + timer.done() + return next(error) if error? + logger.log project_id: project_id, "deleted project via http" + res.send 204 # No Content + diff --git a/services/document-updater/app/coffee/LockManager.coffee b/services/document-updater/app/coffee/LockManager.coffee new file mode 100644 index 0000000000..9a3d6cf761 --- /dev/null +++ b/services/document-updater/app/coffee/LockManager.coffee @@ -0,0 +1,55 @@ +metrics = require('./Metrics') +Settings = require('settings-sharelatex') +redis = require('redis') +redisConf = Settings.redis?.web or Settings.redis or {host: "localhost", port: 6379} +rclient = redis.createClient(redisConf.port, redisConf.host) +rclient.auth(redisConf.password) +keys = require('./RedisKeyBuilder') +logger = require "logger-sharelatex" + +module.exports = LockManager = + LOCK_TEST_INTERVAL: 50 # 50ms between each test of the lock + MAX_LOCK_WAIT_TIME: 10000 # 10s maximum time to spend trying to get the lock + + tryLock : (doc_id, callback = (err, isFree)->)-> + tenSeconds = 10 + rclient.set keys.blockingKey(doc_id: doc_id), "locked", "EX", 10, "NX", (err, gotLock)-> + return callback(err) if err? + if gotLock == "OK" + metrics.inc "doc-not-blocking" + callback err, true + else + metrics.inc "doc-blocking" + logger.log doc_id: doc_id, redis_response: gotLock, "doc is locked" + callback err, false + + getLock: (doc_id, callback = (error) ->) -> + startTime = Date.now() + do attempt = () -> + if Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME + return callback(new Error("Timeout")) + + LockManager.tryLock doc_id, (error, gotLock) -> + return callback(error) if error? + if gotLock + callback(null) + else + setTimeout attempt, LockManager.LOCK_TEST_INTERVAL + + checkLock: (doc_id, callback = (err, isFree)->)-> + multi = rclient.multi() + multi.exists keys.blockingKey(doc_id:doc_id) + multi.exec (err, replys)-> + return callback(err) if err? + exists = parseInt replys[0] + if exists == 1 + metrics.inc "doc-blocking" + callback err, false + else + metrics.inc "doc-not-blocking" + callback err, true + + releaseLock: (doc_id, callback)-> + rclient.del keys.blockingKey(doc_id:doc_id), callback + + diff --git a/services/document-updater/app/coffee/Metrics.coffee b/services/document-updater/app/coffee/Metrics.coffee new file mode 100644 index 0000000000..0b98550c0e --- /dev/null +++ b/services/document-updater/app/coffee/Metrics.coffee @@ -0,0 +1,23 @@ +StatsD = require('lynx') +statsd = new StatsD('localhost', 8125, {on_error:->}) + +buildKey = (key)-> "doc-updater.#{process.env.NODE_ENV}.#{key}" + +module.exports = + set : (key, value, sampleRate = 1)-> + statsd.set buildKey(key), value, sampleRate + + inc : (key, sampleRate = 1)-> + statsd.increment buildKey(key), sampleRate + + Timer : class + constructor :(key, sampleRate = 1)-> + this.start = new Date() + this.key = buildKey(key) + done:-> + timeSpan = new Date - this.start + statsd.timing(this.key, timeSpan, this.sampleRate) + + gauge : (key, value, sampleRate = 1)-> + statsd.gauge key, value, sampleRate + diff --git a/services/document-updater/app/coffee/PersistenceManager.coffee b/services/document-updater/app/coffee/PersistenceManager.coffee new file mode 100644 index 0000000000..eb1a7366c2 --- /dev/null +++ b/services/document-updater/app/coffee/PersistenceManager.coffee @@ -0,0 +1,66 @@ +request = require "request" +Settings = require "settings-sharelatex" +Errors = require "./Errors" +Metrics = require "./Metrics" + +module.exports = PersistenceManager = + getDoc: (project_id, doc_id, _callback = (error, lines) ->) -> + timer = new Metrics.Timer("persistenceManager.getDoc") + callback = (args...) -> + timer.done() + _callback(args...) + + url = "#{Settings.apis.web.url}/project/#{project_id}/doc/#{doc_id}" + request { + url: url + method: "GET" + headers: + "accept": "application/json" + auth: + user: Settings.apis.web.user + pass: Settings.apis.web.pass + sendImmediately: true + jar: false + }, (error, res, body) -> + return callback(error) if error? + if res.statusCode >= 200 and res.statusCode < 300 + try + body = JSON.parse body + catch e + return callback(e) + return callback null, body.lines + else if res.statusCode == 404 + return callback(new Errors.NotFoundError("doc not not found: #{url}")) + else + return callback(new Error("error accessing web API: #{url} #{res.statusCode}")) + + setDoc: (project_id, doc_id, lines, _callback = (error) ->) -> + timer = new Metrics.Timer("persistenceManager.setDoc") + callback = (args...) -> + timer.done() + _callback(args...) + + url = "#{Settings.apis.web.url}/project/#{project_id}/doc/#{doc_id}" + request { + url: url + method: "POST" + body: JSON.stringify + lines: lines + headers: + "content-type": "application/json" + auth: + user: Settings.apis.web.user + pass: Settings.apis.web.pass + sendImmediately: true + jar: false + }, (error, res, body) -> + return callback(error) if error? + if res.statusCode >= 200 and res.statusCode < 300 + return callback null + else if res.statusCode == 404 + return callback(new Errors.NotFoundError("doc not not found: #{url}")) + else + return callback(new Error("error accessing web API: #{url} #{res.statusCode}")) + + + diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee new file mode 100644 index 0000000000..f0f62b6d1b --- /dev/null +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -0,0 +1,60 @@ +RedisManager = require "./RedisManager" +DocumentManager = require "./DocumentManager" +async = require "async" +logger = require "logger-sharelatex" +Metrics = require "./Metrics" + +module.exports = ProjectManager = + flushProjectWithLocks: (project_id, _callback = (error) ->) -> + timer = new Metrics.Timer("projectManager.flushProjectWithLocks") + callback = (args...) -> + timer.done() + _callback(args...) + + RedisManager.getDocIdsInProject project_id, (error, doc_ids) -> + return callback(error) if error? + jobs = [] + errors = [] + for doc_id in (doc_ids or []) + do (doc_id) -> + jobs.push (callback) -> + DocumentManager.flushDocIfLoadedWithLock project_id, doc_id, (error) -> + if error? + logger.error err: error, project_id: project_id, doc_id: doc_id, "error flushing doc" + errors.push(error) + callback() + + logger.log project_id: project_id, doc_ids: doc_ids, "flushing docs" + async.series jobs, () -> + if errors.length > 0 + callback new Error("Errors flushing docs. See log for details") + else + callback(null) + + flushAndDeleteProjectWithLocks: (project_id, _callback = (error) ->) -> + timer = new Metrics.Timer("projectManager.flushAndDeleteProjectWithLocks") + callback = (args...) -> + timer.done() + _callback(args...) + + RedisManager.getDocIdsInProject project_id, (error, doc_ids) -> + return callback(error) if error? + jobs = [] + errors = [] + for doc_id in (doc_ids or []) + do (doc_id) -> + jobs.push (callback) -> + DocumentManager.flushAndDeleteDocWithLock project_id, doc_id, (error) -> + if error? + logger.error err: error, project_id: project_id, doc_id: doc_id, "error deleting doc" + errors.push(error) + callback() + + logger.log project_id: project_id, doc_ids: doc_ids, "deleting docs" + async.series jobs, () -> + if errors.length > 0 + callback new Error("Errors deleting docs. See log for details") + else + callback(null) + + diff --git a/services/document-updater/app/coffee/RedisKeyBuilder.coffee b/services/document-updater/app/coffee/RedisKeyBuilder.coffee new file mode 100644 index 0000000000..a444341ea1 --- /dev/null +++ b/services/document-updater/app/coffee/RedisKeyBuilder.coffee @@ -0,0 +1,28 @@ +ALLDOCSKEY = "AllDocIds" +PROJECTKEY = "ProjectId" +BLOCKINGKEY = "Blocking" +CHANGEQUE = "ChangeQue" +DOCSINPROJECT = "DocsIn" +PENDINGUPDATESKEY = "PendingUpdates" +DOCLINES = "doclines" +DOCOPS = "DocOps" +DOCVERSION = "DocVersion" +DOCIDSWITHPENDINGUPDATES = "DocsWithPendingUpdates" + +module.exports = + + allDocs : ALLDOCSKEY + docLines : (op)-> DOCLINES+":"+op.doc_id + docOps : (op)-> DOCOPS+":"+op.doc_id + docVersion : (op)-> DOCVERSION+":"+op.doc_id + projectKey : (op)-> PROJECTKEY+":"+op.doc_id + blockingKey : (op)-> BLOCKINGKEY+":"+op.doc_id + changeQue : (op)-> CHANGEQUE+":"+op.project_id + docsInProject : (op)-> DOCSINPROJECT+":"+op.project_id + pendingUpdates : (op)-> PENDINGUPDATESKEY+":"+op.doc_id + docsWithPendingUpdates : DOCIDSWITHPENDINGUPDATES + combineProjectIdAndDocId: (project_id, doc_id) -> "#{project_id}:#{doc_id}" + splitProjectIdAndDocId: (project_and_doc_id) -> project_and_doc_id.split(":") + now : (key)-> + d = new Date() + d.getDate()+":"+(d.getMonth()+1)+":"+d.getFullYear()+":"+key diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee new file mode 100644 index 0000000000..79bb06036d --- /dev/null +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -0,0 +1,184 @@ +Settings = require('settings-sharelatex') +redis = require('redis') +redisConf = Settings.redis?.web or Settings.redis or {host: "localhost", port: 6379} +rclient = redis.createClient(redisConf.port, redisConf.host) +rclient.auth(redisConf.password) +async = require('async') +_ = require('underscore') +keys = require('./RedisKeyBuilder') +logger = require('logger-sharelatex') +metrics = require('./Metrics') + +module.exports = + putDocInMemory : (project_id, doc_id, docLines, version, callback)-> + timer = new metrics.Timer("redis.put-doc") + logger.log project_id:project_id, doc_id:doc_id, docLines:docLines, version: version, "putting doc in redis" + multi = rclient.multi() + multi.set keys.docLines(doc_id:doc_id), JSON.stringify(docLines) + multi.set keys.projectKey({doc_id:doc_id}), project_id + multi.set keys.docVersion(doc_id:doc_id), version + multi.del keys.docOps(doc_id:doc_id) + multi.sadd keys.allDocs, doc_id + multi.sadd keys.docsInProject(project_id:project_id), doc_id + multi.exec (err, replys)-> + timer.done() + callback(err) + + removeDocFromMemory : (project_id, doc_id, callback)-> + logger.log project_id:project_id, doc_id:doc_id, "removing doc from redis" + multi = rclient.multi() + multi.get keys.docLines(doc_id:doc_id) + multi.del keys.docLines(doc_id:doc_id) + multi.del keys.projectKey(doc_id:doc_id) + multi.del keys.docVersion(doc_id:doc_id) + multi.del keys.docOps(doc_id:doc_id) + multi.srem keys.docsInProject(project_id:project_id), doc_id + multi.srem keys.allDocs, doc_id + multi.exec (err, replys)-> + if err? + logger.err project_id:project_id, doc_id:doc_id, err:err, "error removing doc from redis" + callback(err, null) + else + docLines = replys[0] + logger.log project_id:project_id, doc_id:doc_id, docLines:docLines, "removed doc from redis" + callback() + + getDoc : (doc_id, callback = (error, lines, version) ->)-> + timer = new metrics.Timer("redis.get-doc") + multi = rclient.multi() + linesKey = keys.docLines(doc_id:doc_id) + multi.get linesKey + multi.get keys.docVersion(doc_id:doc_id) + multi.exec (error, result)-> + timer.done() + return callback(error) if error? + try + docLines = JSON.parse result[0] + catch e + return callback(e) + version = parseInt(result[1] or 0, 10) + callback null, docLines, version + + getDocVersion: (doc_id, callback = (error, version) ->) -> + rclient.get keys.docVersion(doc_id: doc_id), (error, version) -> + return callback(error) if error? + version = parseInt(version, 10) + callback null, version + + getCountOfDocsInMemory : (callback)-> + rclient.smembers keys.allDocs, (err, members)-> + len = members.length + callback null, len + + setDocument : (doc_id, docLines, version, callback = (error) ->)-> + multi = rclient.multi() + multi.set keys.docLines(doc_id:doc_id), JSON.stringify(docLines) + multi.set keys.docVersion(doc_id:doc_id), version + multi.incr keys.now("docsets") + multi.exec (error, replys) -> callback(error) + + getPendingUpdatesForDoc : (doc_id, callback)-> + multi = rclient.multi() + multi.lrange keys.pendingUpdates(doc_id:doc_id), 0 , -1 + multi.del keys.pendingUpdates(doc_id:doc_id) + multi.exec (error, replys) -> + jsonUpdates = replys[0] + updates = [] + for jsonUpdate in jsonUpdates + try + update = JSON.parse jsonUpdate + catch e + return callback e + updates.push update + callback error, updates + + getUpdatesLength: (doc_id, callback)-> + rclient.llen keys.pendingUpdates(doc_id:doc_id), callback + + getDocsWithPendingUpdates: (callback = (error, docs) ->) -> + rclient.smembers keys.docsWithPendingUpdates, (error, doc_keys) -> + return callback(error) if error? + docs = doc_keys.map (doc_key) -> + [project_id, doc_id] = keys.splitProjectIdAndDocId(doc_key) + return { + doc_id: doc_id + project_id: project_id + } + callback null, docs + + clearDocFromPendingUpdatesSet: (project_id, doc_id, callback = (error) ->) -> + doc_key = keys.combineProjectIdAndDocId(project_id, doc_id) + rclient.srem keys.docsWithPendingUpdates, doc_key, callback + + getPreviousDocOps: (doc_id, start, end, callback = (error, jsonOps) ->) -> + # TODO: parse the ops and return them as objects, not JSON + rclient.llen keys.docOps(doc_id: doc_id), (error, length) -> + return callback(error) if error? + rclient.get keys.docVersion(doc_id: doc_id), (error, version) -> + return callback(error) if error? + version = parseInt(version, 10) + first_version_in_redis = version - length + + if start < first_version_in_redis or end > version + error = new Error("doc ops range is not loaded in redis") + logger.error err: error, length: length, version: version, start: start, end: end, "inconsistent version or length" + return callback(error) + + start = start - first_version_in_redis + if end > -1 + end = end - first_version_in_redis + + if isNaN(start) or isNaN(end) + error = new Error("inconsistent version or lengths") + logger.error err: error, length: length, version: version, start: start, end: end, "inconsistent version or length" + return callback(error) + + rclient.lrange keys.docOps(doc_id: doc_id), start, end, (error, jsonOps) -> + return callback(error) if error? + try + ops = jsonOps.map (jsonOp) -> JSON.parse jsonOp + catch e + return callback(e) + callback null, ops + + pushDocOp: (doc_id, op, callback = (error, new_version) ->) -> + # TODO: take a raw op object and JSONify it here + jsonOp = JSON.stringify op + rclient.rpush keys.docOps(doc_id: doc_id), jsonOp, (error) -> + return callback(error) if error? + rclient.incr keys.docVersion(doc_id: doc_id), (error, version) -> + return callback(error) if error? + version = parseInt(version, 10) + callback null, version + + prependDocOps: (doc_id, ops, callback = (error) ->) -> + jsonOps = ops.map (op) -> JSON.stringify op + rclient.lpush keys.docOps(doc_id: doc_id), jsonOps.reverse(), callback + + getDocOpsLength: (doc_id, callback = (error, length) ->) -> + rclient.llen keys.docOps(doc_id: doc_id), callback + + getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) -> + rclient.smembers keys.docsInProject(project_id: project_id), callback + + +getDocumentsProjectId = (doc_id, callback)-> + rclient.get keys.projectKey({doc_id:doc_id}), (err, project_id)-> + callback err, {doc_id:doc_id, project_id:project_id} + +getAllProjectDocsIds = (project_id, callback)-> + rclient.SMEMBERS keys.docsInProject(project_id:project_id), (err, doc_ids)-> + if callback? + callback(err, doc_ids) + +getDocumentsAndExpire = (doc_ids, callback)-> + multi = rclient.multi() + oneDay = 86400 + doc_ids.forEach (doc_id)-> + # rclient.expire keys.docLines(doc_id:doc_id), oneDay, -> + doc_ids.forEach (doc_id)-> + multi.get keys.docLines(doc_id:doc_id) + multi.exec (err, docsLines)-> + callback err, docsLines + + diff --git a/services/document-updater/app/coffee/ShareJsDB.coffee b/services/document-updater/app/coffee/ShareJsDB.coffee new file mode 100644 index 0000000000..3704121b6d --- /dev/null +++ b/services/document-updater/app/coffee/ShareJsDB.coffee @@ -0,0 +1,58 @@ +Keys = require('./RedisKeyBuilder') +Settings = require('settings-sharelatex') +DocumentManager = require "./DocumentManager" +RedisManager = require "./RedisManager" +DocOpsManager = require "./DocOpsManager" +Errors = require "./Errors" + +module.exports = ShareJsDB = + getOps: (doc_key, start, end, callback) -> + if start == end + return callback null, [] + + # In redis, lrange values are inclusive. + if end? + end-- + else + end = -1 + + [project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key) + DocOpsManager.getPreviousDocOps project_id, doc_id, start, end, (error, ops) -> + return callback error if error? + callback null, ops + + writeOp: (doc_key, opData, callback) -> + [project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key) + DocOpsManager.pushDocOp project_id, doc_id, {op:opData.op, meta:opData.meta}, (error, version) -> + return callback error if error? + + if version == opData.v + 1 + callback() + else + # The document has been corrupted by the change. For now, throw an exception. + # Later, rebuild the snapshot. + callback "Version mismatch in db.append. '#{doc_id}' is corrupted." + + getSnapshot: (doc_key, callback) -> + [project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key) + DocumentManager.getDoc project_id, doc_id, (error, lines, version) -> + return callback(error) if error? + if !lines? or !version? + return callback(new Errors.NotFoundError("document not found: #{doc_id}")) + + if lines.length > 0 and lines[0].text? + type = "json" + snapshot = lines: lines + else + type = "text" + snapshot = lines.join("\n") + callback null, + snapshot: snapshot + v: parseInt(version, 10) + type: type + + # To be able to remove a doc from the ShareJS memory + # we need to called Model::delete, which calls this + # method on the database. However, we will handle removing + # it from Redis ourselves + delete: (docName, dbMeta, callback) -> callback() diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee new file mode 100644 index 0000000000..a5b2e88e4f --- /dev/null +++ b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee @@ -0,0 +1,68 @@ +ShareJsModel = require "./sharejs/server/model" +ShareJsDB = require "./ShareJsDB" +async = require "async" +logger = require "logger-sharelatex" +Settings = require('settings-sharelatex') +Keys = require "./RedisKeyBuilder" +{EventEmitter} = require "events" +util = require "util" + +redis = require('redis') +redisConf = Settings.redis?.web or Settings.redis or {host: "localhost", port: 6379} +rclient = redis.createClient(redisConf.port, redisConf.host) +rclient.auth(redisConf.password) + +ShareJsModel:: = {} +util.inherits ShareJsModel, EventEmitter + +module.exports = ShareJsUpdateManager = + getNewShareJsModel: () -> new ShareJsModel(ShareJsDB) + + applyUpdates: (project_id, doc_id, updates, callback = (error, updatedDocLines) ->) -> + logger.log project_id: project_id, doc_id: doc_id, updates: updates, "applying sharejs updates" + jobs = [] + + # We could use a global model for all docs, but we're hitting issues with the + # internal state of ShareJS not being accessible for clearing caches, and + # getting stuck due to queued callbacks (line 260 of sharejs/server/model.coffee) + # This adds a small but hopefully acceptable overhead (~12ms per 1000 updates on + # my 2009 MBP). + model = @getNewShareJsModel() + @_listenForOps(model) + doc_key = Keys.combineProjectIdAndDocId(project_id, doc_id) + for update in updates + do (update) => + jobs.push (callback) => + model.applyOp doc_key, update, callback + + async.series jobs, (error) => + logger.log project_id: project_id, doc_id: doc_id, error: error, "applied updates" + if error? + @_sendError(project_id, doc_id, error) + return callback(error) + model.getSnapshot doc_key, (error, data) => + if error? + @_sendError(project_id, doc_id, error) + return callback(error) + if typeof data.snapshot == "string" + docLines = data.snapshot.split("\n") + else + docLines = data.snapshot.lines + callback(null, docLines, data.v) + + _listenForOps: (model) -> + model.on "applyOp", (doc_key, opData) -> + [project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key) + data = JSON.stringify + project_id: project_id + doc_id: doc_id + op: opData + rclient.publish "applied-ops", data + + _sendError: (project_id, doc_id, error) -> + data = JSON.stringify + project_id: project_id + doc_id: doc_id + error: error.message || error + rclient.publish "applied-ops", data + diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee new file mode 100644 index 0000000000..a1db456457 --- /dev/null +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -0,0 +1,79 @@ +LockManager = require "./LockManager" +RedisManager = require "./RedisManager" +ShareJsUpdateManager = require "./ShareJsUpdateManager" +Settings = require('settings-sharelatex') +async = require("async") +logger = require('logger-sharelatex') +Metrics = require "./Metrics" + +module.exports = UpdateManager = + resumeProcessing: (callback = (error) ->) -> + RedisManager.getDocsWithPendingUpdates (error, docs) => + return callback(error) if error? + jobs = for doc in (docs or []) + do (doc) => + (callback) => @processOutstandingUpdatesWithLock doc.project_id, doc.doc_id, callback + + async.parallelLimit jobs, 5, callback + + processOutstandingUpdates: (project_id, doc_id, _callback = (error) ->) -> + timer = new Metrics.Timer("updateManager.processOutstandingUpdates") + callback = (args...) -> + timer.done() + _callback(args...) + + UpdateManager.fetchAndApplyUpdates project_id, doc_id, (error) => + return callback(error) if error? + RedisManager.clearDocFromPendingUpdatesSet project_id, doc_id, (error) => + return callback(error) if error? + callback() + + processOutstandingUpdatesWithLock: (project_id, doc_id, callback = (error) ->) -> + LockManager.tryLock doc_id, (error, gotLock) => + return callback(error) if error? + return callback() if !gotLock + UpdateManager.processOutstandingUpdates project_id, doc_id, (error) -> + return UpdateManager._handleErrorInsideLock(doc_id, error, callback) if error? + LockManager.releaseLock doc_id, (error) => + return callback(error) if error? + UpdateManager.continueProcessingUpdatesWithLock project_id, doc_id, callback + + continueProcessingUpdatesWithLock: (project_id, doc_id, callback = (error) ->) -> + RedisManager.getUpdatesLength doc_id, (error, length) => + return callback(error) if error? + if length > 0 + UpdateManager.processOutstandingUpdatesWithLock project_id, doc_id, callback + else + callback() + + fetchAndApplyUpdates: (project_id, doc_id, callback = (error) ->) -> + RedisManager.getPendingUpdatesForDoc doc_id, (error, updates) => + return callback(error) if error? + if updates.length == 0 + return callback() + UpdateManager.applyUpdates project_id, doc_id, updates, callback + + applyUpdates: (project_id, doc_id, updates, callback = (error) ->) -> + ShareJsUpdateManager.applyUpdates project_id, doc_id, updates, (error, updatedDocLines, version) -> + return callback(error) if error? + logger.log doc_id: doc_id, version: version, "updating doc via sharejs" + RedisManager.setDocument doc_id, updatedDocLines, version, callback + + lockUpdatesAndDo: (method, project_id, doc_id, args..., callback) -> + LockManager.getLock doc_id, (error) -> + return callback(error) if error? + UpdateManager.processOutstandingUpdates project_id, doc_id, (error) -> + return UpdateManager._handleErrorInsideLock(doc_id, error, callback) if error? + method project_id, doc_id, args..., (error, response_args...) -> + return UpdateManager._handleErrorInsideLock(doc_id, error, callback) if error? + LockManager.releaseLock doc_id, (error) -> + return callback(error) if error? + callback null, response_args... + # We held the lock for a while so updates might have queued up + UpdateManager.continueProcessingUpdatesWithLock project_id, doc_id + + _handleErrorInsideLock: (doc_id, original_error, callback = (error) ->) -> + LockManager.releaseLock doc_id, (lock_error) -> + callback(original_error) + + diff --git a/services/document-updater/app/coffee/mongojs.coffee b/services/document-updater/app/coffee/mongojs.coffee new file mode 100644 index 0000000000..9a1ae72bc0 --- /dev/null +++ b/services/document-updater/app/coffee/mongojs.coffee @@ -0,0 +1,7 @@ +Settings = require "settings-sharelatex" +mongojs = require "mongojs" +db = mongojs.connect(Settings.mongo.url, ["docOps"]) +module.exports = + db: db + ObjectId: mongojs.ObjectId + diff --git a/services/document-updater/app/coffee/sharejs/README.md b/services/document-updater/app/coffee/sharejs/README.md new file mode 100644 index 0000000000..22e68842dd --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/README.md @@ -0,0 +1,48 @@ +This directory contains all the operational transform code. Each file defines a type. + +Most of the types in here are for testing or demonstration. The only types which are sent to the webclient +are `text` and `json`. + + +# An OT type + +All OT types have the following fields: + +`name`: _(string)_ Name of the type. Should match the filename. +`create() -> snapshot`: Function which creates and returns a new document snapshot + +`apply(snapshot, op) -> snapshot`: A function which creates a new document snapshot with the op applied +`transform(op1, op2, side) -> op1'`: OT transform function. + +Given op1, op2, `apply(s, op2, transform(op1, op2, 'left')) == apply(s, op1, transform(op2, op1, 'right'))`. + +Transform and apply must never modify their arguments. + + +Optional properties: + +`tp2`: _(bool)_ True if the transform function supports TP2. This allows p2p architectures to work. +`compose(op1, op2) -> op`: Create and return a new op which has the same effect as op1 + op2. +`serialize(snapshot) -> JSON object`: Serialize a document to something we can JSON.stringify() +`deserialize(object) -> snapshot`: Deserialize a JSON object into the document's internal snapshot format +`prune(op1', op2, side) -> op1`: Inserse transform function. Only required for TP2 types. +`normalize(op) -> op`: Fix up an op to make it valid. Eg, remove skips of size zero. +`api`: _(object)_ Set of helper methods which will be mixed in to the client document object for manipulating documents. See below. + + +# Examples + +`count` and `simple` are two trivial OT type definitions if you want to take a look. JSON defines +the ot-for-JSON type (see the wiki for documentation) and all the text types define different text +implementations. (I still have no idea which one I like the most, and they're fun to write!) + + +# API + +Types can also define API functions. These methods are mixed into the client's Doc object when a document is created. +You can use them to help construct ops programatically (so users don't need to understand how ops are structured). + +For example, the three text types defined here (text, text-composable and text-tp2) all provide the text API, supplying +`.insert()`, `.del()`, `.getLength` and `.getText` methods. + +See text-api.coffee for an example. diff --git a/services/document-updater/app/coffee/sharejs/count.coffee b/services/document-updater/app/coffee/sharejs/count.coffee new file mode 100644 index 0000000000..da28355efb --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/count.coffee @@ -0,0 +1,22 @@ +# This is a simple type used for testing other OT code. Each op is [expectedSnapshot, increment] + +exports.name = 'count' +exports.create = -> 1 + +exports.apply = (snapshot, op) -> + [v, inc] = op + throw new Error "Op #{v} != snapshot #{snapshot}" unless snapshot == v + snapshot + inc + +# transform op1 by op2. Return transformed version of op1. +exports.transform = (op1, op2) -> + throw new Error "Op1 #{op1[0]} != op2 #{op2[0]}" unless op1[0] == op2[0] + [op1[0] + op2[1], op1[1]] + +exports.compose = (op1, op2) -> + throw new Error "Op1 #{op1} + 1 != op2 #{op2}" unless op1[0] + op1[1] == op2[0] + [op1[0], op1[1] + op2[1]] + +exports.generateRandomOp = (doc) -> + [[doc, 1], doc + 1] + diff --git a/services/document-updater/app/coffee/sharejs/helpers.coffee b/services/document-updater/app/coffee/sharejs/helpers.coffee new file mode 100644 index 0000000000..093b32e1bb --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/helpers.coffee @@ -0,0 +1,65 @@ +# These methods let you build a transform function from a transformComponent function +# for OT types like text and JSON in which operations are lists of components +# and transforming them requires N^2 work. + +# Add transform and transformX functions for an OT type which has transformComponent defined. +# transformComponent(destination array, component, other component, side) +exports['_bt'] = bootstrapTransform = (type, transformComponent, checkValidOp, append) -> + transformComponentX = (left, right, destLeft, destRight) -> + transformComponent destLeft, left, right, 'left' + transformComponent destRight, right, left, 'right' + + # Transforms rightOp by leftOp. Returns ['rightOp', clientOp'] + type.transformX = type['transformX'] = transformX = (leftOp, rightOp) -> + checkValidOp leftOp + checkValidOp rightOp + + newRightOp = [] + + for rightComponent in rightOp + # Generate newLeftOp by composing leftOp by rightComponent + newLeftOp = [] + + k = 0 + while k < leftOp.length + nextC = [] + transformComponentX leftOp[k], rightComponent, newLeftOp, nextC + k++ + + if nextC.length == 1 + rightComponent = nextC[0] + else if nextC.length == 0 + append newLeftOp, l for l in leftOp[k..] + rightComponent = null + break + else + # Recurse. + [l_, r_] = transformX leftOp[k..], nextC + append newLeftOp, l for l in l_ + append newRightOp, r for r in r_ + rightComponent = null + break + + append newRightOp, rightComponent if rightComponent? + leftOp = newLeftOp + + [leftOp, newRightOp] + + # Transforms op with specified type ('left' or 'right') by otherOp. + type.transform = type['transform'] = (op, otherOp, type) -> + throw new Error "type must be 'left' or 'right'" unless type == 'left' or type == 'right' + + return op if otherOp.length == 0 + + # TODO: Benchmark with and without this line. I _think_ it'll make a big difference...? + return transformComponent [], op[0], otherOp[0], type if op.length == 1 and otherOp.length == 1 + + if type == 'left' + [left, _] = transformX op, otherOp + left + else + [_, right] = transformX otherOp, op + right + +if typeof WEB is 'undefined' + exports.bootstrapTransform = bootstrapTransform diff --git a/services/document-updater/app/coffee/sharejs/index.coffee b/services/document-updater/app/coffee/sharejs/index.coffee new file mode 100644 index 0000000000..6f3bb8ec20 --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/index.coffee @@ -0,0 +1,15 @@ + +register = (file) -> + type = require file + exports[type.name] = type + try require "#{file}-api" + +# Import all the built-in types. +register './simple' +register './count' + +register './text' +register './text-composable' +register './text-tp2' + +register './json' diff --git a/services/document-updater/app/coffee/sharejs/json-api.coffee b/services/document-updater/app/coffee/sharejs/json-api.coffee new file mode 100644 index 0000000000..8819dee798 --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/json-api.coffee @@ -0,0 +1,180 @@ +# API for JSON OT + +json = require './json' if typeof WEB is 'undefined' + +if WEB? + extendDoc = exports.extendDoc + exports.extendDoc = (name, fn) -> + SubDoc::[name] = fn + extendDoc name, fn + +depath = (path) -> + if path.length == 1 and path[0].constructor == Array + path[0] + else path + +class SubDoc + constructor: (@doc, @path) -> + at: (path...) -> @doc.at @path.concat depath path + get: -> @doc.getAt @path + # for objects and lists + set: (value, cb) -> @doc.setAt @path, value, cb + # for strings and lists. + insert: (pos, value, cb) -> @doc.insertAt @path, pos, value, cb + # for strings + del: (pos, length, cb) -> @doc.deleteTextAt @path, length, pos, cb + # for objects and lists + remove: (cb) -> @doc.removeAt @path, cb + push: (value, cb) -> @insert @get().length, value, cb + move: (from, to, cb) -> @doc.moveAt @path, from, to, cb + add: (amount, cb) -> @doc.addAt @path, amount, cb + on: (event, cb) -> @doc.addListener @path, event, cb + removeListener: (l) -> @doc.removeListener l + + # text API compatibility + getLength: -> @get().length + getText: -> @get() + +traverse = (snapshot, path) -> + container = data:snapshot + key = 'data' + elem = container + for p in path + elem = elem[key] + key = p + throw new Error 'bad path' if typeof elem == 'undefined' + {elem, key} + +pathEquals = (p1, p2) -> + return false if p1.length != p2.length + for e,i in p1 + return false if e != p2[i] + true + +json.api = + provides: {json:true} + + at: (path...) -> new SubDoc this, depath path + + get: -> @snapshot + set: (value, cb) -> @setAt [], value, cb + + getAt: (path) -> + {elem, key} = traverse @snapshot, path + return elem[key] + + setAt: (path, value, cb) -> + {elem, key} = traverse @snapshot, path + op = {p:path} + if elem.constructor == Array + op.li = value + op.ld = elem[key] if typeof elem[key] != 'undefined' + else if typeof elem == 'object' + op.oi = value + op.od = elem[key] if typeof elem[key] != 'undefined' + else throw new Error 'bad path' + @submitOp [op], cb + + removeAt: (path, cb) -> + {elem, key} = traverse @snapshot, path + throw new Error 'no element at that path' unless typeof elem[key] != 'undefined' + op = {p:path} + if elem.constructor == Array + op.ld = elem[key] + else if typeof elem == 'object' + op.od = elem[key] + else throw new Error 'bad path' + @submitOp [op], cb + + insertAt: (path, pos, value, cb) -> + {elem, key} = traverse @snapshot, path + op = {p:path.concat pos} + if elem[key].constructor == Array + op.li = value + else if typeof elem[key] == 'string' + op.si = value + @submitOp [op], cb + + moveAt: (path, from, to, cb) -> + op = [{p:path.concat(from), lm:to}] + @submitOp op, cb + + addAt: (path, amount, cb) -> + op = [{p:path, na:amount}] + @submitOp op, cb + + deleteTextAt: (path, length, pos, cb) -> + {elem, key} = traverse @snapshot, path + op = [{p:path.concat(pos), sd:elem[key][pos...(pos + length)]}] + @submitOp op, cb + + addListener: (path, event, cb) -> + l = {path, event, cb} + @_listeners.push l + l + removeListener: (l) -> + i = @_listeners.indexOf l + return false if i < 0 + @_listeners.splice i, 1 + return true + _register: -> + @_listeners = [] + @on 'change', (op) -> + for c in op + if c.na != undefined or c.si != undefined or c.sd != undefined + # no change to structure + continue + to_remove = [] + for l, i in @_listeners + # Transform a dummy op by the incoming op to work out what + # should happen to the listener. + dummy = {p:l.path, na:0} + xformed = @type.transformComponent [], dummy, c, 'left' + if xformed.length == 0 + # The op was transformed to noop, so we should delete the listener. + to_remove.push i + else if xformed.length == 1 + # The op remained, so grab its new path into the listener. + l.path = xformed[0].p + else + throw new Error "Bad assumption in json-api: xforming an 'si' op will always result in 0 or 1 components." + to_remove.sort (a, b) -> b - a + for i in to_remove + @_listeners.splice i, 1 + @on 'remoteop', (op) -> + for c in op + match_path = if c.na == undefined then c.p[...c.p.length-1] else c.p + for {path, event, cb} in @_listeners + if pathEquals path, match_path + switch event + when 'insert' + if c.li != undefined and c.ld == undefined + cb(c.p[c.p.length-1], c.li) + else if c.oi != undefined and c.od == undefined + cb(c.p[c.p.length-1], c.oi) + else if c.si != undefined + cb(c.p[c.p.length-1], c.si) + when 'delete' + if c.li == undefined and c.ld != undefined + cb(c.p[c.p.length-1], c.ld) + else if c.oi == undefined and c.od != undefined + cb(c.p[c.p.length-1], c.od) + else if c.sd != undefined + cb(c.p[c.p.length-1], c.sd) + when 'replace' + if c.li != undefined and c.ld != undefined + cb(c.p[c.p.length-1], c.ld, c.li) + else if c.oi != undefined and c.od != undefined + cb(c.p[c.p.length-1], c.od, c.oi) + when 'move' + if c.lm != undefined + cb(c.p[c.p.length-1], c.lm) + when 'add' + if c.na != undefined + cb(c.na) + else if (common = @type.commonPath match_path, path)? + if event == 'child op' + if match_path.length == path.length == common + throw new Error "paths match length and have commonality, but aren't equal?" + child_path = c.p[common+1..] + cb(child_path, c) diff --git a/services/document-updater/app/coffee/sharejs/json.coffee b/services/document-updater/app/coffee/sharejs/json.coffee new file mode 100644 index 0000000000..b03b0947ef --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/json.coffee @@ -0,0 +1,441 @@ +# This is the implementation of the JSON OT type. +# +# Spec is here: https://github.com/josephg/ShareJS/wiki/JSON-Operations + +if WEB? + text = exports.types.text +else + text = require './text' + +json = {} + +json.name = 'json' + +json.create = -> null + +json.invertComponent = (c) -> + c_ = {p: c.p} + c_.sd = c.si if c.si != undefined + c_.si = c.sd if c.sd != undefined + c_.od = c.oi if c.oi != undefined + c_.oi = c.od if c.od != undefined + c_.ld = c.li if c.li != undefined + c_.li = c.ld if c.ld != undefined + c_.na = -c.na if c.na != undefined + if c.lm != undefined + c_.lm = c.p[c.p.length-1] + c_.p = c.p[0...c.p.length - 1].concat([c.lm]) + c_ + +json.invert = (op) -> json.invertComponent c for c in op.slice().reverse() + +json.checkValidOp = (op) -> + +isArray = (o) -> Object.prototype.toString.call(o) == '[object Array]' +json.checkList = (elem) -> + throw new Error 'Referenced element not a list' unless isArray(elem) + +json.checkObj = (elem) -> + throw new Error "Referenced element not an object (it was #{JSON.stringify elem})" unless elem.constructor is Object + +json.apply = (snapshot, op) -> + json.checkValidOp op + op = clone op + + container = {data: clone snapshot} + + try + for c, i in op + parent = null + parentkey = null + elem = container + key = 'data' + + for p in c.p + parent = elem + parentkey = key + elem = elem[key] + key = p + + throw new Error 'Path invalid' unless parent? + + if c.na != undefined + # Number add + throw new Error 'Referenced element not a number' unless typeof elem[key] is 'number' + elem[key] += c.na + + else if c.si != undefined + # String insert + throw new Error "Referenced element not a string (it was #{JSON.stringify elem})" unless typeof elem is 'string' + parent[parentkey] = elem[...key] + c.si + elem[key..] + else if c.sd != undefined + # String delete + throw new Error 'Referenced element not a string' unless typeof elem is 'string' + throw new Error 'Deleted string does not match' unless elem[key...key + c.sd.length] == c.sd + parent[parentkey] = elem[...key] + elem[key + c.sd.length..] + + else if c.li != undefined && c.ld != undefined + # List replace + json.checkList elem + + # Should check the list element matches c.ld + elem[key] = c.li + else if c.li != undefined + # List insert + json.checkList elem + + elem.splice key, 0, c.li + else if c.ld != undefined + # List delete + json.checkList elem + + # Should check the list element matches c.ld here too. + elem.splice key, 1 + else if c.lm != undefined + # List move + json.checkList elem + if c.lm != key + e = elem[key] + # Remove it... + elem.splice key, 1 + # And insert it back. + elem.splice c.lm, 0, e + + else if c.oi != undefined + # Object insert / replace + json.checkObj elem + + # Should check that elem[key] == c.od + elem[key] = c.oi + else if c.od != undefined + # Object delete + json.checkObj elem + + # Should check that elem[key] == c.od + delete elem[key] + else + throw new Error 'invalid / missing instruction in op' + catch error + # TODO: Roll back all already applied changes. Write tests before implementing this code. + throw error + + container.data + +# Checks if two paths, p1 and p2 match. +json.pathMatches = (p1, p2, ignoreLast) -> + return false unless p1.length == p2.length + + for p, i in p1 + return false if p != p2[i] and (!ignoreLast or i != p1.length - 1) + + true + +json.append = (dest, c) -> + c = clone c + if dest.length != 0 and json.pathMatches c.p, (last = dest[dest.length - 1]).p + if last.na != undefined and c.na != undefined + dest[dest.length - 1] = { p: last.p, na: last.na + c.na } + else if last.li != undefined and c.li == undefined and c.ld == last.li + # insert immediately followed by delete becomes a noop. + if last.ld != undefined + # leave the delete part of the replace + delete last.li + else + dest.pop() + else if last.od != undefined and last.oi == undefined and + c.oi != undefined and c.od == undefined + last.oi = c.oi + else if c.lm != undefined and c.p[c.p.length-1] == c.lm + null # don't do anything + else + dest.push c + else + dest.push c + +json.compose = (op1, op2) -> + json.checkValidOp op1 + json.checkValidOp op2 + + newOp = clone op1 + json.append newOp, c for c in op2 + + newOp + +json.normalize = (op) -> + newOp = [] + + op = [op] unless isArray op + + for c in op + c.p ?= [] + json.append newOp, c + + newOp + +# hax, copied from test/types/json. Apparently this is still the fastest way to deep clone an object, assuming +# we have browser support for JSON. +# http://jsperf.com/cloning-an-object/12 +clone = (o) -> JSON.parse(JSON.stringify o) + +json.commonPath = (p1, p2) -> + p1 = p1.slice() + p2 = p2.slice() + p1.unshift('data') + p2.unshift('data') + p1 = p1[...p1.length-1] + p2 = p2[...p2.length-1] + return -1 if p2.length == 0 + i = 0 + while p1[i] == p2[i] && i < p1.length + i++ + if i == p2.length + return i-1 + return + +# transform c so it applies to a document with otherC applied. +json.transformComponent = (dest, c, otherC, type) -> + c = clone c + c.p.push(0) if c.na != undefined + otherC.p.push(0) if otherC.na != undefined + + common = json.commonPath c.p, otherC.p + common2 = json.commonPath otherC.p, c.p + + cplength = c.p.length + otherCplength = otherC.p.length + + c.p.pop() if c.na != undefined # hax + otherC.p.pop() if otherC.na != undefined + + if otherC.na + if common2? && otherCplength >= cplength && otherC.p[common2] == c.p[common2] + if c.ld != undefined + oc = clone otherC + oc.p = oc.p[cplength..] + c.ld = json.apply clone(c.ld), [oc] + else if c.od != undefined + oc = clone otherC + oc.p = oc.p[cplength..] + c.od = json.apply clone(c.od), [oc] + json.append dest, c + return dest + + if common2? && otherCplength > cplength && c.p[common2] == otherC.p[common2] + # transform based on c + if c.ld != undefined + oc = clone otherC + oc.p = oc.p[cplength..] + c.ld = json.apply clone(c.ld), [oc] + else if c.od != undefined + oc = clone otherC + oc.p = oc.p[cplength..] + c.od = json.apply clone(c.od), [oc] + + + if common? + commonOperand = cplength == otherCplength + # transform based on otherC + if otherC.na != undefined + # this case is handled above due to icky path hax + else if otherC.si != undefined || otherC.sd != undefined + # String op vs string op - pass through to text type + if c.si != undefined || c.sd != undefined + throw new Error("must be a string?") unless commonOperand + + # Convert an op component to a text op component + convert = (component) -> + newC = p:component.p[component.p.length - 1] + if component.si + newC.i = component.si + else + newC.d = component.sd + newC + + tc1 = convert c + tc2 = convert otherC + + res = [] + text._tc res, tc1, tc2, type + for tc in res + jc = { p: c.p[...common] } + jc.p.push(tc.p) + jc.si = tc.i if tc.i? + jc.sd = tc.d if tc.d? + json.append dest, jc + return dest + else if otherC.li != undefined && otherC.ld != undefined + if otherC.p[common] == c.p[common] + # noop + if !commonOperand + # we're below the deleted element, so -> noop + return dest + else if c.ld != undefined + # we're trying to delete the same element, -> noop + if c.li != undefined and type == 'left' + # we're both replacing one element with another. only one can + # survive! + c.ld = clone otherC.li + else + return dest + else if otherC.li != undefined + if c.li != undefined and c.ld == undefined and commonOperand and c.p[common] == otherC.p[common] + # in li vs. li, left wins. + if type == 'right' + c.p[common]++ + else if otherC.p[common] <= c.p[common] + c.p[common]++ + + if c.lm != undefined + if commonOperand + # otherC edits the same list we edit + if otherC.p[common] <= c.lm + c.lm++ + # changing c.from is handled above. + else if otherC.ld != undefined + if c.lm != undefined + if commonOperand + if otherC.p[common] == c.p[common] + # they deleted the thing we're trying to move + return dest + # otherC edits the same list we edit + p = otherC.p[common] + from = c.p[common] + to = c.lm + if p < to || (p == to && from < to) + c.lm-- + + if otherC.p[common] < c.p[common] + c.p[common]-- + else if otherC.p[common] == c.p[common] + if otherCplength < cplength + # we're below the deleted element, so -> noop + return dest + else if c.ld != undefined + if c.li != undefined + # we're replacing, they're deleting. we become an insert. + delete c.ld + else + # we're trying to delete the same element, -> noop + return dest + else if otherC.lm != undefined + if c.lm != undefined and cplength == otherCplength + # lm vs lm, here we go! + from = c.p[common] + to = c.lm + otherFrom = otherC.p[common] + otherTo = otherC.lm + if otherFrom != otherTo + # if otherFrom == otherTo, we don't need to change our op. + + # where did my thing go? + if from == otherFrom + # they moved it! tie break. + if type == 'left' + c.p[common] = otherTo + if from == to # ugh + c.lm = otherTo + else + return dest + else + # they moved around it + if from > otherFrom + c.p[common]-- + if from > otherTo + c.p[common]++ + else if from == otherTo + if otherFrom > otherTo + c.p[common]++ + if from == to # ugh, again + c.lm++ + + # step 2: where am i going to put it? + if to > otherFrom + c.lm-- + else if to == otherFrom + if to > from + c.lm-- + if to > otherTo + c.lm++ + else if to == otherTo + # if we're both moving in the same direction, tie break + if (otherTo > otherFrom and to > from) or + (otherTo < otherFrom and to < from) + if type == 'right' + c.lm++ + else + if to > from + c.lm++ + else if to == otherFrom + c.lm-- + else if c.li != undefined and c.ld == undefined and commonOperand + # li + from = otherC.p[common] + to = otherC.lm + p = c.p[common] + if p > from + c.p[common]-- + if p > to + c.p[common]++ + else + # ld, ld+li, si, sd, na, oi, od, oi+od, any li on an element beneath + # the lm + # + # i.e. things care about where their item is after the move. + from = otherC.p[common] + to = otherC.lm + p = c.p[common] + if p == from + c.p[common] = to + else + if p > from + c.p[common]-- + if p > to + c.p[common]++ + else if p == to + if from > to + c.p[common]++ + else if otherC.oi != undefined && otherC.od != undefined + if c.p[common] == otherC.p[common] + if c.oi != undefined and commonOperand + # we inserted where someone else replaced + if type == 'right' + # left wins + return dest + else + # we win, make our op replace what they inserted + c.od = otherC.oi + else + # -> noop if the other component is deleting the same object (or any + # parent) + return dest + else if otherC.oi != undefined + if c.oi != undefined and c.p[common] == otherC.p[common] + # left wins if we try to insert at the same place + if type == 'left' + json.append dest, {p:c.p, od:otherC.oi} + else + return dest + else if otherC.od != undefined + if c.p[common] == otherC.p[common] + return dest if !commonOperand + if c.oi != undefined + delete c.od + else + return dest + + json.append dest, c + return dest + +if WEB? + exports.types ||= {} + + # This is kind of awful - come up with a better way to hook this helper code up. + exports._bt(json, json.transformComponent, json.checkValidOp, json.append) + + # [] is used to prevent closure from renaming types.text + exports.types.json = json +else + module.exports = json + + require('./helpers').bootstrapTransform(json, json.transformComponent, json.checkValidOp, json.append) + diff --git a/services/document-updater/app/coffee/sharejs/model.coffee b/services/document-updater/app/coffee/sharejs/model.coffee new file mode 100644 index 0000000000..284d6fd770 --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/model.coffee @@ -0,0 +1,603 @@ +# The model of all the ops. Responsible for applying & transforming remote deltas +# and managing the storage layer. +# +# Actual storage is handled by the database wrappers in db/*, wrapped by DocCache + +{EventEmitter} = require 'events' + +queue = require './syncqueue' +types = require '../types' + +isArray = (o) -> Object.prototype.toString.call(o) == '[object Array]' + +# This constructor creates a new Model object. There will be one model object +# per server context. +# +# The model object is responsible for a lot of things: +# +# - It manages the interactions with the database +# - It maintains (in memory) a set of all active documents +# - It calls out to the OT functions when necessary +# +# The model is an event emitter. It emits the following events: +# +# create(docName, data): A document has been created with the specified name & data +module.exports = Model = (db, options) -> + # db can be null if the user doesn't want persistance. + + return new Model(db, options) if !(this instanceof Model) + + model = this + + options ?= {} + + # This is a cache of 'live' documents. + # + # The cache is a map from docName -> { + # ops:[{op, meta}] + # snapshot + # type + # v + # meta + # eventEmitter + # reapTimer + # committedVersion: v + # snapshotWriteLock: bool to make sure writeSnapshot isn't re-entrant + # dbMeta: database specific data + # opQueue: syncQueue for processing ops + # } + # + # The ops list contains the document's last options.numCachedOps ops. (Or all + # of them if we're using a memory store). + # + # Documents are stored in this set so long as the document has been accessed in + # the last few seconds (options.reapTime) OR at least one client has the document + # open. I don't know if I should keep open (but not being edited) documents live - + # maybe if a client has a document open but the document isn't being edited, I should + # flush it from the cache. + # + # In any case, the API to model is designed such that if we want to change that later + # it should be pretty easy to do so without any external-to-the-model code changes. + docs = {} + + # This is a map from docName -> [callback]. It is used when a document hasn't been + # cached and multiple getSnapshot() / getVersion() requests come in. All requests + # are added to the callback list and called when db.getSnapshot() returns. + # + # callback(error, snapshot data) + awaitingGetSnapshot = {} + + # The time that documents which no clients have open will stay in the cache. + # Should be > 0. + options.reapTime ?= 3000 + + # The number of operations the cache holds before reusing the space + options.numCachedOps ?= 10 + + # This option forces documents to be reaped, even when there's no database backend. + # This is useful when you don't care about persistance and don't want to gradually + # fill memory. + # + # You might want to set reapTime to a day or something. + options.forceReaping ?= false + + # Until I come up with a better strategy, we'll save a copy of the document snapshot + # to the database every ~20 submitted ops. + options.opsBeforeCommit ?= 20 + + # It takes some processing time to transform client ops. The server will punt ops back to the + # client to transform if they're too old. + options.maximumAge ?= 40 + + # **** Cache API methods + + # Its important that all ops are applied in order. This helper method creates the op submission queue + # for a single document. This contains the logic for transforming & applying ops. + makeOpQueue = (docName, doc) -> queue (opData, callback) -> + return callback 'Version missing' unless opData.v >= 0 + return callback 'Op at future version' if opData.v > doc.v + + # Punt the transforming work back to the client if the op is too old. + return callback 'Op too old' if opData.v + options.maximumAge < doc.v + + opData.meta ||= {} + opData.meta.ts = Date.now() + + # We'll need to transform the op to the current version of the document. This + # calls the callback immediately if opVersion == doc.v. + getOps docName, opData.v, doc.v, (error, ops) -> + return callback error if error + + unless doc.v - opData.v == ops.length + # This should never happen. It indicates that we didn't get all the ops we + # asked for. Its important that the submitted op is correctly transformed. + console.error "Could not get old ops in model for document #{docName}" + console.error "Expected ops #{opData.v} to #{doc.v} and got #{ops.length} ops" + return callback 'Internal error' + + if ops.length > 0 + try + # If there's enough ops, it might be worth spinning this out into a webworker thread. + for oldOp in ops + # Dup detection works by sending the id(s) the op has been submitted with previously. + # If the id matches, we reject it. The client can also detect the op has been submitted + # already if it sees its own previous id in the ops it sees when it does catchup. + if oldOp.meta.source and opData.dupIfSource and oldOp.meta.source in opData.dupIfSource + return callback 'Op already submitted' + + opData.op = doc.type.transform opData.op, oldOp.op, 'left' + opData.v++ + catch error + console.error error.stack + return callback error.message + + try + snapshot = doc.type.apply doc.snapshot, opData.op + catch error + console.error error.stack + return callback error.message + + # The op data should be at the current version, and the new document data should be at + # the next version. + # + # This should never happen in practice, but its a nice little check to make sure everything + # is hunky-dory. + unless opData.v == doc.v + # This should never happen. + console.error "Version mismatch detected in model. File a ticket - this is a bug." + console.error "Expecting #{opData.v} == #{doc.v}" + return callback 'Internal error' + + #newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta} + writeOp = db?.writeOp or (docName, newOpData, callback) -> callback() + + writeOp docName, opData, (error) -> + if error + # The user should probably know about this. + console.warn "Error writing ops to database: #{error}" + return callback error + + options.stats?.writeOp?() + + # This is needed when we emit the 'change' event, below. + oldSnapshot = doc.snapshot + + # All the heavy lifting is now done. Finally, we'll update the cache with the new data + # and (maybe!) save a new document snapshot to the database. + + doc.v = opData.v + 1 + doc.snapshot = snapshot + + doc.ops.push opData + doc.ops.shift() if db and doc.ops.length > options.numCachedOps + + model.emit 'applyOp', docName, opData, snapshot, oldSnapshot + doc.eventEmitter.emit 'op', opData, snapshot, oldSnapshot + + # The callback is called with the version of the document at which the op was applied. + # This is the op.v after transformation, and its doc.v - 1. + callback null, opData.v + + # I need a decent strategy here for deciding whether or not to save the snapshot. + # + # The 'right' strategy looks something like "Store the snapshot whenever the snapshot + # is smaller than the accumulated op data". For now, I'll just store it every 20 + # ops or something. (Configurable with doc.committedVersion) + if !doc.snapshotWriteLock and doc.committedVersion + options.opsBeforeCommit <= doc.v + tryWriteSnapshot docName, (error) -> + console.warn "Error writing snapshot #{error}. This is nonfatal" if error + + # Add the data for the given docName to the cache. The named document shouldn't already + # exist in the doc set. + # + # Returns the new doc. + add = (docName, error, data, committedVersion, ops, dbMeta) -> + callbacks = awaitingGetSnapshot[docName] + delete awaitingGetSnapshot[docName] + + if error + callback error for callback in callbacks if callbacks + else + doc = docs[docName] = + snapshot: data.snapshot + v: data.v + type: data.type + meta: data.meta + + # Cache of ops + ops: ops or [] + + eventEmitter: new EventEmitter + + # Timer before the document will be invalidated from the cache (if the document has no + # listeners) + reapTimer: null + + # Version of the snapshot thats in the database + committedVersion: committedVersion ? data.v + snapshotWriteLock: false + dbMeta: dbMeta + + doc.opQueue = makeOpQueue docName, doc + + refreshReapingTimeout docName + model.emit 'add', docName, data + callback null, doc for callback in callbacks if callbacks + + doc + + # This is a little helper wrapper around db.getOps. It does two things: + # + # - If there's no database set, it returns an error to the callback + # - It adds version numbers to each op returned from the database + # (These can be inferred from context so the DB doesn't store them, but its useful to have them). + getOpsInternal = (docName, start, end, callback) -> + return callback? 'Document does not exist' unless db + + db.getOps docName, start, end, (error, ops) -> + return callback? error if error + + v = start + op.v = v++ for op in ops + + callback? null, ops + + # Load the named document into the cache. This function is re-entrant. + # + # The callback is called with (error, doc) + load = (docName, callback) -> + if docs[docName] + # The document is already loaded. Return immediately. + options.stats?.cacheHit? 'getSnapshot' + return callback null, docs[docName] + + # We're a memory store. If we don't have it, nobody does. + return callback 'Document does not exist' unless db + + callbacks = awaitingGetSnapshot[docName] + + # The document is being loaded already. Add ourselves as a callback. + return callbacks.push callback if callbacks + + options.stats?.cacheMiss? 'getSnapshot' + + # The document isn't loaded and isn't being loaded. Load it. + awaitingGetSnapshot[docName] = [callback] + db.getSnapshot docName, (error, data, dbMeta) -> + return add docName, error if error + + type = types[data.type] + unless type + console.warn "Type '#{data.type}' missing" + return callback "Type not found" + data.type = type + + committedVersion = data.v + + # The server can close without saving the most recent document snapshot. + # In this case, there are extra ops which need to be applied before + # returning the snapshot. + getOpsInternal docName, data.v, null, (error, ops) -> + return callback error if error + + if ops.length > 0 + console.log "Catchup #{docName} #{data.v} -> #{data.v + ops.length}" + + try + for op in ops + data.snapshot = type.apply data.snapshot, op.op + data.v++ + catch e + # This should never happen - it indicates that whats in the + # database is invalid. + console.error "Op data invalid for #{docName}: #{e.stack}" + return callback 'Op data invalid' + + model.emit 'load', docName, data + add docName, error, data, committedVersion, ops, dbMeta + + # This makes sure the cache contains a document. If the doc cache doesn't contain + # a document, it is loaded from the database and stored. + # + # Documents are stored so long as either: + # - They have been accessed within the past #{PERIOD} + # - At least one client has the document open + refreshReapingTimeout = (docName) -> + doc = docs[docName] + return unless doc + + # I want to let the clients list be updated before this is called. + process.nextTick -> + # This is an awkward way to find out the number of clients on a document. If this + # causes performance issues, add a numClients field to the document. + # + # The first check is because its possible that between refreshReapingTimeout being called and this + # event being fired, someone called delete() on the document and hence the doc is something else now. + if doc == docs[docName] and + doc.eventEmitter.listeners('op').length == 0 and + (db or options.forceReaping) and + doc.opQueue.busy is false + + clearTimeout doc.reapTimer + doc.reapTimer = reapTimer = setTimeout -> + tryWriteSnapshot docName, -> + # If the reaping timeout has been refreshed while we're writing the snapshot, or if we're + # in the middle of applying an operation, don't reap. + delete docs[docName] if docs[docName].reapTimer is reapTimer and doc.opQueue.busy is false + , options.reapTime + + tryWriteSnapshot = (docName, callback) -> + return callback?() unless db + + doc = docs[docName] + + # The doc is closed + return callback?() unless doc + + # The document is already saved. + return callback?() if doc.committedVersion is doc.v + + return callback? 'Another snapshot write is in progress' if doc.snapshotWriteLock + + doc.snapshotWriteLock = true + + options.stats?.writeSnapshot?() + + writeSnapshot = db?.writeSnapshot or (docName, docData, dbMeta, callback) -> callback() + + data = + v: doc.v + meta: doc.meta + snapshot: doc.snapshot + # The database doesn't know about object types. + type: doc.type.name + + # Commit snapshot. + writeSnapshot docName, data, doc.dbMeta, (error, dbMeta) -> + doc.snapshotWriteLock = false + + # We have to use data.v here because the version in the doc could + # have been updated between the call to writeSnapshot() and now. + doc.committedVersion = data.v + doc.dbMeta = dbMeta + + callback? error + + # *** Model interface methods + + # Create a new document. + # + # data should be {snapshot, type, [meta]}. The version of a new document is 0. + @create = (docName, type, meta, callback) -> + [meta, callback] = [{}, meta] if typeof meta is 'function' + + return callback? 'Invalid document name' if docName.match /\// + return callback? 'Document already exists' if docs[docName] + + type = types[type] if typeof type == 'string' + return callback? 'Type not found' unless type + + data = + snapshot:type.create() + type:type.name + meta:meta or {} + v:0 + + done = (error, dbMeta) -> + # dbMeta can be used to cache extra state needed by the database to access the document, like an ID or something. + return callback? error if error + + # From here on we'll store the object version of the type name. + data.type = type + add docName, null, data, 0, [], dbMeta + model.emit 'create', docName, data + callback?() + + if db + db.create docName, data, done + else + done() + + # Perminantly deletes the specified document. + # If listeners are attached, they are removed. + # + # The callback is called with (error) if there was an error. If error is null / undefined, the + # document was deleted. + # + # WARNING: This isn't well supported throughout the code. (Eg, streaming clients aren't told about the + # deletion. Subsequent op submissions will fail). + @delete = (docName, callback) -> + doc = docs[docName] + + if doc + clearTimeout doc.reapTimer + delete docs[docName] + + done = (error) -> + model.emit 'delete', docName unless error + callback? error + + if db + db.delete docName, doc?.dbMeta, done + else + done (if !doc then 'Document does not exist') + + # This gets all operations from [start...end]. (That is, its not inclusive.) + # + # end can be null. This means 'get me all ops from start'. + # + # Each op returned is in the form {op:o, meta:m, v:version}. + # + # Callback is called with (error, [ops]) + # + # If the document does not exist, getOps doesn't necessarily return an error. This is because + # its awkward to figure out whether or not the document exists for things + # like the redis database backend. I guess its a bit gross having this inconsistant + # with the other DB calls, but its certainly convenient. + # + # Use getVersion() to determine if a document actually exists, if thats what you're + # after. + @getOps = getOps = (docName, start, end, callback) -> + # getOps will only use the op cache if its there. It won't fill the op cache in. + throw new Error 'start must be 0+' unless start >= 0 + + [end, callback] = [null, end] if typeof end is 'function' + + ops = docs[docName]?.ops + + if ops + version = docs[docName].v + + # Ops contains an array of ops. The last op in the list is the last op applied + end ?= version + start = Math.min start, end + + return callback null, [] if start == end + + # Base is the version number of the oldest op we have cached + base = version - ops.length + + # If the database is null, we'll trim to the ops we do have and hope thats enough. + if start >= base or db is null + refreshReapingTimeout docName + options.stats?.cacheHit 'getOps' + + return callback null, ops[(start - base)...(end - base)] + + options.stats?.cacheMiss 'getOps' + + getOpsInternal docName, start, end, callback + + # Gets the snapshot data for the specified document. + # getSnapshot(docName, callback) + # Callback is called with (error, {v: , type: , snapshot: , meta: }) + @getSnapshot = (docName, callback) -> + load docName, (error, doc) -> + callback error, if doc then {v:doc.v, type:doc.type, snapshot:doc.snapshot, meta:doc.meta} + + # Gets the latest version # of the document. + # getVersion(docName, callback) + # callback is called with (error, version). + @getVersion = (docName, callback) -> + load docName, (error, doc) -> callback error, doc?.v + + # Apply an op to the specified document. + # The callback is passed (error, applied version #) + # opData = {op:op, v:v, meta:metadata} + # + # Ops are queued before being applied so that the following code applies op C before op B: + # model.applyOp 'doc', OPA, -> model.applyOp 'doc', OPB + # model.applyOp 'doc', OPC + @applyOp = (docName, opData, callback) -> + # All the logic for this is in makeOpQueue, above. + load docName, (error, doc) -> + return callback error if error + + process.nextTick -> doc.opQueue opData, (error, newVersion) -> + refreshReapingTimeout docName + callback? error, newVersion + + # TODO: store (some) metadata in DB + # TODO: op and meta should be combineable in the op that gets sent + @applyMetaOp = (docName, metaOpData, callback) -> + {path, value} = metaOpData.meta + + return callback? "path should be an array" unless isArray path + + load docName, (error, doc) -> + if error? + callback? error + else + applied = false + switch path[0] + when 'shout' + doc.eventEmitter.emit 'op', metaOpData + applied = true + + model.emit 'applyMetaOp', docName, path, value if applied + callback? null, doc.v + + # Listen to all ops from the specified version. If version is in the past, all + # ops since that version are sent immediately to the listener. + # + # The callback is called once the listener is attached, but before any ops have been passed + # to the listener. + # + # This will _not_ edit the document metadata. + # + # If there are any listeners, we don't purge the document from the cache. But be aware, this behaviour + # might change in a future version. + # + # version is the document version at which the document is opened. It can be left out if you want to open + # the document at the most recent version. + # + # listener is called with (opData) each time an op is applied. + # + # callback(error, openedVersion) + @listen = (docName, version, listener, callback) -> + [version, listener, callback] = [null, version, listener] if typeof version is 'function' + + load docName, (error, doc) -> + return callback? error if error + + clearTimeout doc.reapTimer + + if version? + getOps docName, version, null, (error, data) -> + return callback? error if error + + doc.eventEmitter.on 'op', listener + callback? null, version + for op in data + listener op + + # The listener may well remove itself during the catchup phase. If this happens, break early. + # This is done in a quite inefficient way. (O(n) where n = #listeners on doc) + break unless listener in doc.eventEmitter.listeners 'op' + + else # Version is null / undefined. Just add the listener. + doc.eventEmitter.on 'op', listener + callback? null, doc.v + + # Remove a listener for a particular document. + # + # removeListener(docName, listener) + # + # This is synchronous. + @removeListener = (docName, listener) -> + # The document should already be loaded. + doc = docs[docName] + throw new Error 'removeListener called but document not loaded' unless doc + + doc.eventEmitter.removeListener 'op', listener + refreshReapingTimeout docName + + # Flush saves all snapshot data to the database. I'm not sure whether or not this is actually needed - + # sharejs will happily replay uncommitted ops when documents are re-opened anyway. + @flush = (callback) -> + return callback?() unless db + + pendingWrites = 0 + + for docName, doc of docs + if doc.committedVersion < doc.v + pendingWrites++ + # I'm hoping writeSnapshot will always happen in another thread. + tryWriteSnapshot docName, -> + process.nextTick -> + pendingWrites-- + callback?() if pendingWrites is 0 + + # If nothing was queued, terminate immediately. + callback?() if pendingWrites is 0 + + # Close the database connection. This is needed so nodejs can shut down cleanly. + @closeDb = -> + db?.close?() + db = null + + return + +# Model inherits from EventEmitter. +Model:: = new EventEmitter + diff --git a/services/document-updater/app/coffee/sharejs/server/model.coffee b/services/document-updater/app/coffee/sharejs/server/model.coffee new file mode 100644 index 0000000000..284d6fd770 --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/server/model.coffee @@ -0,0 +1,603 @@ +# The model of all the ops. Responsible for applying & transforming remote deltas +# and managing the storage layer. +# +# Actual storage is handled by the database wrappers in db/*, wrapped by DocCache + +{EventEmitter} = require 'events' + +queue = require './syncqueue' +types = require '../types' + +isArray = (o) -> Object.prototype.toString.call(o) == '[object Array]' + +# This constructor creates a new Model object. There will be one model object +# per server context. +# +# The model object is responsible for a lot of things: +# +# - It manages the interactions with the database +# - It maintains (in memory) a set of all active documents +# - It calls out to the OT functions when necessary +# +# The model is an event emitter. It emits the following events: +# +# create(docName, data): A document has been created with the specified name & data +module.exports = Model = (db, options) -> + # db can be null if the user doesn't want persistance. + + return new Model(db, options) if !(this instanceof Model) + + model = this + + options ?= {} + + # This is a cache of 'live' documents. + # + # The cache is a map from docName -> { + # ops:[{op, meta}] + # snapshot + # type + # v + # meta + # eventEmitter + # reapTimer + # committedVersion: v + # snapshotWriteLock: bool to make sure writeSnapshot isn't re-entrant + # dbMeta: database specific data + # opQueue: syncQueue for processing ops + # } + # + # The ops list contains the document's last options.numCachedOps ops. (Or all + # of them if we're using a memory store). + # + # Documents are stored in this set so long as the document has been accessed in + # the last few seconds (options.reapTime) OR at least one client has the document + # open. I don't know if I should keep open (but not being edited) documents live - + # maybe if a client has a document open but the document isn't being edited, I should + # flush it from the cache. + # + # In any case, the API to model is designed such that if we want to change that later + # it should be pretty easy to do so without any external-to-the-model code changes. + docs = {} + + # This is a map from docName -> [callback]. It is used when a document hasn't been + # cached and multiple getSnapshot() / getVersion() requests come in. All requests + # are added to the callback list and called when db.getSnapshot() returns. + # + # callback(error, snapshot data) + awaitingGetSnapshot = {} + + # The time that documents which no clients have open will stay in the cache. + # Should be > 0. + options.reapTime ?= 3000 + + # The number of operations the cache holds before reusing the space + options.numCachedOps ?= 10 + + # This option forces documents to be reaped, even when there's no database backend. + # This is useful when you don't care about persistance and don't want to gradually + # fill memory. + # + # You might want to set reapTime to a day or something. + options.forceReaping ?= false + + # Until I come up with a better strategy, we'll save a copy of the document snapshot + # to the database every ~20 submitted ops. + options.opsBeforeCommit ?= 20 + + # It takes some processing time to transform client ops. The server will punt ops back to the + # client to transform if they're too old. + options.maximumAge ?= 40 + + # **** Cache API methods + + # Its important that all ops are applied in order. This helper method creates the op submission queue + # for a single document. This contains the logic for transforming & applying ops. + makeOpQueue = (docName, doc) -> queue (opData, callback) -> + return callback 'Version missing' unless opData.v >= 0 + return callback 'Op at future version' if opData.v > doc.v + + # Punt the transforming work back to the client if the op is too old. + return callback 'Op too old' if opData.v + options.maximumAge < doc.v + + opData.meta ||= {} + opData.meta.ts = Date.now() + + # We'll need to transform the op to the current version of the document. This + # calls the callback immediately if opVersion == doc.v. + getOps docName, opData.v, doc.v, (error, ops) -> + return callback error if error + + unless doc.v - opData.v == ops.length + # This should never happen. It indicates that we didn't get all the ops we + # asked for. Its important that the submitted op is correctly transformed. + console.error "Could not get old ops in model for document #{docName}" + console.error "Expected ops #{opData.v} to #{doc.v} and got #{ops.length} ops" + return callback 'Internal error' + + if ops.length > 0 + try + # If there's enough ops, it might be worth spinning this out into a webworker thread. + for oldOp in ops + # Dup detection works by sending the id(s) the op has been submitted with previously. + # If the id matches, we reject it. The client can also detect the op has been submitted + # already if it sees its own previous id in the ops it sees when it does catchup. + if oldOp.meta.source and opData.dupIfSource and oldOp.meta.source in opData.dupIfSource + return callback 'Op already submitted' + + opData.op = doc.type.transform opData.op, oldOp.op, 'left' + opData.v++ + catch error + console.error error.stack + return callback error.message + + try + snapshot = doc.type.apply doc.snapshot, opData.op + catch error + console.error error.stack + return callback error.message + + # The op data should be at the current version, and the new document data should be at + # the next version. + # + # This should never happen in practice, but its a nice little check to make sure everything + # is hunky-dory. + unless opData.v == doc.v + # This should never happen. + console.error "Version mismatch detected in model. File a ticket - this is a bug." + console.error "Expecting #{opData.v} == #{doc.v}" + return callback 'Internal error' + + #newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta} + writeOp = db?.writeOp or (docName, newOpData, callback) -> callback() + + writeOp docName, opData, (error) -> + if error + # The user should probably know about this. + console.warn "Error writing ops to database: #{error}" + return callback error + + options.stats?.writeOp?() + + # This is needed when we emit the 'change' event, below. + oldSnapshot = doc.snapshot + + # All the heavy lifting is now done. Finally, we'll update the cache with the new data + # and (maybe!) save a new document snapshot to the database. + + doc.v = opData.v + 1 + doc.snapshot = snapshot + + doc.ops.push opData + doc.ops.shift() if db and doc.ops.length > options.numCachedOps + + model.emit 'applyOp', docName, opData, snapshot, oldSnapshot + doc.eventEmitter.emit 'op', opData, snapshot, oldSnapshot + + # The callback is called with the version of the document at which the op was applied. + # This is the op.v after transformation, and its doc.v - 1. + callback null, opData.v + + # I need a decent strategy here for deciding whether or not to save the snapshot. + # + # The 'right' strategy looks something like "Store the snapshot whenever the snapshot + # is smaller than the accumulated op data". For now, I'll just store it every 20 + # ops or something. (Configurable with doc.committedVersion) + if !doc.snapshotWriteLock and doc.committedVersion + options.opsBeforeCommit <= doc.v + tryWriteSnapshot docName, (error) -> + console.warn "Error writing snapshot #{error}. This is nonfatal" if error + + # Add the data for the given docName to the cache. The named document shouldn't already + # exist in the doc set. + # + # Returns the new doc. + add = (docName, error, data, committedVersion, ops, dbMeta) -> + callbacks = awaitingGetSnapshot[docName] + delete awaitingGetSnapshot[docName] + + if error + callback error for callback in callbacks if callbacks + else + doc = docs[docName] = + snapshot: data.snapshot + v: data.v + type: data.type + meta: data.meta + + # Cache of ops + ops: ops or [] + + eventEmitter: new EventEmitter + + # Timer before the document will be invalidated from the cache (if the document has no + # listeners) + reapTimer: null + + # Version of the snapshot thats in the database + committedVersion: committedVersion ? data.v + snapshotWriteLock: false + dbMeta: dbMeta + + doc.opQueue = makeOpQueue docName, doc + + refreshReapingTimeout docName + model.emit 'add', docName, data + callback null, doc for callback in callbacks if callbacks + + doc + + # This is a little helper wrapper around db.getOps. It does two things: + # + # - If there's no database set, it returns an error to the callback + # - It adds version numbers to each op returned from the database + # (These can be inferred from context so the DB doesn't store them, but its useful to have them). + getOpsInternal = (docName, start, end, callback) -> + return callback? 'Document does not exist' unless db + + db.getOps docName, start, end, (error, ops) -> + return callback? error if error + + v = start + op.v = v++ for op in ops + + callback? null, ops + + # Load the named document into the cache. This function is re-entrant. + # + # The callback is called with (error, doc) + load = (docName, callback) -> + if docs[docName] + # The document is already loaded. Return immediately. + options.stats?.cacheHit? 'getSnapshot' + return callback null, docs[docName] + + # We're a memory store. If we don't have it, nobody does. + return callback 'Document does not exist' unless db + + callbacks = awaitingGetSnapshot[docName] + + # The document is being loaded already. Add ourselves as a callback. + return callbacks.push callback if callbacks + + options.stats?.cacheMiss? 'getSnapshot' + + # The document isn't loaded and isn't being loaded. Load it. + awaitingGetSnapshot[docName] = [callback] + db.getSnapshot docName, (error, data, dbMeta) -> + return add docName, error if error + + type = types[data.type] + unless type + console.warn "Type '#{data.type}' missing" + return callback "Type not found" + data.type = type + + committedVersion = data.v + + # The server can close without saving the most recent document snapshot. + # In this case, there are extra ops which need to be applied before + # returning the snapshot. + getOpsInternal docName, data.v, null, (error, ops) -> + return callback error if error + + if ops.length > 0 + console.log "Catchup #{docName} #{data.v} -> #{data.v + ops.length}" + + try + for op in ops + data.snapshot = type.apply data.snapshot, op.op + data.v++ + catch e + # This should never happen - it indicates that whats in the + # database is invalid. + console.error "Op data invalid for #{docName}: #{e.stack}" + return callback 'Op data invalid' + + model.emit 'load', docName, data + add docName, error, data, committedVersion, ops, dbMeta + + # This makes sure the cache contains a document. If the doc cache doesn't contain + # a document, it is loaded from the database and stored. + # + # Documents are stored so long as either: + # - They have been accessed within the past #{PERIOD} + # - At least one client has the document open + refreshReapingTimeout = (docName) -> + doc = docs[docName] + return unless doc + + # I want to let the clients list be updated before this is called. + process.nextTick -> + # This is an awkward way to find out the number of clients on a document. If this + # causes performance issues, add a numClients field to the document. + # + # The first check is because its possible that between refreshReapingTimeout being called and this + # event being fired, someone called delete() on the document and hence the doc is something else now. + if doc == docs[docName] and + doc.eventEmitter.listeners('op').length == 0 and + (db or options.forceReaping) and + doc.opQueue.busy is false + + clearTimeout doc.reapTimer + doc.reapTimer = reapTimer = setTimeout -> + tryWriteSnapshot docName, -> + # If the reaping timeout has been refreshed while we're writing the snapshot, or if we're + # in the middle of applying an operation, don't reap. + delete docs[docName] if docs[docName].reapTimer is reapTimer and doc.opQueue.busy is false + , options.reapTime + + tryWriteSnapshot = (docName, callback) -> + return callback?() unless db + + doc = docs[docName] + + # The doc is closed + return callback?() unless doc + + # The document is already saved. + return callback?() if doc.committedVersion is doc.v + + return callback? 'Another snapshot write is in progress' if doc.snapshotWriteLock + + doc.snapshotWriteLock = true + + options.stats?.writeSnapshot?() + + writeSnapshot = db?.writeSnapshot or (docName, docData, dbMeta, callback) -> callback() + + data = + v: doc.v + meta: doc.meta + snapshot: doc.snapshot + # The database doesn't know about object types. + type: doc.type.name + + # Commit snapshot. + writeSnapshot docName, data, doc.dbMeta, (error, dbMeta) -> + doc.snapshotWriteLock = false + + # We have to use data.v here because the version in the doc could + # have been updated between the call to writeSnapshot() and now. + doc.committedVersion = data.v + doc.dbMeta = dbMeta + + callback? error + + # *** Model interface methods + + # Create a new document. + # + # data should be {snapshot, type, [meta]}. The version of a new document is 0. + @create = (docName, type, meta, callback) -> + [meta, callback] = [{}, meta] if typeof meta is 'function' + + return callback? 'Invalid document name' if docName.match /\// + return callback? 'Document already exists' if docs[docName] + + type = types[type] if typeof type == 'string' + return callback? 'Type not found' unless type + + data = + snapshot:type.create() + type:type.name + meta:meta or {} + v:0 + + done = (error, dbMeta) -> + # dbMeta can be used to cache extra state needed by the database to access the document, like an ID or something. + return callback? error if error + + # From here on we'll store the object version of the type name. + data.type = type + add docName, null, data, 0, [], dbMeta + model.emit 'create', docName, data + callback?() + + if db + db.create docName, data, done + else + done() + + # Perminantly deletes the specified document. + # If listeners are attached, they are removed. + # + # The callback is called with (error) if there was an error. If error is null / undefined, the + # document was deleted. + # + # WARNING: This isn't well supported throughout the code. (Eg, streaming clients aren't told about the + # deletion. Subsequent op submissions will fail). + @delete = (docName, callback) -> + doc = docs[docName] + + if doc + clearTimeout doc.reapTimer + delete docs[docName] + + done = (error) -> + model.emit 'delete', docName unless error + callback? error + + if db + db.delete docName, doc?.dbMeta, done + else + done (if !doc then 'Document does not exist') + + # This gets all operations from [start...end]. (That is, its not inclusive.) + # + # end can be null. This means 'get me all ops from start'. + # + # Each op returned is in the form {op:o, meta:m, v:version}. + # + # Callback is called with (error, [ops]) + # + # If the document does not exist, getOps doesn't necessarily return an error. This is because + # its awkward to figure out whether or not the document exists for things + # like the redis database backend. I guess its a bit gross having this inconsistant + # with the other DB calls, but its certainly convenient. + # + # Use getVersion() to determine if a document actually exists, if thats what you're + # after. + @getOps = getOps = (docName, start, end, callback) -> + # getOps will only use the op cache if its there. It won't fill the op cache in. + throw new Error 'start must be 0+' unless start >= 0 + + [end, callback] = [null, end] if typeof end is 'function' + + ops = docs[docName]?.ops + + if ops + version = docs[docName].v + + # Ops contains an array of ops. The last op in the list is the last op applied + end ?= version + start = Math.min start, end + + return callback null, [] if start == end + + # Base is the version number of the oldest op we have cached + base = version - ops.length + + # If the database is null, we'll trim to the ops we do have and hope thats enough. + if start >= base or db is null + refreshReapingTimeout docName + options.stats?.cacheHit 'getOps' + + return callback null, ops[(start - base)...(end - base)] + + options.stats?.cacheMiss 'getOps' + + getOpsInternal docName, start, end, callback + + # Gets the snapshot data for the specified document. + # getSnapshot(docName, callback) + # Callback is called with (error, {v: , type: , snapshot: , meta: }) + @getSnapshot = (docName, callback) -> + load docName, (error, doc) -> + callback error, if doc then {v:doc.v, type:doc.type, snapshot:doc.snapshot, meta:doc.meta} + + # Gets the latest version # of the document. + # getVersion(docName, callback) + # callback is called with (error, version). + @getVersion = (docName, callback) -> + load docName, (error, doc) -> callback error, doc?.v + + # Apply an op to the specified document. + # The callback is passed (error, applied version #) + # opData = {op:op, v:v, meta:metadata} + # + # Ops are queued before being applied so that the following code applies op C before op B: + # model.applyOp 'doc', OPA, -> model.applyOp 'doc', OPB + # model.applyOp 'doc', OPC + @applyOp = (docName, opData, callback) -> + # All the logic for this is in makeOpQueue, above. + load docName, (error, doc) -> + return callback error if error + + process.nextTick -> doc.opQueue opData, (error, newVersion) -> + refreshReapingTimeout docName + callback? error, newVersion + + # TODO: store (some) metadata in DB + # TODO: op and meta should be combineable in the op that gets sent + @applyMetaOp = (docName, metaOpData, callback) -> + {path, value} = metaOpData.meta + + return callback? "path should be an array" unless isArray path + + load docName, (error, doc) -> + if error? + callback? error + else + applied = false + switch path[0] + when 'shout' + doc.eventEmitter.emit 'op', metaOpData + applied = true + + model.emit 'applyMetaOp', docName, path, value if applied + callback? null, doc.v + + # Listen to all ops from the specified version. If version is in the past, all + # ops since that version are sent immediately to the listener. + # + # The callback is called once the listener is attached, but before any ops have been passed + # to the listener. + # + # This will _not_ edit the document metadata. + # + # If there are any listeners, we don't purge the document from the cache. But be aware, this behaviour + # might change in a future version. + # + # version is the document version at which the document is opened. It can be left out if you want to open + # the document at the most recent version. + # + # listener is called with (opData) each time an op is applied. + # + # callback(error, openedVersion) + @listen = (docName, version, listener, callback) -> + [version, listener, callback] = [null, version, listener] if typeof version is 'function' + + load docName, (error, doc) -> + return callback? error if error + + clearTimeout doc.reapTimer + + if version? + getOps docName, version, null, (error, data) -> + return callback? error if error + + doc.eventEmitter.on 'op', listener + callback? null, version + for op in data + listener op + + # The listener may well remove itself during the catchup phase. If this happens, break early. + # This is done in a quite inefficient way. (O(n) where n = #listeners on doc) + break unless listener in doc.eventEmitter.listeners 'op' + + else # Version is null / undefined. Just add the listener. + doc.eventEmitter.on 'op', listener + callback? null, doc.v + + # Remove a listener for a particular document. + # + # removeListener(docName, listener) + # + # This is synchronous. + @removeListener = (docName, listener) -> + # The document should already be loaded. + doc = docs[docName] + throw new Error 'removeListener called but document not loaded' unless doc + + doc.eventEmitter.removeListener 'op', listener + refreshReapingTimeout docName + + # Flush saves all snapshot data to the database. I'm not sure whether or not this is actually needed - + # sharejs will happily replay uncommitted ops when documents are re-opened anyway. + @flush = (callback) -> + return callback?() unless db + + pendingWrites = 0 + + for docName, doc of docs + if doc.committedVersion < doc.v + pendingWrites++ + # I'm hoping writeSnapshot will always happen in another thread. + tryWriteSnapshot docName, -> + process.nextTick -> + pendingWrites-- + callback?() if pendingWrites is 0 + + # If nothing was queued, terminate immediately. + callback?() if pendingWrites is 0 + + # Close the database connection. This is needed so nodejs can shut down cleanly. + @closeDb = -> + db?.close?() + db = null + + return + +# Model inherits from EventEmitter. +Model:: = new EventEmitter + diff --git a/services/document-updater/app/coffee/sharejs/server/syncqueue.coffee b/services/document-updater/app/coffee/sharejs/server/syncqueue.coffee new file mode 100644 index 0000000000..746450b010 --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/server/syncqueue.coffee @@ -0,0 +1,42 @@ +# A synchronous processing queue. The queue calls process on the arguments, +# ensuring that process() is only executing once at a time. +# +# process(data, callback) _MUST_ eventually call its callback. +# +# Example: +# +# queue = require 'syncqueue' +# +# fn = queue (data, callback) -> +# asyncthing data, -> +# callback(321) +# +# fn(1) +# fn(2) +# fn(3, (result) -> console.log(result)) +# +# ^--- async thing will only be running once at any time. + +module.exports = (process) -> + throw new Error('process is not a function') unless typeof process == 'function' + queue = [] + + enqueue = (data, callback) -> + queue.push [data, callback] + flush() + + enqueue.busy = false + + flush = -> + return if enqueue.busy or queue.length == 0 + + enqueue.busy = true + [data, callback] = queue.shift() + process data, (result...) -> # TODO: Make this not use varargs - varargs are really slow. + enqueue.busy = false + # This is called after busy = false so a user can check if enqueue.busy is set in the callback. + callback.apply null, result if callback + flush() + + enqueue + diff --git a/services/document-updater/app/coffee/sharejs/simple.coffee b/services/document-updater/app/coffee/sharejs/simple.coffee new file mode 100644 index 0000000000..996b1a5ddc --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/simple.coffee @@ -0,0 +1,38 @@ +# This is a really simple OT type. Its not compiled with the web client, but it could be. +# +# Its mostly included for demonstration purposes and its used in a lot of unit tests. +# +# This defines a really simple text OT type which only allows inserts. (No deletes). +# +# Ops look like: +# {position:#, text:"asdf"} +# +# Document snapshots look like: +# {str:string} + +module.exports = + # The name of the OT type. The type is stored in types[type.name]. The name can be + # used in place of the actual type in all the API methods. + name: 'simple' + + # Create a new document snapshot + create: -> {str:""} + + # Apply the given op to the document snapshot. Returns the new snapshot. + # + # The original snapshot should not be modified. + apply: (snapshot, op) -> + throw new Error 'Invalid position' unless 0 <= op.position <= snapshot.str.length + + str = snapshot.str + str = str.slice(0, op.position) + op.text + str.slice(op.position) + {str} + + # transform op1 by op2. Return transformed version of op1. + # sym describes the symmetry of the op. Its 'left' or 'right' depending on whether the + # op being transformed comes from the client or the server. + transform: (op1, op2, sym) -> + pos = op1.position + pos += op2.text.length if op2.position < pos or (op2.position == pos and sym is 'left') + + return {position:pos, text:op1.text} diff --git a/services/document-updater/app/coffee/sharejs/syncqueue.coffee b/services/document-updater/app/coffee/sharejs/syncqueue.coffee new file mode 100644 index 0000000000..746450b010 --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/syncqueue.coffee @@ -0,0 +1,42 @@ +# A synchronous processing queue. The queue calls process on the arguments, +# ensuring that process() is only executing once at a time. +# +# process(data, callback) _MUST_ eventually call its callback. +# +# Example: +# +# queue = require 'syncqueue' +# +# fn = queue (data, callback) -> +# asyncthing data, -> +# callback(321) +# +# fn(1) +# fn(2) +# fn(3, (result) -> console.log(result)) +# +# ^--- async thing will only be running once at any time. + +module.exports = (process) -> + throw new Error('process is not a function') unless typeof process == 'function' + queue = [] + + enqueue = (data, callback) -> + queue.push [data, callback] + flush() + + enqueue.busy = false + + flush = -> + return if enqueue.busy or queue.length == 0 + + enqueue.busy = true + [data, callback] = queue.shift() + process data, (result...) -> # TODO: Make this not use varargs - varargs are really slow. + enqueue.busy = false + # This is called after busy = false so a user can check if enqueue.busy is set in the callback. + callback.apply null, result if callback + flush() + + enqueue + diff --git a/services/document-updater/app/coffee/sharejs/text-api.coffee b/services/document-updater/app/coffee/sharejs/text-api.coffee new file mode 100644 index 0000000000..96243ceffb --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/text-api.coffee @@ -0,0 +1,32 @@ +# Text document API for text + +text = require './text' if typeof WEB is 'undefined' + +text.api = + provides: {text:true} + + # The number of characters in the string + getLength: -> @snapshot.length + + # Get the text contents of a document + getText: -> @snapshot + + insert: (pos, text, callback) -> + op = [{p:pos, i:text}] + + @submitOp op, callback + op + + del: (pos, length, callback) -> + op = [{p:pos, d:@snapshot[pos...(pos + length)]}] + + @submitOp op, callback + op + + _register: -> + @on 'remoteop', (op) -> + for component in op + if component.i != undefined + @emit 'insert', component.p, component.i + else + @emit 'delete', component.p, component.d diff --git a/services/document-updater/app/coffee/sharejs/text-composable-api.coffee b/services/document-updater/app/coffee/sharejs/text-composable-api.coffee new file mode 100644 index 0000000000..7b27ac163a --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/text-composable-api.coffee @@ -0,0 +1,43 @@ +# Text document API for text + +if WEB? + type = exports.types['text-composable'] +else + type = require './text-composable' + +type.api = + provides: {'text':true} + + # The number of characters in the string + 'getLength': -> @snapshot.length + + # Get the text contents of a document + 'getText': -> @snapshot + + 'insert': (pos, text, callback) -> + op = type.normalize [pos, 'i':text, (@snapshot.length - pos)] + + @submitOp op, callback + op + + 'del': (pos, length, callback) -> + op = type.normalize [pos, 'd':@snapshot[pos...(pos + length)], (@snapshot.length - pos - length)] + + @submitOp op, callback + op + + _register: -> + @on 'remoteop', (op) -> + pos = 0 + for component in op + if typeof component is 'number' + pos += component + else if component.i != undefined + @emit 'insert', pos, component.i + pos += component.i.length + else + # delete + @emit 'delete', pos, component.d + # We don't increment pos, because the position + # specified is after the delete has happened. + diff --git a/services/document-updater/app/coffee/sharejs/text-composable.coffee b/services/document-updater/app/coffee/sharejs/text-composable.coffee new file mode 100644 index 0000000000..992b567bf0 --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/text-composable.coffee @@ -0,0 +1,261 @@ +# An alternate composable implementation for text. This is much closer +# to the implementation used by google wave. +# +# Ops are lists of components which iterate over the whole document. +# Components are either: +# A number N: Skip N characters in the original document +# {i:'str'}: Insert 'str' at the current position in the document +# {d:'str'}: Delete 'str', which appears at the current position in the document +# +# Eg: [3, {i:'hi'}, 5, {d:'internet'}] +# +# Snapshots are strings. + +p = -> #require('util').debug +i = -> #require('util').inspect + +exports = if WEB? then {} else module.exports + +exports.name = 'text-composable' + +exports.create = -> '' + +# -------- Utility methods + +checkOp = (op) -> + throw new Error('Op must be an array of components') unless Array.isArray(op) + last = null + for c in op + if typeof(c) == 'object' + throw new Error("Invalid op component: #{i c}") unless (c.i? && c.i.length > 0) or (c.d? && c.d.length > 0) + else + throw new Error('Op components must be objects or numbers') unless typeof(c) == 'number' + throw new Error('Skip components must be a positive number') unless c > 0 + throw new Error('Adjacent skip components should be added') if typeof(last) == 'number' + + last = c + +# Makes a function for appending components to a given op. +# Exported for the randomOpGenerator. +exports._makeAppend = makeAppend = (op) -> (component) -> + if component == 0 || component.i == '' || component.d == '' + return + else if op.length == 0 + op.push component + else if typeof(component) == 'number' && typeof(op[op.length - 1]) == 'number' + op[op.length - 1] += component + else if component.i? && op[op.length - 1].i? + op[op.length - 1].i += component.i + else if component.d? && op[op.length - 1].d? + op[op.length - 1].d += component.d + else + op.push component + +# checkOp op + +# Makes 2 functions for taking components from the start of an op, and for peeking +# at the next op that could be taken. +makeTake = (op) -> + # The index of the next component to take + idx = 0 + # The offset into the component + offset = 0 + + # Take up to length n from the front of op. If n is null, take the next + # op component. If indivisableField == 'd', delete components won't be separated. + # If indivisableField == 'i', insert components won't be separated. + take = (n, indivisableField) -> + return null if idx == op.length + #assert.notStrictEqual op.length, i, 'The op is too short to traverse the document' + + if typeof(op[idx]) == 'number' + if !n? or op[idx] - offset <= n + c = op[idx] - offset + ++idx; offset = 0 + c + else + offset += n + n + else + # Take from the string + field = if op[idx].i then 'i' else 'd' + c = {} + if !n? or op[idx][field].length - offset <= n or field == indivisableField + c[field] = op[idx][field][offset..] + ++idx; offset = 0 + else + c[field] = op[idx][field][offset...(offset + n)] + offset += n + c + + peekType = () -> + op[idx] + + [take, peekType] + +# Find and return the length of an op component +componentLength = (component) -> + if typeof(component) == 'number' + component + else if component.i? + component.i.length + else + component.d.length + +# Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate +# adjacent inserts and deletes. +exports.normalize = (op) -> + newOp = [] + append = makeAppend newOp + append component for component in op + newOp + +# Apply the op to the string. Returns the new string. +exports.apply = (str, op) -> + p "Applying #{i op} to '#{str}'" + throw new Error('Snapshot should be a string') unless typeof(str) == 'string' + checkOp op + + pos = 0 + newDoc = [] + + for component in op + if typeof(component) == 'number' + throw new Error('The op is too long for this document') if component > str.length + newDoc.push str[...component] + str = str[component..] + else if component.i? + newDoc.push component.i + else + throw new Error("The deleted text '#{component.d}' doesn't match the next characters in the document '#{str[...component.d.length]}'") unless component.d == str[...component.d.length] + str = str[component.d.length..] + + throw new Error("The applied op doesn't traverse the entire document") unless '' == str + + newDoc.join '' + +# transform op1 by op2. Return transformed version of op1. +# op1 and op2 are unchanged by transform. +exports.transform = (op, otherOp, side) -> + throw new Error "side (#{side} must be 'left' or 'right'" unless side == 'left' or side == 'right' + + checkOp op + checkOp otherOp + newOp = [] + + append = makeAppend newOp + [take, peek] = makeTake op + + for component in otherOp + if typeof(component) == 'number' # Skip + length = component + while length > 0 + chunk = take(length, 'i') + throw new Error('The op traverses more elements than the document has') unless chunk != null + + append chunk + length -= componentLength chunk unless typeof(chunk) == 'object' && chunk.i? + else if component.i? # Insert + if side == 'left' + # The left insert should go first. + o = peek() + append take() if o?.i + + # Otherwise, skip the inserted text. + append(component.i.length) + else # Delete. + #assert.ok component.d + length = component.d.length + while length > 0 + chunk = take(length, 'i') + throw new Error('The op traverses more elements than the document has') unless chunk != null + + if typeof(chunk) == 'number' + length -= chunk + else if chunk.i? + append(chunk) + else + #assert.ok chunk.d + # The delete is unnecessary now. + length -= chunk.d.length + + # Append extras from op1 + while (component = take()) + throw new Error "Remaining fragments in the op: #{i component}" unless component?.i? + append component + + newOp + + +# Compose 2 ops into 1 op. +exports.compose = (op1, op2) -> + p "COMPOSE #{i op1} + #{i op2}" + checkOp op1 + checkOp op2 + + result = [] + + append = makeAppend result + [take, _] = makeTake op1 + + for component in op2 + if typeof(component) == 'number' # Skip + length = component + while length > 0 + chunk = take(length, 'd') + throw new Error('The op traverses more elements than the document has') unless chunk != null + + append chunk + length -= componentLength chunk unless typeof(chunk) == 'object' && chunk.d? + + else if component.i? # Insert + append {i:component.i} + + else # Delete + offset = 0 + while offset < component.d.length + chunk = take(component.d.length - offset, 'd') + throw new Error('The op traverses more elements than the document has') unless chunk != null + + # If its delete, append it. If its skip, drop it and decrease length. If its insert, check the strings match, drop it and decrease length. + if typeof(chunk) == 'number' + append {d:component.d[offset...(offset + chunk)]} + offset += chunk + else if chunk.i? + throw new Error("The deleted text doesn't match the inserted text") unless component.d[offset...(offset + chunk.i.length)] == chunk.i + offset += chunk.i.length + # The ops cancel each other out. + else + # Delete + append chunk + + # Append extras from op1 + while (component = take()) + throw new Error "Trailing stuff in op1 #{i component}" unless component?.d? + append component + + result + + +invertComponent = (c) -> + if typeof(c) == 'number' + c + else if c.i? + {d:c.i} + else + {i:c.d} + +# Invert an op +exports.invert = (op) -> + result = [] + append = makeAppend result + + append(invertComponent component) for component in op + + result + +if window? + window.ot ||= {} + window.ot.types ||= {} + window.ot.types.text = exports + diff --git a/services/document-updater/app/coffee/sharejs/text-tp2-api.coffee b/services/document-updater/app/coffee/sharejs/text-tp2-api.coffee new file mode 100644 index 0000000000..d661b5ae37 --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/text-tp2-api.coffee @@ -0,0 +1,89 @@ +# Text document API for text-tp2 + +if WEB? + type = exports.types['text-tp2'] +else + type = require './text-tp2' + +{_takeDoc:takeDoc, _append:append} = type + +appendSkipChars = (op, doc, pos, maxlength) -> + while (maxlength == undefined || maxlength > 0) and pos.index < doc.data.length + part = takeDoc doc, pos, maxlength, true + maxlength -= part.length if maxlength != undefined and typeof part is 'string' + append op, (part.length || part) + +type['api'] = + 'provides': {'text':true} + + # The number of characters in the string + 'getLength': -> @snapshot.charLength + + # Flatten a document into a string + 'getText': -> + strings = (elem for elem in @snapshot.data when typeof elem is 'string') + strings.join '' + + 'insert': (pos, text, callback) -> + pos = 0 if pos == undefined + + op = [] + docPos = {index:0, offset:0} + + appendSkipChars op, @snapshot, docPos, pos + append op, {'i':text} + appendSkipChars op, @snapshot, docPos + + @submitOp op, callback + op + + 'del': (pos, length, callback) -> + op = [] + docPos = {index:0, offset:0} + + appendSkipChars op, @snapshot, docPos, pos + + while length > 0 + part = takeDoc @snapshot, docPos, length, true + if typeof part is 'string' + append op, {'d':part.length} + length -= part.length + else + append op, part + + appendSkipChars op, @snapshot, docPos + + @submitOp op, callback + op + + '_register': -> + # Interpret recieved ops + generate more detailed events for them + @on 'remoteop', (op, snapshot) -> + textPos = 0 + docPos = {index:0, offset:0} + + for component in op + if typeof component is 'number' + # Skip + remainder = component + while remainder > 0 + part = takeDoc snapshot, docPos, remainder + if typeof part is 'string' + textPos += part.length + remainder -= part.length || part + else if component.i != undefined + # Insert + if typeof component.i is 'string' + @emit 'insert', textPos, component.i + textPos += component.i.length + else + # Delete + remainder = component.d + while remainder > 0 + part = takeDoc snapshot, docPos, remainder + if typeof part is 'string' + @emit 'delete', textPos, part + remainder -= part.length || part + + return + diff --git a/services/document-updater/app/coffee/sharejs/text-tp2.coffee b/services/document-updater/app/coffee/sharejs/text-tp2.coffee new file mode 100644 index 0000000000..d19cbdcef4 --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/text-tp2.coffee @@ -0,0 +1,322 @@ +# A TP2 implementation of text, following this spec: +# http://code.google.com/p/lightwave/source/browse/trunk/experimental/ot/README +# +# A document is made up of a string and a set of tombstones inserted throughout +# the string. For example, 'some ', (2 tombstones), 'string'. +# +# This is encoded in a document as: {s:'some string', t:[5, -2, 6]} +# +# Ops are lists of components which iterate over the whole document. +# Components are either: +# N: Skip N characters in the original document +# {i:'str'}: Insert 'str' at the current position in the document +# {i:N}: Insert N tombstones at the current position in the document +# {d:N}: Delete (tombstone) N characters at the current position in the document +# +# Eg: [3, {i:'hi'}, 5, {d:8}] +# +# Snapshots are lists with characters and tombstones. Characters are stored in strings +# and adjacent tombstones are flattened into numbers. +# +# Eg, the document: 'Hello .....world' ('.' denotes tombstoned (deleted) characters) +# would be represented by a document snapshot of ['Hello ', 5, 'world'] + +type = + name: 'text-tp2' + tp2: true + create: -> {charLength:0, totalLength:0, positionCache:[], data:[]} + serialize: (doc) -> + throw new Error 'invalid doc snapshot' unless doc.data + doc.data + deserialize: (data) -> + doc = type.create() + doc.data = data + + for component in data + if typeof component is 'string' + doc.charLength += component.length + doc.totalLength += component.length + else + doc.totalLength += component + + doc + + +checkOp = (op) -> + throw new Error('Op must be an array of components') unless Array.isArray(op) + last = null + for c in op + if typeof(c) == 'object' + if c.i != undefined + throw new Error('Inserts must insert a string or a +ive number') unless (typeof(c.i) == 'string' and c.i.length > 0) or (typeof(c.i) == 'number' and c.i > 0) + else if c.d != undefined + throw new Error('Deletes must be a +ive number') unless typeof(c.d) == 'number' and c.d > 0 + else + throw new Error('Operation component must define .i or .d') + else + throw new Error('Op components must be objects or numbers') unless typeof(c) == 'number' + throw new Error('Skip components must be a positive number') unless c > 0 + throw new Error('Adjacent skip components should be combined') if typeof(last) == 'number' + + last = c + +# Take the next part from the specified position in a document snapshot. +# position = {index, offset}. It will be updated. +type._takeDoc = takeDoc = (doc, position, maxlength, tombsIndivisible) -> + throw new Error 'Operation goes past the end of the document' if position.index >= doc.data.length + + part = doc.data[position.index] + # peel off data[0] + result = if typeof(part) == 'string' + if maxlength != undefined + part[position.offset...(position.offset + maxlength)] + else + part[position.offset...] + else + if maxlength == undefined or tombsIndivisible + part - position.offset + else + Math.min(maxlength, part - position.offset) + + resultLen = result.length || result + + if (part.length || part) - position.offset > resultLen + position.offset += resultLen + else + position.index++ + position.offset = 0 + + result + +# Append a part to the end of a document +type._appendDoc = appendDoc = (doc, p) -> + return if p == 0 or p == '' + + if typeof p is 'string' + doc.charLength += p.length + doc.totalLength += p.length + else + doc.totalLength += p + + data = doc.data + if data.length == 0 + data.push p + else if typeof(data[data.length - 1]) == typeof(p) + data[data.length - 1] += p + else + data.push p + return + +# Apply the op to the document. The document is not modified in the process. +type.apply = (doc, op) -> + unless doc.totalLength != undefined and doc.charLength != undefined and doc.data.length != undefined + throw new Error('Snapshot is invalid') + + checkOp op + + newDoc = type.create() + position = {index:0, offset:0} + + for component in op + if typeof(component) is 'number' + remainder = component + while remainder > 0 + part = takeDoc doc, position, remainder + + appendDoc newDoc, part + remainder -= part.length || part + + else if component.i != undefined + appendDoc newDoc, component.i + else if component.d != undefined + remainder = component.d + while remainder > 0 + part = takeDoc doc, position, remainder + remainder -= part.length || part + appendDoc newDoc, component.d + + newDoc + +# Append an op component to the end of the specified op. +# Exported for the randomOpGenerator. +type._append = append = (op, component) -> + if component == 0 || component.i == '' || component.i == 0 || component.d == 0 + return + else if op.length == 0 + op.push component + else + last = op[op.length - 1] + if typeof(component) == 'number' && typeof(last) == 'number' + op[op.length - 1] += component + else if component.i != undefined && last.i? && typeof(last.i) == typeof(component.i) + last.i += component.i + else if component.d != undefined && last.d? + last.d += component.d + else + op.push component + +# Makes 2 functions for taking components from the start of an op, and for peeking +# at the next op that could be taken. +makeTake = (op) -> + # The index of the next component to take + index = 0 + # The offset into the component + offset = 0 + + # Take up to length maxlength from the op. If maxlength is not defined, there is no max. + # If insertsIndivisible is true, inserts (& insert tombstones) won't be separated. + # + # Returns null when op is fully consumed. + take = (maxlength, insertsIndivisible) -> + return null if index == op.length + + e = op[index] + if typeof((current = e)) == 'number' or typeof((current = e.i)) == 'number' or (current = e.d) != undefined + if !maxlength? or current - offset <= maxlength or (insertsIndivisible and e.i != undefined) + # Return the rest of the current element. + c = current - offset + ++index; offset = 0 + else + offset += maxlength + c = maxlength + if e.i != undefined then {i:c} else if e.d != undefined then {d:c} else c + else + # Take from the inserted string + if !maxlength? or e.i.length - offset <= maxlength or insertsIndivisible + result = {i:e.i[offset..]} + ++index; offset = 0 + else + result = {i:e.i[offset...offset + maxlength]} + offset += maxlength + result + + peekType = -> op[index] + + [take, peekType] + +# Find and return the length of an op component +componentLength = (component) -> + if typeof(component) == 'number' + component + else if typeof(component.i) == 'string' + component.i.length + else + # This should work because c.d and c.i must be +ive. + component.d or component.i + +# Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate +# adjacent inserts and deletes. +type.normalize = (op) -> + newOp = [] + append newOp, component for component in op + newOp + +# This is a helper method to transform and prune. goForwards is true for transform, false for prune. +transformer = (op, otherOp, goForwards, side) -> + checkOp op + checkOp otherOp + newOp = [] + + [take, peek] = makeTake op + + for component in otherOp + length = componentLength component + + if component.i != undefined # Insert text or tombs + if goForwards # transform - insert skips over inserted parts + if side == 'left' + # The left insert should go first. + append newOp, take() while peek()?.i != undefined + + # In any case, skip the inserted text. + append newOp, length + + else # Prune. Remove skips for inserts. + while length > 0 + chunk = take length, true + + throw new Error 'The transformed op is invalid' unless chunk != null + throw new Error 'The transformed op deletes locally inserted characters - it cannot be purged of the insert.' if chunk.d != undefined + + if typeof chunk is 'number' + length -= chunk + else + append newOp, chunk + + else # Skip or delete + while length > 0 + chunk = take length, true + throw new Error('The op traverses more elements than the document has') unless chunk != null + + append newOp, chunk + length -= componentLength chunk unless chunk.i + + # Append extras from op1 + while (component = take()) + throw new Error "Remaining fragments in the op: #{component}" unless component.i != undefined + append newOp, component + + newOp + +# transform op1 by op2. Return transformed version of op1. +# op1 and op2 are unchanged by transform. +# side should be 'left' or 'right', depending on if op1.id <> op2.id. 'left' == client op. +type.transform = (op, otherOp, side) -> + throw new Error "side (#{side}) should be 'left' or 'right'" unless side == 'left' or side == 'right' + transformer op, otherOp, true, side + +# Prune is the inverse of transform. +type.prune = (op, otherOp) -> transformer op, otherOp, false + +# Compose 2 ops into 1 op. +type.compose = (op1, op2) -> + return op2 if op1 == null or op1 == undefined + + checkOp op1 + checkOp op2 + + result = [] + + [take, _] = makeTake op1 + + for component in op2 + + if typeof(component) == 'number' # Skip + # Just copy from op1. + length = component + while length > 0 + chunk = take length + throw new Error('The op traverses more elements than the document has') unless chunk != null + + append result, chunk + length -= componentLength chunk + + else if component.i != undefined # Insert + append result, {i:component.i} + + else # Delete + length = component.d + while length > 0 + chunk = take length + throw new Error('The op traverses more elements than the document has') unless chunk != null + + chunkLength = componentLength chunk + if chunk.i != undefined + append result, {i:chunkLength} + else + append result, {d:chunkLength} + + length -= chunkLength + + # Append extras from op1 + while (component = take()) + throw new Error "Remaining fragments in op1: #{component}" unless component.i != undefined + append result, component + + result + +if WEB? + exports.types['text-tp2'] = type +else + module.exports = type + diff --git a/services/document-updater/app/coffee/sharejs/text.coffee b/services/document-updater/app/coffee/sharejs/text.coffee new file mode 100644 index 0000000000..c64b4dfa68 --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/text.coffee @@ -0,0 +1,209 @@ +# A simple text implementation +# +# Operations are lists of components. +# Each component either inserts or deletes at a specified position in the document. +# +# Components are either: +# {i:'str', p:100}: Insert 'str' at position 100 in the document +# {d:'str', p:100}: Delete 'str' at position 100 in the document +# +# Components in an operation are executed sequentially, so the position of components +# assumes previous components have already executed. +# +# Eg: This op: +# [{i:'abc', p:0}] +# is equivalent to this op: +# [{i:'a', p:0}, {i:'b', p:1}, {i:'c', p:2}] + +# NOTE: The global scope here is shared with other sharejs files when built with closure. +# Be careful what ends up in your namespace. + +text = {} + +text.name = 'text' + +text.create = -> '' + +strInject = (s1, pos, s2) -> s1[...pos] + s2 + s1[pos..] + +checkValidComponent = (c) -> + throw new Error 'component missing position field' if typeof c.p != 'number' + + i_type = typeof c.i + d_type = typeof c.d + throw new Error 'component needs an i or d field' unless (i_type == 'string') ^ (d_type == 'string') + + throw new Error 'position cannot be negative' unless c.p >= 0 + +checkValidOp = (op) -> + checkValidComponent(c) for c in op + true + +text.apply = (snapshot, op) -> + checkValidOp op + for component in op + if component.i? + snapshot = strInject snapshot, component.p, component.i + else + deleted = snapshot[component.p...(component.p + component.d.length)] + throw new Error "Delete component '#{component.d}' does not match deleted text '#{deleted}'" unless component.d == deleted + snapshot = snapshot[...component.p] + snapshot[(component.p + component.d.length)..] + + snapshot + + +# Exported for use by the random op generator. +# +# For simplicity, this version of append does not compress adjacent inserts and deletes of +# the same text. It would be nice to change that at some stage. +text._append = append = (newOp, c) -> + return if c.i == '' or c.d == '' + if newOp.length == 0 + newOp.push c + else + last = newOp[newOp.length - 1] + + # Compose the insert into the previous insert if possible + if last.i? && c.i? and last.p <= c.p <= (last.p + last.i.length) + newOp[newOp.length - 1] = {i:strInject(last.i, c.p - last.p, c.i), p:last.p} + else if last.d? && c.d? and c.p <= last.p <= (c.p + c.d.length) + newOp[newOp.length - 1] = {d:strInject(c.d, last.p - c.p, last.d), p:c.p} + else + newOp.push c + +text.compose = (op1, op2) -> + checkValidOp op1 + checkValidOp op2 + + newOp = op1.slice() + append newOp, c for c in op2 + + newOp + +# Attempt to compress the op components together 'as much as possible'. +# This implementation preserves order and preserves create/delete pairs. +text.compress = (op) -> text.compose [], op + +text.normalize = (op) -> + newOp = [] + + # Normalize should allow ops which are a single (unwrapped) component: + # {i:'asdf', p:23}. + # There's no good way to test if something is an array: + # http://perfectionkills.com/instanceof-considered-harmful-or-how-to-write-a-robust-isarray/ + # so this is probably the least bad solution. + op = [op] if op.i? or op.p? + + for c in op + c.p ?= 0 + append newOp, c + + newOp + +# This helper method transforms a position by an op component. +# +# If c is an insert, insertAfter specifies whether the transform +# is pushed after the insert (true) or before it (false). +# +# insertAfter is optional for deletes. +transformPosition = (pos, c, insertAfter) -> + if c.i? + if c.p < pos || (c.p == pos && insertAfter) + pos + c.i.length + else + pos + else + # I think this could also be written as: Math.min(c.p, Math.min(c.p - otherC.p, otherC.d.length)) + # but I think its harder to read that way, and it compiles using ternary operators anyway + # so its no slower written like this. + if pos <= c.p + pos + else if pos <= c.p + c.d.length + c.p + else + pos - c.d.length + +# Helper method to transform a cursor position as a result of an op. +# +# Like transformPosition above, if c is an insert, insertAfter specifies whether the cursor position +# is pushed after an insert (true) or before it (false). +text.transformCursor = (position, op, side) -> + insertAfter = side == 'right' + position = transformPosition position, c, insertAfter for c in op + position + +# Transform an op component by another op component. Asymmetric. +# The result will be appended to destination. +# +# exported for use in JSON type +text._tc = transformComponent = (dest, c, otherC, side) -> + checkValidOp [c] + checkValidOp [otherC] + + if c.i? + append dest, {i:c.i, p:transformPosition(c.p, otherC, side == 'right')} + + else # Delete + if otherC.i? # delete vs insert + s = c.d + if c.p < otherC.p + append dest, {d:s[...otherC.p - c.p], p:c.p} + s = s[(otherC.p - c.p)..] + if s != '' + append dest, {d:s, p:c.p + otherC.i.length} + + else # Delete vs delete + if c.p >= otherC.p + otherC.d.length + append dest, {d:c.d, p:c.p - otherC.d.length} + else if c.p + c.d.length <= otherC.p + append dest, c + else + # They overlap somewhere. + newC = {d:'', p:c.p} + if c.p < otherC.p + newC.d = c.d[...(otherC.p - c.p)] + if c.p + c.d.length > otherC.p + otherC.d.length + newC.d += c.d[(otherC.p + otherC.d.length - c.p)..] + + # This is entirely optional - just for a check that the deleted + # text in the two ops matches + intersectStart = Math.max c.p, otherC.p + intersectEnd = Math.min c.p + c.d.length, otherC.p + otherC.d.length + cIntersect = c.d[intersectStart - c.p...intersectEnd - c.p] + otherIntersect = otherC.d[intersectStart - otherC.p...intersectEnd - otherC.p] + throw new Error 'Delete ops delete different text in the same region of the document' unless cIntersect == otherIntersect + + if newC.d != '' + # This could be rewritten similarly to insert v delete, above. + newC.p = transformPosition newC.p, otherC + append dest, newC + + dest + +invertComponent = (c) -> + if c.i? + {d:c.i, p:c.p} + else + {i:c.d, p:c.p} + +# No need to use append for invert, because the components won't be able to +# cancel with one another. +text.invert = (op) -> (invertComponent c for c in op.slice().reverse()) + + +if WEB? + exports.types ||= {} + + # This is kind of awful - come up with a better way to hook this helper code up. + bootstrapTransform(text, transformComponent, checkValidOp, append) + + # [] is used to prevent closure from renaming types.text + exports.types.text = text +else + module.exports = text + + # The text type really shouldn't need this - it should be possible to define + # an efficient transform function by making a sort of transform map and passing each + # op component through it. + require('./helpers').bootstrapTransform(text, transformComponent, checkValidOp, append) + diff --git a/services/document-updater/app/coffee/sharejs/types/count.coffee b/services/document-updater/app/coffee/sharejs/types/count.coffee new file mode 100644 index 0000000000..da28355efb --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/types/count.coffee @@ -0,0 +1,22 @@ +# This is a simple type used for testing other OT code. Each op is [expectedSnapshot, increment] + +exports.name = 'count' +exports.create = -> 1 + +exports.apply = (snapshot, op) -> + [v, inc] = op + throw new Error "Op #{v} != snapshot #{snapshot}" unless snapshot == v + snapshot + inc + +# transform op1 by op2. Return transformed version of op1. +exports.transform = (op1, op2) -> + throw new Error "Op1 #{op1[0]} != op2 #{op2[0]}" unless op1[0] == op2[0] + [op1[0] + op2[1], op1[1]] + +exports.compose = (op1, op2) -> + throw new Error "Op1 #{op1} + 1 != op2 #{op2}" unless op1[0] + op1[1] == op2[0] + [op1[0], op1[1] + op2[1]] + +exports.generateRandomOp = (doc) -> + [[doc, 1], doc + 1] + diff --git a/services/document-updater/app/coffee/sharejs/types/helpers.coffee b/services/document-updater/app/coffee/sharejs/types/helpers.coffee new file mode 100644 index 0000000000..093b32e1bb --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/types/helpers.coffee @@ -0,0 +1,65 @@ +# These methods let you build a transform function from a transformComponent function +# for OT types like text and JSON in which operations are lists of components +# and transforming them requires N^2 work. + +# Add transform and transformX functions for an OT type which has transformComponent defined. +# transformComponent(destination array, component, other component, side) +exports['_bt'] = bootstrapTransform = (type, transformComponent, checkValidOp, append) -> + transformComponentX = (left, right, destLeft, destRight) -> + transformComponent destLeft, left, right, 'left' + transformComponent destRight, right, left, 'right' + + # Transforms rightOp by leftOp. Returns ['rightOp', clientOp'] + type.transformX = type['transformX'] = transformX = (leftOp, rightOp) -> + checkValidOp leftOp + checkValidOp rightOp + + newRightOp = [] + + for rightComponent in rightOp + # Generate newLeftOp by composing leftOp by rightComponent + newLeftOp = [] + + k = 0 + while k < leftOp.length + nextC = [] + transformComponentX leftOp[k], rightComponent, newLeftOp, nextC + k++ + + if nextC.length == 1 + rightComponent = nextC[0] + else if nextC.length == 0 + append newLeftOp, l for l in leftOp[k..] + rightComponent = null + break + else + # Recurse. + [l_, r_] = transformX leftOp[k..], nextC + append newLeftOp, l for l in l_ + append newRightOp, r for r in r_ + rightComponent = null + break + + append newRightOp, rightComponent if rightComponent? + leftOp = newLeftOp + + [leftOp, newRightOp] + + # Transforms op with specified type ('left' or 'right') by otherOp. + type.transform = type['transform'] = (op, otherOp, type) -> + throw new Error "type must be 'left' or 'right'" unless type == 'left' or type == 'right' + + return op if otherOp.length == 0 + + # TODO: Benchmark with and without this line. I _think_ it'll make a big difference...? + return transformComponent [], op[0], otherOp[0], type if op.length == 1 and otherOp.length == 1 + + if type == 'left' + [left, _] = transformX op, otherOp + left + else + [_, right] = transformX otherOp, op + right + +if typeof WEB is 'undefined' + exports.bootstrapTransform = bootstrapTransform diff --git a/services/document-updater/app/coffee/sharejs/types/index.coffee b/services/document-updater/app/coffee/sharejs/types/index.coffee new file mode 100644 index 0000000000..6f3bb8ec20 --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/types/index.coffee @@ -0,0 +1,15 @@ + +register = (file) -> + type = require file + exports[type.name] = type + try require "#{file}-api" + +# Import all the built-in types. +register './simple' +register './count' + +register './text' +register './text-composable' +register './text-tp2' + +register './json' diff --git a/services/document-updater/app/coffee/sharejs/types/json-api.coffee b/services/document-updater/app/coffee/sharejs/types/json-api.coffee new file mode 100644 index 0000000000..8819dee798 --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/types/json-api.coffee @@ -0,0 +1,180 @@ +# API for JSON OT + +json = require './json' if typeof WEB is 'undefined' + +if WEB? + extendDoc = exports.extendDoc + exports.extendDoc = (name, fn) -> + SubDoc::[name] = fn + extendDoc name, fn + +depath = (path) -> + if path.length == 1 and path[0].constructor == Array + path[0] + else path + +class SubDoc + constructor: (@doc, @path) -> + at: (path...) -> @doc.at @path.concat depath path + get: -> @doc.getAt @path + # for objects and lists + set: (value, cb) -> @doc.setAt @path, value, cb + # for strings and lists. + insert: (pos, value, cb) -> @doc.insertAt @path, pos, value, cb + # for strings + del: (pos, length, cb) -> @doc.deleteTextAt @path, length, pos, cb + # for objects and lists + remove: (cb) -> @doc.removeAt @path, cb + push: (value, cb) -> @insert @get().length, value, cb + move: (from, to, cb) -> @doc.moveAt @path, from, to, cb + add: (amount, cb) -> @doc.addAt @path, amount, cb + on: (event, cb) -> @doc.addListener @path, event, cb + removeListener: (l) -> @doc.removeListener l + + # text API compatibility + getLength: -> @get().length + getText: -> @get() + +traverse = (snapshot, path) -> + container = data:snapshot + key = 'data' + elem = container + for p in path + elem = elem[key] + key = p + throw new Error 'bad path' if typeof elem == 'undefined' + {elem, key} + +pathEquals = (p1, p2) -> + return false if p1.length != p2.length + for e,i in p1 + return false if e != p2[i] + true + +json.api = + provides: {json:true} + + at: (path...) -> new SubDoc this, depath path + + get: -> @snapshot + set: (value, cb) -> @setAt [], value, cb + + getAt: (path) -> + {elem, key} = traverse @snapshot, path + return elem[key] + + setAt: (path, value, cb) -> + {elem, key} = traverse @snapshot, path + op = {p:path} + if elem.constructor == Array + op.li = value + op.ld = elem[key] if typeof elem[key] != 'undefined' + else if typeof elem == 'object' + op.oi = value + op.od = elem[key] if typeof elem[key] != 'undefined' + else throw new Error 'bad path' + @submitOp [op], cb + + removeAt: (path, cb) -> + {elem, key} = traverse @snapshot, path + throw new Error 'no element at that path' unless typeof elem[key] != 'undefined' + op = {p:path} + if elem.constructor == Array + op.ld = elem[key] + else if typeof elem == 'object' + op.od = elem[key] + else throw new Error 'bad path' + @submitOp [op], cb + + insertAt: (path, pos, value, cb) -> + {elem, key} = traverse @snapshot, path + op = {p:path.concat pos} + if elem[key].constructor == Array + op.li = value + else if typeof elem[key] == 'string' + op.si = value + @submitOp [op], cb + + moveAt: (path, from, to, cb) -> + op = [{p:path.concat(from), lm:to}] + @submitOp op, cb + + addAt: (path, amount, cb) -> + op = [{p:path, na:amount}] + @submitOp op, cb + + deleteTextAt: (path, length, pos, cb) -> + {elem, key} = traverse @snapshot, path + op = [{p:path.concat(pos), sd:elem[key][pos...(pos + length)]}] + @submitOp op, cb + + addListener: (path, event, cb) -> + l = {path, event, cb} + @_listeners.push l + l + removeListener: (l) -> + i = @_listeners.indexOf l + return false if i < 0 + @_listeners.splice i, 1 + return true + _register: -> + @_listeners = [] + @on 'change', (op) -> + for c in op + if c.na != undefined or c.si != undefined or c.sd != undefined + # no change to structure + continue + to_remove = [] + for l, i in @_listeners + # Transform a dummy op by the incoming op to work out what + # should happen to the listener. + dummy = {p:l.path, na:0} + xformed = @type.transformComponent [], dummy, c, 'left' + if xformed.length == 0 + # The op was transformed to noop, so we should delete the listener. + to_remove.push i + else if xformed.length == 1 + # The op remained, so grab its new path into the listener. + l.path = xformed[0].p + else + throw new Error "Bad assumption in json-api: xforming an 'si' op will always result in 0 or 1 components." + to_remove.sort (a, b) -> b - a + for i in to_remove + @_listeners.splice i, 1 + @on 'remoteop', (op) -> + for c in op + match_path = if c.na == undefined then c.p[...c.p.length-1] else c.p + for {path, event, cb} in @_listeners + if pathEquals path, match_path + switch event + when 'insert' + if c.li != undefined and c.ld == undefined + cb(c.p[c.p.length-1], c.li) + else if c.oi != undefined and c.od == undefined + cb(c.p[c.p.length-1], c.oi) + else if c.si != undefined + cb(c.p[c.p.length-1], c.si) + when 'delete' + if c.li == undefined and c.ld != undefined + cb(c.p[c.p.length-1], c.ld) + else if c.oi == undefined and c.od != undefined + cb(c.p[c.p.length-1], c.od) + else if c.sd != undefined + cb(c.p[c.p.length-1], c.sd) + when 'replace' + if c.li != undefined and c.ld != undefined + cb(c.p[c.p.length-1], c.ld, c.li) + else if c.oi != undefined and c.od != undefined + cb(c.p[c.p.length-1], c.od, c.oi) + when 'move' + if c.lm != undefined + cb(c.p[c.p.length-1], c.lm) + when 'add' + if c.na != undefined + cb(c.na) + else if (common = @type.commonPath match_path, path)? + if event == 'child op' + if match_path.length == path.length == common + throw new Error "paths match length and have commonality, but aren't equal?" + child_path = c.p[common+1..] + cb(child_path, c) diff --git a/services/document-updater/app/coffee/sharejs/types/json.coffee b/services/document-updater/app/coffee/sharejs/types/json.coffee new file mode 100644 index 0000000000..b03b0947ef --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/types/json.coffee @@ -0,0 +1,441 @@ +# This is the implementation of the JSON OT type. +# +# Spec is here: https://github.com/josephg/ShareJS/wiki/JSON-Operations + +if WEB? + text = exports.types.text +else + text = require './text' + +json = {} + +json.name = 'json' + +json.create = -> null + +json.invertComponent = (c) -> + c_ = {p: c.p} + c_.sd = c.si if c.si != undefined + c_.si = c.sd if c.sd != undefined + c_.od = c.oi if c.oi != undefined + c_.oi = c.od if c.od != undefined + c_.ld = c.li if c.li != undefined + c_.li = c.ld if c.ld != undefined + c_.na = -c.na if c.na != undefined + if c.lm != undefined + c_.lm = c.p[c.p.length-1] + c_.p = c.p[0...c.p.length - 1].concat([c.lm]) + c_ + +json.invert = (op) -> json.invertComponent c for c in op.slice().reverse() + +json.checkValidOp = (op) -> + +isArray = (o) -> Object.prototype.toString.call(o) == '[object Array]' +json.checkList = (elem) -> + throw new Error 'Referenced element not a list' unless isArray(elem) + +json.checkObj = (elem) -> + throw new Error "Referenced element not an object (it was #{JSON.stringify elem})" unless elem.constructor is Object + +json.apply = (snapshot, op) -> + json.checkValidOp op + op = clone op + + container = {data: clone snapshot} + + try + for c, i in op + parent = null + parentkey = null + elem = container + key = 'data' + + for p in c.p + parent = elem + parentkey = key + elem = elem[key] + key = p + + throw new Error 'Path invalid' unless parent? + + if c.na != undefined + # Number add + throw new Error 'Referenced element not a number' unless typeof elem[key] is 'number' + elem[key] += c.na + + else if c.si != undefined + # String insert + throw new Error "Referenced element not a string (it was #{JSON.stringify elem})" unless typeof elem is 'string' + parent[parentkey] = elem[...key] + c.si + elem[key..] + else if c.sd != undefined + # String delete + throw new Error 'Referenced element not a string' unless typeof elem is 'string' + throw new Error 'Deleted string does not match' unless elem[key...key + c.sd.length] == c.sd + parent[parentkey] = elem[...key] + elem[key + c.sd.length..] + + else if c.li != undefined && c.ld != undefined + # List replace + json.checkList elem + + # Should check the list element matches c.ld + elem[key] = c.li + else if c.li != undefined + # List insert + json.checkList elem + + elem.splice key, 0, c.li + else if c.ld != undefined + # List delete + json.checkList elem + + # Should check the list element matches c.ld here too. + elem.splice key, 1 + else if c.lm != undefined + # List move + json.checkList elem + if c.lm != key + e = elem[key] + # Remove it... + elem.splice key, 1 + # And insert it back. + elem.splice c.lm, 0, e + + else if c.oi != undefined + # Object insert / replace + json.checkObj elem + + # Should check that elem[key] == c.od + elem[key] = c.oi + else if c.od != undefined + # Object delete + json.checkObj elem + + # Should check that elem[key] == c.od + delete elem[key] + else + throw new Error 'invalid / missing instruction in op' + catch error + # TODO: Roll back all already applied changes. Write tests before implementing this code. + throw error + + container.data + +# Checks if two paths, p1 and p2 match. +json.pathMatches = (p1, p2, ignoreLast) -> + return false unless p1.length == p2.length + + for p, i in p1 + return false if p != p2[i] and (!ignoreLast or i != p1.length - 1) + + true + +json.append = (dest, c) -> + c = clone c + if dest.length != 0 and json.pathMatches c.p, (last = dest[dest.length - 1]).p + if last.na != undefined and c.na != undefined + dest[dest.length - 1] = { p: last.p, na: last.na + c.na } + else if last.li != undefined and c.li == undefined and c.ld == last.li + # insert immediately followed by delete becomes a noop. + if last.ld != undefined + # leave the delete part of the replace + delete last.li + else + dest.pop() + else if last.od != undefined and last.oi == undefined and + c.oi != undefined and c.od == undefined + last.oi = c.oi + else if c.lm != undefined and c.p[c.p.length-1] == c.lm + null # don't do anything + else + dest.push c + else + dest.push c + +json.compose = (op1, op2) -> + json.checkValidOp op1 + json.checkValidOp op2 + + newOp = clone op1 + json.append newOp, c for c in op2 + + newOp + +json.normalize = (op) -> + newOp = [] + + op = [op] unless isArray op + + for c in op + c.p ?= [] + json.append newOp, c + + newOp + +# hax, copied from test/types/json. Apparently this is still the fastest way to deep clone an object, assuming +# we have browser support for JSON. +# http://jsperf.com/cloning-an-object/12 +clone = (o) -> JSON.parse(JSON.stringify o) + +json.commonPath = (p1, p2) -> + p1 = p1.slice() + p2 = p2.slice() + p1.unshift('data') + p2.unshift('data') + p1 = p1[...p1.length-1] + p2 = p2[...p2.length-1] + return -1 if p2.length == 0 + i = 0 + while p1[i] == p2[i] && i < p1.length + i++ + if i == p2.length + return i-1 + return + +# transform c so it applies to a document with otherC applied. +json.transformComponent = (dest, c, otherC, type) -> + c = clone c + c.p.push(0) if c.na != undefined + otherC.p.push(0) if otherC.na != undefined + + common = json.commonPath c.p, otherC.p + common2 = json.commonPath otherC.p, c.p + + cplength = c.p.length + otherCplength = otherC.p.length + + c.p.pop() if c.na != undefined # hax + otherC.p.pop() if otherC.na != undefined + + if otherC.na + if common2? && otherCplength >= cplength && otherC.p[common2] == c.p[common2] + if c.ld != undefined + oc = clone otherC + oc.p = oc.p[cplength..] + c.ld = json.apply clone(c.ld), [oc] + else if c.od != undefined + oc = clone otherC + oc.p = oc.p[cplength..] + c.od = json.apply clone(c.od), [oc] + json.append dest, c + return dest + + if common2? && otherCplength > cplength && c.p[common2] == otherC.p[common2] + # transform based on c + if c.ld != undefined + oc = clone otherC + oc.p = oc.p[cplength..] + c.ld = json.apply clone(c.ld), [oc] + else if c.od != undefined + oc = clone otherC + oc.p = oc.p[cplength..] + c.od = json.apply clone(c.od), [oc] + + + if common? + commonOperand = cplength == otherCplength + # transform based on otherC + if otherC.na != undefined + # this case is handled above due to icky path hax + else if otherC.si != undefined || otherC.sd != undefined + # String op vs string op - pass through to text type + if c.si != undefined || c.sd != undefined + throw new Error("must be a string?") unless commonOperand + + # Convert an op component to a text op component + convert = (component) -> + newC = p:component.p[component.p.length - 1] + if component.si + newC.i = component.si + else + newC.d = component.sd + newC + + tc1 = convert c + tc2 = convert otherC + + res = [] + text._tc res, tc1, tc2, type + for tc in res + jc = { p: c.p[...common] } + jc.p.push(tc.p) + jc.si = tc.i if tc.i? + jc.sd = tc.d if tc.d? + json.append dest, jc + return dest + else if otherC.li != undefined && otherC.ld != undefined + if otherC.p[common] == c.p[common] + # noop + if !commonOperand + # we're below the deleted element, so -> noop + return dest + else if c.ld != undefined + # we're trying to delete the same element, -> noop + if c.li != undefined and type == 'left' + # we're both replacing one element with another. only one can + # survive! + c.ld = clone otherC.li + else + return dest + else if otherC.li != undefined + if c.li != undefined and c.ld == undefined and commonOperand and c.p[common] == otherC.p[common] + # in li vs. li, left wins. + if type == 'right' + c.p[common]++ + else if otherC.p[common] <= c.p[common] + c.p[common]++ + + if c.lm != undefined + if commonOperand + # otherC edits the same list we edit + if otherC.p[common] <= c.lm + c.lm++ + # changing c.from is handled above. + else if otherC.ld != undefined + if c.lm != undefined + if commonOperand + if otherC.p[common] == c.p[common] + # they deleted the thing we're trying to move + return dest + # otherC edits the same list we edit + p = otherC.p[common] + from = c.p[common] + to = c.lm + if p < to || (p == to && from < to) + c.lm-- + + if otherC.p[common] < c.p[common] + c.p[common]-- + else if otherC.p[common] == c.p[common] + if otherCplength < cplength + # we're below the deleted element, so -> noop + return dest + else if c.ld != undefined + if c.li != undefined + # we're replacing, they're deleting. we become an insert. + delete c.ld + else + # we're trying to delete the same element, -> noop + return dest + else if otherC.lm != undefined + if c.lm != undefined and cplength == otherCplength + # lm vs lm, here we go! + from = c.p[common] + to = c.lm + otherFrom = otherC.p[common] + otherTo = otherC.lm + if otherFrom != otherTo + # if otherFrom == otherTo, we don't need to change our op. + + # where did my thing go? + if from == otherFrom + # they moved it! tie break. + if type == 'left' + c.p[common] = otherTo + if from == to # ugh + c.lm = otherTo + else + return dest + else + # they moved around it + if from > otherFrom + c.p[common]-- + if from > otherTo + c.p[common]++ + else if from == otherTo + if otherFrom > otherTo + c.p[common]++ + if from == to # ugh, again + c.lm++ + + # step 2: where am i going to put it? + if to > otherFrom + c.lm-- + else if to == otherFrom + if to > from + c.lm-- + if to > otherTo + c.lm++ + else if to == otherTo + # if we're both moving in the same direction, tie break + if (otherTo > otherFrom and to > from) or + (otherTo < otherFrom and to < from) + if type == 'right' + c.lm++ + else + if to > from + c.lm++ + else if to == otherFrom + c.lm-- + else if c.li != undefined and c.ld == undefined and commonOperand + # li + from = otherC.p[common] + to = otherC.lm + p = c.p[common] + if p > from + c.p[common]-- + if p > to + c.p[common]++ + else + # ld, ld+li, si, sd, na, oi, od, oi+od, any li on an element beneath + # the lm + # + # i.e. things care about where their item is after the move. + from = otherC.p[common] + to = otherC.lm + p = c.p[common] + if p == from + c.p[common] = to + else + if p > from + c.p[common]-- + if p > to + c.p[common]++ + else if p == to + if from > to + c.p[common]++ + else if otherC.oi != undefined && otherC.od != undefined + if c.p[common] == otherC.p[common] + if c.oi != undefined and commonOperand + # we inserted where someone else replaced + if type == 'right' + # left wins + return dest + else + # we win, make our op replace what they inserted + c.od = otherC.oi + else + # -> noop if the other component is deleting the same object (or any + # parent) + return dest + else if otherC.oi != undefined + if c.oi != undefined and c.p[common] == otherC.p[common] + # left wins if we try to insert at the same place + if type == 'left' + json.append dest, {p:c.p, od:otherC.oi} + else + return dest + else if otherC.od != undefined + if c.p[common] == otherC.p[common] + return dest if !commonOperand + if c.oi != undefined + delete c.od + else + return dest + + json.append dest, c + return dest + +if WEB? + exports.types ||= {} + + # This is kind of awful - come up with a better way to hook this helper code up. + exports._bt(json, json.transformComponent, json.checkValidOp, json.append) + + # [] is used to prevent closure from renaming types.text + exports.types.json = json +else + module.exports = json + + require('./helpers').bootstrapTransform(json, json.transformComponent, json.checkValidOp, json.append) + diff --git a/services/document-updater/app/coffee/sharejs/types/model.coffee b/services/document-updater/app/coffee/sharejs/types/model.coffee new file mode 100644 index 0000000000..284d6fd770 --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/types/model.coffee @@ -0,0 +1,603 @@ +# The model of all the ops. Responsible for applying & transforming remote deltas +# and managing the storage layer. +# +# Actual storage is handled by the database wrappers in db/*, wrapped by DocCache + +{EventEmitter} = require 'events' + +queue = require './syncqueue' +types = require '../types' + +isArray = (o) -> Object.prototype.toString.call(o) == '[object Array]' + +# This constructor creates a new Model object. There will be one model object +# per server context. +# +# The model object is responsible for a lot of things: +# +# - It manages the interactions with the database +# - It maintains (in memory) a set of all active documents +# - It calls out to the OT functions when necessary +# +# The model is an event emitter. It emits the following events: +# +# create(docName, data): A document has been created with the specified name & data +module.exports = Model = (db, options) -> + # db can be null if the user doesn't want persistance. + + return new Model(db, options) if !(this instanceof Model) + + model = this + + options ?= {} + + # This is a cache of 'live' documents. + # + # The cache is a map from docName -> { + # ops:[{op, meta}] + # snapshot + # type + # v + # meta + # eventEmitter + # reapTimer + # committedVersion: v + # snapshotWriteLock: bool to make sure writeSnapshot isn't re-entrant + # dbMeta: database specific data + # opQueue: syncQueue for processing ops + # } + # + # The ops list contains the document's last options.numCachedOps ops. (Or all + # of them if we're using a memory store). + # + # Documents are stored in this set so long as the document has been accessed in + # the last few seconds (options.reapTime) OR at least one client has the document + # open. I don't know if I should keep open (but not being edited) documents live - + # maybe if a client has a document open but the document isn't being edited, I should + # flush it from the cache. + # + # In any case, the API to model is designed such that if we want to change that later + # it should be pretty easy to do so without any external-to-the-model code changes. + docs = {} + + # This is a map from docName -> [callback]. It is used when a document hasn't been + # cached and multiple getSnapshot() / getVersion() requests come in. All requests + # are added to the callback list and called when db.getSnapshot() returns. + # + # callback(error, snapshot data) + awaitingGetSnapshot = {} + + # The time that documents which no clients have open will stay in the cache. + # Should be > 0. + options.reapTime ?= 3000 + + # The number of operations the cache holds before reusing the space + options.numCachedOps ?= 10 + + # This option forces documents to be reaped, even when there's no database backend. + # This is useful when you don't care about persistance and don't want to gradually + # fill memory. + # + # You might want to set reapTime to a day or something. + options.forceReaping ?= false + + # Until I come up with a better strategy, we'll save a copy of the document snapshot + # to the database every ~20 submitted ops. + options.opsBeforeCommit ?= 20 + + # It takes some processing time to transform client ops. The server will punt ops back to the + # client to transform if they're too old. + options.maximumAge ?= 40 + + # **** Cache API methods + + # Its important that all ops are applied in order. This helper method creates the op submission queue + # for a single document. This contains the logic for transforming & applying ops. + makeOpQueue = (docName, doc) -> queue (opData, callback) -> + return callback 'Version missing' unless opData.v >= 0 + return callback 'Op at future version' if opData.v > doc.v + + # Punt the transforming work back to the client if the op is too old. + return callback 'Op too old' if opData.v + options.maximumAge < doc.v + + opData.meta ||= {} + opData.meta.ts = Date.now() + + # We'll need to transform the op to the current version of the document. This + # calls the callback immediately if opVersion == doc.v. + getOps docName, opData.v, doc.v, (error, ops) -> + return callback error if error + + unless doc.v - opData.v == ops.length + # This should never happen. It indicates that we didn't get all the ops we + # asked for. Its important that the submitted op is correctly transformed. + console.error "Could not get old ops in model for document #{docName}" + console.error "Expected ops #{opData.v} to #{doc.v} and got #{ops.length} ops" + return callback 'Internal error' + + if ops.length > 0 + try + # If there's enough ops, it might be worth spinning this out into a webworker thread. + for oldOp in ops + # Dup detection works by sending the id(s) the op has been submitted with previously. + # If the id matches, we reject it. The client can also detect the op has been submitted + # already if it sees its own previous id in the ops it sees when it does catchup. + if oldOp.meta.source and opData.dupIfSource and oldOp.meta.source in opData.dupIfSource + return callback 'Op already submitted' + + opData.op = doc.type.transform opData.op, oldOp.op, 'left' + opData.v++ + catch error + console.error error.stack + return callback error.message + + try + snapshot = doc.type.apply doc.snapshot, opData.op + catch error + console.error error.stack + return callback error.message + + # The op data should be at the current version, and the new document data should be at + # the next version. + # + # This should never happen in practice, but its a nice little check to make sure everything + # is hunky-dory. + unless opData.v == doc.v + # This should never happen. + console.error "Version mismatch detected in model. File a ticket - this is a bug." + console.error "Expecting #{opData.v} == #{doc.v}" + return callback 'Internal error' + + #newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta} + writeOp = db?.writeOp or (docName, newOpData, callback) -> callback() + + writeOp docName, opData, (error) -> + if error + # The user should probably know about this. + console.warn "Error writing ops to database: #{error}" + return callback error + + options.stats?.writeOp?() + + # This is needed when we emit the 'change' event, below. + oldSnapshot = doc.snapshot + + # All the heavy lifting is now done. Finally, we'll update the cache with the new data + # and (maybe!) save a new document snapshot to the database. + + doc.v = opData.v + 1 + doc.snapshot = snapshot + + doc.ops.push opData + doc.ops.shift() if db and doc.ops.length > options.numCachedOps + + model.emit 'applyOp', docName, opData, snapshot, oldSnapshot + doc.eventEmitter.emit 'op', opData, snapshot, oldSnapshot + + # The callback is called with the version of the document at which the op was applied. + # This is the op.v after transformation, and its doc.v - 1. + callback null, opData.v + + # I need a decent strategy here for deciding whether or not to save the snapshot. + # + # The 'right' strategy looks something like "Store the snapshot whenever the snapshot + # is smaller than the accumulated op data". For now, I'll just store it every 20 + # ops or something. (Configurable with doc.committedVersion) + if !doc.snapshotWriteLock and doc.committedVersion + options.opsBeforeCommit <= doc.v + tryWriteSnapshot docName, (error) -> + console.warn "Error writing snapshot #{error}. This is nonfatal" if error + + # Add the data for the given docName to the cache. The named document shouldn't already + # exist in the doc set. + # + # Returns the new doc. + add = (docName, error, data, committedVersion, ops, dbMeta) -> + callbacks = awaitingGetSnapshot[docName] + delete awaitingGetSnapshot[docName] + + if error + callback error for callback in callbacks if callbacks + else + doc = docs[docName] = + snapshot: data.snapshot + v: data.v + type: data.type + meta: data.meta + + # Cache of ops + ops: ops or [] + + eventEmitter: new EventEmitter + + # Timer before the document will be invalidated from the cache (if the document has no + # listeners) + reapTimer: null + + # Version of the snapshot thats in the database + committedVersion: committedVersion ? data.v + snapshotWriteLock: false + dbMeta: dbMeta + + doc.opQueue = makeOpQueue docName, doc + + refreshReapingTimeout docName + model.emit 'add', docName, data + callback null, doc for callback in callbacks if callbacks + + doc + + # This is a little helper wrapper around db.getOps. It does two things: + # + # - If there's no database set, it returns an error to the callback + # - It adds version numbers to each op returned from the database + # (These can be inferred from context so the DB doesn't store them, but its useful to have them). + getOpsInternal = (docName, start, end, callback) -> + return callback? 'Document does not exist' unless db + + db.getOps docName, start, end, (error, ops) -> + return callback? error if error + + v = start + op.v = v++ for op in ops + + callback? null, ops + + # Load the named document into the cache. This function is re-entrant. + # + # The callback is called with (error, doc) + load = (docName, callback) -> + if docs[docName] + # The document is already loaded. Return immediately. + options.stats?.cacheHit? 'getSnapshot' + return callback null, docs[docName] + + # We're a memory store. If we don't have it, nobody does. + return callback 'Document does not exist' unless db + + callbacks = awaitingGetSnapshot[docName] + + # The document is being loaded already. Add ourselves as a callback. + return callbacks.push callback if callbacks + + options.stats?.cacheMiss? 'getSnapshot' + + # The document isn't loaded and isn't being loaded. Load it. + awaitingGetSnapshot[docName] = [callback] + db.getSnapshot docName, (error, data, dbMeta) -> + return add docName, error if error + + type = types[data.type] + unless type + console.warn "Type '#{data.type}' missing" + return callback "Type not found" + data.type = type + + committedVersion = data.v + + # The server can close without saving the most recent document snapshot. + # In this case, there are extra ops which need to be applied before + # returning the snapshot. + getOpsInternal docName, data.v, null, (error, ops) -> + return callback error if error + + if ops.length > 0 + console.log "Catchup #{docName} #{data.v} -> #{data.v + ops.length}" + + try + for op in ops + data.snapshot = type.apply data.snapshot, op.op + data.v++ + catch e + # This should never happen - it indicates that whats in the + # database is invalid. + console.error "Op data invalid for #{docName}: #{e.stack}" + return callback 'Op data invalid' + + model.emit 'load', docName, data + add docName, error, data, committedVersion, ops, dbMeta + + # This makes sure the cache contains a document. If the doc cache doesn't contain + # a document, it is loaded from the database and stored. + # + # Documents are stored so long as either: + # - They have been accessed within the past #{PERIOD} + # - At least one client has the document open + refreshReapingTimeout = (docName) -> + doc = docs[docName] + return unless doc + + # I want to let the clients list be updated before this is called. + process.nextTick -> + # This is an awkward way to find out the number of clients on a document. If this + # causes performance issues, add a numClients field to the document. + # + # The first check is because its possible that between refreshReapingTimeout being called and this + # event being fired, someone called delete() on the document and hence the doc is something else now. + if doc == docs[docName] and + doc.eventEmitter.listeners('op').length == 0 and + (db or options.forceReaping) and + doc.opQueue.busy is false + + clearTimeout doc.reapTimer + doc.reapTimer = reapTimer = setTimeout -> + tryWriteSnapshot docName, -> + # If the reaping timeout has been refreshed while we're writing the snapshot, or if we're + # in the middle of applying an operation, don't reap. + delete docs[docName] if docs[docName].reapTimer is reapTimer and doc.opQueue.busy is false + , options.reapTime + + tryWriteSnapshot = (docName, callback) -> + return callback?() unless db + + doc = docs[docName] + + # The doc is closed + return callback?() unless doc + + # The document is already saved. + return callback?() if doc.committedVersion is doc.v + + return callback? 'Another snapshot write is in progress' if doc.snapshotWriteLock + + doc.snapshotWriteLock = true + + options.stats?.writeSnapshot?() + + writeSnapshot = db?.writeSnapshot or (docName, docData, dbMeta, callback) -> callback() + + data = + v: doc.v + meta: doc.meta + snapshot: doc.snapshot + # The database doesn't know about object types. + type: doc.type.name + + # Commit snapshot. + writeSnapshot docName, data, doc.dbMeta, (error, dbMeta) -> + doc.snapshotWriteLock = false + + # We have to use data.v here because the version in the doc could + # have been updated between the call to writeSnapshot() and now. + doc.committedVersion = data.v + doc.dbMeta = dbMeta + + callback? error + + # *** Model interface methods + + # Create a new document. + # + # data should be {snapshot, type, [meta]}. The version of a new document is 0. + @create = (docName, type, meta, callback) -> + [meta, callback] = [{}, meta] if typeof meta is 'function' + + return callback? 'Invalid document name' if docName.match /\// + return callback? 'Document already exists' if docs[docName] + + type = types[type] if typeof type == 'string' + return callback? 'Type not found' unless type + + data = + snapshot:type.create() + type:type.name + meta:meta or {} + v:0 + + done = (error, dbMeta) -> + # dbMeta can be used to cache extra state needed by the database to access the document, like an ID or something. + return callback? error if error + + # From here on we'll store the object version of the type name. + data.type = type + add docName, null, data, 0, [], dbMeta + model.emit 'create', docName, data + callback?() + + if db + db.create docName, data, done + else + done() + + # Perminantly deletes the specified document. + # If listeners are attached, they are removed. + # + # The callback is called with (error) if there was an error. If error is null / undefined, the + # document was deleted. + # + # WARNING: This isn't well supported throughout the code. (Eg, streaming clients aren't told about the + # deletion. Subsequent op submissions will fail). + @delete = (docName, callback) -> + doc = docs[docName] + + if doc + clearTimeout doc.reapTimer + delete docs[docName] + + done = (error) -> + model.emit 'delete', docName unless error + callback? error + + if db + db.delete docName, doc?.dbMeta, done + else + done (if !doc then 'Document does not exist') + + # This gets all operations from [start...end]. (That is, its not inclusive.) + # + # end can be null. This means 'get me all ops from start'. + # + # Each op returned is in the form {op:o, meta:m, v:version}. + # + # Callback is called with (error, [ops]) + # + # If the document does not exist, getOps doesn't necessarily return an error. This is because + # its awkward to figure out whether or not the document exists for things + # like the redis database backend. I guess its a bit gross having this inconsistant + # with the other DB calls, but its certainly convenient. + # + # Use getVersion() to determine if a document actually exists, if thats what you're + # after. + @getOps = getOps = (docName, start, end, callback) -> + # getOps will only use the op cache if its there. It won't fill the op cache in. + throw new Error 'start must be 0+' unless start >= 0 + + [end, callback] = [null, end] if typeof end is 'function' + + ops = docs[docName]?.ops + + if ops + version = docs[docName].v + + # Ops contains an array of ops. The last op in the list is the last op applied + end ?= version + start = Math.min start, end + + return callback null, [] if start == end + + # Base is the version number of the oldest op we have cached + base = version - ops.length + + # If the database is null, we'll trim to the ops we do have and hope thats enough. + if start >= base or db is null + refreshReapingTimeout docName + options.stats?.cacheHit 'getOps' + + return callback null, ops[(start - base)...(end - base)] + + options.stats?.cacheMiss 'getOps' + + getOpsInternal docName, start, end, callback + + # Gets the snapshot data for the specified document. + # getSnapshot(docName, callback) + # Callback is called with (error, {v: , type: , snapshot: , meta: }) + @getSnapshot = (docName, callback) -> + load docName, (error, doc) -> + callback error, if doc then {v:doc.v, type:doc.type, snapshot:doc.snapshot, meta:doc.meta} + + # Gets the latest version # of the document. + # getVersion(docName, callback) + # callback is called with (error, version). + @getVersion = (docName, callback) -> + load docName, (error, doc) -> callback error, doc?.v + + # Apply an op to the specified document. + # The callback is passed (error, applied version #) + # opData = {op:op, v:v, meta:metadata} + # + # Ops are queued before being applied so that the following code applies op C before op B: + # model.applyOp 'doc', OPA, -> model.applyOp 'doc', OPB + # model.applyOp 'doc', OPC + @applyOp = (docName, opData, callback) -> + # All the logic for this is in makeOpQueue, above. + load docName, (error, doc) -> + return callback error if error + + process.nextTick -> doc.opQueue opData, (error, newVersion) -> + refreshReapingTimeout docName + callback? error, newVersion + + # TODO: store (some) metadata in DB + # TODO: op and meta should be combineable in the op that gets sent + @applyMetaOp = (docName, metaOpData, callback) -> + {path, value} = metaOpData.meta + + return callback? "path should be an array" unless isArray path + + load docName, (error, doc) -> + if error? + callback? error + else + applied = false + switch path[0] + when 'shout' + doc.eventEmitter.emit 'op', metaOpData + applied = true + + model.emit 'applyMetaOp', docName, path, value if applied + callback? null, doc.v + + # Listen to all ops from the specified version. If version is in the past, all + # ops since that version are sent immediately to the listener. + # + # The callback is called once the listener is attached, but before any ops have been passed + # to the listener. + # + # This will _not_ edit the document metadata. + # + # If there are any listeners, we don't purge the document from the cache. But be aware, this behaviour + # might change in a future version. + # + # version is the document version at which the document is opened. It can be left out if you want to open + # the document at the most recent version. + # + # listener is called with (opData) each time an op is applied. + # + # callback(error, openedVersion) + @listen = (docName, version, listener, callback) -> + [version, listener, callback] = [null, version, listener] if typeof version is 'function' + + load docName, (error, doc) -> + return callback? error if error + + clearTimeout doc.reapTimer + + if version? + getOps docName, version, null, (error, data) -> + return callback? error if error + + doc.eventEmitter.on 'op', listener + callback? null, version + for op in data + listener op + + # The listener may well remove itself during the catchup phase. If this happens, break early. + # This is done in a quite inefficient way. (O(n) where n = #listeners on doc) + break unless listener in doc.eventEmitter.listeners 'op' + + else # Version is null / undefined. Just add the listener. + doc.eventEmitter.on 'op', listener + callback? null, doc.v + + # Remove a listener for a particular document. + # + # removeListener(docName, listener) + # + # This is synchronous. + @removeListener = (docName, listener) -> + # The document should already be loaded. + doc = docs[docName] + throw new Error 'removeListener called but document not loaded' unless doc + + doc.eventEmitter.removeListener 'op', listener + refreshReapingTimeout docName + + # Flush saves all snapshot data to the database. I'm not sure whether or not this is actually needed - + # sharejs will happily replay uncommitted ops when documents are re-opened anyway. + @flush = (callback) -> + return callback?() unless db + + pendingWrites = 0 + + for docName, doc of docs + if doc.committedVersion < doc.v + pendingWrites++ + # I'm hoping writeSnapshot will always happen in another thread. + tryWriteSnapshot docName, -> + process.nextTick -> + pendingWrites-- + callback?() if pendingWrites is 0 + + # If nothing was queued, terminate immediately. + callback?() if pendingWrites is 0 + + # Close the database connection. This is needed so nodejs can shut down cleanly. + @closeDb = -> + db?.close?() + db = null + + return + +# Model inherits from EventEmitter. +Model:: = new EventEmitter + diff --git a/services/document-updater/app/coffee/sharejs/types/simple.coffee b/services/document-updater/app/coffee/sharejs/types/simple.coffee new file mode 100644 index 0000000000..996b1a5ddc --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/types/simple.coffee @@ -0,0 +1,38 @@ +# This is a really simple OT type. Its not compiled with the web client, but it could be. +# +# Its mostly included for demonstration purposes and its used in a lot of unit tests. +# +# This defines a really simple text OT type which only allows inserts. (No deletes). +# +# Ops look like: +# {position:#, text:"asdf"} +# +# Document snapshots look like: +# {str:string} + +module.exports = + # The name of the OT type. The type is stored in types[type.name]. The name can be + # used in place of the actual type in all the API methods. + name: 'simple' + + # Create a new document snapshot + create: -> {str:""} + + # Apply the given op to the document snapshot. Returns the new snapshot. + # + # The original snapshot should not be modified. + apply: (snapshot, op) -> + throw new Error 'Invalid position' unless 0 <= op.position <= snapshot.str.length + + str = snapshot.str + str = str.slice(0, op.position) + op.text + str.slice(op.position) + {str} + + # transform op1 by op2. Return transformed version of op1. + # sym describes the symmetry of the op. Its 'left' or 'right' depending on whether the + # op being transformed comes from the client or the server. + transform: (op1, op2, sym) -> + pos = op1.position + pos += op2.text.length if op2.position < pos or (op2.position == pos and sym is 'left') + + return {position:pos, text:op1.text} diff --git a/services/document-updater/app/coffee/sharejs/types/syncqueue.coffee b/services/document-updater/app/coffee/sharejs/types/syncqueue.coffee new file mode 100644 index 0000000000..746450b010 --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/types/syncqueue.coffee @@ -0,0 +1,42 @@ +# A synchronous processing queue. The queue calls process on the arguments, +# ensuring that process() is only executing once at a time. +# +# process(data, callback) _MUST_ eventually call its callback. +# +# Example: +# +# queue = require 'syncqueue' +# +# fn = queue (data, callback) -> +# asyncthing data, -> +# callback(321) +# +# fn(1) +# fn(2) +# fn(3, (result) -> console.log(result)) +# +# ^--- async thing will only be running once at any time. + +module.exports = (process) -> + throw new Error('process is not a function') unless typeof process == 'function' + queue = [] + + enqueue = (data, callback) -> + queue.push [data, callback] + flush() + + enqueue.busy = false + + flush = -> + return if enqueue.busy or queue.length == 0 + + enqueue.busy = true + [data, callback] = queue.shift() + process data, (result...) -> # TODO: Make this not use varargs - varargs are really slow. + enqueue.busy = false + # This is called after busy = false so a user can check if enqueue.busy is set in the callback. + callback.apply null, result if callback + flush() + + enqueue + diff --git a/services/document-updater/app/coffee/sharejs/types/text-api.coffee b/services/document-updater/app/coffee/sharejs/types/text-api.coffee new file mode 100644 index 0000000000..96243ceffb --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/types/text-api.coffee @@ -0,0 +1,32 @@ +# Text document API for text + +text = require './text' if typeof WEB is 'undefined' + +text.api = + provides: {text:true} + + # The number of characters in the string + getLength: -> @snapshot.length + + # Get the text contents of a document + getText: -> @snapshot + + insert: (pos, text, callback) -> + op = [{p:pos, i:text}] + + @submitOp op, callback + op + + del: (pos, length, callback) -> + op = [{p:pos, d:@snapshot[pos...(pos + length)]}] + + @submitOp op, callback + op + + _register: -> + @on 'remoteop', (op) -> + for component in op + if component.i != undefined + @emit 'insert', component.p, component.i + else + @emit 'delete', component.p, component.d diff --git a/services/document-updater/app/coffee/sharejs/types/text-composable-api.coffee b/services/document-updater/app/coffee/sharejs/types/text-composable-api.coffee new file mode 100644 index 0000000000..7b27ac163a --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/types/text-composable-api.coffee @@ -0,0 +1,43 @@ +# Text document API for text + +if WEB? + type = exports.types['text-composable'] +else + type = require './text-composable' + +type.api = + provides: {'text':true} + + # The number of characters in the string + 'getLength': -> @snapshot.length + + # Get the text contents of a document + 'getText': -> @snapshot + + 'insert': (pos, text, callback) -> + op = type.normalize [pos, 'i':text, (@snapshot.length - pos)] + + @submitOp op, callback + op + + 'del': (pos, length, callback) -> + op = type.normalize [pos, 'd':@snapshot[pos...(pos + length)], (@snapshot.length - pos - length)] + + @submitOp op, callback + op + + _register: -> + @on 'remoteop', (op) -> + pos = 0 + for component in op + if typeof component is 'number' + pos += component + else if component.i != undefined + @emit 'insert', pos, component.i + pos += component.i.length + else + # delete + @emit 'delete', pos, component.d + # We don't increment pos, because the position + # specified is after the delete has happened. + diff --git a/services/document-updater/app/coffee/sharejs/types/text-composable.coffee b/services/document-updater/app/coffee/sharejs/types/text-composable.coffee new file mode 100644 index 0000000000..992b567bf0 --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/types/text-composable.coffee @@ -0,0 +1,261 @@ +# An alternate composable implementation for text. This is much closer +# to the implementation used by google wave. +# +# Ops are lists of components which iterate over the whole document. +# Components are either: +# A number N: Skip N characters in the original document +# {i:'str'}: Insert 'str' at the current position in the document +# {d:'str'}: Delete 'str', which appears at the current position in the document +# +# Eg: [3, {i:'hi'}, 5, {d:'internet'}] +# +# Snapshots are strings. + +p = -> #require('util').debug +i = -> #require('util').inspect + +exports = if WEB? then {} else module.exports + +exports.name = 'text-composable' + +exports.create = -> '' + +# -------- Utility methods + +checkOp = (op) -> + throw new Error('Op must be an array of components') unless Array.isArray(op) + last = null + for c in op + if typeof(c) == 'object' + throw new Error("Invalid op component: #{i c}") unless (c.i? && c.i.length > 0) or (c.d? && c.d.length > 0) + else + throw new Error('Op components must be objects or numbers') unless typeof(c) == 'number' + throw new Error('Skip components must be a positive number') unless c > 0 + throw new Error('Adjacent skip components should be added') if typeof(last) == 'number' + + last = c + +# Makes a function for appending components to a given op. +# Exported for the randomOpGenerator. +exports._makeAppend = makeAppend = (op) -> (component) -> + if component == 0 || component.i == '' || component.d == '' + return + else if op.length == 0 + op.push component + else if typeof(component) == 'number' && typeof(op[op.length - 1]) == 'number' + op[op.length - 1] += component + else if component.i? && op[op.length - 1].i? + op[op.length - 1].i += component.i + else if component.d? && op[op.length - 1].d? + op[op.length - 1].d += component.d + else + op.push component + +# checkOp op + +# Makes 2 functions for taking components from the start of an op, and for peeking +# at the next op that could be taken. +makeTake = (op) -> + # The index of the next component to take + idx = 0 + # The offset into the component + offset = 0 + + # Take up to length n from the front of op. If n is null, take the next + # op component. If indivisableField == 'd', delete components won't be separated. + # If indivisableField == 'i', insert components won't be separated. + take = (n, indivisableField) -> + return null if idx == op.length + #assert.notStrictEqual op.length, i, 'The op is too short to traverse the document' + + if typeof(op[idx]) == 'number' + if !n? or op[idx] - offset <= n + c = op[idx] - offset + ++idx; offset = 0 + c + else + offset += n + n + else + # Take from the string + field = if op[idx].i then 'i' else 'd' + c = {} + if !n? or op[idx][field].length - offset <= n or field == indivisableField + c[field] = op[idx][field][offset..] + ++idx; offset = 0 + else + c[field] = op[idx][field][offset...(offset + n)] + offset += n + c + + peekType = () -> + op[idx] + + [take, peekType] + +# Find and return the length of an op component +componentLength = (component) -> + if typeof(component) == 'number' + component + else if component.i? + component.i.length + else + component.d.length + +# Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate +# adjacent inserts and deletes. +exports.normalize = (op) -> + newOp = [] + append = makeAppend newOp + append component for component in op + newOp + +# Apply the op to the string. Returns the new string. +exports.apply = (str, op) -> + p "Applying #{i op} to '#{str}'" + throw new Error('Snapshot should be a string') unless typeof(str) == 'string' + checkOp op + + pos = 0 + newDoc = [] + + for component in op + if typeof(component) == 'number' + throw new Error('The op is too long for this document') if component > str.length + newDoc.push str[...component] + str = str[component..] + else if component.i? + newDoc.push component.i + else + throw new Error("The deleted text '#{component.d}' doesn't match the next characters in the document '#{str[...component.d.length]}'") unless component.d == str[...component.d.length] + str = str[component.d.length..] + + throw new Error("The applied op doesn't traverse the entire document") unless '' == str + + newDoc.join '' + +# transform op1 by op2. Return transformed version of op1. +# op1 and op2 are unchanged by transform. +exports.transform = (op, otherOp, side) -> + throw new Error "side (#{side} must be 'left' or 'right'" unless side == 'left' or side == 'right' + + checkOp op + checkOp otherOp + newOp = [] + + append = makeAppend newOp + [take, peek] = makeTake op + + for component in otherOp + if typeof(component) == 'number' # Skip + length = component + while length > 0 + chunk = take(length, 'i') + throw new Error('The op traverses more elements than the document has') unless chunk != null + + append chunk + length -= componentLength chunk unless typeof(chunk) == 'object' && chunk.i? + else if component.i? # Insert + if side == 'left' + # The left insert should go first. + o = peek() + append take() if o?.i + + # Otherwise, skip the inserted text. + append(component.i.length) + else # Delete. + #assert.ok component.d + length = component.d.length + while length > 0 + chunk = take(length, 'i') + throw new Error('The op traverses more elements than the document has') unless chunk != null + + if typeof(chunk) == 'number' + length -= chunk + else if chunk.i? + append(chunk) + else + #assert.ok chunk.d + # The delete is unnecessary now. + length -= chunk.d.length + + # Append extras from op1 + while (component = take()) + throw new Error "Remaining fragments in the op: #{i component}" unless component?.i? + append component + + newOp + + +# Compose 2 ops into 1 op. +exports.compose = (op1, op2) -> + p "COMPOSE #{i op1} + #{i op2}" + checkOp op1 + checkOp op2 + + result = [] + + append = makeAppend result + [take, _] = makeTake op1 + + for component in op2 + if typeof(component) == 'number' # Skip + length = component + while length > 0 + chunk = take(length, 'd') + throw new Error('The op traverses more elements than the document has') unless chunk != null + + append chunk + length -= componentLength chunk unless typeof(chunk) == 'object' && chunk.d? + + else if component.i? # Insert + append {i:component.i} + + else # Delete + offset = 0 + while offset < component.d.length + chunk = take(component.d.length - offset, 'd') + throw new Error('The op traverses more elements than the document has') unless chunk != null + + # If its delete, append it. If its skip, drop it and decrease length. If its insert, check the strings match, drop it and decrease length. + if typeof(chunk) == 'number' + append {d:component.d[offset...(offset + chunk)]} + offset += chunk + else if chunk.i? + throw new Error("The deleted text doesn't match the inserted text") unless component.d[offset...(offset + chunk.i.length)] == chunk.i + offset += chunk.i.length + # The ops cancel each other out. + else + # Delete + append chunk + + # Append extras from op1 + while (component = take()) + throw new Error "Trailing stuff in op1 #{i component}" unless component?.d? + append component + + result + + +invertComponent = (c) -> + if typeof(c) == 'number' + c + else if c.i? + {d:c.i} + else + {i:c.d} + +# Invert an op +exports.invert = (op) -> + result = [] + append = makeAppend result + + append(invertComponent component) for component in op + + result + +if window? + window.ot ||= {} + window.ot.types ||= {} + window.ot.types.text = exports + diff --git a/services/document-updater/app/coffee/sharejs/types/text-tp2-api.coffee b/services/document-updater/app/coffee/sharejs/types/text-tp2-api.coffee new file mode 100644 index 0000000000..d661b5ae37 --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/types/text-tp2-api.coffee @@ -0,0 +1,89 @@ +# Text document API for text-tp2 + +if WEB? + type = exports.types['text-tp2'] +else + type = require './text-tp2' + +{_takeDoc:takeDoc, _append:append} = type + +appendSkipChars = (op, doc, pos, maxlength) -> + while (maxlength == undefined || maxlength > 0) and pos.index < doc.data.length + part = takeDoc doc, pos, maxlength, true + maxlength -= part.length if maxlength != undefined and typeof part is 'string' + append op, (part.length || part) + +type['api'] = + 'provides': {'text':true} + + # The number of characters in the string + 'getLength': -> @snapshot.charLength + + # Flatten a document into a string + 'getText': -> + strings = (elem for elem in @snapshot.data when typeof elem is 'string') + strings.join '' + + 'insert': (pos, text, callback) -> + pos = 0 if pos == undefined + + op = [] + docPos = {index:0, offset:0} + + appendSkipChars op, @snapshot, docPos, pos + append op, {'i':text} + appendSkipChars op, @snapshot, docPos + + @submitOp op, callback + op + + 'del': (pos, length, callback) -> + op = [] + docPos = {index:0, offset:0} + + appendSkipChars op, @snapshot, docPos, pos + + while length > 0 + part = takeDoc @snapshot, docPos, length, true + if typeof part is 'string' + append op, {'d':part.length} + length -= part.length + else + append op, part + + appendSkipChars op, @snapshot, docPos + + @submitOp op, callback + op + + '_register': -> + # Interpret recieved ops + generate more detailed events for them + @on 'remoteop', (op, snapshot) -> + textPos = 0 + docPos = {index:0, offset:0} + + for component in op + if typeof component is 'number' + # Skip + remainder = component + while remainder > 0 + part = takeDoc snapshot, docPos, remainder + if typeof part is 'string' + textPos += part.length + remainder -= part.length || part + else if component.i != undefined + # Insert + if typeof component.i is 'string' + @emit 'insert', textPos, component.i + textPos += component.i.length + else + # Delete + remainder = component.d + while remainder > 0 + part = takeDoc snapshot, docPos, remainder + if typeof part is 'string' + @emit 'delete', textPos, part + remainder -= part.length || part + + return + diff --git a/services/document-updater/app/coffee/sharejs/types/text-tp2.coffee b/services/document-updater/app/coffee/sharejs/types/text-tp2.coffee new file mode 100644 index 0000000000..d19cbdcef4 --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/types/text-tp2.coffee @@ -0,0 +1,322 @@ +# A TP2 implementation of text, following this spec: +# http://code.google.com/p/lightwave/source/browse/trunk/experimental/ot/README +# +# A document is made up of a string and a set of tombstones inserted throughout +# the string. For example, 'some ', (2 tombstones), 'string'. +# +# This is encoded in a document as: {s:'some string', t:[5, -2, 6]} +# +# Ops are lists of components which iterate over the whole document. +# Components are either: +# N: Skip N characters in the original document +# {i:'str'}: Insert 'str' at the current position in the document +# {i:N}: Insert N tombstones at the current position in the document +# {d:N}: Delete (tombstone) N characters at the current position in the document +# +# Eg: [3, {i:'hi'}, 5, {d:8}] +# +# Snapshots are lists with characters and tombstones. Characters are stored in strings +# and adjacent tombstones are flattened into numbers. +# +# Eg, the document: 'Hello .....world' ('.' denotes tombstoned (deleted) characters) +# would be represented by a document snapshot of ['Hello ', 5, 'world'] + +type = + name: 'text-tp2' + tp2: true + create: -> {charLength:0, totalLength:0, positionCache:[], data:[]} + serialize: (doc) -> + throw new Error 'invalid doc snapshot' unless doc.data + doc.data + deserialize: (data) -> + doc = type.create() + doc.data = data + + for component in data + if typeof component is 'string' + doc.charLength += component.length + doc.totalLength += component.length + else + doc.totalLength += component + + doc + + +checkOp = (op) -> + throw new Error('Op must be an array of components') unless Array.isArray(op) + last = null + for c in op + if typeof(c) == 'object' + if c.i != undefined + throw new Error('Inserts must insert a string or a +ive number') unless (typeof(c.i) == 'string' and c.i.length > 0) or (typeof(c.i) == 'number' and c.i > 0) + else if c.d != undefined + throw new Error('Deletes must be a +ive number') unless typeof(c.d) == 'number' and c.d > 0 + else + throw new Error('Operation component must define .i or .d') + else + throw new Error('Op components must be objects or numbers') unless typeof(c) == 'number' + throw new Error('Skip components must be a positive number') unless c > 0 + throw new Error('Adjacent skip components should be combined') if typeof(last) == 'number' + + last = c + +# Take the next part from the specified position in a document snapshot. +# position = {index, offset}. It will be updated. +type._takeDoc = takeDoc = (doc, position, maxlength, tombsIndivisible) -> + throw new Error 'Operation goes past the end of the document' if position.index >= doc.data.length + + part = doc.data[position.index] + # peel off data[0] + result = if typeof(part) == 'string' + if maxlength != undefined + part[position.offset...(position.offset + maxlength)] + else + part[position.offset...] + else + if maxlength == undefined or tombsIndivisible + part - position.offset + else + Math.min(maxlength, part - position.offset) + + resultLen = result.length || result + + if (part.length || part) - position.offset > resultLen + position.offset += resultLen + else + position.index++ + position.offset = 0 + + result + +# Append a part to the end of a document +type._appendDoc = appendDoc = (doc, p) -> + return if p == 0 or p == '' + + if typeof p is 'string' + doc.charLength += p.length + doc.totalLength += p.length + else + doc.totalLength += p + + data = doc.data + if data.length == 0 + data.push p + else if typeof(data[data.length - 1]) == typeof(p) + data[data.length - 1] += p + else + data.push p + return + +# Apply the op to the document. The document is not modified in the process. +type.apply = (doc, op) -> + unless doc.totalLength != undefined and doc.charLength != undefined and doc.data.length != undefined + throw new Error('Snapshot is invalid') + + checkOp op + + newDoc = type.create() + position = {index:0, offset:0} + + for component in op + if typeof(component) is 'number' + remainder = component + while remainder > 0 + part = takeDoc doc, position, remainder + + appendDoc newDoc, part + remainder -= part.length || part + + else if component.i != undefined + appendDoc newDoc, component.i + else if component.d != undefined + remainder = component.d + while remainder > 0 + part = takeDoc doc, position, remainder + remainder -= part.length || part + appendDoc newDoc, component.d + + newDoc + +# Append an op component to the end of the specified op. +# Exported for the randomOpGenerator. +type._append = append = (op, component) -> + if component == 0 || component.i == '' || component.i == 0 || component.d == 0 + return + else if op.length == 0 + op.push component + else + last = op[op.length - 1] + if typeof(component) == 'number' && typeof(last) == 'number' + op[op.length - 1] += component + else if component.i != undefined && last.i? && typeof(last.i) == typeof(component.i) + last.i += component.i + else if component.d != undefined && last.d? + last.d += component.d + else + op.push component + +# Makes 2 functions for taking components from the start of an op, and for peeking +# at the next op that could be taken. +makeTake = (op) -> + # The index of the next component to take + index = 0 + # The offset into the component + offset = 0 + + # Take up to length maxlength from the op. If maxlength is not defined, there is no max. + # If insertsIndivisible is true, inserts (& insert tombstones) won't be separated. + # + # Returns null when op is fully consumed. + take = (maxlength, insertsIndivisible) -> + return null if index == op.length + + e = op[index] + if typeof((current = e)) == 'number' or typeof((current = e.i)) == 'number' or (current = e.d) != undefined + if !maxlength? or current - offset <= maxlength or (insertsIndivisible and e.i != undefined) + # Return the rest of the current element. + c = current - offset + ++index; offset = 0 + else + offset += maxlength + c = maxlength + if e.i != undefined then {i:c} else if e.d != undefined then {d:c} else c + else + # Take from the inserted string + if !maxlength? or e.i.length - offset <= maxlength or insertsIndivisible + result = {i:e.i[offset..]} + ++index; offset = 0 + else + result = {i:e.i[offset...offset + maxlength]} + offset += maxlength + result + + peekType = -> op[index] + + [take, peekType] + +# Find and return the length of an op component +componentLength = (component) -> + if typeof(component) == 'number' + component + else if typeof(component.i) == 'string' + component.i.length + else + # This should work because c.d and c.i must be +ive. + component.d or component.i + +# Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate +# adjacent inserts and deletes. +type.normalize = (op) -> + newOp = [] + append newOp, component for component in op + newOp + +# This is a helper method to transform and prune. goForwards is true for transform, false for prune. +transformer = (op, otherOp, goForwards, side) -> + checkOp op + checkOp otherOp + newOp = [] + + [take, peek] = makeTake op + + for component in otherOp + length = componentLength component + + if component.i != undefined # Insert text or tombs + if goForwards # transform - insert skips over inserted parts + if side == 'left' + # The left insert should go first. + append newOp, take() while peek()?.i != undefined + + # In any case, skip the inserted text. + append newOp, length + + else # Prune. Remove skips for inserts. + while length > 0 + chunk = take length, true + + throw new Error 'The transformed op is invalid' unless chunk != null + throw new Error 'The transformed op deletes locally inserted characters - it cannot be purged of the insert.' if chunk.d != undefined + + if typeof chunk is 'number' + length -= chunk + else + append newOp, chunk + + else # Skip or delete + while length > 0 + chunk = take length, true + throw new Error('The op traverses more elements than the document has') unless chunk != null + + append newOp, chunk + length -= componentLength chunk unless chunk.i + + # Append extras from op1 + while (component = take()) + throw new Error "Remaining fragments in the op: #{component}" unless component.i != undefined + append newOp, component + + newOp + +# transform op1 by op2. Return transformed version of op1. +# op1 and op2 are unchanged by transform. +# side should be 'left' or 'right', depending on if op1.id <> op2.id. 'left' == client op. +type.transform = (op, otherOp, side) -> + throw new Error "side (#{side}) should be 'left' or 'right'" unless side == 'left' or side == 'right' + transformer op, otherOp, true, side + +# Prune is the inverse of transform. +type.prune = (op, otherOp) -> transformer op, otherOp, false + +# Compose 2 ops into 1 op. +type.compose = (op1, op2) -> + return op2 if op1 == null or op1 == undefined + + checkOp op1 + checkOp op2 + + result = [] + + [take, _] = makeTake op1 + + for component in op2 + + if typeof(component) == 'number' # Skip + # Just copy from op1. + length = component + while length > 0 + chunk = take length + throw new Error('The op traverses more elements than the document has') unless chunk != null + + append result, chunk + length -= componentLength chunk + + else if component.i != undefined # Insert + append result, {i:component.i} + + else # Delete + length = component.d + while length > 0 + chunk = take length + throw new Error('The op traverses more elements than the document has') unless chunk != null + + chunkLength = componentLength chunk + if chunk.i != undefined + append result, {i:chunkLength} + else + append result, {d:chunkLength} + + length -= chunkLength + + # Append extras from op1 + while (component = take()) + throw new Error "Remaining fragments in op1: #{component}" unless component.i != undefined + append result, component + + result + +if WEB? + exports.types['text-tp2'] = type +else + module.exports = type + diff --git a/services/document-updater/app/coffee/sharejs/types/text.coffee b/services/document-updater/app/coffee/sharejs/types/text.coffee new file mode 100644 index 0000000000..c64b4dfa68 --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/types/text.coffee @@ -0,0 +1,209 @@ +# A simple text implementation +# +# Operations are lists of components. +# Each component either inserts or deletes at a specified position in the document. +# +# Components are either: +# {i:'str', p:100}: Insert 'str' at position 100 in the document +# {d:'str', p:100}: Delete 'str' at position 100 in the document +# +# Components in an operation are executed sequentially, so the position of components +# assumes previous components have already executed. +# +# Eg: This op: +# [{i:'abc', p:0}] +# is equivalent to this op: +# [{i:'a', p:0}, {i:'b', p:1}, {i:'c', p:2}] + +# NOTE: The global scope here is shared with other sharejs files when built with closure. +# Be careful what ends up in your namespace. + +text = {} + +text.name = 'text' + +text.create = -> '' + +strInject = (s1, pos, s2) -> s1[...pos] + s2 + s1[pos..] + +checkValidComponent = (c) -> + throw new Error 'component missing position field' if typeof c.p != 'number' + + i_type = typeof c.i + d_type = typeof c.d + throw new Error 'component needs an i or d field' unless (i_type == 'string') ^ (d_type == 'string') + + throw new Error 'position cannot be negative' unless c.p >= 0 + +checkValidOp = (op) -> + checkValidComponent(c) for c in op + true + +text.apply = (snapshot, op) -> + checkValidOp op + for component in op + if component.i? + snapshot = strInject snapshot, component.p, component.i + else + deleted = snapshot[component.p...(component.p + component.d.length)] + throw new Error "Delete component '#{component.d}' does not match deleted text '#{deleted}'" unless component.d == deleted + snapshot = snapshot[...component.p] + snapshot[(component.p + component.d.length)..] + + snapshot + + +# Exported for use by the random op generator. +# +# For simplicity, this version of append does not compress adjacent inserts and deletes of +# the same text. It would be nice to change that at some stage. +text._append = append = (newOp, c) -> + return if c.i == '' or c.d == '' + if newOp.length == 0 + newOp.push c + else + last = newOp[newOp.length - 1] + + # Compose the insert into the previous insert if possible + if last.i? && c.i? and last.p <= c.p <= (last.p + last.i.length) + newOp[newOp.length - 1] = {i:strInject(last.i, c.p - last.p, c.i), p:last.p} + else if last.d? && c.d? and c.p <= last.p <= (c.p + c.d.length) + newOp[newOp.length - 1] = {d:strInject(c.d, last.p - c.p, last.d), p:c.p} + else + newOp.push c + +text.compose = (op1, op2) -> + checkValidOp op1 + checkValidOp op2 + + newOp = op1.slice() + append newOp, c for c in op2 + + newOp + +# Attempt to compress the op components together 'as much as possible'. +# This implementation preserves order and preserves create/delete pairs. +text.compress = (op) -> text.compose [], op + +text.normalize = (op) -> + newOp = [] + + # Normalize should allow ops which are a single (unwrapped) component: + # {i:'asdf', p:23}. + # There's no good way to test if something is an array: + # http://perfectionkills.com/instanceof-considered-harmful-or-how-to-write-a-robust-isarray/ + # so this is probably the least bad solution. + op = [op] if op.i? or op.p? + + for c in op + c.p ?= 0 + append newOp, c + + newOp + +# This helper method transforms a position by an op component. +# +# If c is an insert, insertAfter specifies whether the transform +# is pushed after the insert (true) or before it (false). +# +# insertAfter is optional for deletes. +transformPosition = (pos, c, insertAfter) -> + if c.i? + if c.p < pos || (c.p == pos && insertAfter) + pos + c.i.length + else + pos + else + # I think this could also be written as: Math.min(c.p, Math.min(c.p - otherC.p, otherC.d.length)) + # but I think its harder to read that way, and it compiles using ternary operators anyway + # so its no slower written like this. + if pos <= c.p + pos + else if pos <= c.p + c.d.length + c.p + else + pos - c.d.length + +# Helper method to transform a cursor position as a result of an op. +# +# Like transformPosition above, if c is an insert, insertAfter specifies whether the cursor position +# is pushed after an insert (true) or before it (false). +text.transformCursor = (position, op, side) -> + insertAfter = side == 'right' + position = transformPosition position, c, insertAfter for c in op + position + +# Transform an op component by another op component. Asymmetric. +# The result will be appended to destination. +# +# exported for use in JSON type +text._tc = transformComponent = (dest, c, otherC, side) -> + checkValidOp [c] + checkValidOp [otherC] + + if c.i? + append dest, {i:c.i, p:transformPosition(c.p, otherC, side == 'right')} + + else # Delete + if otherC.i? # delete vs insert + s = c.d + if c.p < otherC.p + append dest, {d:s[...otherC.p - c.p], p:c.p} + s = s[(otherC.p - c.p)..] + if s != '' + append dest, {d:s, p:c.p + otherC.i.length} + + else # Delete vs delete + if c.p >= otherC.p + otherC.d.length + append dest, {d:c.d, p:c.p - otherC.d.length} + else if c.p + c.d.length <= otherC.p + append dest, c + else + # They overlap somewhere. + newC = {d:'', p:c.p} + if c.p < otherC.p + newC.d = c.d[...(otherC.p - c.p)] + if c.p + c.d.length > otherC.p + otherC.d.length + newC.d += c.d[(otherC.p + otherC.d.length - c.p)..] + + # This is entirely optional - just for a check that the deleted + # text in the two ops matches + intersectStart = Math.max c.p, otherC.p + intersectEnd = Math.min c.p + c.d.length, otherC.p + otherC.d.length + cIntersect = c.d[intersectStart - c.p...intersectEnd - c.p] + otherIntersect = otherC.d[intersectStart - otherC.p...intersectEnd - otherC.p] + throw new Error 'Delete ops delete different text in the same region of the document' unless cIntersect == otherIntersect + + if newC.d != '' + # This could be rewritten similarly to insert v delete, above. + newC.p = transformPosition newC.p, otherC + append dest, newC + + dest + +invertComponent = (c) -> + if c.i? + {d:c.i, p:c.p} + else + {i:c.d, p:c.p} + +# No need to use append for invert, because the components won't be able to +# cancel with one another. +text.invert = (op) -> (invertComponent c for c in op.slice().reverse()) + + +if WEB? + exports.types ||= {} + + # This is kind of awful - come up with a better way to hook this helper code up. + bootstrapTransform(text, transformComponent, checkValidOp, append) + + # [] is used to prevent closure from renaming types.text + exports.types.text = text +else + module.exports = text + + # The text type really shouldn't need this - it should be possible to define + # an efficient transform function by making a sort of transform map and passing each + # op component through it. + require('./helpers').bootstrapTransform(text, transformComponent, checkValidOp, append) + diff --git a/services/document-updater/app/coffee/sharejs/types/web-prelude.coffee b/services/document-updater/app/coffee/sharejs/types/web-prelude.coffee new file mode 100644 index 0000000000..3c045532dc --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/types/web-prelude.coffee @@ -0,0 +1,11 @@ +# This is included at the top of each compiled type file for the web. + +`/** + @const + @type {boolean} +*/ +var WEB = true; +` + +exports = window['sharejs'] + diff --git a/services/document-updater/app/coffee/sharejs/web-prelude.coffee b/services/document-updater/app/coffee/sharejs/web-prelude.coffee new file mode 100644 index 0000000000..3c045532dc --- /dev/null +++ b/services/document-updater/app/coffee/sharejs/web-prelude.coffee @@ -0,0 +1,11 @@ +# This is included at the top of each compiled type file for the web. + +`/** + @const + @type {boolean} +*/ +var WEB = true; +` + +exports = window['sharejs'] + diff --git a/services/document-updater/app/lib/diff_match_patch.js b/services/document-updater/app/lib/diff_match_patch.js new file mode 100644 index 0000000000..112130e097 --- /dev/null +++ b/services/document-updater/app/lib/diff_match_patch.js @@ -0,0 +1,2193 @@ +/** + * Diff Match and Patch + * + * Copyright 2006 Google Inc. + * http://code.google.com/p/google-diff-match-patch/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * @fileoverview Computes the difference between two texts to create a patch. + * Applies the patch onto another text, allowing for errors. + * @author fraser@google.com (Neil Fraser) + */ + +/** + * Class containing the diff, match and patch methods. + * @constructor + */ +function diff_match_patch() { + + // Defaults. + // Redefine these in your program to override the defaults. + + // Number of seconds to map a diff before giving up (0 for infinity). + this.Diff_Timeout = 1.0; + // Cost of an empty edit operation in terms of edit characters. + this.Diff_EditCost = 4; + // At what point is no match declared (0.0 = perfection, 1.0 = very loose). + this.Match_Threshold = 0.5; + // How far to search for a match (0 = exact location, 1000+ = broad match). + // A match this many characters away from the expected location will add + // 1.0 to the score (0.0 is a perfect match). + this.Match_Distance = 1000; + // When deleting a large block of text (over ~64 characters), how close do + // the contents have to be to match the expected contents. (0.0 = perfection, + // 1.0 = very loose). Note that Match_Threshold controls how closely the + // end points of a delete need to match. + this.Patch_DeleteThreshold = 0.5; + // Chunk size for context length. + this.Patch_Margin = 4; + + // The number of bits in an int. + this.Match_MaxBits = 32; +} + + +// DIFF FUNCTIONS + + +/** + * The data structure representing a diff is an array of tuples: + * [[DIFF_DELETE, 'Hello'], [DIFF_INSERT, 'Goodbye'], [DIFF_EQUAL, ' world.']] + * which means: delete 'Hello', add 'Goodbye' and keep ' world.' + */ +var DIFF_DELETE = -1; +var DIFF_INSERT = 1; +var DIFF_EQUAL = 0; + +/** @typedef {{0: number, 1: string}} */ +diff_match_patch.Diff; + + +/** + * Find the differences between two texts. Simplifies the problem by stripping + * any common prefix or suffix off the texts before diffing. + * @param {string} text1 Old string to be diffed. + * @param {string} text2 New string to be diffed. + * @param {boolean=} opt_checklines Optional speedup flag. If present and false, + * then don't run a line-level diff first to identify the changed areas. + * Defaults to true, which does a faster, slightly less optimal diff. + * @param {number} opt_deadline Optional time when the diff should be complete + * by. Used internally for recursive calls. Users should set DiffTimeout + * instead. + * @return {!Array.} Array of diff tuples. + */ +diff_match_patch.prototype.diff_main = function(text1, text2, opt_checklines, + opt_deadline) { + // Set a deadline by which time the diff must be complete. + if (typeof opt_deadline == 'undefined') { + if (this.Diff_Timeout <= 0) { + opt_deadline = Number.MAX_VALUE; + } else { + opt_deadline = (new Date).getTime() + this.Diff_Timeout * 1000; + } + } + var deadline = opt_deadline; + + // Check for null inputs. + if (text1 == null || text2 == null) { + throw new Error('Null input. (diff_main)'); + } + + // Check for equality (speedup). + if (text1 == text2) { + if (text1) { + return [[DIFF_EQUAL, text1]]; + } + return []; + } + + if (typeof opt_checklines == 'undefined') { + opt_checklines = true; + } + var checklines = opt_checklines; + + // Trim off common prefix (speedup). + var commonlength = this.diff_commonPrefix(text1, text2); + var commonprefix = text1.substring(0, commonlength); + text1 = text1.substring(commonlength); + text2 = text2.substring(commonlength); + + // Trim off common suffix (speedup). + commonlength = this.diff_commonSuffix(text1, text2); + var commonsuffix = text1.substring(text1.length - commonlength); + text1 = text1.substring(0, text1.length - commonlength); + text2 = text2.substring(0, text2.length - commonlength); + + // Compute the diff on the middle block. + var diffs = this.diff_compute_(text1, text2, checklines, deadline); + + // Restore the prefix and suffix. + if (commonprefix) { + diffs.unshift([DIFF_EQUAL, commonprefix]); + } + if (commonsuffix) { + diffs.push([DIFF_EQUAL, commonsuffix]); + } + this.diff_cleanupMerge(diffs); + return diffs; +}; + + +/** + * Find the differences between two texts. Assumes that the texts do not + * have any common prefix or suffix. + * @param {string} text1 Old string to be diffed. + * @param {string} text2 New string to be diffed. + * @param {boolean} checklines Speedup flag. If false, then don't run a + * line-level diff first to identify the changed areas. + * If true, then run a faster, slightly less optimal diff. + * @param {number} deadline Time when the diff should be complete by. + * @return {!Array.} Array of diff tuples. + * @private + */ +diff_match_patch.prototype.diff_compute_ = function(text1, text2, checklines, + deadline) { + var diffs; + + if (!text1) { + // Just add some text (speedup). + return [[DIFF_INSERT, text2]]; + } + + if (!text2) { + // Just delete some text (speedup). + return [[DIFF_DELETE, text1]]; + } + + var longtext = text1.length > text2.length ? text1 : text2; + var shorttext = text1.length > text2.length ? text2 : text1; + var i = longtext.indexOf(shorttext); + if (i != -1) { + // Shorter text is inside the longer text (speedup). + diffs = [[DIFF_INSERT, longtext.substring(0, i)], + [DIFF_EQUAL, shorttext], + [DIFF_INSERT, longtext.substring(i + shorttext.length)]]; + // Swap insertions for deletions if diff is reversed. + if (text1.length > text2.length) { + diffs[0][0] = diffs[2][0] = DIFF_DELETE; + } + return diffs; + } + + if (shorttext.length == 1) { + // Single character string. + // After the previous speedup, the character can't be an equality. + return [[DIFF_DELETE, text1], [DIFF_INSERT, text2]]; + } + + // Check to see if the problem can be split in two. + var hm = this.diff_halfMatch_(text1, text2); + if (hm) { + // A half-match was found, sort out the return data. + var text1_a = hm[0]; + var text1_b = hm[1]; + var text2_a = hm[2]; + var text2_b = hm[3]; + var mid_common = hm[4]; + // Send both pairs off for separate processing. + var diffs_a = this.diff_main(text1_a, text2_a, checklines, deadline); + var diffs_b = this.diff_main(text1_b, text2_b, checklines, deadline); + // Merge the results. + return diffs_a.concat([[DIFF_EQUAL, mid_common]], diffs_b); + } + + if (checklines && text1.length > 100 && text2.length > 100) { + return this.diff_lineMode_(text1, text2, deadline); + } + + return this.diff_bisect_(text1, text2, deadline); +}; + + +/** + * Do a quick line-level diff on both strings, then rediff the parts for + * greater accuracy. + * This speedup can produce non-minimal diffs. + * @param {string} text1 Old string to be diffed. + * @param {string} text2 New string to be diffed. + * @param {number} deadline Time when the diff should be complete by. + * @return {!Array.} Array of diff tuples. + * @private + */ +diff_match_patch.prototype.diff_lineMode_ = function(text1, text2, deadline) { + // Scan the text on a line-by-line basis first. + var a = this.diff_linesToChars_(text1, text2); + text1 = a.chars1; + text2 = a.chars2; + var linearray = a.lineArray; + + var diffs = this.diff_main(text1, text2, false, deadline); + + // Convert the diff back to original text. + this.diff_charsToLines_(diffs, linearray); + // Eliminate freak matches (e.g. blank lines) + this.diff_cleanupSemantic(diffs); + + // Rediff any replacement blocks, this time character-by-character. + // Add a dummy entry at the end. + diffs.push([DIFF_EQUAL, '']); + var pointer = 0; + var count_delete = 0; + var count_insert = 0; + var text_delete = ''; + var text_insert = ''; + while (pointer < diffs.length) { + switch (diffs[pointer][0]) { + case DIFF_INSERT: + count_insert++; + text_insert += diffs[pointer][1]; + break; + case DIFF_DELETE: + count_delete++; + text_delete += diffs[pointer][1]; + break; + case DIFF_EQUAL: + // Upon reaching an equality, check for prior redundancies. + if (count_delete >= 1 && count_insert >= 1) { + // Delete the offending records and add the merged ones. + diffs.splice(pointer - count_delete - count_insert, + count_delete + count_insert); + pointer = pointer - count_delete - count_insert; + var a = this.diff_main(text_delete, text_insert, false, deadline); + for (var j = a.length - 1; j >= 0; j--) { + diffs.splice(pointer, 0, a[j]); + } + pointer = pointer + a.length; + } + count_insert = 0; + count_delete = 0; + text_delete = ''; + text_insert = ''; + break; + } + pointer++; + } + diffs.pop(); // Remove the dummy entry at the end. + + return diffs; +}; + + +/** + * Find the 'middle snake' of a diff, split the problem in two + * and return the recursively constructed diff. + * See Myers 1986 paper: An O(ND) Difference Algorithm and Its Variations. + * @param {string} text1 Old string to be diffed. + * @param {string} text2 New string to be diffed. + * @param {number} deadline Time at which to bail if not yet complete. + * @return {!Array.} Array of diff tuples. + * @private + */ +diff_match_patch.prototype.diff_bisect_ = function(text1, text2, deadline) { + // Cache the text lengths to prevent multiple calls. + var text1_length = text1.length; + var text2_length = text2.length; + var max_d = Math.ceil((text1_length + text2_length) / 2); + var v_offset = max_d; + var v_length = 2 * max_d; + var v1 = new Array(v_length); + var v2 = new Array(v_length); + // Setting all elements to -1 is faster in Chrome & Firefox than mixing + // integers and undefined. + for (var x = 0; x < v_length; x++) { + v1[x] = -1; + v2[x] = -1; + } + v1[v_offset + 1] = 0; + v2[v_offset + 1] = 0; + var delta = text1_length - text2_length; + // If the total number of characters is odd, then the front path will collide + // with the reverse path. + var front = (delta % 2 != 0); + // Offsets for start and end of k loop. + // Prevents mapping of space beyond the grid. + var k1start = 0; + var k1end = 0; + var k2start = 0; + var k2end = 0; + for (var d = 0; d < max_d; d++) { + // Bail out if deadline is reached. + if ((new Date()).getTime() > deadline) { + break; + } + + // Walk the front path one step. + for (var k1 = -d + k1start; k1 <= d - k1end; k1 += 2) { + var k1_offset = v_offset + k1; + var x1; + if (k1 == -d || (k1 != d && v1[k1_offset - 1] < v1[k1_offset + 1])) { + x1 = v1[k1_offset + 1]; + } else { + x1 = v1[k1_offset - 1] + 1; + } + var y1 = x1 - k1; + while (x1 < text1_length && y1 < text2_length && + text1.charAt(x1) == text2.charAt(y1)) { + x1++; + y1++; + } + v1[k1_offset] = x1; + if (x1 > text1_length) { + // Ran off the right of the graph. + k1end += 2; + } else if (y1 > text2_length) { + // Ran off the bottom of the graph. + k1start += 2; + } else if (front) { + var k2_offset = v_offset + delta - k1; + if (k2_offset >= 0 && k2_offset < v_length && v2[k2_offset] != -1) { + // Mirror x2 onto top-left coordinate system. + var x2 = text1_length - v2[k2_offset]; + if (x1 >= x2) { + // Overlap detected. + return this.diff_bisectSplit_(text1, text2, x1, y1, deadline); + } + } + } + } + + // Walk the reverse path one step. + for (var k2 = -d + k2start; k2 <= d - k2end; k2 += 2) { + var k2_offset = v_offset + k2; + var x2; + if (k2 == -d || (k2 != d && v2[k2_offset - 1] < v2[k2_offset + 1])) { + x2 = v2[k2_offset + 1]; + } else { + x2 = v2[k2_offset - 1] + 1; + } + var y2 = x2 - k2; + while (x2 < text1_length && y2 < text2_length && + text1.charAt(text1_length - x2 - 1) == + text2.charAt(text2_length - y2 - 1)) { + x2++; + y2++; + } + v2[k2_offset] = x2; + if (x2 > text1_length) { + // Ran off the left of the graph. + k2end += 2; + } else if (y2 > text2_length) { + // Ran off the top of the graph. + k2start += 2; + } else if (!front) { + var k1_offset = v_offset + delta - k2; + if (k1_offset >= 0 && k1_offset < v_length && v1[k1_offset] != -1) { + var x1 = v1[k1_offset]; + var y1 = v_offset + x1 - k1_offset; + // Mirror x2 onto top-left coordinate system. + x2 = text1_length - x2; + if (x1 >= x2) { + // Overlap detected. + return this.diff_bisectSplit_(text1, text2, x1, y1, deadline); + } + } + } + } + } + // Diff took too long and hit the deadline or + // number of diffs equals number of characters, no commonality at all. + return [[DIFF_DELETE, text1], [DIFF_INSERT, text2]]; +}; + + +/** + * Given the location of the 'middle snake', split the diff in two parts + * and recurse. + * @param {string} text1 Old string to be diffed. + * @param {string} text2 New string to be diffed. + * @param {number} x Index of split point in text1. + * @param {number} y Index of split point in text2. + * @param {number} deadline Time at which to bail if not yet complete. + * @return {!Array.} Array of diff tuples. + * @private + */ +diff_match_patch.prototype.diff_bisectSplit_ = function(text1, text2, x, y, + deadline) { + var text1a = text1.substring(0, x); + var text2a = text2.substring(0, y); + var text1b = text1.substring(x); + var text2b = text2.substring(y); + + // Compute both diffs serially. + var diffs = this.diff_main(text1a, text2a, false, deadline); + var diffsb = this.diff_main(text1b, text2b, false, deadline); + + return diffs.concat(diffsb); +}; + + +/** + * Split two texts into an array of strings. Reduce the texts to a string of + * hashes where each Unicode character represents one line. + * @param {string} text1 First string. + * @param {string} text2 Second string. + * @return {{chars1: string, chars2: string, lineArray: !Array.}} + * An object containing the encoded text1, the encoded text2 and + * the array of unique strings. + * The zeroth element of the array of unique strings is intentionally blank. + * @private + */ +diff_match_patch.prototype.diff_linesToChars_ = function(text1, text2) { + var lineArray = []; // e.g. lineArray[4] == 'Hello\n' + var lineHash = {}; // e.g. lineHash['Hello\n'] == 4 + + // '\x00' is a valid character, but various debuggers don't like it. + // So we'll insert a junk entry to avoid generating a null character. + lineArray[0] = ''; + + /** + * Split a text into an array of strings. Reduce the texts to a string of + * hashes where each Unicode character represents one line. + * Modifies linearray and linehash through being a closure. + * @param {string} text String to encode. + * @return {string} Encoded string. + * @private + */ + function diff_linesToCharsMunge_(text) { + var chars = ''; + // Walk the text, pulling out a substring for each line. + // text.split('\n') would would temporarily double our memory footprint. + // Modifying text would create many large strings to garbage collect. + var lineStart = 0; + var lineEnd = -1; + // Keeping our own length variable is faster than looking it up. + var lineArrayLength = lineArray.length; + while (lineEnd < text.length - 1) { + lineEnd = text.indexOf('\n', lineStart); + if (lineEnd == -1) { + lineEnd = text.length - 1; + } + var line = text.substring(lineStart, lineEnd + 1); + lineStart = lineEnd + 1; + + if (lineHash.hasOwnProperty ? lineHash.hasOwnProperty(line) : + (lineHash[line] !== undefined)) { + chars += String.fromCharCode(lineHash[line]); + } else { + chars += String.fromCharCode(lineArrayLength); + lineHash[line] = lineArrayLength; + lineArray[lineArrayLength++] = line; + } + } + return chars; + } + + var chars1 = diff_linesToCharsMunge_(text1); + var chars2 = diff_linesToCharsMunge_(text2); + return {chars1: chars1, chars2: chars2, lineArray: lineArray}; +}; + + +/** + * Rehydrate the text in a diff from a string of line hashes to real lines of + * text. + * @param {!Array.} diffs Array of diff tuples. + * @param {!Array.} lineArray Array of unique strings. + * @private + */ +diff_match_patch.prototype.diff_charsToLines_ = function(diffs, lineArray) { + for (var x = 0; x < diffs.length; x++) { + var chars = diffs[x][1]; + var text = []; + for (var y = 0; y < chars.length; y++) { + text[y] = lineArray[chars.charCodeAt(y)]; + } + diffs[x][1] = text.join(''); + } +}; + + +/** + * Determine the common prefix of two strings. + * @param {string} text1 First string. + * @param {string} text2 Second string. + * @return {number} The number of characters common to the start of each + * string. + */ +diff_match_patch.prototype.diff_commonPrefix = function(text1, text2) { + // Quick check for common null cases. + if (!text1 || !text2 || text1.charAt(0) != text2.charAt(0)) { + return 0; + } + // Binary search. + // Performance analysis: http://neil.fraser.name/news/2007/10/09/ + var pointermin = 0; + var pointermax = Math.min(text1.length, text2.length); + var pointermid = pointermax; + var pointerstart = 0; + while (pointermin < pointermid) { + if (text1.substring(pointerstart, pointermid) == + text2.substring(pointerstart, pointermid)) { + pointermin = pointermid; + pointerstart = pointermin; + } else { + pointermax = pointermid; + } + pointermid = Math.floor((pointermax - pointermin) / 2 + pointermin); + } + return pointermid; +}; + + +/** + * Determine the common suffix of two strings. + * @param {string} text1 First string. + * @param {string} text2 Second string. + * @return {number} The number of characters common to the end of each string. + */ +diff_match_patch.prototype.diff_commonSuffix = function(text1, text2) { + // Quick check for common null cases. + if (!text1 || !text2 || + text1.charAt(text1.length - 1) != text2.charAt(text2.length - 1)) { + return 0; + } + // Binary search. + // Performance analysis: http://neil.fraser.name/news/2007/10/09/ + var pointermin = 0; + var pointermax = Math.min(text1.length, text2.length); + var pointermid = pointermax; + var pointerend = 0; + while (pointermin < pointermid) { + if (text1.substring(text1.length - pointermid, text1.length - pointerend) == + text2.substring(text2.length - pointermid, text2.length - pointerend)) { + pointermin = pointermid; + pointerend = pointermin; + } else { + pointermax = pointermid; + } + pointermid = Math.floor((pointermax - pointermin) / 2 + pointermin); + } + return pointermid; +}; + + +/** + * Determine if the suffix of one string is the prefix of another. + * @param {string} text1 First string. + * @param {string} text2 Second string. + * @return {number} The number of characters common to the end of the first + * string and the start of the second string. + * @private + */ +diff_match_patch.prototype.diff_commonOverlap_ = function(text1, text2) { + // Cache the text lengths to prevent multiple calls. + var text1_length = text1.length; + var text2_length = text2.length; + // Eliminate the null case. + if (text1_length == 0 || text2_length == 0) { + return 0; + } + // Truncate the longer string. + if (text1_length > text2_length) { + text1 = text1.substring(text1_length - text2_length); + } else if (text1_length < text2_length) { + text2 = text2.substring(0, text1_length); + } + var text_length = Math.min(text1_length, text2_length); + // Quick check for the worst case. + if (text1 == text2) { + return text_length; + } + + // Start by looking for a single character match + // and increase length until no match is found. + // Performance analysis: http://neil.fraser.name/news/2010/11/04/ + var best = 0; + var length = 1; + while (true) { + var pattern = text1.substring(text_length - length); + var found = text2.indexOf(pattern); + if (found == -1) { + return best; + } + length += found; + if (found == 0 || text1.substring(text_length - length) == + text2.substring(0, length)) { + best = length; + length++; + } + } +}; + + +/** + * Do the two texts share a substring which is at least half the length of the + * longer text? + * This speedup can produce non-minimal diffs. + * @param {string} text1 First string. + * @param {string} text2 Second string. + * @return {Array.} Five element Array, containing the prefix of + * text1, the suffix of text1, the prefix of text2, the suffix of + * text2 and the common middle. Or null if there was no match. + * @private + */ +diff_match_patch.prototype.diff_halfMatch_ = function(text1, text2) { + if (this.Diff_Timeout <= 0) { + // Don't risk returning a non-optimal diff if we have unlimited time. + return null; + } + var longtext = text1.length > text2.length ? text1 : text2; + var shorttext = text1.length > text2.length ? text2 : text1; + if (longtext.length < 4 || shorttext.length * 2 < longtext.length) { + return null; // Pointless. + } + var dmp = this; // 'this' becomes 'window' in a closure. + + /** + * Does a substring of shorttext exist within longtext such that the substring + * is at least half the length of longtext? + * Closure, but does not reference any external variables. + * @param {string} longtext Longer string. + * @param {string} shorttext Shorter string. + * @param {number} i Start index of quarter length substring within longtext. + * @return {Array.} Five element Array, containing the prefix of + * longtext, the suffix of longtext, the prefix of shorttext, the suffix + * of shorttext and the common middle. Or null if there was no match. + * @private + */ + function diff_halfMatchI_(longtext, shorttext, i) { + // Start with a 1/4 length substring at position i as a seed. + var seed = longtext.substring(i, i + Math.floor(longtext.length / 4)); + var j = -1; + var best_common = ''; + var best_longtext_a, best_longtext_b, best_shorttext_a, best_shorttext_b; + while ((j = shorttext.indexOf(seed, j + 1)) != -1) { + var prefixLength = dmp.diff_commonPrefix(longtext.substring(i), + shorttext.substring(j)); + var suffixLength = dmp.diff_commonSuffix(longtext.substring(0, i), + shorttext.substring(0, j)); + if (best_common.length < suffixLength + prefixLength) { + best_common = shorttext.substring(j - suffixLength, j) + + shorttext.substring(j, j + prefixLength); + best_longtext_a = longtext.substring(0, i - suffixLength); + best_longtext_b = longtext.substring(i + prefixLength); + best_shorttext_a = shorttext.substring(0, j - suffixLength); + best_shorttext_b = shorttext.substring(j + prefixLength); + } + } + if (best_common.length * 2 >= longtext.length) { + return [best_longtext_a, best_longtext_b, + best_shorttext_a, best_shorttext_b, best_common]; + } else { + return null; + } + } + + // First check if the second quarter is the seed for a half-match. + var hm1 = diff_halfMatchI_(longtext, shorttext, + Math.ceil(longtext.length / 4)); + // Check again based on the third quarter. + var hm2 = diff_halfMatchI_(longtext, shorttext, + Math.ceil(longtext.length / 2)); + var hm; + if (!hm1 && !hm2) { + return null; + } else if (!hm2) { + hm = hm1; + } else if (!hm1) { + hm = hm2; + } else { + // Both matched. Select the longest. + hm = hm1[4].length > hm2[4].length ? hm1 : hm2; + } + + // A half-match was found, sort out the return data. + var text1_a, text1_b, text2_a, text2_b; + if (text1.length > text2.length) { + text1_a = hm[0]; + text1_b = hm[1]; + text2_a = hm[2]; + text2_b = hm[3]; + } else { + text2_a = hm[0]; + text2_b = hm[1]; + text1_a = hm[2]; + text1_b = hm[3]; + } + var mid_common = hm[4]; + return [text1_a, text1_b, text2_a, text2_b, mid_common]; +}; + + +/** + * Reduce the number of edits by eliminating semantically trivial equalities. + * @param {!Array.} diffs Array of diff tuples. + */ +diff_match_patch.prototype.diff_cleanupSemantic = function(diffs) { + var changes = false; + var equalities = []; // Stack of indices where equalities are found. + var equalitiesLength = 0; // Keeping our own length var is faster in JS. + /** @type {?string} */ + var lastequality = null; + // Always equal to diffs[equalities[equalitiesLength - 1]][1] + var pointer = 0; // Index of current position. + // Number of characters that changed prior to the equality. + var length_insertions1 = 0; + var length_deletions1 = 0; + // Number of characters that changed after the equality. + var length_insertions2 = 0; + var length_deletions2 = 0; + while (pointer < diffs.length) { + if (diffs[pointer][0] == DIFF_EQUAL) { // Equality found. + equalities[equalitiesLength++] = pointer; + length_insertions1 = length_insertions2; + length_deletions1 = length_deletions2; + length_insertions2 = 0; + length_deletions2 = 0; + lastequality = diffs[pointer][1]; + } else { // An insertion or deletion. + if (diffs[pointer][0] == DIFF_INSERT) { + length_insertions2 += diffs[pointer][1].length; + } else { + length_deletions2 += diffs[pointer][1].length; + } + // Eliminate an equality that is smaller or equal to the edits on both + // sides of it. + if (lastequality && (lastequality.length <= + Math.max(length_insertions1, length_deletions1)) && + (lastequality.length <= Math.max(length_insertions2, + length_deletions2))) { + // Duplicate record. + diffs.splice(equalities[equalitiesLength - 1], 0, + [DIFF_DELETE, lastequality]); + // Change second copy to insert. + diffs[equalities[equalitiesLength - 1] + 1][0] = DIFF_INSERT; + // Throw away the equality we just deleted. + equalitiesLength--; + // Throw away the previous equality (it needs to be reevaluated). + equalitiesLength--; + pointer = equalitiesLength > 0 ? equalities[equalitiesLength - 1] : -1; + length_insertions1 = 0; // Reset the counters. + length_deletions1 = 0; + length_insertions2 = 0; + length_deletions2 = 0; + lastequality = null; + changes = true; + } + } + pointer++; + } + + // Normalize the diff. + if (changes) { + this.diff_cleanupMerge(diffs); + } + this.diff_cleanupSemanticLossless(diffs); + + // Find any overlaps between deletions and insertions. + // e.g: abcxxxxxxdef + // -> abcxxxdef + // e.g: xxxabcdefxxx + // -> defxxxabc + // Only extract an overlap if it is as big as the edit ahead or behind it. + pointer = 1; + while (pointer < diffs.length) { + if (diffs[pointer - 1][0] == DIFF_DELETE && + diffs[pointer][0] == DIFF_INSERT) { + var deletion = diffs[pointer - 1][1]; + var insertion = diffs[pointer][1]; + var overlap_length1 = this.diff_commonOverlap_(deletion, insertion); + var overlap_length2 = this.diff_commonOverlap_(insertion, deletion); + if (overlap_length1 >= overlap_length2) { + if (overlap_length1 >= deletion.length / 2 || + overlap_length1 >= insertion.length / 2) { + // Overlap found. Insert an equality and trim the surrounding edits. + diffs.splice(pointer, 0, + [DIFF_EQUAL, insertion.substring(0, overlap_length1)]); + diffs[pointer - 1][1] = + deletion.substring(0, deletion.length - overlap_length1); + diffs[pointer + 1][1] = insertion.substring(overlap_length1); + pointer++; + } + } else { + if (overlap_length2 >= deletion.length / 2 || + overlap_length2 >= insertion.length / 2) { + // Reverse overlap found. + // Insert an equality and swap and trim the surrounding edits. + diffs.splice(pointer, 0, + [DIFF_EQUAL, deletion.substring(0, overlap_length2)]); + diffs[pointer - 1][0] = DIFF_INSERT; + diffs[pointer - 1][1] = + insertion.substring(0, insertion.length - overlap_length2); + diffs[pointer + 1][0] = DIFF_DELETE; + diffs[pointer + 1][1] = + deletion.substring(overlap_length2); + pointer++; + } + } + pointer++; + } + pointer++; + } +}; + + +/** + * Look for single edits surrounded on both sides by equalities + * which can be shifted sideways to align the edit to a word boundary. + * e.g: The cat came. -> The cat came. + * @param {!Array.} diffs Array of diff tuples. + */ +diff_match_patch.prototype.diff_cleanupSemanticLossless = function(diffs) { + /** + * Given two strings, compute a score representing whether the internal + * boundary falls on logical boundaries. + * Scores range from 6 (best) to 0 (worst). + * Closure, but does not reference any external variables. + * @param {string} one First string. + * @param {string} two Second string. + * @return {number} The score. + * @private + */ + function diff_cleanupSemanticScore_(one, two) { + if (!one || !two) { + // Edges are the best. + return 6; + } + + // Each port of this function behaves slightly differently due to + // subtle differences in each language's definition of things like + // 'whitespace'. Since this function's purpose is largely cosmetic, + // the choice has been made to use each language's native features + // rather than force total conformity. + var char1 = one.charAt(one.length - 1); + var char2 = two.charAt(0); + var nonAlphaNumeric1 = char1.match(diff_match_patch.nonAlphaNumericRegex_); + var nonAlphaNumeric2 = char2.match(diff_match_patch.nonAlphaNumericRegex_); + var whitespace1 = nonAlphaNumeric1 && + char1.match(diff_match_patch.whitespaceRegex_); + var whitespace2 = nonAlphaNumeric2 && + char2.match(diff_match_patch.whitespaceRegex_); + var lineBreak1 = whitespace1 && + char1.match(diff_match_patch.linebreakRegex_); + var lineBreak2 = whitespace2 && + char2.match(diff_match_patch.linebreakRegex_); + var blankLine1 = lineBreak1 && + one.match(diff_match_patch.blanklineEndRegex_); + var blankLine2 = lineBreak2 && + two.match(diff_match_patch.blanklineStartRegex_); + + if (blankLine1 || blankLine2) { + // Five points for blank lines. + return 5; + } else if (lineBreak1 || lineBreak2) { + // Four points for line breaks. + return 4; + } else if (nonAlphaNumeric1 && !whitespace1 && whitespace2) { + // Three points for end of sentences. + return 3; + } else if (whitespace1 || whitespace2) { + // Two points for whitespace. + return 2; + } else if (nonAlphaNumeric1 || nonAlphaNumeric2) { + // One point for non-alphanumeric. + return 1; + } + return 0; + } + + var pointer = 1; + // Intentionally ignore the first and last element (don't need checking). + while (pointer < diffs.length - 1) { + if (diffs[pointer - 1][0] == DIFF_EQUAL && + diffs[pointer + 1][0] == DIFF_EQUAL) { + // This is a single edit surrounded by equalities. + var equality1 = diffs[pointer - 1][1]; + var edit = diffs[pointer][1]; + var equality2 = diffs[pointer + 1][1]; + + // First, shift the edit as far left as possible. + var commonOffset = this.diff_commonSuffix(equality1, edit); + if (commonOffset) { + var commonString = edit.substring(edit.length - commonOffset); + equality1 = equality1.substring(0, equality1.length - commonOffset); + edit = commonString + edit.substring(0, edit.length - commonOffset); + equality2 = commonString + equality2; + } + + // Second, step character by character right, looking for the best fit. + var bestEquality1 = equality1; + var bestEdit = edit; + var bestEquality2 = equality2; + var bestScore = diff_cleanupSemanticScore_(equality1, edit) + + diff_cleanupSemanticScore_(edit, equality2); + while (edit.charAt(0) === equality2.charAt(0)) { + equality1 += edit.charAt(0); + edit = edit.substring(1) + equality2.charAt(0); + equality2 = equality2.substring(1); + var score = diff_cleanupSemanticScore_(equality1, edit) + + diff_cleanupSemanticScore_(edit, equality2); + // The >= encourages trailing rather than leading whitespace on edits. + if (score >= bestScore) { + bestScore = score; + bestEquality1 = equality1; + bestEdit = edit; + bestEquality2 = equality2; + } + } + + if (diffs[pointer - 1][1] != bestEquality1) { + // We have an improvement, save it back to the diff. + if (bestEquality1) { + diffs[pointer - 1][1] = bestEquality1; + } else { + diffs.splice(pointer - 1, 1); + pointer--; + } + diffs[pointer][1] = bestEdit; + if (bestEquality2) { + diffs[pointer + 1][1] = bestEquality2; + } else { + diffs.splice(pointer + 1, 1); + pointer--; + } + } + } + pointer++; + } +}; + +// Define some regex patterns for matching boundaries. +diff_match_patch.nonAlphaNumericRegex_ = /[^a-zA-Z0-9]/; +diff_match_patch.whitespaceRegex_ = /\s/; +diff_match_patch.linebreakRegex_ = /[\r\n]/; +diff_match_patch.blanklineEndRegex_ = /\n\r?\n$/; +diff_match_patch.blanklineStartRegex_ = /^\r?\n\r?\n/; + +/** + * Reduce the number of edits by eliminating operationally trivial equalities. + * @param {!Array.} diffs Array of diff tuples. + */ +diff_match_patch.prototype.diff_cleanupEfficiency = function(diffs) { + var changes = false; + var equalities = []; // Stack of indices where equalities are found. + var equalitiesLength = 0; // Keeping our own length var is faster in JS. + /** @type {?string} */ + var lastequality = null; + // Always equal to diffs[equalities[equalitiesLength - 1]][1] + var pointer = 0; // Index of current position. + // Is there an insertion operation before the last equality. + var pre_ins = false; + // Is there a deletion operation before the last equality. + var pre_del = false; + // Is there an insertion operation after the last equality. + var post_ins = false; + // Is there a deletion operation after the last equality. + var post_del = false; + while (pointer < diffs.length) { + if (diffs[pointer][0] == DIFF_EQUAL) { // Equality found. + if (diffs[pointer][1].length < this.Diff_EditCost && + (post_ins || post_del)) { + // Candidate found. + equalities[equalitiesLength++] = pointer; + pre_ins = post_ins; + pre_del = post_del; + lastequality = diffs[pointer][1]; + } else { + // Not a candidate, and can never become one. + equalitiesLength = 0; + lastequality = null; + } + post_ins = post_del = false; + } else { // An insertion or deletion. + if (diffs[pointer][0] == DIFF_DELETE) { + post_del = true; + } else { + post_ins = true; + } + /* + * Five types to be split: + * ABXYCD + * AXCD + * ABXC + * AXCD + * ABXC + */ + if (lastequality && ((pre_ins && pre_del && post_ins && post_del) || + ((lastequality.length < this.Diff_EditCost / 2) && + (pre_ins + pre_del + post_ins + post_del) == 3))) { + // Duplicate record. + diffs.splice(equalities[equalitiesLength - 1], 0, + [DIFF_DELETE, lastequality]); + // Change second copy to insert. + diffs[equalities[equalitiesLength - 1] + 1][0] = DIFF_INSERT; + equalitiesLength--; // Throw away the equality we just deleted; + lastequality = null; + if (pre_ins && pre_del) { + // No changes made which could affect previous entry, keep going. + post_ins = post_del = true; + equalitiesLength = 0; + } else { + equalitiesLength--; // Throw away the previous equality. + pointer = equalitiesLength > 0 ? + equalities[equalitiesLength - 1] : -1; + post_ins = post_del = false; + } + changes = true; + } + } + pointer++; + } + + if (changes) { + this.diff_cleanupMerge(diffs); + } +}; + + +/** + * Reorder and merge like edit sections. Merge equalities. + * Any edit section can move as long as it doesn't cross an equality. + * @param {!Array.} diffs Array of diff tuples. + */ +diff_match_patch.prototype.diff_cleanupMerge = function(diffs) { + diffs.push([DIFF_EQUAL, '']); // Add a dummy entry at the end. + var pointer = 0; + var count_delete = 0; + var count_insert = 0; + var text_delete = ''; + var text_insert = ''; + var commonlength; + while (pointer < diffs.length) { + switch (diffs[pointer][0]) { + case DIFF_INSERT: + count_insert++; + text_insert += diffs[pointer][1]; + pointer++; + break; + case DIFF_DELETE: + count_delete++; + text_delete += diffs[pointer][1]; + pointer++; + break; + case DIFF_EQUAL: + // Upon reaching an equality, check for prior redundancies. + if (count_delete + count_insert > 1) { + if (count_delete !== 0 && count_insert !== 0) { + // Factor out any common prefixies. + commonlength = this.diff_commonPrefix(text_insert, text_delete); + if (commonlength !== 0) { + if ((pointer - count_delete - count_insert) > 0 && + diffs[pointer - count_delete - count_insert - 1][0] == + DIFF_EQUAL) { + diffs[pointer - count_delete - count_insert - 1][1] += + text_insert.substring(0, commonlength); + } else { + diffs.splice(0, 0, [DIFF_EQUAL, + text_insert.substring(0, commonlength)]); + pointer++; + } + text_insert = text_insert.substring(commonlength); + text_delete = text_delete.substring(commonlength); + } + // Factor out any common suffixies. + commonlength = this.diff_commonSuffix(text_insert, text_delete); + if (commonlength !== 0) { + diffs[pointer][1] = text_insert.substring(text_insert.length - + commonlength) + diffs[pointer][1]; + text_insert = text_insert.substring(0, text_insert.length - + commonlength); + text_delete = text_delete.substring(0, text_delete.length - + commonlength); + } + } + // Delete the offending records and add the merged ones. + if (count_delete === 0) { + diffs.splice(pointer - count_insert, + count_delete + count_insert, [DIFF_INSERT, text_insert]); + } else if (count_insert === 0) { + diffs.splice(pointer - count_delete, + count_delete + count_insert, [DIFF_DELETE, text_delete]); + } else { + diffs.splice(pointer - count_delete - count_insert, + count_delete + count_insert, [DIFF_DELETE, text_delete], + [DIFF_INSERT, text_insert]); + } + pointer = pointer - count_delete - count_insert + + (count_delete ? 1 : 0) + (count_insert ? 1 : 0) + 1; + } else if (pointer !== 0 && diffs[pointer - 1][0] == DIFF_EQUAL) { + // Merge this equality with the previous one. + diffs[pointer - 1][1] += diffs[pointer][1]; + diffs.splice(pointer, 1); + } else { + pointer++; + } + count_insert = 0; + count_delete = 0; + text_delete = ''; + text_insert = ''; + break; + } + } + if (diffs[diffs.length - 1][1] === '') { + diffs.pop(); // Remove the dummy entry at the end. + } + + // Second pass: look for single edits surrounded on both sides by equalities + // which can be shifted sideways to eliminate an equality. + // e.g: ABAC -> ABAC + var changes = false; + pointer = 1; + // Intentionally ignore the first and last element (don't need checking). + while (pointer < diffs.length - 1) { + if (diffs[pointer - 1][0] == DIFF_EQUAL && + diffs[pointer + 1][0] == DIFF_EQUAL) { + // This is a single edit surrounded by equalities. + if (diffs[pointer][1].substring(diffs[pointer][1].length - + diffs[pointer - 1][1].length) == diffs[pointer - 1][1]) { + // Shift the edit over the previous equality. + diffs[pointer][1] = diffs[pointer - 1][1] + + diffs[pointer][1].substring(0, diffs[pointer][1].length - + diffs[pointer - 1][1].length); + diffs[pointer + 1][1] = diffs[pointer - 1][1] + diffs[pointer + 1][1]; + diffs.splice(pointer - 1, 1); + changes = true; + } else if (diffs[pointer][1].substring(0, diffs[pointer + 1][1].length) == + diffs[pointer + 1][1]) { + // Shift the edit over the next equality. + diffs[pointer - 1][1] += diffs[pointer + 1][1]; + diffs[pointer][1] = + diffs[pointer][1].substring(diffs[pointer + 1][1].length) + + diffs[pointer + 1][1]; + diffs.splice(pointer + 1, 1); + changes = true; + } + } + pointer++; + } + // If shifts were made, the diff needs reordering and another shift sweep. + if (changes) { + this.diff_cleanupMerge(diffs); + } +}; + + +/** + * loc is a location in text1, compute and return the equivalent location in + * text2. + * e.g. 'The cat' vs 'The big cat', 1->1, 5->8 + * @param {!Array.} diffs Array of diff tuples. + * @param {number} loc Location within text1. + * @return {number} Location within text2. + */ +diff_match_patch.prototype.diff_xIndex = function(diffs, loc) { + var chars1 = 0; + var chars2 = 0; + var last_chars1 = 0; + var last_chars2 = 0; + var x; + for (x = 0; x < diffs.length; x++) { + if (diffs[x][0] !== DIFF_INSERT) { // Equality or deletion. + chars1 += diffs[x][1].length; + } + if (diffs[x][0] !== DIFF_DELETE) { // Equality or insertion. + chars2 += diffs[x][1].length; + } + if (chars1 > loc) { // Overshot the location. + break; + } + last_chars1 = chars1; + last_chars2 = chars2; + } + // Was the location was deleted? + if (diffs.length != x && diffs[x][0] === DIFF_DELETE) { + return last_chars2; + } + // Add the remaining character length. + return last_chars2 + (loc - last_chars1); +}; + + +/** + * Convert a diff array into a pretty HTML report. + * @param {!Array.} diffs Array of diff tuples. + * @return {string} HTML representation. + */ +diff_match_patch.prototype.diff_prettyHtml = function(diffs) { + var html = []; + var pattern_amp = /&/g; + var pattern_lt = //g; + var pattern_para = /\n/g; + for (var x = 0; x < diffs.length; x++) { + var op = diffs[x][0]; // Operation (insert, delete, equal) + var data = diffs[x][1]; // Text of change. + var text = data.replace(pattern_amp, '&').replace(pattern_lt, '<') + .replace(pattern_gt, '>').replace(pattern_para, '¶
'); + switch (op) { + case DIFF_INSERT: + html[x] = '' + text + ''; + break; + case DIFF_DELETE: + html[x] = '' + text + ''; + break; + case DIFF_EQUAL: + html[x] = '' + text + ''; + break; + } + } + return html.join(''); +}; + + +/** + * Compute and return the source text (all equalities and deletions). + * @param {!Array.} diffs Array of diff tuples. + * @return {string} Source text. + */ +diff_match_patch.prototype.diff_text1 = function(diffs) { + var text = []; + for (var x = 0; x < diffs.length; x++) { + if (diffs[x][0] !== DIFF_INSERT) { + text[x] = diffs[x][1]; + } + } + return text.join(''); +}; + + +/** + * Compute and return the destination text (all equalities and insertions). + * @param {!Array.} diffs Array of diff tuples. + * @return {string} Destination text. + */ +diff_match_patch.prototype.diff_text2 = function(diffs) { + var text = []; + for (var x = 0; x < diffs.length; x++) { + if (diffs[x][0] !== DIFF_DELETE) { + text[x] = diffs[x][1]; + } + } + return text.join(''); +}; + + +/** + * Compute the Levenshtein distance; the number of inserted, deleted or + * substituted characters. + * @param {!Array.} diffs Array of diff tuples. + * @return {number} Number of changes. + */ +diff_match_patch.prototype.diff_levenshtein = function(diffs) { + var levenshtein = 0; + var insertions = 0; + var deletions = 0; + for (var x = 0; x < diffs.length; x++) { + var op = diffs[x][0]; + var data = diffs[x][1]; + switch (op) { + case DIFF_INSERT: + insertions += data.length; + break; + case DIFF_DELETE: + deletions += data.length; + break; + case DIFF_EQUAL: + // A deletion and an insertion is one substitution. + levenshtein += Math.max(insertions, deletions); + insertions = 0; + deletions = 0; + break; + } + } + levenshtein += Math.max(insertions, deletions); + return levenshtein; +}; + + +/** + * Crush the diff into an encoded string which describes the operations + * required to transform text1 into text2. + * E.g. =3\t-2\t+ing -> Keep 3 chars, delete 2 chars, insert 'ing'. + * Operations are tab-separated. Inserted text is escaped using %xx notation. + * @param {!Array.} diffs Array of diff tuples. + * @return {string} Delta text. + */ +diff_match_patch.prototype.diff_toDelta = function(diffs) { + var text = []; + for (var x = 0; x < diffs.length; x++) { + switch (diffs[x][0]) { + case DIFF_INSERT: + text[x] = '+' + encodeURI(diffs[x][1]); + break; + case DIFF_DELETE: + text[x] = '-' + diffs[x][1].length; + break; + case DIFF_EQUAL: + text[x] = '=' + diffs[x][1].length; + break; + } + } + return text.join('\t').replace(/%20/g, ' '); +}; + + +/** + * Given the original text1, and an encoded string which describes the + * operations required to transform text1 into text2, compute the full diff. + * @param {string} text1 Source string for the diff. + * @param {string} delta Delta text. + * @return {!Array.} Array of diff tuples. + * @throws {!Error} If invalid input. + */ +diff_match_patch.prototype.diff_fromDelta = function(text1, delta) { + var diffs = []; + var diffsLength = 0; // Keeping our own length var is faster in JS. + var pointer = 0; // Cursor in text1 + var tokens = delta.split(/\t/g); + for (var x = 0; x < tokens.length; x++) { + // Each token begins with a one character parameter which specifies the + // operation of this token (delete, insert, equality). + var param = tokens[x].substring(1); + switch (tokens[x].charAt(0)) { + case '+': + try { + diffs[diffsLength++] = [DIFF_INSERT, decodeURI(param)]; + } catch (ex) { + // Malformed URI sequence. + throw new Error('Illegal escape in diff_fromDelta: ' + param); + } + break; + case '-': + // Fall through. + case '=': + var n = parseInt(param, 10); + if (isNaN(n) || n < 0) { + throw new Error('Invalid number in diff_fromDelta: ' + param); + } + var text = text1.substring(pointer, pointer += n); + if (tokens[x].charAt(0) == '=') { + diffs[diffsLength++] = [DIFF_EQUAL, text]; + } else { + diffs[diffsLength++] = [DIFF_DELETE, text]; + } + break; + default: + // Blank tokens are ok (from a trailing \t). + // Anything else is an error. + if (tokens[x]) { + throw new Error('Invalid diff operation in diff_fromDelta: ' + + tokens[x]); + } + } + } + if (pointer != text1.length) { + throw new Error('Delta length (' + pointer + + ') does not equal source text length (' + text1.length + ').'); + } + return diffs; +}; + + +// MATCH FUNCTIONS + + +/** + * Locate the best instance of 'pattern' in 'text' near 'loc'. + * @param {string} text The text to search. + * @param {string} pattern The pattern to search for. + * @param {number} loc The location to search around. + * @return {number} Best match index or -1. + */ +diff_match_patch.prototype.match_main = function(text, pattern, loc) { + // Check for null inputs. + if (text == null || pattern == null || loc == null) { + throw new Error('Null input. (match_main)'); + } + + loc = Math.max(0, Math.min(loc, text.length)); + if (text == pattern) { + // Shortcut (potentially not guaranteed by the algorithm) + return 0; + } else if (!text.length) { + // Nothing to match. + return -1; + } else if (text.substring(loc, loc + pattern.length) == pattern) { + // Perfect match at the perfect spot! (Includes case of null pattern) + return loc; + } else { + // Do a fuzzy compare. + return this.match_bitap_(text, pattern, loc); + } +}; + + +/** + * Locate the best instance of 'pattern' in 'text' near 'loc' using the + * Bitap algorithm. + * @param {string} text The text to search. + * @param {string} pattern The pattern to search for. + * @param {number} loc The location to search around. + * @return {number} Best match index or -1. + * @private + */ +diff_match_patch.prototype.match_bitap_ = function(text, pattern, loc) { + if (pattern.length > this.Match_MaxBits) { + throw new Error('Pattern too long for this browser.'); + } + + // Initialise the alphabet. + var s = this.match_alphabet_(pattern); + + var dmp = this; // 'this' becomes 'window' in a closure. + + /** + * Compute and return the score for a match with e errors and x location. + * Accesses loc and pattern through being a closure. + * @param {number} e Number of errors in match. + * @param {number} x Location of match. + * @return {number} Overall score for match (0.0 = good, 1.0 = bad). + * @private + */ + function match_bitapScore_(e, x) { + var accuracy = e / pattern.length; + var proximity = Math.abs(loc - x); + if (!dmp.Match_Distance) { + // Dodge divide by zero error. + return proximity ? 1.0 : accuracy; + } + return accuracy + (proximity / dmp.Match_Distance); + } + + // Highest score beyond which we give up. + var score_threshold = this.Match_Threshold; + // Is there a nearby exact match? (speedup) + var best_loc = text.indexOf(pattern, loc); + if (best_loc != -1) { + score_threshold = Math.min(match_bitapScore_(0, best_loc), score_threshold); + // What about in the other direction? (speedup) + best_loc = text.lastIndexOf(pattern, loc + pattern.length); + if (best_loc != -1) { + score_threshold = + Math.min(match_bitapScore_(0, best_loc), score_threshold); + } + } + + // Initialise the bit arrays. + var matchmask = 1 << (pattern.length - 1); + best_loc = -1; + + var bin_min, bin_mid; + var bin_max = pattern.length + text.length; + var last_rd; + for (var d = 0; d < pattern.length; d++) { + // Scan for the best match; each iteration allows for one more error. + // Run a binary search to determine how far from 'loc' we can stray at this + // error level. + bin_min = 0; + bin_mid = bin_max; + while (bin_min < bin_mid) { + if (match_bitapScore_(d, loc + bin_mid) <= score_threshold) { + bin_min = bin_mid; + } else { + bin_max = bin_mid; + } + bin_mid = Math.floor((bin_max - bin_min) / 2 + bin_min); + } + // Use the result from this iteration as the maximum for the next. + bin_max = bin_mid; + var start = Math.max(1, loc - bin_mid + 1); + var finish = Math.min(loc + bin_mid, text.length) + pattern.length; + + var rd = Array(finish + 2); + rd[finish + 1] = (1 << d) - 1; + for (var j = finish; j >= start; j--) { + // The alphabet (s) is a sparse hash, so the following line generates + // warnings. + var charMatch = s[text.charAt(j - 1)]; + if (d === 0) { // First pass: exact match. + rd[j] = ((rd[j + 1] << 1) | 1) & charMatch; + } else { // Subsequent passes: fuzzy match. + rd[j] = (((rd[j + 1] << 1) | 1) & charMatch) | + (((last_rd[j + 1] | last_rd[j]) << 1) | 1) | + last_rd[j + 1]; + } + if (rd[j] & matchmask) { + var score = match_bitapScore_(d, j - 1); + // This match will almost certainly be better than any existing match. + // But check anyway. + if (score <= score_threshold) { + // Told you so. + score_threshold = score; + best_loc = j - 1; + if (best_loc > loc) { + // When passing loc, don't exceed our current distance from loc. + start = Math.max(1, 2 * loc - best_loc); + } else { + // Already passed loc, downhill from here on in. + break; + } + } + } + } + // No hope for a (better) match at greater error levels. + if (match_bitapScore_(d + 1, loc) > score_threshold) { + break; + } + last_rd = rd; + } + return best_loc; +}; + + +/** + * Initialise the alphabet for the Bitap algorithm. + * @param {string} pattern The text to encode. + * @return {!Object} Hash of character locations. + * @private + */ +diff_match_patch.prototype.match_alphabet_ = function(pattern) { + var s = {}; + for (var i = 0; i < pattern.length; i++) { + s[pattern.charAt(i)] = 0; + } + for (var i = 0; i < pattern.length; i++) { + s[pattern.charAt(i)] |= 1 << (pattern.length - i - 1); + } + return s; +}; + + +// PATCH FUNCTIONS + + +/** + * Increase the context until it is unique, + * but don't let the pattern expand beyond Match_MaxBits. + * @param {!diff_match_patch.patch_obj} patch The patch to grow. + * @param {string} text Source text. + * @private + */ +diff_match_patch.prototype.patch_addContext_ = function(patch, text) { + if (text.length == 0) { + return; + } + var pattern = text.substring(patch.start2, patch.start2 + patch.length1); + var padding = 0; + + // Look for the first and last matches of pattern in text. If two different + // matches are found, increase the pattern length. + while (text.indexOf(pattern) != text.lastIndexOf(pattern) && + pattern.length < this.Match_MaxBits - this.Patch_Margin - + this.Patch_Margin) { + padding += this.Patch_Margin; + pattern = text.substring(patch.start2 - padding, + patch.start2 + patch.length1 + padding); + } + // Add one chunk for good luck. + padding += this.Patch_Margin; + + // Add the prefix. + var prefix = text.substring(patch.start2 - padding, patch.start2); + if (prefix) { + patch.diffs.unshift([DIFF_EQUAL, prefix]); + } + // Add the suffix. + var suffix = text.substring(patch.start2 + patch.length1, + patch.start2 + patch.length1 + padding); + if (suffix) { + patch.diffs.push([DIFF_EQUAL, suffix]); + } + + // Roll back the start points. + patch.start1 -= prefix.length; + patch.start2 -= prefix.length; + // Extend the lengths. + patch.length1 += prefix.length + suffix.length; + patch.length2 += prefix.length + suffix.length; +}; + + +/** + * Compute a list of patches to turn text1 into text2. + * Use diffs if provided, otherwise compute it ourselves. + * There are four ways to call this function, depending on what data is + * available to the caller: + * Method 1: + * a = text1, b = text2 + * Method 2: + * a = diffs + * Method 3 (optimal): + * a = text1, b = diffs + * Method 4 (deprecated, use method 3): + * a = text1, b = text2, c = diffs + * + * @param {string|!Array.} a text1 (methods 1,3,4) or + * Array of diff tuples for text1 to text2 (method 2). + * @param {string|!Array.} opt_b text2 (methods 1,4) or + * Array of diff tuples for text1 to text2 (method 3) or undefined (method 2). + * @param {string|!Array.} opt_c Array of diff tuples + * for text1 to text2 (method 4) or undefined (methods 1,2,3). + * @return {!Array.} Array of Patch objects. + */ +diff_match_patch.prototype.patch_make = function(a, opt_b, opt_c) { + var text1, diffs; + if (typeof a == 'string' && typeof opt_b == 'string' && + typeof opt_c == 'undefined') { + // Method 1: text1, text2 + // Compute diffs from text1 and text2. + text1 = /** @type {string} */(a); + diffs = this.diff_main(text1, /** @type {string} */(opt_b), true); + if (diffs.length > 2) { + this.diff_cleanupSemantic(diffs); + this.diff_cleanupEfficiency(diffs); + } + } else if (a && typeof a == 'object' && typeof opt_b == 'undefined' && + typeof opt_c == 'undefined') { + // Method 2: diffs + // Compute text1 from diffs. + diffs = /** @type {!Array.} */(a); + text1 = this.diff_text1(diffs); + } else if (typeof a == 'string' && opt_b && typeof opt_b == 'object' && + typeof opt_c == 'undefined') { + // Method 3: text1, diffs + text1 = /** @type {string} */(a); + diffs = /** @type {!Array.} */(opt_b); + } else if (typeof a == 'string' && typeof opt_b == 'string' && + opt_c && typeof opt_c == 'object') { + // Method 4: text1, text2, diffs + // text2 is not used. + text1 = /** @type {string} */(a); + diffs = /** @type {!Array.} */(opt_c); + } else { + throw new Error('Unknown call format to patch_make.'); + } + + if (diffs.length === 0) { + return []; // Get rid of the null case. + } + var patches = []; + var patch = new diff_match_patch.patch_obj(); + var patchDiffLength = 0; // Keeping our own length var is faster in JS. + var char_count1 = 0; // Number of characters into the text1 string. + var char_count2 = 0; // Number of characters into the text2 string. + // Start with text1 (prepatch_text) and apply the diffs until we arrive at + // text2 (postpatch_text). We recreate the patches one by one to determine + // context info. + var prepatch_text = text1; + var postpatch_text = text1; + for (var x = 0; x < diffs.length; x++) { + var diff_type = diffs[x][0]; + var diff_text = diffs[x][1]; + + if (!patchDiffLength && diff_type !== DIFF_EQUAL) { + // A new patch starts here. + patch.start1 = char_count1; + patch.start2 = char_count2; + } + + switch (diff_type) { + case DIFF_INSERT: + patch.diffs[patchDiffLength++] = diffs[x]; + patch.length2 += diff_text.length; + postpatch_text = postpatch_text.substring(0, char_count2) + diff_text + + postpatch_text.substring(char_count2); + break; + case DIFF_DELETE: + patch.length1 += diff_text.length; + patch.diffs[patchDiffLength++] = diffs[x]; + postpatch_text = postpatch_text.substring(0, char_count2) + + postpatch_text.substring(char_count2 + + diff_text.length); + break; + case DIFF_EQUAL: + if (diff_text.length <= 2 * this.Patch_Margin && + patchDiffLength && diffs.length != x + 1) { + // Small equality inside a patch. + patch.diffs[patchDiffLength++] = diffs[x]; + patch.length1 += diff_text.length; + patch.length2 += diff_text.length; + } else if (diff_text.length >= 2 * this.Patch_Margin) { + // Time for a new patch. + if (patchDiffLength) { + this.patch_addContext_(patch, prepatch_text); + patches.push(patch); + patch = new diff_match_patch.patch_obj(); + patchDiffLength = 0; + // Unlike Unidiff, our patch lists have a rolling context. + // http://code.google.com/p/google-diff-match-patch/wiki/Unidiff + // Update prepatch text & pos to reflect the application of the + // just completed patch. + prepatch_text = postpatch_text; + char_count1 = char_count2; + } + } + break; + } + + // Update the current character count. + if (diff_type !== DIFF_INSERT) { + char_count1 += diff_text.length; + } + if (diff_type !== DIFF_DELETE) { + char_count2 += diff_text.length; + } + } + // Pick up the leftover patch if not empty. + if (patchDiffLength) { + this.patch_addContext_(patch, prepatch_text); + patches.push(patch); + } + + return patches; +}; + + +/** + * Given an array of patches, return another array that is identical. + * @param {!Array.} patches Array of Patch objects. + * @return {!Array.} Array of Patch objects. + */ +diff_match_patch.prototype.patch_deepCopy = function(patches) { + // Making deep copies is hard in JavaScript. + var patchesCopy = []; + for (var x = 0; x < patches.length; x++) { + var patch = patches[x]; + var patchCopy = new diff_match_patch.patch_obj(); + patchCopy.diffs = []; + for (var y = 0; y < patch.diffs.length; y++) { + patchCopy.diffs[y] = patch.diffs[y].slice(); + } + patchCopy.start1 = patch.start1; + patchCopy.start2 = patch.start2; + patchCopy.length1 = patch.length1; + patchCopy.length2 = patch.length2; + patchesCopy[x] = patchCopy; + } + return patchesCopy; +}; + + +/** + * Merge a set of patches onto the text. Return a patched text, as well + * as a list of true/false values indicating which patches were applied. + * @param {!Array.} patches Array of Patch objects. + * @param {string} text Old text. + * @return {!Array.>} Two element Array, containing the + * new text and an array of boolean values. + */ +diff_match_patch.prototype.patch_apply = function(patches, text) { + if (patches.length == 0) { + return [text, []]; + } + + // Deep copy the patches so that no changes are made to originals. + patches = this.patch_deepCopy(patches); + + var nullPadding = this.patch_addPadding(patches); + text = nullPadding + text + nullPadding; + + this.patch_splitMax(patches); + // delta keeps track of the offset between the expected and actual location + // of the previous patch. If there are patches expected at positions 10 and + // 20, but the first patch was found at 12, delta is 2 and the second patch + // has an effective expected position of 22. + var delta = 0; + var results = []; + for (var x = 0; x < patches.length; x++) { + var expected_loc = patches[x].start2 + delta; + var text1 = this.diff_text1(patches[x].diffs); + var start_loc; + var end_loc = -1; + if (text1.length > this.Match_MaxBits) { + // patch_splitMax will only provide an oversized pattern in the case of + // a monster delete. + start_loc = this.match_main(text, text1.substring(0, this.Match_MaxBits), + expected_loc); + if (start_loc != -1) { + end_loc = this.match_main(text, + text1.substring(text1.length - this.Match_MaxBits), + expected_loc + text1.length - this.Match_MaxBits); + if (end_loc == -1 || start_loc >= end_loc) { + // Can't find valid trailing context. Drop this patch. + start_loc = -1; + } + } + } else { + start_loc = this.match_main(text, text1, expected_loc); + } + if (start_loc == -1) { + // No match found. :( + results[x] = false; + // Subtract the delta for this failed patch from subsequent patches. + delta -= patches[x].length2 - patches[x].length1; + } else { + // Found a match. :) + results[x] = true; + delta = start_loc - expected_loc; + var text2; + if (end_loc == -1) { + text2 = text.substring(start_loc, start_loc + text1.length); + } else { + text2 = text.substring(start_loc, end_loc + this.Match_MaxBits); + } + if (text1 == text2) { + // Perfect match, just shove the replacement text in. + text = text.substring(0, start_loc) + + this.diff_text2(patches[x].diffs) + + text.substring(start_loc + text1.length); + } else { + // Imperfect match. Run a diff to get a framework of equivalent + // indices. + var diffs = this.diff_main(text1, text2, false); + if (text1.length > this.Match_MaxBits && + this.diff_levenshtein(diffs) / text1.length > + this.Patch_DeleteThreshold) { + // The end points match, but the content is unacceptably bad. + results[x] = false; + } else { + this.diff_cleanupSemanticLossless(diffs); + var index1 = 0; + var index2; + for (var y = 0; y < patches[x].diffs.length; y++) { + var mod = patches[x].diffs[y]; + if (mod[0] !== DIFF_EQUAL) { + index2 = this.diff_xIndex(diffs, index1); + } + if (mod[0] === DIFF_INSERT) { // Insertion + text = text.substring(0, start_loc + index2) + mod[1] + + text.substring(start_loc + index2); + } else if (mod[0] === DIFF_DELETE) { // Deletion + text = text.substring(0, start_loc + index2) + + text.substring(start_loc + this.diff_xIndex(diffs, + index1 + mod[1].length)); + } + if (mod[0] !== DIFF_DELETE) { + index1 += mod[1].length; + } + } + } + } + } + } + // Strip the padding off. + text = text.substring(nullPadding.length, text.length - nullPadding.length); + return [text, results]; +}; + + +/** + * Add some padding on text start and end so that edges can match something. + * Intended to be called only from within patch_apply. + * @param {!Array.} patches Array of Patch objects. + * @return {string} The padding string added to each side. + */ +diff_match_patch.prototype.patch_addPadding = function(patches) { + var paddingLength = this.Patch_Margin; + var nullPadding = ''; + for (var x = 1; x <= paddingLength; x++) { + nullPadding += String.fromCharCode(x); + } + + // Bump all the patches forward. + for (var x = 0; x < patches.length; x++) { + patches[x].start1 += paddingLength; + patches[x].start2 += paddingLength; + } + + // Add some padding on start of first diff. + var patch = patches[0]; + var diffs = patch.diffs; + if (diffs.length == 0 || diffs[0][0] != DIFF_EQUAL) { + // Add nullPadding equality. + diffs.unshift([DIFF_EQUAL, nullPadding]); + patch.start1 -= paddingLength; // Should be 0. + patch.start2 -= paddingLength; // Should be 0. + patch.length1 += paddingLength; + patch.length2 += paddingLength; + } else if (paddingLength > diffs[0][1].length) { + // Grow first equality. + var extraLength = paddingLength - diffs[0][1].length; + diffs[0][1] = nullPadding.substring(diffs[0][1].length) + diffs[0][1]; + patch.start1 -= extraLength; + patch.start2 -= extraLength; + patch.length1 += extraLength; + patch.length2 += extraLength; + } + + // Add some padding on end of last diff. + patch = patches[patches.length - 1]; + diffs = patch.diffs; + if (diffs.length == 0 || diffs[diffs.length - 1][0] != DIFF_EQUAL) { + // Add nullPadding equality. + diffs.push([DIFF_EQUAL, nullPadding]); + patch.length1 += paddingLength; + patch.length2 += paddingLength; + } else if (paddingLength > diffs[diffs.length - 1][1].length) { + // Grow last equality. + var extraLength = paddingLength - diffs[diffs.length - 1][1].length; + diffs[diffs.length - 1][1] += nullPadding.substring(0, extraLength); + patch.length1 += extraLength; + patch.length2 += extraLength; + } + + return nullPadding; +}; + + +/** + * Look through the patches and break up any which are longer than the maximum + * limit of the match algorithm. + * Intended to be called only from within patch_apply. + * @param {!Array.} patches Array of Patch objects. + */ +diff_match_patch.prototype.patch_splitMax = function(patches) { + var patch_size = this.Match_MaxBits; + for (var x = 0; x < patches.length; x++) { + if (patches[x].length1 <= patch_size) { + continue; + } + var bigpatch = patches[x]; + // Remove the big old patch. + patches.splice(x--, 1); + var start1 = bigpatch.start1; + var start2 = bigpatch.start2; + var precontext = ''; + while (bigpatch.diffs.length !== 0) { + // Create one of several smaller patches. + var patch = new diff_match_patch.patch_obj(); + var empty = true; + patch.start1 = start1 - precontext.length; + patch.start2 = start2 - precontext.length; + if (precontext !== '') { + patch.length1 = patch.length2 = precontext.length; + patch.diffs.push([DIFF_EQUAL, precontext]); + } + while (bigpatch.diffs.length !== 0 && + patch.length1 < patch_size - this.Patch_Margin) { + var diff_type = bigpatch.diffs[0][0]; + var diff_text = bigpatch.diffs[0][1]; + if (diff_type === DIFF_INSERT) { + // Insertions are harmless. + patch.length2 += diff_text.length; + start2 += diff_text.length; + patch.diffs.push(bigpatch.diffs.shift()); + empty = false; + } else if (diff_type === DIFF_DELETE && patch.diffs.length == 1 && + patch.diffs[0][0] == DIFF_EQUAL && + diff_text.length > 2 * patch_size) { + // This is a large deletion. Let it pass in one chunk. + patch.length1 += diff_text.length; + start1 += diff_text.length; + empty = false; + patch.diffs.push([diff_type, diff_text]); + bigpatch.diffs.shift(); + } else { + // Deletion or equality. Only take as much as we can stomach. + diff_text = diff_text.substring(0, + patch_size - patch.length1 - this.Patch_Margin); + patch.length1 += diff_text.length; + start1 += diff_text.length; + if (diff_type === DIFF_EQUAL) { + patch.length2 += diff_text.length; + start2 += diff_text.length; + } else { + empty = false; + } + patch.diffs.push([diff_type, diff_text]); + if (diff_text == bigpatch.diffs[0][1]) { + bigpatch.diffs.shift(); + } else { + bigpatch.diffs[0][1] = + bigpatch.diffs[0][1].substring(diff_text.length); + } + } + } + // Compute the head context for the next patch. + precontext = this.diff_text2(patch.diffs); + precontext = + precontext.substring(precontext.length - this.Patch_Margin); + // Append the end context for this patch. + var postcontext = this.diff_text1(bigpatch.diffs) + .substring(0, this.Patch_Margin); + if (postcontext !== '') { + patch.length1 += postcontext.length; + patch.length2 += postcontext.length; + if (patch.diffs.length !== 0 && + patch.diffs[patch.diffs.length - 1][0] === DIFF_EQUAL) { + patch.diffs[patch.diffs.length - 1][1] += postcontext; + } else { + patch.diffs.push([DIFF_EQUAL, postcontext]); + } + } + if (!empty) { + patches.splice(++x, 0, patch); + } + } + } +}; + + +/** + * Take a list of patches and return a textual representation. + * @param {!Array.} patches Array of Patch objects. + * @return {string} Text representation of patches. + */ +diff_match_patch.prototype.patch_toText = function(patches) { + var text = []; + for (var x = 0; x < patches.length; x++) { + text[x] = patches[x]; + } + return text.join(''); +}; + + +/** + * Parse a textual representation of patches and return a list of Patch objects. + * @param {string} textline Text representation of patches. + * @return {!Array.} Array of Patch objects. + * @throws {!Error} If invalid input. + */ +diff_match_patch.prototype.patch_fromText = function(textline) { + var patches = []; + if (!textline) { + return patches; + } + var text = textline.split('\n'); + var textPointer = 0; + var patchHeader = /^@@ -(\d+),?(\d*) \+(\d+),?(\d*) @@$/; + while (textPointer < text.length) { + var m = text[textPointer].match(patchHeader); + if (!m) { + throw new Error('Invalid patch string: ' + text[textPointer]); + } + var patch = new diff_match_patch.patch_obj(); + patches.push(patch); + patch.start1 = parseInt(m[1], 10); + if (m[2] === '') { + patch.start1--; + patch.length1 = 1; + } else if (m[2] == '0') { + patch.length1 = 0; + } else { + patch.start1--; + patch.length1 = parseInt(m[2], 10); + } + + patch.start2 = parseInt(m[3], 10); + if (m[4] === '') { + patch.start2--; + patch.length2 = 1; + } else if (m[4] == '0') { + patch.length2 = 0; + } else { + patch.start2--; + patch.length2 = parseInt(m[4], 10); + } + textPointer++; + + while (textPointer < text.length) { + var sign = text[textPointer].charAt(0); + try { + var line = decodeURI(text[textPointer].substring(1)); + } catch (ex) { + // Malformed URI sequence. + throw new Error('Illegal escape in patch_fromText: ' + line); + } + if (sign == '-') { + // Deletion. + patch.diffs.push([DIFF_DELETE, line]); + } else if (sign == '+') { + // Insertion. + patch.diffs.push([DIFF_INSERT, line]); + } else if (sign == ' ') { + // Minor equality. + patch.diffs.push([DIFF_EQUAL, line]); + } else if (sign == '@') { + // Start of next patch. + break; + } else if (sign === '') { + // Blank line? Whatever. + } else { + // WTF? + throw new Error('Invalid patch mode "' + sign + '" in: ' + line); + } + textPointer++; + } + } + return patches; +}; + + +/** + * Class representing one patch operation. + * @constructor + */ +diff_match_patch.patch_obj = function() { + /** @type {!Array.} */ + this.diffs = []; + /** @type {?number} */ + this.start1 = null; + /** @type {?number} */ + this.start2 = null; + /** @type {number} */ + this.length1 = 0; + /** @type {number} */ + this.length2 = 0; +}; + + +/** + * Emmulate GNU diff's format. + * Header: @@ -382,8 +481,9 @@ + * Indicies are printed as 1-based, not 0-based. + * @return {string} The GNU diff string. + */ +diff_match_patch.patch_obj.prototype.toString = function() { + var coords1, coords2; + if (this.length1 === 0) { + coords1 = this.start1 + ',0'; + } else if (this.length1 == 1) { + coords1 = this.start1 + 1; + } else { + coords1 = (this.start1 + 1) + ',' + this.length1; + } + if (this.length2 === 0) { + coords2 = this.start2 + ',0'; + } else if (this.length2 == 1) { + coords2 = this.start2 + 1; + } else { + coords2 = (this.start2 + 1) + ',' + this.length2; + } + var text = ['@@ -' + coords1 + ' +' + coords2 + ' @@\n']; + var op; + // Escape the body of the patch with %xx notation. + for (var x = 0; x < this.diffs.length; x++) { + switch (this.diffs[x][0]) { + case DIFF_INSERT: + op = '+'; + break; + case DIFF_DELETE: + op = '-'; + break; + case DIFF_EQUAL: + op = ' '; + break; + } + text[x + 1] = op + encodeURI(this.diffs[x][1]) + '\n'; + } + return text.join('').replace(/%20/g, ' '); +}; + + +// Export these global variables so that they survive Google's JS compiler. +// In a browser, 'this' will be 'window'. +// Users of node.js should 'require' the uncompressed version since Google's +// JS compiler may break the following exports for non-browser environments. +this['diff_match_patch'] = diff_match_patch; +this['DIFF_DELETE'] = DIFF_DELETE; +this['DIFF_INSERT'] = DIFF_INSERT; +this['DIFF_EQUAL'] = DIFF_EQUAL; diff --git a/services/document-updater/config/settings.development.coffee b/services/document-updater/config/settings.development.coffee new file mode 100755 index 0000000000..d730bb0f2d --- /dev/null +++ b/services/document-updater/config/settings.development.coffee @@ -0,0 +1,23 @@ +Path = require('path') +http = require('http') +http.globalAgent.maxSockets = 300 + +module.exports = + internal: + documentupdater: + port: 3003 + + apis: + web: + url: "http://localhost:3000" + user: "sharelatex" + pass: "password" + + redis: + web: + port:"6379" + host:"localhost" + password:"" + + mongo: + url: 'mongodb://127.0.0.1/sharelatex' diff --git a/services/document-updater/package.json b/services/document-updater/package.json new file mode 100644 index 0000000000..ff65e225fe --- /dev/null +++ b/services/document-updater/package.json @@ -0,0 +1,30 @@ +{ + "name": "document-updater-sharelatex", + "version": "0.0.1", + "dependencies": { + "express": "3.3.4", + "underscore": "1.2.2", + "redis": "0.7.2", + "chai": "", + "request": "2.25.0", + "sandboxed-module": "~0.2.0", + "chai-spies": "", + "async": "", + "lynx": "0.0.11", + "coffee-script": "1.4.0", + "settings-sharelatex": "git+ssh://git@bitbucket.org:sharelatex/settings-sharelatex.git#master", + "logger-sharelatex": "git+ssh://git@bitbucket.org:sharelatex/logger-sharelatex.git#bunyan", + "sinon": "~1.5.2", + "mongojs": "0.9.11" + }, + "devDependencies": { + "grunt-execute": "~0.1.5", + "grunt-contrib-clean": "~0.5.0", + "grunt-mocha-test": "~0.9.0", + "grunt": "~0.4.2", + "grunt-available-tasks": "~0.4.1", + "grunt-contrib-coffee": "~0.10.0", + "bunyan": "~0.22.1", + "grunt-bunyan": "~0.5.0" + } +} diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee new file mode 100644 index 0000000000..5108a4c2cc --- /dev/null +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -0,0 +1,215 @@ +sinon = require "sinon" +chai = require("chai") +chai.should() +async = require "async" +mongojs = require "../../../app/js/mongojs" +db = mongojs.db +ObjectId = mongojs.ObjectId + +MockWebApi = require "./helpers/MockWebApi" +DocUpdaterClient = require "./helpers/DocUpdaterClient" + +describe "Applying updates to a doc", -> + before -> + @lines = ["one", "two", "three"] + @update = + doc: @doc_id + op: [{ + i: "one and a half\n" + p: 4 + }] + v: 0 + @result = ["one", "one and a half", "two", "three"] + + describe "when the document is not loaded", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines + } + sinon.spy MockWebApi, "getDocument" + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> + throw error if error? + setTimeout done, 200 + + after -> + MockWebApi.getDocument.restore() + + it "should load the document from the web API", -> + MockWebApi.getDocument + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should update the doc", (done) -> + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => + doc.lines.should.deep.equal @result + done() + + describe "when the document is loaded", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines + } + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + throw error if error? + sinon.spy MockWebApi, "getDocument" + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> + throw error if error? + setTimeout done, 200 + + after -> + MockWebApi.getDocument.restore() + + it "should not need to call the web api", -> + MockWebApi.getDocument.called.should.equal false + + it "should update the doc", (done) -> + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => + doc.lines.should.deep.equal @result + done() + + describe "when the document has been deleted", -> + describe "when the ops come in a single linear order", -> + before -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + @lines = ["", "", ""] + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines + } + + @updates = [ + { doc_id: @doc_id, v: 0, op: [i: "h", p: 0 ] } + { doc_id: @doc_id, v: 1, op: [i: "e", p: 1 ] } + { doc_id: @doc_id, v: 2, op: [i: "l", p: 2 ] } + { doc_id: @doc_id, v: 3, op: [i: "l", p: 3 ] } + { doc_id: @doc_id, v: 4, op: [i: "o", p: 4 ] } + { doc_id: @doc_id, v: 5, op: [i: " ", p: 5 ] } + { doc_id: @doc_id, v: 6, op: [i: "w", p: 6 ] } + { doc_id: @doc_id, v: 7, op: [i: "o", p: 7 ] } + { doc_id: @doc_id, v: 8, op: [i: "r", p: 8 ] } + { doc_id: @doc_id, v: 9, op: [i: "l", p: 9 ] } + { doc_id: @doc_id, v: 10, op: [i: "d", p: 10] } + ] + @result = ["hello world", "", ""] + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines + } + + it "should be able to continue applying updates when the project has been deleted", (done) -> + actions = [] + for update in @updates.slice(0,6) + do (update) => + actions.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc_id, update, callback + actions.push (callback) => DocUpdaterClient.deleteDoc @project_id, @doc_id, callback + for update in @updates.slice(6) + do (update) => + actions.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc_id, update, callback + + async.series actions, (error) => + throw error if error? + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => + doc.lines.should.deep.equal @result + done() + + describe "when older ops come in after the delete", -> + before -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + @lines = ["", "", ""] + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines + } + + @updates = [ + { doc_id: @doc_id, v: 0, op: [i: "h", p: 0 ] } + { doc_id: @doc_id, v: 1, op: [i: "e", p: 1 ] } + { doc_id: @doc_id, v: 2, op: [i: "l", p: 2 ] } + { doc_id: @doc_id, v: 3, op: [i: "l", p: 3 ] } + { doc_id: @doc_id, v: 4, op: [i: "o", p: 4 ] } + { doc_id: @doc_id, v: 0, op: [i: "world", p: 1 ] } + ] + @result = ["hello", "world", ""] + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines + } + + it "should be able to continue applying updates when the project has been deleted", (done) -> + actions = [] + for update in @updates.slice(0,5) + do (update) => + actions.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc_id, update, callback + actions.push (callback) => DocUpdaterClient.deleteDoc @project_id, @doc_id, callback + for update in @updates.slice(5) + do (update) => + actions.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc_id, update, callback + + async.series actions, (error) => + throw error if error? + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => + doc.lines.should.deep.equal @result + done() + + describe "when the mongo array has been trimmed", -> + before -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + @lines = ["", "", ""] + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines + } + + @updates = [ + { doc_id: @doc_id, v: 0, op: [i: "h", p: 0 ] } + { doc_id: @doc_id, v: 1, op: [i: "e", p: 1 ] } + { doc_id: @doc_id, v: 2, op: [i: "l", p: 2 ] } + { doc_id: @doc_id, v: 3, op: [i: "l", p: 3 ] } + { doc_id: @doc_id, v: 4, op: [i: "o", p: 4 ] } + { doc_id: @doc_id, v: 3, op: [i: "world", p: 4 ] } + ] + @result = ["hello", "world", ""] + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines + } + + it "should be able to reload the required ops from the trimmed mongo array", (done) -> + actions = [] + # Apply first set of ops + for update in @updates.slice(0,5) + do (update) => + actions.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc_id, update, callback + # Delete doc from redis and trim ops back to version 3 + actions.push (callback) => DocUpdaterClient.deleteDoc @project_id, @doc_id, callback + actions.push (callback) => + db.docOps.update({doc_id: ObjectId(@doc_id)}, {$push: docOps: { $each: [], $slice: -2 }}, callback) + # Apply older update back from version 3 + for update in @updates.slice(5) + do (update) => + actions.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc_id, update, callback + # Flush ops to mongo + actions.push (callback) => DocUpdaterClient.flushDoc @project_id, @doc_id, callback + + async.series actions, (error) => + throw error if error? + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => + db.docOps.find {doc_id: ObjectId(@doc_id)}, (error, docOps) => + # Check mongo array has been trimmed + docOps = docOps[0] + docOps.docOps.length.should.equal 3 + # Check ops have all be applied properly + doc.lines.should.deep.equal @result + done() + + describe "with a broken update", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines + } + DocUpdaterClient.sendUpdate @project_id, @doc_id, @undefined, (error) -> + throw error if error? + setTimeout done, 200 + + it "should not update the doc", (done) -> + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => + doc.lines.should.deep.equal @lines + done() + diff --git a/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee new file mode 100644 index 0000000000..d28f37cd6d --- /dev/null +++ b/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee @@ -0,0 +1,89 @@ +sinon = require "sinon" +chai = require("chai") +chai.should() + +MockWebApi = require "./helpers/MockWebApi" +DocUpdaterClient = require "./helpers/DocUpdaterClient" + +describe "Deleting a document", -> + before -> + @lines = ["one", "two", "three"] + @update = + doc: @doc_id + op: [{ + i: "one and a half\n" + p: 4 + }] + v: 0 + @result = ["one", "one and a half", "two", "three"] + + describe "when the updated doc exists in the doc updater", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines + } + sinon.spy MockWebApi, "setDocumentLines" + sinon.spy MockWebApi, "getDocument" + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + throw error if error? + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) => + throw error if error? + setTimeout () => + DocUpdaterClient.deleteDoc @project_id, @doc_id, (error, res, body) => + @statusCode = res.statusCode + done() + , 200 + + after -> + MockWebApi.setDocumentLines.restore() + MockWebApi.getDocument.restore() + + it "should return a 204 status code", -> + @statusCode.should.equal 204 + + it "should send the updated document to the web api", -> + MockWebApi.setDocumentLines + .calledWith(@project_id, @doc_id, @result) + .should.equal true + + it "should need to reload the doc if read again", (done) -> + MockWebApi.getDocument.called.should.equal.false + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => + MockWebApi.getDocument + .calledWith(@project_id, @doc_id) + .should.equal true + done() + + describe "when the doc is not in the doc updater", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines + } + sinon.spy MockWebApi, "setDocumentLines" + sinon.spy MockWebApi, "getDocument" + DocUpdaterClient.deleteDoc @project_id, @doc_id, (error, res, body) => + @statusCode = res.statusCode + done() + + after -> + MockWebApi.setDocumentLines.restore() + MockWebApi.getDocument.restore() + + it "should return a 204 status code", -> + @statusCode.should.equal 204 + + it "should not need to send the updated document to the web api", -> + MockWebApi.setDocumentLines.called.should.equal false + + it "should need to reload the doc if read again", (done) -> + MockWebApi.getDocument.called.should.equal.false + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => + MockWebApi.getDocument + .calledWith(@project_id, @doc_id) + .should.equal true + done() + + + diff --git a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee new file mode 100644 index 0000000000..7b07ed6a25 --- /dev/null +++ b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee @@ -0,0 +1,81 @@ +sinon = require "sinon" +chai = require("chai") +chai.should() +async = require "async" + +MockWebApi = require "./helpers/MockWebApi" +DocUpdaterClient = require "./helpers/DocUpdaterClient" + +describe "Deleting a project", -> + before -> + @project_id = DocUpdaterClient.randomId() + @docs = [{ + id: doc_id0 = DocUpdaterClient.randomId() + lines: ["one", "two", "three"] + update: + doc: doc_id0 + op: [{ + i: "one and a half\n" + p: 4 + }] + v: 0 + updatedLines: ["one", "one and a half", "two", "three"] + }, { + id: doc_id1 = DocUpdaterClient.randomId() + lines: ["four", "five", "six"] + update: + doc: doc_id1 + op: [{ + i: "four and a half\n" + p: 5 + }] + v: 0 + updatedLines: ["four", "four and a half", "five", "six"] + }] + for doc in @docs + MockWebApi.insertDoc @project_id, doc.id, { + lines: doc.lines + } + + describe "with documents which have been updated", -> + before (done) -> + sinon.spy MockWebApi, "setDocumentLines" + async.series @docs.map((doc) => + (callback) => + DocUpdaterClient.preloadDoc @project_id, doc.id, (error) => + return callback(error) if error? + DocUpdaterClient.sendUpdate @project_id, doc.id, doc.update, (error) => + callback(error) + ), (error) => + throw error if error? + setTimeout () => + DocUpdaterClient.deleteProject @project_id, (error, res, body) => + @statusCode = res.statusCode + done() + , 200 + + after -> + MockWebApi.setDocumentLines.restore() + + it "should return a 204 status code", -> + @statusCode.should.equal 204 + + it "should send each document to the web api", -> + for doc in @docs + MockWebApi.setDocumentLines + .calledWith(@project_id, doc.id, doc.updatedLines) + .should.equal true + + it "should need to reload the docs if read again", (done) -> + sinon.spy MockWebApi, "getDocument" + async.series @docs.map((doc) => + (callback) => + MockWebApi.getDocument.calledWith(@project_id, doc.id).should.equal false + DocUpdaterClient.getDoc @project_id, doc.id, (error, res, returnedDoc) => + MockWebApi.getDocument.calledWith(@project_id, doc.id).should.equal true + callback() + ), () -> + MockWebApi.getDocument.restore() + done() + + diff --git a/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee b/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee new file mode 100644 index 0000000000..02b44e3fd6 --- /dev/null +++ b/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee @@ -0,0 +1,76 @@ +sinon = require "sinon" +chai = require("chai") +chai.should() +async = require "async" + +MockWebApi = require "./helpers/MockWebApi" +DocUpdaterClient = require "./helpers/DocUpdaterClient" + +describe "Flushing a project", -> + before -> + @project_id = DocUpdaterClient.randomId() + @docs = [{ + id: doc_id0 = DocUpdaterClient.randomId() + lines: ["one", "two", "three"] + update: + doc: doc_id0 + op: [{ + i: "one and a half\n" + p: 4 + }] + v: 0 + updatedLines: ["one", "one and a half", "two", "three"] + }, { + id: doc_id1 = DocUpdaterClient.randomId() + lines: ["four", "five", "six"] + update: + doc: doc_id1 + op: [{ + i: "four and a half\n" + p: 5 + }] + v: 0 + updatedLines: ["four", "four and a half", "five", "six"] + }] + for doc in @docs + MockWebApi.insertDoc @project_id, doc.id, { + lines: doc.lines + } + + describe "with documents which have been updated", -> + before (done) -> + sinon.spy MockWebApi, "setDocumentLines" + async.series @docs.map((doc) => + (callback) => + DocUpdaterClient.preloadDoc @project_id, doc.id, (error) => + return callback(error) if error? + DocUpdaterClient.sendUpdate @project_id, doc.id, doc.update, (error) => + callback(error) + ), (error) => + throw error if error? + setTimeout () => + DocUpdaterClient.flushProject @project_id, (error, res, body) => + @statusCode = res.statusCode + done() + , 200 + + after -> + MockWebApi.setDocumentLines.restore() + + it "should return a 204 status code", -> + @statusCode.should.equal 204 + + it "should send each document to the web api", -> + for doc in @docs + MockWebApi.setDocumentLines + .calledWith(@project_id, doc.id, doc.updatedLines) + .should.equal true + + it "should update the lines in the doc updater", (done) -> + async.series @docs.map((doc) => + (callback) => + DocUpdaterClient.getDoc @project_id, doc.id, (error, res, returnedDoc) => + returnedDoc.lines.should.deep.equal doc.updatedLines + callback() + ), done + diff --git a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee new file mode 100644 index 0000000000..aaaef99936 --- /dev/null +++ b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee @@ -0,0 +1,97 @@ +sinon = require "sinon" +chai = require("chai") +chai.should() +async = require "async" + +MockWebApi = require "./helpers/MockWebApi" +DocUpdaterClient = require "./helpers/DocUpdaterClient" +mongojs = require "../../../app/js/mongojs" +db = mongojs.db +ObjectId = mongojs.ObjectId + +describe "Flushing a doc to Mongo", -> + before -> + @lines = ["one", "two", "three"] + @update = + doc: @doc_id + op: [{ + i: "one and a half\n" + p: 4 + }] + v: 0 + @result = ["one", "one and a half", "two", "three"] + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines + } + + describe "when the updated doc exists in the doc updater", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines + } + sinon.spy MockWebApi, "setDocumentLines" + + DocUpdaterClient.sendUpdates @project_id, @doc_id, [@update], (error) => + throw error if error? + setTimeout () => + DocUpdaterClient.flushDoc @project_id, @doc_id, done + , 200 + + after -> + MockWebApi.setDocumentLines.restore() + + it "should flush the updated document to the web api", -> + MockWebApi.setDocumentLines + .calledWith(@project_id, @doc_id, @result) + .should.equal true + + it "should flush the doc ops to Mongo", (done) -> + db.docOps.find doc_id: ObjectId(@doc_id), (error, docs) => + doc = docs[0] + doc.docOps[0].op.should.deep.equal @update.op + done() + + describe "when the doc has a large number of ops to be flushed", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines + } + @updates = [] + for v in [0..999] + @updates.push + doc_id: @doc_id, + op: [i: v.toString(), p: 0] + v: v + + DocUpdaterClient.sendUpdates @project_id, @doc_id, @updates, (error) => + throw error if error? + setTimeout () => + DocUpdaterClient.flushDoc @project_id, @doc_id, done + , 200 + + it "should flush the doc ops to Mongo in order", (done) -> + db.docOps.find doc_id: ObjectId(@doc_id), (error, docs) => + doc = docs[0] + updates = @updates.slice(-100) + for update, i in doc.docOps + update.op.should.deep.equal updates[i].op + done() + + describe "when the doc does not exist in the doc updater", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines + } + sinon.spy MockWebApi, "setDocumentLines" + DocUpdaterClient.flushDoc @project_id, @doc_id, done + + after -> + MockWebApi.setDocumentLines.restore() + + it "should not flush the doc to the web api", -> + MockWebApi.setDocumentLines.called.should.equal false + + diff --git a/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee new file mode 100644 index 0000000000..0e8456e45f --- /dev/null +++ b/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee @@ -0,0 +1,107 @@ +sinon = require "sinon" +chai = require("chai") +chai.should() + +MockWebApi = require "./helpers/MockWebApi" +DocUpdaterClient = require "./helpers/DocUpdaterClient" + +describe "Getting a document", -> + describe "when the document is not loaded", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines = ["one", "two", "three"] + } + sinon.spy MockWebApi, "getDocument" + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, @returnedDoc) => done() + + after -> + MockWebApi.getDocument.restore() + + it "should load the document from the web API", -> + MockWebApi.getDocument + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should return the document lines", -> + @returnedDoc.lines.should.deep.equal @lines + + it "should return the document at version 0", -> + @returnedDoc.version.should.equal 0 + + describe "when the document is already loaded", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines = ["one", "two", "three"] + } + + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + throw error if error? + sinon.spy MockWebApi, "getDocument" + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, @returnedDoc) => done() + + after -> + MockWebApi.getDocument.restore() + + it "should not load the document from the web API", -> + MockWebApi.getDocument.called.should.equal false + + it "should return the document lines", -> + @returnedDoc.lines.should.deep.equal @lines + + describe "when the request asks for some recent ops", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines = ["one", "two", "three"] + } + + @updates = for v in [0..99] + doc_id: @doc_id, + op: [i: v.toString(), p: 0] + v: v + + DocUpdaterClient.sendUpdates @project_id, @doc_id, @updates, (error) => + throw error if error? + sinon.spy MockWebApi, "getDocument" + DocUpdaterClient.getDocAndRecentOps @project_id, @doc_id, 90, (error, res, @returnedDoc) => done() + + after -> + MockWebApi.getDocument.restore() + + it "should return the recent ops", -> + @returnedDoc.ops.length.should.equal 10 + for update, i in @updates.slice(90, -1) + @returnedDoc.ops[i].op.should.deep.equal update.op + + + describe "when the document does not exist", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => + @statusCode = res.statusCode + done() + + it "should return 404", -> + @statusCode.should.equal 404 + + describe "when the web api returns an error", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + sinon.stub MockWebApi, "getDocument", (project_id, doc_id, callback = (error, doc) ->) -> + callback new Error("oops") + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => + @statusCode = res.statusCode + done() + + after -> + MockWebApi.getDocument.restore() + + it "should return 500", -> + @statusCode.should.equal 500 + + + + + diff --git a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee new file mode 100644 index 0000000000..cc0f30834a --- /dev/null +++ b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee @@ -0,0 +1,58 @@ +sinon = require "sinon" +chai = require("chai") +chai.should() + +MockWebApi = require "./helpers/MockWebApi" +DocUpdaterClient = require "./helpers/DocUpdaterClient" + +describe "Setting a document", -> + before -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + @lines = ["one", "two", "three"] + @update = + doc: @doc_id + op: [{ + i: "one and a half\n" + p: 4 + }] + v: 0 + @result = ["one", "one and a half", "two", "three"] + @newLines = ["these", "are", "the", "new", "lines"] + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines + } + + describe "when the updated doc exists in the doc updater", -> + before (done) -> + sinon.spy MockWebApi, "setDocumentLines" + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + throw error if error? + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) => + throw error if error? + setTimeout () => + DocUpdaterClient.setDocLines @project_id, @doc_id, @newLines, (error, res, body) => + @statusCode = res.statusCode + done() + , 200 + + after -> + MockWebApi.setDocumentLines.restore() + + it "should return a 204 status code", -> + @statusCode.should.equal 204 + + it "should send the updated document to the web api", -> + MockWebApi.setDocumentLines + .calledWith(@project_id, @doc_id, @newLines) + .should.equal true + + it "should update the lines in the doc updater", (done) -> + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => + doc.lines.should.deep.equal @newLines + done() + + it "should bump the version in the doc updater", (done) -> + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => + doc.version.should.equal 2 + done() + diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee new file mode 100644 index 0000000000..4ddef90d26 --- /dev/null +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee @@ -0,0 +1,66 @@ +rclient = require("redis").createClient() +request = require("request").defaults(jar: false) +async = require "async" + +module.exports = DocUpdaterClient = + randomId: () -> + return require("../../../../app/js/mongojs").ObjectId().toString() + + sendUpdate: (project_id, doc_id, update, callback = (error) ->) -> + rclient.rpush "PendingUpdates:#{doc_id}", JSON.stringify(update), (error)-> + return callback(error) if error? + doc_key = "#{project_id}:#{doc_id}" + rclient.sadd "DocsWithPendingUpdates", doc_key, (error) -> + return callback(error) if error? + rclient.publish "pending-updates", doc_key, callback + + sendUpdates: (project_id, doc_id, updates, callback = (error) ->) -> + DocUpdaterClient.preloadDoc project_id, doc_id, (error) -> + return callback(error) if error? + jobs = [] + for update in updates + do (update) -> + jobs.push (callback) -> + DocUpdaterClient.sendUpdate project_id, doc_id, update, callback + async.series jobs, callback + + getDoc: (project_id, doc_id, callback = (error, res, body) ->) -> + request.get "http://localhost:3003/project/#{project_id}/doc/#{doc_id}", (error, res, body) -> + if body? and res.statusCode >= 200 and res.statusCode < 300 + body = JSON.parse(body) + callback error, res, body + + getDocAndRecentOps: (project_id, doc_id, fromVersion, callback = (error, res, body) ->) -> + request.get "http://localhost:3003/project/#{project_id}/doc/#{doc_id}?fromVersion=#{fromVersion}", (error, res, body) -> + if body? and res.statusCode >= 200 and res.statusCode < 300 + body = JSON.parse(body) + callback error, res, body + + preloadDoc: (project_id, doc_id, callback = (error) ->) -> + DocUpdaterClient.getDoc project_id, doc_id, callback + + flushDoc: (project_id, doc_id, callback = (error) ->) -> + request.post "http://localhost:3003/project/#{project_id}/doc/#{doc_id}/flush", (error, res, body) -> + callback error, res, body + + setDocLines: (project_id, doc_id, lines, callback = (error) ->) -> + request.post { + url: "http://localhost:3003/project/#{project_id}/doc/#{doc_id}" + json: + lines: lines + }, (error, res, body) -> + callback error, res, body + + deleteDoc: (project_id, doc_id, callback = (error) ->) -> + request.del "http://localhost:3003/project/#{project_id}/doc/#{doc_id}", (error, res, body) -> + callback error, res, body + + flushProject: (project_id, callback = () ->) -> + request.post "http://localhost:3003/project/#{project_id}/flush", callback + + deleteProject: (project_id, callback = () ->) -> + request.del "http://localhost:3003/project/#{project_id}", callback + + + + diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee new file mode 100644 index 0000000000..7d50eb8377 --- /dev/null +++ b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee @@ -0,0 +1,40 @@ +express = require("express") +app = express() + +module.exports = MockWebApi = + docs: {} + + clearDocs: () -> @docs = {} + + insertDoc: (project_id, doc_id, doc) -> + @docs["#{project_id}:#{doc_id}"] = doc + + setDocumentLines: (project_id, doc_id, lines, callback = (error) ->) -> + @docs["#{project_id}:#{doc_id}"] ||= {} + @docs["#{project_id}:#{doc_id}"].lines = lines + callback null + + getDocument: (project_id, doc_id, callback = (error, doc) ->) -> + callback null, @docs["#{project_id}:#{doc_id}"] + + run: () -> + app.get "/project/:project_id/doc/:doc_id", (req, res, next) => + @getDocument req.params.project_id, req.params.doc_id, (error, doc) -> + if error? + res.send 500 + else if doc? + res.send JSON.stringify doc + else + res.send 404 + + app.post "/project/:project_id/doc/:doc_id", express.bodyParser(), (req, res, next) => + @setDocumentLines req.params.project_id, req.params.doc_id, req.body.lines, (error) -> + if error? + res.send 500 + else + res.send 204 + + app.listen(3000) + +MockWebApi.run() + diff --git a/services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee b/services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee new file mode 100644 index 0000000000..1ca00bb305 --- /dev/null +++ b/services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee @@ -0,0 +1,58 @@ +require('coffee-script') +assert = require('assert') +path = require('path') +modulePath = path.join __dirname, '../../../app/js/RedisManager.js' +keys = require(path.join __dirname, '../../../app/js/RedisKeyBuilder.js') +project_id = 1234 +doc_id = 5678 +loadModule = require('./module-loader').loadModule + +describe 'putting a doc into memory', ()-> + lines = ["this is one line", "and another line"] + version = 42 + + potentialSets = {} + potentialSets[keys.docLines(doc_id:doc_id)] = lines + potentialSets[keys.projectKey(doc_id:doc_id)] = project_id + potentialSets[keys.docVersion(doc_id:doc_id)] = version + + potentialSAdds = {} + potentialSAdds[keys.allDocs] = doc_id + potentialSAdds[keys.docsInProject(project_id:project_id)] = doc_id + + potentialDels = {} + potentialDels[keys.docOps(doc_id:doc_id)] = true + + mocks = + "logger-sharelatex": log:-> + redis: + createClient : ()-> + auth:-> + multi: ()-> + set:(key, value)-> + result = potentialSets[key] + delete potentialSets[key] + if key == keys.docLines(doc_id:doc_id) + value = JSON.parse(value) + assert.deepEqual result, value + incr:()-> + sadd:(key, value)-> + result = potentialSAdds[key] + delete potentialSAdds[key] + assert.equal result, value + del: (key) -> + result = potentialDels[key] + delete potentialDels[key] + assert.equal result, true + exec:(callback)-> + callback() + + redisManager = loadModule(modulePath, mocks).module.exports + + it 'should put a all data into memory', (done)-> + redisManager.putDocInMemory project_id, doc_id, lines, version, ()-> + assert.deepEqual potentialSets, {} + assert.deepEqual potentialSAdds, {} + assert.deepEqual potentialDels, {} + done() + diff --git a/services/document-updater/test/unit/coffee/CheckingUpdatesLength.coffee b/services/document-updater/test/unit/coffee/CheckingUpdatesLength.coffee new file mode 100644 index 0000000000..4f76c48ae7 --- /dev/null +++ b/services/document-updater/test/unit/coffee/CheckingUpdatesLength.coffee @@ -0,0 +1,27 @@ +assert = require('chai').assert +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../app/js/RedisManager.js" +SandboxedModule = require('sandboxed-module') + +doc_id = "1234" + +describe 'Document Manager - getUpdatesLength ', -> + + beforeEach -> + + @llenStub = sinon.stub() + @redisManager = SandboxedModule.require modulePath, requires: + redis: + createClient:=> + auth:-> + llen:@llenStub + + it "should the number of things to process in the que", (done)-> + + @llenStub.callsArgWith(1, null, 3) + @redisManager.getUpdatesLength doc_id, (err, len)=> + @llenStub.calledWith("PendingUpdates:#{doc_id}").should.equal true + len.should.equal 3 + done() diff --git a/services/document-updater/test/unit/coffee/DiffCodec/DiffCodecTests.coffee b/services/document-updater/test/unit/coffee/DiffCodec/DiffCodecTests.coffee new file mode 100644 index 0000000000..bcd07c0479 --- /dev/null +++ b/services/document-updater/test/unit/coffee/DiffCodec/DiffCodecTests.coffee @@ -0,0 +1,56 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +expect = chai.expect +modulePath = "../../../../app/js/DiffCodec.js" +SandboxedModule = require('sandboxed-module') + +describe "DiffCodec", -> + beforeEach -> + @callback = sinon.stub() + @DiffCodec = SandboxedModule.require modulePath + + describe "diffAsShareJsOps", -> + it "should insert new text correctly", (done) -> + @before = ["hello world"] + @after = ["hello beautiful world"] + @DiffCodec.diffAsShareJsOp @before, @after, (error, ops) -> + expect(ops).to.deep.equal [ + i: "beautiful " + p: 6 + ] + done() + + it "should shift later inserts by previous inserts", (done) -> + @before = ["the boy played with the ball"] + @after = ["the tall boy played with the red ball"] + @DiffCodec.diffAsShareJsOp @before, @after, (error, ops) -> + expect(ops).to.deep.equal [ + { i: "tall ", p: 4 } + { i: "red ", p: 29 } + ] + done() + + it "should delete text correctly", (done) -> + @before = ["hello beautiful world"] + @after = ["hello world"] + @DiffCodec.diffAsShareJsOp @before, @after, (error, ops) -> + expect(ops).to.deep.equal [ + d: "beautiful " + p: 6 + ] + done() + + + it "should shift later deletes by the first deletes", (done) -> + @before = ["the tall boy played with the red ball"] + @after = ["the boy played with the ball"] + @DiffCodec.diffAsShareJsOp @before, @after, (error, ops) -> + expect(ops).to.deep.equal [ + { d: "tall ", p: 4 } + { d: "red ", p: 24 } + ] + done() + + + diff --git a/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee b/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee new file mode 100644 index 0000000000..83e0ff48cf --- /dev/null +++ b/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee @@ -0,0 +1,309 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/DocOpsManager.js" +SandboxedModule = require('sandboxed-module') +ObjectId = require("../../../../app/js/mongojs").ObjectId + +describe "DocOpsManager", -> + beforeEach -> + @doc_id = ObjectId().toString() + @project_id = "project-id" + @callback = sinon.stub() + @DocOpsManager = SandboxedModule.require modulePath, requires: + "./RedisManager": @RedisManager = {} + "./mongojs": + db: @db = { docOps: {} } + ObjectId: ObjectId + "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } + "./Metrics": @Metrics = + Timer: class Timer + done: sinon.stub() + + describe "flushDocOpsToMongo", -> + describe "when versions are consistent", -> + beforeEach -> + @mongo_version = 40 + @redis_version = 42 + @ops = [ "mock-op-1", "mock-op-2" ] + @DocOpsManager.getDocVersionInMongo = sinon.stub().callsArgWith(1, null, @mongo_version) + @RedisManager.getDocVersion = sinon.stub().callsArgWith(1, null, @redis_version) + @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) + @DocOpsManager._appendDocOpsInMongo = sinon.stub().callsArg(3) + @DocOpsManager.flushDocOpsToMongo @project_id, @doc_id, @callback + + it "should get the version from Mongo", -> + @DocOpsManager.getDocVersionInMongo + .calledWith(@doc_id) + .should.equal true + + it "should get the version from REdis", -> + @RedisManager.getDocVersion + .calledWith(@doc_id) + .should.equal true + + it "should get all doc ops since the version in Mongo", -> + @RedisManager.getPreviousDocOps + .calledWith(@doc_id, @mongo_version, -1) + .should.equal true + + it "should update Mongo with the new ops", -> + @DocOpsManager._appendDocOpsInMongo + .calledWith(@doc_id, @ops, @redis_version) + .should.equal true + + it "should call the callback", -> + @callback.called.should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when the number of ops does not match the difference in versions", -> + beforeEach -> + @mongo_version = 40 + @redis_version = 45 + @ops = [ "mock-op-1", "mock-op-2" ] + @DocOpsManager.getDocVersionInMongo = sinon.stub().callsArgWith(1, null, @mongo_version) + @RedisManager.getDocVersion = sinon.stub().callsArgWith(1, null, @redis_version) + @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) + @DocOpsManager._appendDocOpsInMongo = sinon.stub().callsArg(3) + @DocOpsManager.flushDocOpsToMongo @project_id, @doc_id, @callback + + it "should call the callback with an error", -> + @callback.calledWith(new Error("inconsistet versions")).should.equal true + + it "should log an error", -> + @logger.error + .calledWith(doc_id: @doc_id, mongoVersion: @mongo_version, redisVersion: @redis_version, opsLength: @ops.length, "version difference does not match ops length") + .should.equal true + + it "should not modify mongo", -> + @DocOpsManager._appendDocOpsInMongo.called.should.equal false + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when redis version is behind mongo version", -> + beforeEach -> + @mongo_version = 40 + @redis_version = 30 + @DocOpsManager.getDocVersionInMongo = sinon.stub().callsArgWith(1, null, @mongo_version) + @RedisManager.getDocVersion = sinon.stub().callsArgWith(1, null, @redis_version) + @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) + @DocOpsManager._appendDocOpsInMongo = sinon.stub().callsArg(3) + @DocOpsManager.flushDocOpsToMongo @project_id, @doc_id, @callback + + it "should call the callback with an error", -> + @callback.calledWith(new Error("inconsistet versions")).should.equal true + + it "should log an error", -> + @logger.error + .calledWith(doc_id: @doc_id, mongoVersion: @mongo_version, redisVersion: @redis_version, "mongo version is ahead of redis") + .should.equal true + + it "should not modify mongo", -> + @DocOpsManager._appendDocOpsInMongo.called.should.equal false + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "getPreviousDocOps", -> + beforeEach -> + @ops = [ "mock-op-1", "mock-op-2" ] + @start = 30 + @end = 32 + @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) + @DocOpsManager._ensureOpsAreLoaded = sinon.stub().callsArg(3) + @DocOpsManager.getPreviousDocOps @project_id, @doc_id, @start, @end, @callback + + it "should ensure the ops are loaded back far enough", -> + @DocOpsManager._ensureOpsAreLoaded + .calledWith(@project_id, @doc_id, @start) + .should.equal true + + it "should get the previous doc ops", -> + @RedisManager.getPreviousDocOps + .calledWith(@doc_id, @start, @end) + .should.equal true + + it "should call the callback with the ops", -> + @callback.calledWith(null, @ops).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "_ensureOpsAreLoaded", -> + describe "when the ops are not loaded", -> + beforeEach -> + @redisVersion = 42 + @redisOpsLength = 10 + @backToVersion = 30 + @ops = [ "mock-op-1", "mock-op-2" ] + @RedisManager.getDocVersion = sinon.stub().callsArgWith(1, null, @redisVersion) + @RedisManager.getDocOpsLength = sinon.stub().callsArgWith(1, null, @redisOpsLength) + @DocOpsManager._getDocOpsFromMongo = sinon.stub().callsArgWith(3, null, @ops) + @RedisManager.prependDocOps = sinon.stub().callsArgWith(2, null) + @DocOpsManager._ensureOpsAreLoaded @project_id, @doc_id, @backToVersion, @callback + + it "should get the doc version from redis", -> + @RedisManager.getDocVersion + .calledWith(@doc_id) + .should.equal true + + it "should get the doc ops length in redis", -> + @RedisManager.getDocOpsLength + .calledWith(@doc_id) + .should.equal true + + it "should get the doc ops that need loading from Mongo", -> + @DocOpsManager._getDocOpsFromMongo + .calledWith(@doc_id, @backToVersion, @redisVersion - @redisOpsLength) + .should.equal true + + it "should prepend the retrieved ops to redis", -> + @RedisManager.prependDocOps + .calledWith(@doc_id, @ops) + .should.equal true + + it "should call the callback", -> + @callback.called.should.equal true + + describe "when the ops are loaded", -> + beforeEach -> + @redisVersion = 42 + @redisOpsLength = 10 + @backToVersion = 35 + @RedisManager.getDocVersion = sinon.stub().callsArgWith(1, null, @redisVersion) + @RedisManager.getDocOpsLength = sinon.stub().callsArgWith(1, null, @redisOpsLength) + @DocOpsManager._getDocOpsFromMongo = sinon.stub().callsArgWith(3, null, @ops) + @RedisManager.prependDocOps = sinon.stub().callsArgWith(2, null) + @DocOpsManager._ensureOpsAreLoaded @project_id, @doc_id, @backToVersion, @callback + + it "should not need to get the docs from Mongo or put any into redis", -> + @DocOpsManager._getDocOpsFromMongo.called.should.equal false + @RedisManager.prependDocOps.called.should.equal false + + it "should call the callback", -> + @callback.called.should.equal true + + describe "getDocVersionInMongo", -> + describe "when the doc exists", -> + beforeEach -> + @doc = + version: @version = 42 + @db.docOps.find = sinon.stub().callsArgWith(2, null, [@doc]) + @DocOpsManager.getDocVersionInMongo @doc_id, @callback + + it "should look for the doc in the database", -> + @db.docOps.find + .calledWith({ doc_id: ObjectId(@doc_id) }, {version: 1}) + .should.equal true + + it "should call the callback with the version", -> + @callback.calledWith(null, @version).should.equal true + + describe "when the doc doesn't exist", -> + beforeEach -> + @db.docOps.find = sinon.stub().callsArgWith(2, null, []) + @DocOpsManager.getDocVersionInMongo @doc_id, @callback + + it "should call the callback with 0", -> + @callback.calledWith(null, 0).should.equal true + + describe "_appendDocOpsInMongo", -> + describe "with a small set of updates", -> + beforeEach (done) -> + @ops = [ "mock-op-1", "mock-op-2" ] + @version = 42 + @db.docOps.update = sinon.stub().callsArg(3) + @DocOpsManager._appendDocOpsInMongo @doc_id, @ops, @version, (error) => + @callback(error) + done() + + it "should update the database", -> + @db.docOps.update + .calledWith({ + doc_id: ObjectId(@doc_id) + }, { + $push: docOps: { $each: @ops, $slice: -100 } + $set: version: @version + }, { + upsert: true + }) + .should.equal true + + it "should call the callbak", -> + @callback.called.should.equal true + + describe "with a large set of updates", -> + beforeEach (done) -> + @ops = [ "mock-op-1", "mock-op-2", "mock-op-3", "mock-op-4", "mock-op-5" ] + @version = 42 + @DocOpsManager.APPEND_OPS_BATCH_SIZE = 2 + @db.docOps.update = sinon.stub().callsArg(3) + @DocOpsManager._appendDocOpsInMongo @doc_id, @ops, @version, (error) => + @callback(error) + done() + + it "should update the database in batches", -> + @db.docOps.update + .calledWith({ doc_id: ObjectId(@doc_id) }, { + $push: docOps: { $each: @ops.slice(0,2), $slice: -100 } + $set: version: @version - 3 + }, { upsert: true }) + .should.equal true + @db.docOps.update + .calledWith({ doc_id: ObjectId(@doc_id) }, { + $push: docOps: { $each: @ops.slice(2,4), $slice: -100 } + $set: version: @version - 1 + }, { upsert: true }) + .should.equal true + @db.docOps.update + .calledWith({ doc_id: ObjectId(@doc_id) }, { + $push: docOps: { $each: @ops.slice(4,5), $slice: -100 } + $set: version: @version + }, { upsert: true }) + .should.equal true + + it "should call the callbak", -> + @callback.called.should.equal true + + describe "with no updates", -> + beforeEach (done) -> + @ops = [] + @version = 42 + @db.docOps.update = sinon.stub().callsArg(3) + @DocOpsManager._appendDocOpsInMongo @doc_id, @ops, @version, (error) => + @callback(error) + done() + + it "should not try to update the database", -> + @db.docOps.update.called.should.equal false + + describe "_getDocsOpsFromMongo", -> + beforeEach -> + @version = 42 + @start = 32 + @limit = 5 + @doc = + docOps: ["mock-ops"] + @DocOpsManager.getDocVersionInMongo = sinon.stub().callsArgWith(1, null, @version) + @db.docOps.find = sinon.stub().callsArgWith(2, null, [@doc]) + @DocOpsManager._getDocOpsFromMongo @doc_id, @start, @start + @limit, @callback + + it "should get the current version", -> + @DocOpsManager.getDocVersionInMongo + .calledWith(@doc_id) + .should.equal true + + it "should get the doc ops", -> + @db.docOps.find + .calledWith({ doc_id: ObjectId(@doc_id) }, { + docOps: $slice: [-(@version - @start), @limit] + }) + .should.equal true + + it "should return the ops", -> + @callback.calledWith(null, @doc.docOps).should.equal true + + diff --git a/services/document-updater/test/unit/coffee/DocumentManager/flushAndDeleteDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/flushAndDeleteDocTests.coffee new file mode 100644 index 0000000000..85a25ee5a7 --- /dev/null +++ b/services/document-updater/test/unit/coffee/DocumentManager/flushAndDeleteDocTests.coffee @@ -0,0 +1,41 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/DocumentManager.js" +SandboxedModule = require('sandboxed-module') + +describe "DocumentUpdater - flushAndDeleteDoc", -> + beforeEach -> + @DocumentManager = SandboxedModule.require modulePath, requires: + "./RedisManager": @RedisManager = {} + "./PersistenceManager": @PersistenceManager = {} + "logger-sharelatex": @logger = {log: sinon.stub()} + "./DocOpsManager" :{} + "./Metrics": @Metrics = + Timer: class Timer + done: sinon.stub() + @project_id = "project-id-123" + @doc_id = "doc-id-123" + @callback = sinon.stub() + + describe "successfully", -> + beforeEach -> + @RedisManager.removeDocFromMemory = sinon.stub().callsArg(2) + @DocumentManager.flushDocIfLoaded = sinon.stub().callsArgWith(2) + @DocumentManager.flushAndDeleteDoc @project_id, @doc_id, @callback + + it "should flush the doc", -> + @DocumentManager.flushDocIfLoaded + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should remove the doc from redis", -> + @RedisManager.removeDocFromMemory + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should call the callback without error", -> + @callback.calledWith(null).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true diff --git a/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee new file mode 100644 index 0000000000..079341a536 --- /dev/null +++ b/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee @@ -0,0 +1,73 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/DocumentManager.js" +SandboxedModule = require('sandboxed-module') + +describe "DocumentUpdater - flushDocIfLoaded", -> + beforeEach -> + @DocumentManager = SandboxedModule.require modulePath, requires: + "./RedisManager": @RedisManager = {} + "./PersistenceManager": @PersistenceManager = {} + "./DocOpsManager": @DocOpsManager = {} + "logger-sharelatex": @logger = {log: sinon.stub()} + "./Metrics": @Metrics = + Timer: class Timer + done: sinon.stub() + @project_id = "project-id-123" + @doc_id = "doc-id-123" + @lines = ["one", "two", "three"] + @version = 42 + @callback = sinon.stub() + + describe "when the doc is in Redis", -> + beforeEach -> + @RedisManager.getDoc = sinon.stub().callsArgWith(1, null, @lines, @version) + @PersistenceManager.setDoc = sinon.stub().callsArgWith(3) + @DocOpsManager.flushDocOpsToMongo = sinon.stub().callsArgWith(2) + @DocumentManager.flushDocIfLoaded @project_id, @doc_id, @callback + + it "should get the doc from redis", -> + @RedisManager.getDoc + .calledWith(@doc_id) + .should.equal true + + it "should write the doc lines to the persistence layer", -> + @PersistenceManager.setDoc + .calledWith(@project_id, @doc_id, @lines) + .should.equal true + + it "should write the doc ops to mongo", -> + @DocOpsManager.flushDocOpsToMongo + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should call the callback without error", -> + @callback.calledWith(null).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when the document is not in Redis", -> + beforeEach -> + @RedisManager.getDoc = sinon.stub().callsArgWith(1, null, null, null) + @PersistenceManager.setDoc = sinon.stub().callsArgWith(3) + @DocOpsManager.flushDocOpsToMongo = sinon.stub().callsArgWith(2) + @DocumentManager.flushDocIfLoaded @project_id, @doc_id, @callback + + it "should get the doc from redis", -> + @RedisManager.getDoc + .calledWith(@doc_id) + .should.equal true + + it "should not write anything to the persistence layer", -> + @PersistenceManager.setDoc.called.should.equal false + @DocOpsManager.flushDocOpsToMongo.called.should.equal false + + it "should call the callback without error", -> + @callback.calledWith(null).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + diff --git a/services/document-updater/test/unit/coffee/DocumentManager/getDocAndRecentOpsTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/getDocAndRecentOpsTests.coffee new file mode 100644 index 0000000000..7a296cc47d --- /dev/null +++ b/services/document-updater/test/unit/coffee/DocumentManager/getDocAndRecentOpsTests.coffee @@ -0,0 +1,67 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/DocumentManager.js" +SandboxedModule = require('sandboxed-module') + +describe "DocumentUpdater - getDocAndRecentOps", -> + beforeEach -> + @DocumentManager = SandboxedModule.require modulePath, requires: + "./RedisManager": @RedisManager = {} + "./PersistenceManager": @PersistenceManager = {} + "./DocOpsManager": @DocOpsManager = {} + "logger-sharelatex": @logger = {log: sinon.stub()} + "./Metrics": @Metrics = + Timer: class Timer + done: sinon.stub() + + @project_id = "project-id-123" + @doc_id = "doc-id-123" + @lines = ["one", "two", "three"] + @version = 42 + @fromVersion = 40 + @ops = ["mock-op-1", "mock-op-2"] + @callback = sinon.stub() + + describe "with a previous version specified", -> + beforeEach -> + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version) + @DocOpsManager.getPreviousDocOps = sinon.stub().callsArgWith(4, null, @ops) + @DocumentManager.getDocAndRecentOps @project_id, @doc_id, @fromVersion, @callback + + it "should get the doc", -> + @DocumentManager.getDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should get the doc ops", -> + @DocOpsManager.getPreviousDocOps + .calledWith(@project_id, @doc_id, @fromVersion, @version) + .should.equal true + + it "should call the callback with the doc info", -> + @callback.calledWith(null, @lines, @version, @ops).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "with no previous version specified", -> + beforeEach -> + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version) + @DocOpsManager.getPreviousDocOps = sinon.stub().callsArgWith(4, null, @ops) + @DocumentManager.getDocAndRecentOps @project_id, @doc_id, -1, @callback + + it "should get the doc", -> + @DocumentManager.getDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should not need to get the doc ops", -> + @DocOpsManager.getPreviousDocOps.called.should.equal false + + it "should call the callback with the doc info", -> + @callback.calledWith(null, @lines, @version, []).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + diff --git a/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee new file mode 100644 index 0000000000..93de1725fa --- /dev/null +++ b/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee @@ -0,0 +1,75 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/DocumentManager.js" +SandboxedModule = require('sandboxed-module') + +describe "DocumentUpdater - getDoc", -> + beforeEach -> + @DocumentManager = SandboxedModule.require modulePath, requires: + "./RedisManager": @RedisManager = {} + "./PersistenceManager": @PersistenceManager = {} + "./DocOpsManager": @DocOpsManager = {} + "logger-sharelatex": @logger = {log: sinon.stub()} + "./Metrics": @Metrics = + Timer: class Timer + done: sinon.stub() + + @project_id = "project-id-123" + @doc_id = "doc-id-123" + @lines = ["one", "two", "three"] + @version = 42 + @callback = sinon.stub() + + describe "when the doc exists in Redis", -> + beforeEach -> + @RedisManager.getDoc = sinon.stub().callsArgWith(1, null, @lines, @version) + @DocumentManager.getDoc @project_id, @doc_id, @callback + + it "should get the doc from Redis", -> + @RedisManager.getDoc + .calledWith(@doc_id) + .should.equal true + + it "should call the callback with the doc info", -> + @callback.calledWith(null, @lines, @version).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when the doc does not exist in Redis", -> + beforeEach -> + @RedisManager.getDoc = sinon.stub().callsArgWith(1, null, null, null) + @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines) + @DocOpsManager.getDocVersionInMongo = sinon.stub().callsArgWith(1, null, @version) + @RedisManager.putDocInMemory = sinon.stub().callsArg(4) + @DocumentManager.getDoc @project_id, @doc_id, @callback + + it "should try to get the doc from Redis", -> + @RedisManager.getDoc + .calledWith(@doc_id) + .should.equal true + + it "should get the doc version from Mongo", -> + @DocOpsManager.getDocVersionInMongo + .calledWith(@doc_id) + .should.equal true + + it "should get the doc from the PersistenceManager", -> + @PersistenceManager.getDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should set the doc in Redis", -> + @RedisManager.putDocInMemory + .calledWith(@project_id, @doc_id, @lines, @version) + .should.equal true + + it "should call the callback with the doc info", -> + @callback.calledWith(null, @lines, @version).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + + diff --git a/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee new file mode 100644 index 0000000000..d4b5e931b8 --- /dev/null +++ b/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee @@ -0,0 +1,105 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/DocumentManager.js" +SandboxedModule = require('sandboxed-module') + +describe "DocumentManager - setDoc", -> + beforeEach -> + @DocumentManager = SandboxedModule.require modulePath, requires: + "./RedisManager": @RedisManager = {} + "./PersistenceManager": @PersistenceManager = {} + "./DiffCodec": @DiffCodec = {} + "./DocOpsManager":{} + "./UpdateManager": @UpdateManager = {} + "logger-sharelatex": @logger = {log: sinon.stub()} + "./Metrics": @Metrics = + Timer: class Timer + done: sinon.stub() + + @project_id = "project-id-123" + @doc_id = "doc-id-123" + @version = 42 + @ops = ["mock-ops"] + @callback = sinon.stub() + + describe "with plain tex lines", -> + beforeEach -> + @beforeLines = ["before", "lines"] + @afterLines = ["after", "lines"] + + describe "successfully", -> + beforeEach -> + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version) + @DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, @ops) + @UpdateManager.applyUpdates = sinon.stub().callsArgWith(3, null) + @DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2) + @DocumentManager.setDoc @project_id, @doc_id, @afterLines, @callback + + it "should get the current doc lines", -> + @DocumentManager.getDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should return a diff of the old and new lines", -> + @DiffCodec.diffAsShareJsOp + .calledWith(@beforeLines, @afterLines) + .should.equal true + + it "should apply the diff as a ShareJS op", -> + @UpdateManager.applyUpdates + .calledWith(@project_id, @doc_id, [doc: @doc_id, v: @version, op: @ops, meta: { type: "external" }]) + .should.equal true + + it "should flush the doc to Mongo", -> + @DocumentManager.flushDocIfLoaded + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should call the callback", -> + @callback.calledWith(null).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "with json lines", -> + beforeEach -> + @beforeLines = [text: "before", text: "lines"] + @afterLines = ["after", "lines"] + + describe "successfully", -> + beforeEach -> + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version) + @DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, @ops) + @UpdateManager.applyUpdates = sinon.stub().callsArgWith(3, null) + @DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2) + @DocumentManager.setDoc @project_id, @doc_id, @afterLines, @callback + + it "should get the current doc lines", -> + @DocumentManager.getDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should return not try to get a diff", -> + @DiffCodec.diffAsShareJsOp.called.should.equal false + + it "should call the callback", -> + @callback.calledWith(null).should.equal true + + describe "without new lines", -> + beforeEach -> + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version) + @DocumentManager.setDoc @project_id, @doc_id, null, @callback + + it "should return teh callback with an error", -> + @callback.calledWith(new Error("No lines were passed to setDoc")) + + it "should not try to get the doc lines", -> + @DocumentManager.getDoc.called.should.equal false + + + + + + + diff --git a/services/document-updater/test/unit/coffee/GettingDoc.coffee b/services/document-updater/test/unit/coffee/GettingDoc.coffee new file mode 100644 index 0000000000..824fe14a3f --- /dev/null +++ b/services/document-updater/test/unit/coffee/GettingDoc.coffee @@ -0,0 +1,41 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../app/js/RedisManager.js" +SandboxedModule = require('sandboxed-module') + +describe 'RedisManager - getDoc', -> + beforeEach -> + @rclient = {} + @rclient.auth = () -> + @rclient.multi = () => @rclient + + @RedisManager = SandboxedModule.require modulePath, requires: + "redis": @redis = + createClient: () => @rclient + + @doc_id = "doc-id-123" + @lines = ["one", "two", "three"] + @jsonlines = JSON.stringify @lines + @version = 42 + @callback = sinon.stub() + + @rclient.get = sinon.stub() + @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version]) + + @RedisManager.getDoc @doc_id, @callback + + it "should get the lines from redis", -> + @rclient.get + .calledWith("doclines:#{@doc_id}") + .should.equal true + + it "should get the version from", -> + @rclient.get + .calledWith("DocVersion:#{@doc_id}") + .should.equal true + + it 'should return the document', -> + @callback + .calledWith(null, @lines, @version) + .should.equal true diff --git a/services/document-updater/test/unit/coffee/GettingListOfPendingUpdates.coffee b/services/document-updater/test/unit/coffee/GettingListOfPendingUpdates.coffee new file mode 100644 index 0000000000..cb98e8f601 --- /dev/null +++ b/services/document-updater/test/unit/coffee/GettingListOfPendingUpdates.coffee @@ -0,0 +1,42 @@ +assert = require('assert') +should = require('chai').should() +path = require('path') +modulePath = path.join __dirname, '../../../app/js/RedisManager.js' +_ = require('underscore') +loadModule = require('./module-loader').loadModule +keys = require(path.join __dirname, '../../../app/js/RedisKeyBuilder.js') + +describe 'getting entire list of pending updates', ()-> + + doc_id = 123 + redisMemory = {} + correctUpdates = [{"update1"}, {"update2"}, {"update3"}] + jsonCorrectUpdates = _.map correctUpdates, (d)-> JSON.stringify d + redisMemory[keys.pendingUpdates(doc_id:doc_id)] = jsonCorrectUpdates + redisMemory[keys.pendingUpdates(doc_id:"notThis")] = JSON.stringify([{"updatex"}, {"updatez"}]) + + redisReturn = [] + + mocks = + redis: + createClient: ()-> + auth:-> + multi: ()-> + lrange:(key, start, end)-> + key.should.equal(keys.pendingUpdates(doc_id:doc_id)) + start.should.equal(0) + end.should.equal(-1) + redisReturn.push(redisMemory[key]) + del : (key)-> + key.should.equal(keys.pendingUpdates(doc_id:doc_id)) + redisReturn.push(1) + exec: (callback)-> + callback(null, redisReturn) + + redisManager = loadModule(modulePath, mocks).module.exports + + it 'should have 3 elements in array', (done)-> + redisManager.getPendingUpdatesForDoc doc_id, (err, listOfUpdates)-> + listOfUpdates.length.should.equal(3) + done() + diff --git a/services/document-updater/test/unit/coffee/GettingTotalNumberOfDocs.coffee b/services/document-updater/test/unit/coffee/GettingTotalNumberOfDocs.coffee new file mode 100644 index 0000000000..ae4af4825d --- /dev/null +++ b/services/document-updater/test/unit/coffee/GettingTotalNumberOfDocs.coffee @@ -0,0 +1,47 @@ +require('coffee-script') +assert = require('assert') +should = require('chai').should() +path = require('path') +modulePath = path.join __dirname, '../../../app/js/RedisManager.js' +keys = require(path.join __dirname, '../../../app/js/RedisKeyBuilder.js') +loadModule = require('./module-loader').loadModule + +describe 'getting cound of docs from memory', ()-> + + project_id = "12345" + doc_id1 = "docid1" + doc_id2 = "docid2" + doc_id3 = "docid3" + + redisMemory = {} + redisManager = undefined + + beforeEach (done)-> + mocks = + "logger-sharelatex": log:-> + redis: + createClient : ()-> + auth:-> + smembers:(key, callback)-> + callback(null, redisMemory[key]) + multi: ()-> + set:(key, value)-> + redisMemory[key] = value + sadd:(key, value)-> + if !redisMemory[key]? + redisMemory[key] = [] + redisMemory[key].push value + del:()-> + exec:(callback)-> + callback() + + redisManager = loadModule(modulePath, mocks).module.exports + redisManager.putDocInMemory project_id, doc_id1, 0, ["line"], -> + redisManager.putDocInMemory project_id, doc_id2, 0, ["ledf"], -> + redisManager.putDocInMemory project_id, doc_id3, 0, ["ledf"], -> + done() + + it 'should return total', (done)-> + redisManager.getCountOfDocsInMemory (err, count)-> + assert.equal count, 3 + done() diff --git a/services/document-updater/test/unit/coffee/HttpController/deleteProjectTests.coffee b/services/document-updater/test/unit/coffee/HttpController/deleteProjectTests.coffee new file mode 100644 index 0000000000..e3c6eda35c --- /dev/null +++ b/services/document-updater/test/unit/coffee/HttpController/deleteProjectTests.coffee @@ -0,0 +1,63 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/HttpController.js" +SandboxedModule = require('sandboxed-module') +Errors = require "../../../../app/js/Errors.js" + +describe "HttpController - deleteProject", -> + beforeEach -> + @HttpController = SandboxedModule.require modulePath, requires: + "./DocumentManager": @DocumentManager = {} + "./ProjectManager": @ProjectManager = {} + "logger-sharelatex" : @logger = { log: sinon.stub() } + "./Metrics": @Metrics = {} + + @Metrics.Timer = class Timer + done: sinon.stub() + + @project_id = "project-id-123" + @res = + send: sinon.stub() + @req = + params: + project_id: @project_id + @next = sinon.stub() + + describe "successfully", -> + beforeEach -> + @ProjectManager.flushAndDeleteProjectWithLocks = sinon.stub().callsArgWith(1) + @HttpController.deleteProject(@req, @res, @next) + + it "should delete the project", -> + @ProjectManager.flushAndDeleteProjectWithLocks + .calledWith(@project_id) + .should.equal true + + it "should return a successful No Content response", -> + @res.send + .calledWith(204) + .should.equal true + + it "should log the request", -> + @logger.log + .calledWith(project_id: @project_id, "deleting project via http") + .should.equal true + + it "should time the request", -> + @Metrics.Timer::done.called.should.equal true + + describe "when an errors occurs", -> + beforeEach -> + @ProjectManager.flushAndDeleteProjectWithLocks = sinon.stub().callsArgWith(1, new Error("oops")) + @HttpController.deleteProject(@req, @res, @next) + + it "should call next with the error", -> + @next + .calledWith(new Error("oops")) + .should.equal true + + + + + diff --git a/services/document-updater/test/unit/coffee/HttpController/flushAndDeleteDocTests.coffee b/services/document-updater/test/unit/coffee/HttpController/flushAndDeleteDocTests.coffee new file mode 100644 index 0000000000..f586b6c4f8 --- /dev/null +++ b/services/document-updater/test/unit/coffee/HttpController/flushAndDeleteDocTests.coffee @@ -0,0 +1,64 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/HttpController.js" +SandboxedModule = require('sandboxed-module') +Errors = require "../../../../app/js/Errors.js" + +describe "HttpController - flushAndDeleteDoc", -> + beforeEach -> + @HttpController = SandboxedModule.require modulePath, requires: + "./DocumentManager": @DocumentManager = {} + "./ProjectManager":{} + "logger-sharelatex" : @logger = { log: sinon.stub() } + "./Metrics": @Metrics = {} + + @Metrics.Timer = class Timer + done: sinon.stub() + + @project_id = "project-id-123" + @doc_id = "doc-id-123" + @res = + send: sinon.stub() + @req = + params: + project_id: @project_id + doc_id: @doc_id + @next = sinon.stub() + + describe "successfully", -> + beforeEach -> + @DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArgWith(2) + @HttpController.flushAndDeleteDoc(@req, @res, @next) + + it "should flush and delete the doc", -> + @DocumentManager.flushAndDeleteDocWithLock + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should return a successful No Content response", -> + @res.send + .calledWith(204) + .should.equal true + + it "should log the request", -> + @logger.log + .calledWith(doc_id: @doc_id, project_id: @project_id, "deleting doc via http") + .should.equal true + + it "should time the request", -> + @Metrics.Timer::done.called.should.equal true + + describe "when an errors occurs", -> + beforeEach -> + @DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArgWith(2, new Error("oops")) + @HttpController.flushAndDeleteDoc(@req, @res, @next) + + it "should call next with the error", -> + @next + .calledWith(new Error("oops")) + .should.equal true + + + + diff --git a/services/document-updater/test/unit/coffee/HttpController/flushDocIfLoadedTests.coffee b/services/document-updater/test/unit/coffee/HttpController/flushDocIfLoadedTests.coffee new file mode 100644 index 0000000000..69c0137676 --- /dev/null +++ b/services/document-updater/test/unit/coffee/HttpController/flushDocIfLoadedTests.coffee @@ -0,0 +1,65 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/HttpController.js" +SandboxedModule = require('sandboxed-module') +Errors = require "../../../../app/js/Errors.js" + +describe "HttpController - flushDocIfLoaded", -> + beforeEach -> + @HttpController = SandboxedModule.require modulePath, requires: + "./DocumentManager": @DocumentManager = {} + "./ProjectManager": {} + "logger-sharelatex" : @logger = { log: sinon.stub() } + "./Metrics": @Metrics = {} + + @Metrics.Timer = class Timer + done: sinon.stub() + + @project_id = "project-id-123" + @doc_id = "doc-id-123" + @lines = ["one", "two", "three"] + @version = 42 + @res = + send: sinon.stub() + @req = + params: + project_id: @project_id + doc_id: @doc_id + @next = sinon.stub() + + describe "successfully", -> + beforeEach -> + @DocumentManager.flushDocIfLoadedWithLock = sinon.stub().callsArgWith(2) + @HttpController.flushDocIfLoaded(@req, @res, @next) + + it "should flush the doc", -> + @DocumentManager.flushDocIfLoadedWithLock + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should return a successful No Content response", -> + @res.send + .calledWith(204) + .should.equal true + + it "should log the request", -> + @logger.log + .calledWith(doc_id: @doc_id, project_id: @project_id, "flushing doc via http") + .should.equal true + + it "should time the request", -> + @Metrics.Timer::done.called.should.equal true + + describe "when an errors occurs", -> + beforeEach -> + @DocumentManager.flushDocIfLoadedWithLock = sinon.stub().callsArgWith(2, new Error("oops")) + @HttpController.flushDocIfLoaded(@req, @res, @next) + + it "should call next with the error", -> + @next + .calledWith(new Error("oops")) + .should.equal true + + + diff --git a/services/document-updater/test/unit/coffee/HttpController/flushProjectTests.coffee b/services/document-updater/test/unit/coffee/HttpController/flushProjectTests.coffee new file mode 100644 index 0000000000..5175cd4280 --- /dev/null +++ b/services/document-updater/test/unit/coffee/HttpController/flushProjectTests.coffee @@ -0,0 +1,62 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/HttpController.js" +SandboxedModule = require('sandboxed-module') +Errors = require "../../../../app/js/Errors.js" + +describe "HttpController - flushProject", -> + beforeEach -> + @HttpController = SandboxedModule.require modulePath, requires: + "./DocumentManager": @DocumentManager = {} + "./ProjectManager": @ProjectManager = {} + "logger-sharelatex" : @logger = { log: sinon.stub() } + "./Metrics": @Metrics = {} + + @Metrics.Timer = class Timer + done: sinon.stub() + + @project_id = "project-id-123" + @res = + send: sinon.stub() + @req = + params: + project_id: @project_id + @next = sinon.stub() + + describe "successfully", -> + beforeEach -> + @ProjectManager.flushProjectWithLocks = sinon.stub().callsArgWith(1) + @HttpController.flushProject(@req, @res, @next) + + it "should flush the project", -> + @ProjectManager.flushProjectWithLocks + .calledWith(@project_id) + .should.equal true + + it "should return a successful No Content response", -> + @res.send + .calledWith(204) + .should.equal true + + it "should log the request", -> + @logger.log + .calledWith(project_id: @project_id, "flushing project via http") + .should.equal true + + it "should time the request", -> + @Metrics.Timer::done.called.should.equal true + + describe "when an errors occurs", -> + beforeEach -> + @ProjectManager.flushProjectWithLocks = sinon.stub().callsArgWith(1, new Error("oops")) + @HttpController.flushProject(@req, @res, @next) + + it "should call next with the error", -> + @next + .calledWith(new Error("oops")) + .should.equal true + + + + diff --git a/services/document-updater/test/unit/coffee/HttpController/getDocTests.coffee b/services/document-updater/test/unit/coffee/HttpController/getDocTests.coffee new file mode 100644 index 0000000000..4ec493bc4b --- /dev/null +++ b/services/document-updater/test/unit/coffee/HttpController/getDocTests.coffee @@ -0,0 +1,110 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/HttpController.js" +SandboxedModule = require('sandboxed-module') +Errors = require "../../../../app/js/Errors.js" + +describe "HttpController - getDoc", -> + beforeEach -> + @HttpController = SandboxedModule.require modulePath, requires: + "./DocumentManager": @DocumentManager = {} + "./ProjectManager": {} + "logger-sharelatex" : @logger = { log: sinon.stub() } + "./Metrics": @Metrics = {} + + @Metrics.Timer = class Timer + done: sinon.stub() + + @project_id = "project-id-123" + @doc_id = "doc-id-123" + @lines = ["one", "two", "three"] + @ops = ["mock-op-1", "mock-op-2"] + @version = 42 + @fromVersion = 42 + @res = + send: sinon.stub() + @req = + params: + project_id: @project_id + doc_id: @doc_id + @next = sinon.stub() + + describe "when the document exists and no recent ops are requested", -> + beforeEach -> + @DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, @lines, @version, []) + @HttpController.getDoc(@req, @res, @next) + + it "should get the doc", -> + @DocumentManager.getDocAndRecentOpsWithLock + .calledWith(@project_id, @doc_id, -1) + .should.equal true + + it "should return the doc as JSON", -> + @res.send + .calledWith(JSON.stringify({ + id: @doc_id + lines: @lines + version: @version + ops: [] + })) + .should.equal true + + it "should log the request", -> + @logger.log + .calledWith(doc_id: @doc_id, project_id: @project_id, "getting doc via http") + .should.equal true + + it "should time the request", -> + @Metrics.Timer::done.called.should.equal true + + describe "when recent ops are requested", -> + beforeEach -> + @DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, @lines, @version, @ops) + @req.query = fromVersion: "#{@fromVersion}" + @HttpController.getDoc(@req, @res, @next) + + it "should get the doc", -> + @DocumentManager.getDocAndRecentOpsWithLock + .calledWith(@project_id, @doc_id, @fromVersion) + .should.equal true + + it "should return the doc as JSON", -> + @res.send + .calledWith(JSON.stringify({ + id: @doc_id + lines: @lines + version: @version + ops: @ops + })) + .should.equal true + + it "should log the request", -> + @logger.log + .calledWith(doc_id: @doc_id, project_id: @project_id, "getting doc via http") + .should.equal true + + it "should time the request", -> + @Metrics.Timer::done.called.should.equal true + + describe "when the document does not exist", -> + beforeEach -> + @DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, null, null) + @HttpController.getDoc(@req, @res, @next) + + it "should call next with NotFoundError", -> + @next + .calledWith(new Errors.NotFoundError("not found")) + .should.equal true + + describe "when an errors occurs", -> + beforeEach -> + @DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, new Error("oops"), null, null) + @HttpController.getDoc(@req, @res, @next) + + it "should call next with the error", -> + @next + .calledWith(new Error("oops")) + .should.equal true + + diff --git a/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee b/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee new file mode 100644 index 0000000000..2c3924c030 --- /dev/null +++ b/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee @@ -0,0 +1,67 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/HttpController.js" +SandboxedModule = require('sandboxed-module') +Errors = require "../../../../app/js/Errors.js" + +describe "HttpController - setDoc", -> + beforeEach -> + @HttpController = SandboxedModule.require modulePath, requires: + "./DocumentManager": @DocumentManager = {} + "./ProjectManager": {} + "logger-sharelatex" : @logger = { log: sinon.stub() } + "./Metrics": @Metrics = {} + + @Metrics.Timer = class Timer + done: sinon.stub() + + @project_id = "project-id-123" + @doc_id = "doc-id-123" + @lines = ["one", "two", "three"] + @res = + send: sinon.stub() + @req = + params: + project_id: @project_id + doc_id: @doc_id + body: + lines: @lines + @next = sinon.stub() + + describe "successfully", -> + beforeEach -> + @DocumentManager.setDocWithLock = sinon.stub().callsArgWith(3) + @HttpController.setDoc(@req, @res, @next) + + it "should set the doc", -> + @DocumentManager.setDocWithLock + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should return a successful No Content response", -> + @res.send + .calledWith(204) + .should.equal true + + it "should log the request", -> + @logger.log + .calledWith(doc_id: @doc_id, project_id: @project_id, lines: @lines, "setting doc via http") + .should.equal true + + it "should time the request", -> + @Metrics.Timer::done.called.should.equal true + + describe "when an errors occurs", -> + beforeEach -> + @DocumentManager.setDocWithLock = sinon.stub().callsArgWith(3, new Error("oops")) + @HttpController.setDoc(@req, @res, @next) + + it "should call next with the error", -> + @next + .calledWith(new Error("oops")) + .should.equal true + + + + diff --git a/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee b/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee new file mode 100644 index 0000000000..ac72cbae93 --- /dev/null +++ b/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee @@ -0,0 +1,50 @@ +require('coffee-script') +sinon = require('sinon') +assert = require('assert') +path = require('path') +modulePath = path.join __dirname, '../../../../app/js/LockManager.js' +keys = require(path.join __dirname, '../../../../app/js/RedisKeyBuilder.js') +project_id = 1234 +doc_id = 5678 +blockingKey = "Blocking:#{doc_id}" +loadModule = require('../module-loader').loadModule + +describe 'Lock Manager - checking the lock', ()-> + + existsStub = sinon.stub() + setStub = sinon.stub() + exireStub = sinon.stub() + execStub = sinon.stub() + + mocks = + "logger-sharelatex": log:-> + + redis: + createClient : ()-> + auth:-> + multi: -> + exists: existsStub + expire: exireStub + set: setStub + exec: execStub + LockManager = loadModule(modulePath, mocks).module.exports + + it 'should check if lock exists but not set or expire', (done)-> + execStub.callsArgWith(0, null, ["1"]) + LockManager.checkLock doc_id, (err, docIsLocked)-> + existsStub.calledWith(blockingKey).should.equal true + setStub.called.should.equal false + exireStub.called.should.equal false + done() + + it 'should return true if the key does not exists', (done)-> + execStub.callsArgWith(0, null, "0") + LockManager.checkLock doc_id, (err, free)-> + free.should.equal true + done() + + it 'should return false if the key does exists', (done)-> + execStub.callsArgWith(0, null, "1") + LockManager.checkLock doc_id, (err, free)-> + free.should.equal false + done() diff --git a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee new file mode 100644 index 0000000000..81f42a3f59 --- /dev/null +++ b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee @@ -0,0 +1,28 @@ +require('coffee-script') +sinon = require('sinon') +assert = require('assert') +path = require('path') +modulePath = path.join __dirname, '../../../../app/js/LockManager.js' +keys = require(path.join __dirname, '../../../../app/js/RedisKeyBuilder.js') +project_id = 1234 +doc_id = 5678 +loadModule = require('../module-loader').loadModule + +describe 'LockManager - releasing the lock', ()-> + + deleteStub = sinon.stub().callsArgWith(1) + mocks = + "logger-sharelatex": log:-> + + redis: + createClient : ()-> + auth:-> + del:deleteStub + + LockManager = loadModule(modulePath, mocks).module.exports + + it 'should put a all data into memory', (done)-> + LockManager.releaseLock doc_id, -> + deleteStub.calledWith("Blocking:#{doc_id}").should.equal true + done() + diff --git a/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee b/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee new file mode 100644 index 0000000000..f378650d95 --- /dev/null +++ b/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee @@ -0,0 +1,69 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/LockManager.js" +SandboxedModule = require('sandboxed-module') + +describe 'LockManager - getting the lock', -> + beforeEach -> + @LockManager = SandboxedModule.require modulePath, requires: + "logger-sharelatex": log:-> + redis: + createClient : () => + auth:-> + @callback = sinon.stub() + @doc_id = "doc-id-123" + + describe "when the lock is not set", -> + beforeEach (done) -> + @LockManager.tryLock = sinon.stub().callsArgWith(1, null, true) + @LockManager.getLock @doc_id, (args...) => + @callback(args...) + done() + + it "should try to get the lock", -> + @LockManager.tryLock + .calledWith(@doc_id) + .should.equal true + + it "should only need to try once", -> + @LockManager.tryLock.callCount.should.equal 1 + + it "should return the callback", -> + @callback.calledWith(null).should.equal true + + describe "when the lock is initially set", -> + beforeEach (done) -> + startTime = Date.now() + @LockManager.LOCK_TEST_INTERVAL = 5 + @LockManager.tryLock = (doc_id, callback = (error, isFree) ->) -> + if Date.now() - startTime < 20 + callback null, false + else + callback null, true + sinon.spy @LockManager, "tryLock" + + @LockManager.getLock @doc_id, (args...) => + @callback(args...) + done() + + it "should call tryLock multiple times until free", -> + (@LockManager.tryLock.callCount > 1).should.equal true + + it "should return the callback", -> + @callback.calledWith(null).should.equal true + + describe "when the lock times out", -> + beforeEach (done) -> + time = Date.now() + @LockManager.MAX_LOCK_WAIT_TIME = 5 + @LockManager.tryLock = sinon.stub().callsArgWith(1, null, false) + @LockManager.getLock @doc_id, (args...) => + @callback(args...) + done() + + it "should return the callback with an error", -> + @callback.calledWith(new Error("timeout")).should.equal true + + + diff --git a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee new file mode 100644 index 0000000000..cff2b9538b --- /dev/null +++ b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee @@ -0,0 +1,37 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/LockManager.js" +SandboxedModule = require('sandboxed-module') + +describe 'LockManager - trying the lock', -> + beforeEach -> + @LockManager = SandboxedModule.require modulePath, requires: + "logger-sharelatex": log:-> + redis: + createClient : () => + auth:-> + set: @set = sinon.stub() + @callback = sinon.stub() + @doc_id = "doc-id-123" + + describe "when the lock is not set", -> + beforeEach -> + @set.callsArgWith(5, null, "OK") + @LockManager.tryLock @doc_id, @callback + + it "should set the lock key with an expiry if it is not set", -> + @set.calledWith("Blocking:#{@doc_id}", "locked", "EX", 10, "NX") + .should.equal true + + it "should return the callback with true", -> + @callback.calledWith(null, true).should.equal true + + describe "when the lock is already set", -> + beforeEach -> + @set.callsArgWith(5, null, null) + @LockManager.tryLock @doc_id, @callback + + it "should return the callback with false", -> + @callback.calledWith(null, false).should.equal true + diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee new file mode 100644 index 0000000000..c5cfc35ac8 --- /dev/null +++ b/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee @@ -0,0 +1,85 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/PersistenceManager.js" +SandboxedModule = require('sandboxed-module') +Errors = require "../../../../app/js/Errors" + +describe "PersistenceManager.getDoc", -> + beforeEach -> + @PersistenceManager = SandboxedModule.require modulePath, requires: + "request": @request = sinon.stub() + "settings-sharelatex": @Settings = {} + "./Metrics": @Metrics = + Timer: class Timer + done: sinon.stub() + @project_id = "project-id-123" + @doc_id = "doc-id-123" + @lines = ["one", "two", "three"] + @callback = sinon.stub() + @Settings.apis = + web: + url: @url = "www.example.com" + user: @user = "sharelatex" + pass: @pass = "password" + + describe "with a successful response from the web api", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(lines: @lines)) + @PersistenceManager.getDoc(@project_id, @doc_id, @callback) + + it "should call the web api", -> + @request + .calledWith({ + url: "#{@url}/project/#{@project_id}/doc/#{@doc_id}" + method: "GET" + headers: + "accept": "application/json" + auth: + user: @user + pass: @pass + sendImmediately: true + jar: false + }) + .should.equal true + + it "should call the callback with the doc lines", -> + @callback.calledWith(null, @lines).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when request returns an error", -> + beforeEach -> + @request.callsArgWith(1, @error = new Error("oops"), null, null) + @PersistenceManager.getDoc(@project_id, @doc_id, @callback) + + it "should return the error", -> + @callback.calledWith(@error).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when the request returns 404", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 404}, "") + @PersistenceManager.getDoc(@project_id, @doc_id, @callback) + + it "should return a NotFoundError", -> + @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when the request returns an error status code", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 500}, "") + @PersistenceManager.getDoc(@project_id, @doc_id, @callback) + + it "should return an error", -> + @callback.calledWith(new Error("web api error")).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/setDocTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/setDocTests.coffee new file mode 100644 index 0000000000..cd9d962d3b --- /dev/null +++ b/services/document-updater/test/unit/coffee/PersistenceManager/setDocTests.coffee @@ -0,0 +1,86 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/PersistenceManager.js" +SandboxedModule = require('sandboxed-module') +Errors = require "../../../../app/js/Errors" + +describe "PersistenceManager.setDoc", -> + beforeEach -> + @PersistenceManager = SandboxedModule.require modulePath, requires: + "request": @request = sinon.stub() + "settings-sharelatex": @Settings = {} + "./Metrics": @Metrics = + Timer: class Timer + done: sinon.stub() + @project_id = "project-id-123" + @doc_id = "doc-id-123" + @lines = ["one", "two", "three"] + @callback = sinon.stub() + @Settings.apis = + web: + url: @url = "www.example.com" + user: @user = "sharelatex" + pass: @pass = "password" + + describe "with a successful response from the web api", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(lines: @lines)) + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @callback) + + it "should call the web api", -> + @request + .calledWith({ + url: "#{@url}/project/#{@project_id}/doc/#{@doc_id}" + body: JSON.stringify + lines: @lines + method: "POST" + headers: + "content-type": "application/json" + auth: + user: @user + pass: @pass + sendImmediately: true + jar: false + }) + .should.equal true + + it "should call the callback without error", -> + @callback.calledWith(null).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when request returns an error", -> + beforeEach -> + @request.callsArgWith(1, @error = new Error("oops"), null, null) + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @callback) + + it "should return the error", -> + @callback.calledWith(@error).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when the request returns 404", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 404}, "") + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @callback) + + it "should return a NotFoundError", -> + @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when the request returns an error status code", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 500}, "") + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @callback) + + it "should return an error", -> + @callback.calledWith(new Error("web api error")).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + diff --git a/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee new file mode 100644 index 0000000000..fc2ea998f6 --- /dev/null +++ b/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee @@ -0,0 +1,75 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/ProjectManager.js" +SandboxedModule = require('sandboxed-module') + +describe "ProjectManager - flushAndDeleteProject", -> + beforeEach -> + @ProjectManager = SandboxedModule.require modulePath, requires: + "./RedisManager": @RedisManager = {} + "./DocumentManager": @DocumentManager = {} + "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } + "./Metrics": @Metrics = + Timer: class Timer + done: sinon.stub() + @project_id = "project-id-123" + @callback = sinon.stub() + + describe "successfully", -> + beforeEach (done) -> + @doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"] + @RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, @doc_ids) + @DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArg(2) + @ProjectManager.flushAndDeleteProjectWithLocks @project_id, (error) => + @callback(error) + done() + + it "should get the doc ids in the project", -> + @RedisManager.getDocIdsInProject + .calledWith(@project_id) + .should.equal true + + it "should delete each doc in the project", -> + for doc_id in @doc_ids + @DocumentManager.flushAndDeleteDocWithLock + .calledWith(@project_id, doc_id) + .should.equal true + + it "should call the callback without error", -> + @callback.calledWith(null).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when a doc errors", -> + beforeEach (done) -> + @doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"] + @RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, @doc_ids) + @DocumentManager.flushAndDeleteDocWithLock = sinon.spy (project_id, doc_id, callback = (error) ->) => + if doc_id == "doc-id-1" + callback(@error = new Error("oops, something went wrong")) + else + callback() + @ProjectManager.flushAndDeleteProjectWithLocks @project_id, (error) => + @callback(error) + done() + + it "should still flush each doc in the project", -> + for doc_id in @doc_ids + @DocumentManager.flushAndDeleteDocWithLock + .calledWith(@project_id, doc_id) + .should.equal true + + it "should record the error", -> + @logger.error + .calledWith(err: @error, project_id: @project_id, doc_id: "doc-id-1", "error deleting doc") + .should.equal true + + it "should call the callback with an error", -> + @callback.calledWith(new Error()).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + diff --git a/services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.coffee new file mode 100644 index 0000000000..301740c015 --- /dev/null +++ b/services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.coffee @@ -0,0 +1,75 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/ProjectManager.js" +SandboxedModule = require('sandboxed-module') + +describe "ProjectManager - flushProject", -> + beforeEach -> + @ProjectManager = SandboxedModule.require modulePath, requires: + "./RedisManager": @RedisManager = {} + "./DocumentManager": @DocumentManager = {} + "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } + "./Metrics": @Metrics = + Timer: class Timer + done: sinon.stub() + @project_id = "project-id-123" + @callback = sinon.stub() + + describe "successfully", -> + beforeEach (done) -> + @doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"] + @RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, @doc_ids) + @DocumentManager.flushDocIfLoadedWithLock = sinon.stub().callsArg(2) + @ProjectManager.flushProjectWithLocks @project_id, (error) => + @callback(error) + done() + + it "should get the doc ids in the project", -> + @RedisManager.getDocIdsInProject + .calledWith(@project_id) + .should.equal true + + it "should flush each doc in the project", -> + for doc_id in @doc_ids + @DocumentManager.flushDocIfLoadedWithLock + .calledWith(@project_id, doc_id) + .should.equal true + + it "should call the callback without error", -> + @callback.calledWith(null).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when a doc errors", -> + beforeEach (done) -> + @doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"] + @RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, @doc_ids) + @DocumentManager.flushDocIfLoadedWithLock = sinon.spy (project_id, doc_id, callback = (error) ->) => + if doc_id == "doc-id-1" + callback(@error = new Error("oops, something went wrong")) + else + callback() + @ProjectManager.flushProjectWithLocks @project_id, (error) => + @callback(error) + done() + + it "should still flush each doc in the project", -> + for doc_id in @doc_ids + @DocumentManager.flushDocIfLoadedWithLock + .calledWith(@project_id, doc_id) + .should.equal true + + it "should record the error", -> + @logger.error + .calledWith(err: @error, project_id: @project_id, doc_id: "doc-id-1", "error flushing doc") + .should.equal true + + it "should call the callback with an error", -> + @callback.calledWith(new Error()).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + diff --git a/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee new file mode 100644 index 0000000000..676d454167 --- /dev/null +++ b/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee @@ -0,0 +1,27 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/RedisManager" +SandboxedModule = require('sandboxed-module') + +describe "RedisManager.clearDocFromPendingUpdatesSet", -> + beforeEach -> + @project_id = "project-id" + @doc_id = "document-id" + @callback = sinon.stub() + @RedisManager = SandboxedModule.require modulePath, requires: + "redis" : createClient: () => + @rclient = auth:-> + + @rclient.srem = sinon.stub().callsArg(2) + @RedisManager.clearDocFromPendingUpdatesSet(@project_id, @doc_id, @callback) + + it "should get the docs with pending updates", -> + @rclient.srem + .calledWith("DocsWithPendingUpdates", "#{@project_id}:#{@doc_id}") + .should.equal true + + it "should return the callback", -> + @callback.called.should.equal true + + diff --git a/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee new file mode 100644 index 0000000000..602197ad57 --- /dev/null +++ b/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee @@ -0,0 +1,33 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/RedisManager" +SandboxedModule = require('sandboxed-module') + +describe "RedisManager.getDocsWithPendingUpdates", -> + beforeEach -> + @callback = sinon.stub() + @RedisManager = SandboxedModule.require modulePath, requires: + "redis" : createClient: () => + @rclient = auth:-> + + @docs = [{ + doc_id: "doc-id-1" + project_id: "project-id-1" + }, { + doc_id: "doc-id-2" + project_id: "project-id-2" + }] + @doc_keys = @docs.map (doc) -> "#{doc.project_id}:#{doc.doc_id}" + + @rclient.smembers = sinon.stub().callsArgWith(1, null, @doc_keys) + @RedisManager.getDocsWithPendingUpdates(@callback) + + it "should get the docs with pending updates", -> + @rclient.smembers + .calledWith("DocsWithPendingUpdates") + .should.equal true + + it "should return the docs with pending updates", -> + @callback.calledWith(null, @docs).should.equal true + diff --git a/services/document-updater/test/unit/coffee/RedisManager/getPendingUpdatesForDocTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/getPendingUpdatesForDocTests.coffee new file mode 100644 index 0000000000..4910f1498f --- /dev/null +++ b/services/document-updater/test/unit/coffee/RedisManager/getPendingUpdatesForDocTests.coffee @@ -0,0 +1,56 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/RedisManager.js" +SandboxedModule = require('sandboxed-module') + +describe "RedisManager.getPendingUpdatesForDoc", -> + beforeEach -> + @RedisManager = SandboxedModule.require modulePath, requires: + "redis": createClient: () => + @rclient = + auth: () -> + multi: () => @rclient + "logger-sharelatex": @logger = {log: sinon.stub()} + @project_id = "project-id-123" + @doc_id = "doc-id-123" + @callback = sinon.stub() + @rclient.lrange = sinon.stub() + @rclient.del = sinon.stub() + + describe "successfully", -> + beforeEach -> + @updates = [ + { op: [{ i: "foo", p: 4 }] } + { op: [{ i: "foo", p: 4 }] } + ] + @jsonUpdates = @updates.map (update) -> JSON.stringify update + @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonUpdates]) + @RedisManager.getPendingUpdatesForDoc @doc_id, @callback + + it "should get the pending updates", -> + @rclient.lrange + .calledWith("PendingUpdates:#{@doc_id}", 0, -1) + .should.equal true + + it "should delete the pending updates", -> + @rclient.del + .calledWith("PendingUpdates:#{@doc_id}") + .should.equal true + + it "should call the callback with the updates", -> + @callback.calledWith(null, @updates).should.equal true + + describe "when the JSON doesn't parse", -> + beforeEach -> + @jsonUpdates = [ + JSON.stringify { op: [{ i: "foo", p: 4 }] } + "broken json" + ] + @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonUpdates]) + @RedisManager.getPendingUpdatesForDoc @doc_id, @callback + + it "should return an error to the callback", -> + @callback.calledWith(new Error("JSON parse error")).should.equal true + + diff --git a/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee new file mode 100644 index 0000000000..775418313e --- /dev/null +++ b/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee @@ -0,0 +1,99 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/RedisManager" +SandboxedModule = require('sandboxed-module') + +describe "RedisManager.getPreviousDocOpsTests", -> + beforeEach -> + @callback = sinon.stub() + @RedisManager = SandboxedModule.require modulePath, requires: + "redis" : createClient: () => + @rclient = + auth: -> + multi: => @rclient + "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub() } + @doc_id = "doc-id-123" + + describe "with a start and an end value", -> + beforeEach -> + @first_version_in_redis = 30 + @version = 70 + @length = @version - @first_version_in_redis + @start = 50 + @end = 60 + @ops = [ + { "mock": "op-1" }, + { "mock": "op-2" } + ] + @jsonOps = @ops.map (op) -> JSON.stringify op + @rclient.llen = sinon.stub().callsArgWith(1, null, @length) + @rclient.get = sinon.stub().callsArgWith(1, null, @version.toString()) + @rclient.lrange = sinon.stub().callsArgWith(3, null, @jsonOps) + @RedisManager.getPreviousDocOps(@doc_id, @start, @end, @callback) + + it "should get the length of the existing doc ops", -> + @rclient.llen + .calledWith("DocOps:#{@doc_id}") + .should.equal true + + it "should get the current version of the doc", -> + @rclient.get + .calledWith("DocVersion:#{@doc_id}") + .should.equal true + + it "should get the appropriate docs ops", -> + @rclient.lrange + .calledWith("DocOps:#{@doc_id}", @start - @first_version_in_redis, @end - @first_version_in_redis) + .should.equal true + + it "should return the docs with the doc ops deserialized", -> + @callback.calledWith(null, @ops).should.equal true + + describe "with an end value of -1", -> + beforeEach -> + @first_version_in_redis = 30 + @version = 70 + @length = @version - @first_version_in_redis + @start = 50 + @end = -1 + @ops = [ + { "mock": "op-1" }, + { "mock": "op-2" } + ] + @jsonOps = @ops.map (op) -> JSON.stringify op + @rclient.llen = sinon.stub().callsArgWith(1, null, @length) + @rclient.get = sinon.stub().callsArgWith(1, null, @version.toString()) + @rclient.lrange = sinon.stub().callsArgWith(3, null, @jsonOps) + @RedisManager.getPreviousDocOps(@doc_id, @start, @end, @callback) + + it "should get the appropriate docs ops to the end of list", -> + @rclient.lrange + .calledWith("DocOps:#{@doc_id}", @start - @first_version_in_redis, -1) + .should.equal true + + it "should return the docs with the doc ops deserialized", -> + @callback.calledWith(null, @ops).should.equal true + + describe "when the requested range is not in Redis", -> + beforeEach -> + @first_version_in_redis = 30 + @version = 70 + @length = @version - @first_version_in_redis + @start = 20 + @end = -1 + @ops = [ + { "mock": "op-1" }, + { "mock": "op-2" } + ] + @jsonOps = @ops.map (op) -> JSON.stringify op + @rclient.llen = sinon.stub().callsArgWith(1, null, @length) + @rclient.get = sinon.stub().callsArgWith(1, null, @version.toString()) + @rclient.lrange = sinon.stub().callsArgWith(3, null, @jsonOps) + @RedisManager.getPreviousDocOps(@doc_id, @start, @end, @callback) + + it "should return an error", -> + @callback.calledWith(new Error("range is not loaded in redis")).should.equal true + + it "should log out the problem", -> + @logger.error.called.should.equal true diff --git a/services/document-updater/test/unit/coffee/RedisManager/prependDocOpsTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/prependDocOpsTests.coffee new file mode 100644 index 0000000000..b4a8192d12 --- /dev/null +++ b/services/document-updater/test/unit/coffee/RedisManager/prependDocOpsTests.coffee @@ -0,0 +1,32 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/RedisManager" +SandboxedModule = require('sandboxed-module') + +describe "RedisManager.clearDocFromPendingUpdatesSet", -> + beforeEach -> + @doc_id = "document-id" + @callback = sinon.stub() + @RedisManager = SandboxedModule.require modulePath, requires: + "redis" : createClient: () => + @rclient = auth:-> + + @rclient.lpush = sinon.stub().callsArg(2) + @ops = [ + { "mock" : "op-1" }, + { "mock" : "op-2" } + ] + @reversedJsonOps = @ops.map((op) -> JSON.stringify op).reverse() + @RedisManager.prependDocOps(@doc_id, @ops, @callback) + + it "should push the reversed JSONed ops", -> + @rclient.lpush + .calledWith("DocOps:#{@doc_id}", @reversedJsonOps) + .should.equal true + + it "should return the callback", -> + @callback.called.should.equal true + + + diff --git a/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee new file mode 100644 index 0000000000..0c76730437 --- /dev/null +++ b/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee @@ -0,0 +1,37 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/RedisManager" +SandboxedModule = require('sandboxed-module') + +describe "RedisManager.getPreviousDocOpsTests", -> + beforeEach -> + @callback = sinon.stub() + @RedisManager = SandboxedModule.require modulePath, requires: + "redis" : createClient: () => + @rclient = + auth: -> + multi: => @rclient + @doc_id = "doc-id-123" + + beforeEach -> + @version = 70 + @op = + { "mock": "op-1" } + @jsonOp = JSON.stringify @op + @rclient.rpush = sinon.stub().callsArgWith(2, null) + @rclient.incr = sinon.stub().callsArgWith(1, null, @version.toString()) + @RedisManager.pushDocOp(@doc_id, @op, @callback) + + it "should push the op into redis", -> + @rclient.rpush + .calledWith("DocOps:#{@doc_id}", @jsonOp) + .should.equal true + + it "should increment the version number", -> + @rclient.incr + .calledWith("DocVersion:#{@doc_id}") + .should.equal true + + it "should call the callback with the new version", -> + @callback.calledWith(null, @version).should.equal true diff --git a/services/document-updater/test/unit/coffee/RemovingSingleDocFromMemory.coffee b/services/document-updater/test/unit/coffee/RemovingSingleDocFromMemory.coffee new file mode 100644 index 0000000000..9fd0136aad --- /dev/null +++ b/services/document-updater/test/unit/coffee/RemovingSingleDocFromMemory.coffee @@ -0,0 +1,73 @@ +require('coffee-script') +_ = require("underscore") +assert = require('assert') +sinon = require('sinon') +path = require('path') +modulePath = path.join __dirname, '../../../app/js/RedisManager.js' +keys = require(path.join __dirname, '../../../app/js/RedisKeyBuilder.js') +loadModule = require('./module-loader').loadModule + +describe 'removing single doc from memory', ()-> + + project_id = "12345" + doc_id1 = "docid1" + doc_id2 = "docid2" + doc_id3 = "docid3" + + redisMemory = undefined + redisManager = undefined + self = @ + beforeEach (done)-> + redisMemory = {} + + mocks = + "logger-sharelatex": + error:-> + log:-> + redis: + createClient : -> + auth:-> + multi: -> + get:-> + set:(key, value)-> + redisMemory[key] = value + sadd:(key, value)-> + if !redisMemory[key]? + redisMemory[key] = [] + redisMemory[key].push value + del : (key)-> + delete redisMemory[key] + srem : (key, member)-> + index = redisMemory[key].indexOf(member) + redisMemory[key].splice(index, 1) + exec:(callback)-> + callback(null, []) + + redisManager = loadModule(modulePath, mocks).module.exports + redisManager.putDocInMemory project_id, doc_id1, 0, ["line"], -> + redisManager.putDocInMemory project_id, doc_id2, 0, ["ledf"], -> + redisManager.putDocInMemory project_id, doc_id3, 0, ["ledf"], -> + done() + + it 'should remove doc lines from memory', (done)-> + keyExists = false + redisManager.removeDocFromMemory project_id, doc_id1, ()-> + assert.equal redisMemory[keys.docLines(doc_id:doc_id1)], undefined + keys = _.keys(redisMemory) + containsKey(keys, doc_id1) + keys.forEach (sets)-> + containsKey sets, doc_id1 + _.each redisMemory, (value)-> + if value.indexOf(doc_id1) != -1 + assert.equal false, "#{doc_id1} found in value #{value}" + done() + + +containsKey = (haystack, key)-> + if haystack.forEach? + haystack.forEach (area)-> + if area.indexOf(key) != -1 + assert.equal false, "#{key} found in haystack in #{area}" + + + diff --git a/services/document-updater/test/unit/coffee/ShareJsDB/GetOpsTests.coffee b/services/document-updater/test/unit/coffee/ShareJsDB/GetOpsTests.coffee new file mode 100644 index 0000000000..4812619574 --- /dev/null +++ b/services/document-updater/test/unit/coffee/ShareJsDB/GetOpsTests.coffee @@ -0,0 +1,54 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/ShareJsDB.js" +SandboxedModule = require('sandboxed-module') + +describe "ShareJsDB.getOps", -> + beforeEach -> + @doc_id = "document-id" + @project_id = "project-id" + @doc_key = "#{@project_id}:#{@doc_id}" + @callback = sinon.stub() + @ops = [{p: 20, t: "foo"}] + @redis_ops = (JSON.stringify(op) for op in @ops) + @ShareJsDB = SandboxedModule.require modulePath, requires: + "./RedisManager": @RedisManager = {} + "./DocOpsManager": @DocOpsManager = {} + "./DocumentManager":{} + + describe "with start == end", -> + beforeEach -> + @start = @end = 42 + @ShareJsDB.getOps @doc_key, @start, @end, @callback + + it "should return an empty array", -> + @callback.calledWith(null, []).should.equal true + + describe "with a non empty range", -> + beforeEach -> + @start = 35 + @end = 42 + @DocOpsManager.getPreviousDocOps = sinon.stub().callsArgWith(4, null, @ops) + @ShareJsDB.getOps @doc_key, @start, @end, @callback + + it "should get the range from redis", -> + @DocOpsManager.getPreviousDocOps + .calledWith(@project_id, @doc_id, @start, @end-1) + .should.equal true + + it "should return the ops", -> + @callback.calledWith(null, @ops).should.equal true + + describe "with no specified end", -> + beforeEach -> + @start = 35 + @end = null + @DocOpsManager.getPreviousDocOps = sinon.stub().callsArgWith(4, null, @ops) + @ShareJsDB.getOps @doc_key, @start, @end, @callback + + it "should get until the end of the list", -> + @DocOpsManager.getPreviousDocOps + .calledWith(@project_id, @doc_id, @start, -1) + .should.equal true + diff --git a/services/document-updater/test/unit/coffee/ShareJsDB/GetSnapshotTests.coffee b/services/document-updater/test/unit/coffee/ShareJsDB/GetSnapshotTests.coffee new file mode 100644 index 0000000000..ef433c1f90 --- /dev/null +++ b/services/document-updater/test/unit/coffee/ShareJsDB/GetSnapshotTests.coffee @@ -0,0 +1,85 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +expect = chai.expect +modulePath = "../../../../app/js/ShareJsDB.js" +SandboxedModule = require('sandboxed-module') +Errors = require "../../../../app/js/Errors" + +describe "ShareJsDB.getSnapshot", -> + beforeEach -> + @doc_id = "document-id" + @project_id = "project-id" + @doc_key = "#{@project_id}:#{@doc_id}" + @callback = sinon.stub() + @ShareJsDB = SandboxedModule.require modulePath, requires: + "./DocumentManager": @DocumentManager = {} + "./RedisManager": {} + "./DocOpsManager": {} + + @version = 42 + + describe "with a text document", -> + beforeEach -> + @lines = ["one", "two", "three"] + + describe "successfully", -> + beforeEach -> + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version) + @ShareJsDB.getSnapshot @doc_key, @callback + + it "should get the doc", -> + @DocumentManager.getDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should return the doc lines", -> + @callback.args[0][1].snapshot.should.equal @lines.join("\n") + + it "should return the doc version", -> + @callback.args[0][1].v.should.equal @version + + it "should return the type as text", -> + @callback.args[0][1].type.should.equal "text" + + describe "when the doclines do not exist", -> + beforeEach -> + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, null, null) + @ShareJsDB.getSnapshot @doc_key, @callback + + it "should return the callback with a NotFoundError", -> + @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true + + describe "when getDoc returns an error", -> + beforeEach -> + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, @error = new Error("oops"), null, null) + @ShareJsDB.getSnapshot @doc_key, @callback + + it "should return the callback with an error", -> + @callback.calledWith(@error).should.equal true + + describe "with a JSON document", -> + beforeEach -> + @lines = [{text: "one"}, {text:"two"}, {text:"three"}] + + describe "successfully", -> + beforeEach -> + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version) + @ShareJsDB.getSnapshot @doc_key, @callback + + it "should get the doc", -> + @DocumentManager.getDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should return the doc lines", -> + expect(@callback.args[0][1].snapshot).to.deep.equal lines: @lines + + it "should return the doc version", -> + @callback.args[0][1].v.should.equal @version + + it "should return the type as text", -> + @callback.args[0][1].type.should.equal "json" + + + diff --git a/services/document-updater/test/unit/coffee/ShareJsDB/WriteOpsTests.coffee b/services/document-updater/test/unit/coffee/ShareJsDB/WriteOpsTests.coffee new file mode 100644 index 0000000000..b28f23d2f4 --- /dev/null +++ b/services/document-updater/test/unit/coffee/ShareJsDB/WriteOpsTests.coffee @@ -0,0 +1,53 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/ShareJsDB.js" +SandboxedModule = require('sandboxed-module') + +describe "ShareJsDB.writeOps", -> + beforeEach -> + @project_id = "project-id" + @doc_id = "document-id" + @doc_key = "#{@project_id}:#{@doc_id}" + @callback = sinon.stub() + @opData = + op: {p: 20, t: "foo"} + meta: {source: "bar"} + @ShareJsDB = SandboxedModule.require modulePath, requires: + "./RedisManager": @RedisManager = {} + "./DocOpsManager": @DocOpsManager = {} + "./DocumentManager": {} + + describe "writing an op", -> + beforeEach -> + @version = 42 + @opData.v = @version + @DocOpsManager.pushDocOp = sinon.stub().callsArgWith(3, null, @version+1) + @ShareJsDB.writeOp @doc_key, @opData, @callback + + it "should write the op to redis", -> + op = + op: @opData.op + meta: @opData.meta + @DocOpsManager.pushDocOp + .calledWith(@project_id, @doc_id, op) + .should.equal true + + it "should call the callback without an error", -> + @callback.called.should.equal true + (@callback.args[0][0]?).should.equal false + + describe "writing an op at the wrong version", -> + beforeEach -> + @version = 42 + @mismatch = 5 + @opData.v = @version + @DocOpsManager.pushDocOp = sinon.stub().callsArgWith(3, null, @version + @mismatch) + @ShareJsDB.writeOp @doc_key, @opData, @callback + + it "should call the callback with an error", -> + @callback.calledWith(sinon.match.string).should.equal true + + + + diff --git a/services/document-updater/test/unit/coffee/ShareJsUpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/ShareJsUpdateManagerTests.coffee new file mode 100644 index 0000000000..af5a475836 --- /dev/null +++ b/services/document-updater/test/unit/coffee/ShareJsUpdateManagerTests.coffee @@ -0,0 +1,174 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../app/js/ShareJsUpdateManager.js" +SandboxedModule = require('sandboxed-module') + +describe "ShareJsUpdateManager", -> + beforeEach -> + @project_id = "project-id-123" + @doc_id = "document-id-123" + @callback = sinon.stub() + @ShareJsUpdateManager = SandboxedModule.require modulePath, + requires: + "./sharejs/server/model": + class Model + constructor: (@db) -> + "./ShareJsDB" : @ShareJsDB = { mockDB: true } + "redis" : createClient: () => @rclient = auth:-> + "logger-sharelatex": @logger = { log: sinon.stub() } + globals: + clearTimeout: @clearTimeout = sinon.stub() + + describe "applyUpdates", -> + beforeEach -> + @version = 34 + @model = + applyOp: sinon.stub().callsArg(2) + getSnapshot: sinon.stub() + @ShareJsUpdateManager.getNewShareJsModel = sinon.stub().returns(@model) + @ShareJsUpdateManager._listenForOps = sinon.stub() + @ShareJsUpdateManager.removeDocFromCache = sinon.stub().callsArg(1) + + describe "with a text document", -> + beforeEach -> + @updates = [ + {p: 4, t: "foo"} + {p: 6, t: "bar"} + ] + @updatedDocLines = ["one", "two"] + + describe "successfully", -> + beforeEach (done) -> + @model.getSnapshot.callsArgWith(1, null, {snapshot: @updatedDocLines.join("\n"), v: @version}) + @ShareJsUpdateManager.applyUpdates @project_id, @doc_id, @updates, (err, docLines, version) => + @callback(err, docLines, version) + done() + + it "should create a new ShareJs model", -> + @ShareJsUpdateManager.getNewShareJsModel + .called.should.equal true + + it "should listen for ops on the model", -> + @ShareJsUpdateManager._listenForOps + .calledWith(@model) + .should.equal true + + it "should send each update to ShareJs", -> + for update in @updates + @model.applyOp + .calledWith("#{@project_id}:#{@doc_id}", update).should.equal true + + it "should get the updated doc lines", -> + @model.getSnapshot + .calledWith("#{@project_id}:#{@doc_id}") + .should.equal true + + it "should return the updated doc lines", -> + @callback.calledWith(null, @updatedDocLines, @version).should.equal true + + describe "when applyOp fails", -> + beforeEach (done) -> + @error = new Error("Something went wrong") + @ShareJsUpdateManager._sendError = sinon.stub() + @model.applyOp = sinon.stub().callsArgWith(2, @error) + @ShareJsUpdateManager.applyUpdates @project_id, @doc_id, @updates, (err, docLines, version) => + @callback(err, docLines, version) + done() + + it "should call sendError with the error", -> + @ShareJsUpdateManager._sendError + .calledWith(@project_id, @doc_id, @error) + .should.equal true + + it "should call the callback with the error", -> + @callback.calledWith(@error).should.equal true + + describe "when getSnapshot fails", -> + beforeEach (done) -> + @error = new Error("Something went wrong") + @ShareJsUpdateManager._sendError = sinon.stub() + @model.getSnapshot.callsArgWith(1, @error) + @ShareJsUpdateManager.applyUpdates @project_id, @doc_id, @updates, (err, docLines, version) => + @callback(err, docLines, version) + done() + + it "should call sendError with the error", -> + @ShareJsUpdateManager._sendError + .calledWith(@project_id, @doc_id, @error) + .should.equal true + + it "should call the callback with the error", -> + @callback.calledWith(@error).should.equal true + + describe "with a JSON document", -> + beforeEach -> + @updates = [ + {p: ["lines", 0], dl: { foo: "bar "}} + ] + @docLines = [text: "one", text: "two"] + + describe "successfully", -> + beforeEach (done) -> + @model.getSnapshot.callsArgWith(1, null, {snapshot: {lines: @docLines}, v: @version}) + @ShareJsUpdateManager.applyUpdates @project_id, @doc_id, @updates, (err, docLines, version) => + @callback(err, docLines, version) + done() + + it "should create a new ShareJs model", -> + @ShareJsUpdateManager.getNewShareJsModel + .called.should.equal true + + it "should listen for ops on the model", -> + @ShareJsUpdateManager._listenForOps + .calledWith(@model) + .should.equal true + + it "should send each update to ShareJs", -> + for update in @updates + @model.applyOp + .calledWith("#{@project_id}:#{@doc_id}", update).should.equal true + + it "should get the updated doc lines", -> + @model.getSnapshot + .calledWith("#{@project_id}:#{@doc_id}") + .should.equal true + + it "should return the updated doc lines", -> + @callback.calledWith(null, @docLines, @version).should.equal true + + describe "_listenForOps", -> + beforeEach -> + @model = on: (event, callback) => + @callback = callback + sinon.spy @model, "on" + @ShareJsUpdateManager._listenForOps(@model) + + it "should listen to the model for updates", -> + @model.on.calledWith("applyOp") + .should.equal true + + describe "the callback", -> + beforeEach -> + @opData = + op: {t: "foo", p: 1} + meta: source: "bar" + @rclient.publish = sinon.stub() + @callback("#{@project_id}:#{@doc_id}", @opData) + + it "should publish the op to redis", -> + @rclient.publish + .calledWith("applied-ops", JSON.stringify(project_id: @project_id, doc_id: @doc_id, op: @opData)) + .should.equal true + + describe "_sendError", -> + beforeEach -> + @error_text = "Something went wrong" + @rclient.publish = sinon.stub() + @ShareJsUpdateManager._sendError(@project_id, @doc_id, new Error(@error_text)) + + it "should publish the error to the redis stream", -> + @rclient.publish + .calledWith("applied-ops", JSON.stringify(project_id: @project_id, doc_id: @doc_id, error: @error_text)) + .should.equal true + diff --git a/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee b/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee new file mode 100644 index 0000000000..f421d545a7 --- /dev/null +++ b/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee @@ -0,0 +1,198 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/UpdateManager.js" +SandboxedModule = require('sandboxed-module') + +describe "UpdateManager", -> + beforeEach -> + @project_id = "project-id-123" + @doc_id = "document-id-123" + @callback = sinon.stub() + @UpdateManager = SandboxedModule.require modulePath, requires: + "./LockManager" : @LockManager = {} + "./RedisManager" : @RedisManager = {} + "./ShareJsUpdateManager" : @ShareJsUpdateManager = {} + "logger-sharelatex": @logger = { log: sinon.stub() } + "./Metrics": @Metrics = + Timer: class Timer + done: sinon.stub() + + describe "resumeProcessing", -> + beforeEach (done) -> + @docs = [{ + doc_id: "doc-1" + project_id: "project-1" + }, { + doc_id: "doc-2" + project_id: "project-2" + }, { + doc_id: "doc-3" + project_id: "project-3" + }] + @RedisManager.getDocsWithPendingUpdates = sinon.stub().callsArgWith(0, null, @docs) + @UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArg(2) + @UpdateManager.resumeProcessing(done) + + it "should the docs that haven't been processed yet", -> + @RedisManager.getDocsWithPendingUpdates + .called.should.equal true + + it "should call processOutstandingUpdatesWithLock for each doc", -> + for doc in @docs + @UpdateManager.processOutstandingUpdatesWithLock + .calledWith(doc.project_id, doc.doc_id) + .should.equal true + + describe "processOutstandingUpdates", -> + beforeEach -> + @UpdateManager.fetchAndApplyUpdates = sinon.stub().callsArg(2) + @RedisManager.clearDocFromPendingUpdatesSet = sinon.stub().callsArg(2) + @UpdateManager.processOutstandingUpdates @project_id, @doc_id, @callback + + it "should apply the updates", -> + @UpdateManager.fetchAndApplyUpdates.calledWith(@project_id, @doc_id).should.equal true + + it "should clear the doc from the process pending set", -> + @RedisManager.clearDocFromPendingUpdatesSet + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should call the callback", -> + @callback.called.should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "processOutstandingUpdatesWithLock", -> + describe "when the lock is free", -> + beforeEach -> + @LockManager.tryLock = sinon.stub().callsArgWith(1, null, true) + @LockManager.releaseLock = sinon.stub().callsArg(1) + @UpdateManager.continueProcessingUpdatesWithLock = sinon.stub().callsArg(2) + @UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2) + + describe "successfully", -> + beforeEach -> + @UpdateManager.processOutstandingUpdatesWithLock @project_id, @doc_id, @callback + + it "should acquire the lock", -> + @LockManager.tryLock.calledWith(@doc_id).should.equal true + + it "should free the lock", -> + @LockManager.releaseLock.calledWith(@doc_id).should.equal true + + it "should process the outstanding updates", -> + @UpdateManager.processOutstandingUpdates.calledWith(@project_id, @doc_id).should.equal true + + it "should do everything with the lock acquired", -> + @UpdateManager.processOutstandingUpdates.calledAfter(@LockManager.tryLock).should.equal true + @UpdateManager.processOutstandingUpdates.calledBefore(@LockManager.releaseLock).should.equal true + + it "should continue processing new updates that may have come in", -> + @UpdateManager.continueProcessingUpdatesWithLock.calledWith(@project_id, @doc_id).should.equal true + + it "should return the callback", -> + @callback.called.should.equal true + + describe "when processOutstandingUpdates returns an error", -> + beforeEach -> + @UpdateManager.processOutstandingUpdates = sinon.stub().callsArgWith(2, @error = new Error("Something went wrong")) + @UpdateManager.processOutstandingUpdatesWithLock @project_id, @doc_id, @callback + + it "should free the lock", -> + @LockManager.releaseLock.calledWith(@doc_id).should.equal true + + it "should return the error in the callback", -> + @callback.calledWith(@error).should.equal true + + describe "when the lock is taken", -> + beforeEach -> + @LockManager.tryLock = sinon.stub().callsArgWith(1, null, false) + @UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2) + @UpdateManager.processOutstandingUpdatesWithLock @project_id, @doc_id, @callback + + it "should return the callback", -> + @callback.called.should.equal true + + it "should not process the updates", -> + @UpdateManager.processOutstandingUpdates.called.should.equal false + + describe "continueProcessingUpdatesWithLock", -> + describe "when there are outstanding updates", -> + beforeEach -> + @RedisManager.getUpdatesLength = sinon.stub().callsArgWith(1, null, 3) + @UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArg(2) + @UpdateManager.continueProcessingUpdatesWithLock @project_id, @doc_id, @callback + + it "should process the outstanding updates", -> + @UpdateManager.processOutstandingUpdatesWithLock.calledWith(@project_id, @doc_id).should.equal true + + it "should return the callback", -> + @callback.called.should.equal true + + describe "when there are no outstanding updates", -> + beforeEach -> + @RedisManager.getUpdatesLength = sinon.stub().callsArgWith(1, null, 0) + @UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArg(2) + @UpdateManager.continueProcessingUpdatesWithLock @project_id, @doc_id, @callback + + it "should not try to process the outstanding updates", -> + @UpdateManager.processOutstandingUpdatesWithLock.called.should.equal false + + it "should return the callback", -> + @callback.called.should.equal true + + describe "fetchAndApplyUpdates", -> + describe "with updates", -> + beforeEach -> + @updates = [{p: 1, t: "foo"}] + @updatedDocLines = ["updated", "lines"] + @version = 34 + @RedisManager.getPendingUpdatesForDoc = sinon.stub().callsArgWith(1, null, @updates) + @UpdateManager.applyUpdates = sinon.stub().callsArgWith(3, null, @updatedDocLines, @version) + @UpdateManager.fetchAndApplyUpdates @project_id, @doc_id, @callback + + it "should get the pending updates", -> + @RedisManager.getPendingUpdatesForDoc.calledWith(@doc_id).should.equal true + + it "should apply the updates", -> + @UpdateManager.applyUpdates + .calledWith(@project_id, @doc_id, @updates) + .should.equal true + + it "should call the callback", -> + @callback.called.should.equal true + + describe "when there are no updates", -> + beforeEach -> + @updates = [] + @RedisManager.getPendingUpdatesForDoc = sinon.stub().callsArgWith(1, null, @updates) + @UpdateManager.applyUpdates = sinon.stub() + @RedisManager.setDocument = sinon.stub() + @UpdateManager.fetchAndApplyUpdates @project_id, @doc_id, @callback + + it "should not call applyUpdates", -> + @UpdateManager.applyUpdates.called.should.equal false + + it "should call the callback", -> + @callback.called.should.equal true + + describe "applyUpdates", -> + beforeEach -> + @updates = [{p: 1, t: "foo"}] + @updatedDocLines = ["updated", "lines"] + @version = 34 + @ShareJsUpdateManager.applyUpdates = sinon.stub().callsArgWith(3, null, @updatedDocLines, @version) + @RedisManager.setDocument = sinon.stub().callsArg(3) + @UpdateManager.applyUpdates @project_id, @doc_id, @updates, @callback + + it "should save the document", -> + @RedisManager.setDocument + .calledWith(@doc_id, @updatedDocLines, @version) + .should.equal true + + it "should call the callback", -> + @callback.called.should.equal true + + diff --git a/services/document-updater/test/unit/coffee/UpdateManager/lockUpdatesAndDoTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/lockUpdatesAndDoTests.coffee new file mode 100644 index 0000000000..74e5a689fa --- /dev/null +++ b/services/document-updater/test/unit/coffee/UpdateManager/lockUpdatesAndDoTests.coffee @@ -0,0 +1,83 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/UpdateManager.js" +SandboxedModule = require('sandboxed-module') + +describe 'UpdateManager - lockUpdatesAndDo', -> + beforeEach -> + @UpdateManager = SandboxedModule.require modulePath, requires: + "./LockManager" : @LockManager = {} + "./RedisManager" : @RedisManager = {} + "./ShareJsUpdateManager" : @ShareJsUpdateManager = {} + "logger-sharelatex": @logger = { log: sinon.stub() } + @project_id = "project-id-123" + @doc_id = "doc-id-123" + @method = sinon.stub().callsArgWith(3, null, @response_arg1) + @callback = sinon.stub() + @arg1 = "argument 1" + @response_arg1 = "response argument 1" + @LockManager.getLock = sinon.stub().callsArgWith(1, null, true) + @LockManager.releaseLock = sinon.stub().callsArg(1) + + describe "successfully", -> + beforeEach -> + @UpdateManager.continueProcessingUpdatesWithLock = sinon.stub() + @UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2) + @UpdateManager.lockUpdatesAndDo @method, @project_id, @doc_id, @arg1, @callback + + it "should lock the doc", -> + @LockManager.getLock + .calledWith(@doc_id) + .should.equal true + + it "should process any outstanding updates", -> + @UpdateManager.processOutstandingUpdates + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should call the method", -> + @method + .calledWith(@project_id, @doc_id, @arg1) + .should.equal true + + it "should return the method response to the callback", -> + @callback + .calledWith(null, @response_arg1) + .should.equal true + + it "should release the lock", -> + @LockManager.releaseLock + .calledWith(@doc_id) + .should.equal true + + it "should continue processing updates", -> + @UpdateManager.continueProcessingUpdatesWithLock + .calledWith(@project_id, @doc_id) + .should.equal true + + describe "when processOutstandingUpdates returns an error", -> + beforeEach -> + @UpdateManager.processOutstandingUpdates = sinon.stub().callsArgWith(2, @error = new Error("Something went wrong")) + @UpdateManager.lockUpdatesAndDo @method, @project_id, @doc_id, @arg1, @callback + + it "should free the lock", -> + @LockManager.releaseLock.calledWith(@doc_id).should.equal true + + it "should return the error in the callback", -> + @callback.calledWith(@error).should.equal true + + describe "when the method returns an error", -> + beforeEach -> + @UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2) + @method = sinon.stub().callsArgWith(3, @error = new Error("something went wrong"), @response_arg1) + @UpdateManager.lockUpdatesAndDo @method, @project_id, @doc_id, @arg1, @callback + + it "should free the lock", -> + @LockManager.releaseLock.calledWith(@doc_id).should.equal true + + it "should return the error in the callback", -> + @callback.calledWith(@error).should.equal true + + + diff --git a/services/document-updater/test/unit/js/module-loader.js b/services/document-updater/test/unit/js/module-loader.js new file mode 100644 index 0000000000..ac4cae7601 --- /dev/null +++ b/services/document-updater/test/unit/js/module-loader.js @@ -0,0 +1,29 @@ +var vm = require('vm'); +var fs = require('fs'); +var path = require('path'); + +module.exports.loadModule = function(filePath, mocks) { + mocks = mocks || {}; + + // this is necessary to allow relative path modules within loaded file + // i.e. requiring ./some inside file /a/b.js needs to be resolved to /a/some + var resolveModule = function(module) { + if (module.charAt(0) !== '.') return module; + return path.resolve(path.dirname(filePath), module); + }; + + var exports = {}; + var context = { + require: function(name) { + return mocks[name] || require(resolveModule(name)); + }, + console: console, + exports: exports, + module: { + exports: exports + } + }; + file = fs.readFileSync(filePath); + vm.runInNewContext(file, context); + return context; +}; From a5d816272e353c34d5f247a88472967ae9ecdbbe Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 12 Feb 2014 10:52:53 +0000 Subject: [PATCH 002/769] Remove unused DocumentUpdater.js file --- .../document-updater/app/DocumentUpdater.js | 181 ------------------ 1 file changed, 181 deletions(-) delete mode 100644 services/document-updater/app/DocumentUpdater.js diff --git a/services/document-updater/app/DocumentUpdater.js b/services/document-updater/app/DocumentUpdater.js deleted file mode 100644 index 11e69368dd..0000000000 --- a/services/document-updater/app/DocumentUpdater.js +++ /dev/null @@ -1,181 +0,0 @@ -(function(exports){ - Ace = require('aceserverside-sharelatex') - Range = Ace.Range - - //look at applyDeltas method - exports.applyChange = function(aceDoc, change, callback) { - var r = change.range; - var range = new Range(r.start.row, r.start.column, r.end.row, r.end.column); - if('insertText'==change.action){ - aceDoc.insert(change.range.start, change.text); - }else if('insertLines'==change.action){ - aceDoc.insertLines(change.range.start.row, change.lines); - }else if('removeText'==change.action){ - aceDoc.remove(range); - }else if('removeLines'==change.action){ - aceDoc.removeLines(range.start.row, range.end.row-1); - } - - if(typeof callback === 'function'){ - callback(null, aceDoc); - }; - } - -})(typeof exports === 'undefined'? this['documentUpdater']={}: exports); - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - From 414b669a2c7178082ff34214c0fb2e5182e71d21 Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 13 Feb 2014 10:36:26 +0000 Subject: [PATCH 003/769] Point every redis connection to the new config --- services/document-updater/app.coffee | 6 ++++-- services/document-updater/app/coffee/LockManager.coffee | 2 +- services/document-updater/app/coffee/RedisManager.coffee | 2 +- .../document-updater/app/coffee/ShareJsUpdateManager.coffee | 2 +- 4 files changed, 7 insertions(+), 5 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index f35b50fe30..1974169f4e 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -7,12 +7,14 @@ RedisManager = require('./app/js/RedisManager.js') UpdateManager = require('./app/js/UpdateManager.js') Keys = require('./app/js/RedisKeyBuilder') redis = require('redis') -rclient = redis.createClient(Settings.redis.port, Settings.redis.host) -rclient.auth(Settings.redis.password) metrics = require('./app/js/Metrics') Errors = require "./app/js/Errors" HttpController = require "./app/js/HttpController" +redisConf = Settings.redis.web +rclient = redis.createClient(redisConf.port, redisConf.host) +rclient.auth(redisConf.password) + app = express() app.configure -> app.use(express.logger(':remote-addr - [:date] - :user-agent ":method :url" :status - :response-time ms')); diff --git a/services/document-updater/app/coffee/LockManager.coffee b/services/document-updater/app/coffee/LockManager.coffee index 9a3d6cf761..97c06ad721 100644 --- a/services/document-updater/app/coffee/LockManager.coffee +++ b/services/document-updater/app/coffee/LockManager.coffee @@ -1,7 +1,7 @@ metrics = require('./Metrics') Settings = require('settings-sharelatex') redis = require('redis') -redisConf = Settings.redis?.web or Settings.redis or {host: "localhost", port: 6379} +redisConf = Settings.redis.web rclient = redis.createClient(redisConf.port, redisConf.host) rclient.auth(redisConf.password) keys = require('./RedisKeyBuilder') diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 79bb06036d..b2c4c9c9d0 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -1,6 +1,6 @@ Settings = require('settings-sharelatex') redis = require('redis') -redisConf = Settings.redis?.web or Settings.redis or {host: "localhost", port: 6379} +redisConf = Settings.redis.web rclient = redis.createClient(redisConf.port, redisConf.host) rclient.auth(redisConf.password) async = require('async') diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee index a5b2e88e4f..9cde95492b 100644 --- a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee +++ b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee @@ -8,7 +8,7 @@ Keys = require "./RedisKeyBuilder" util = require "util" redis = require('redis') -redisConf = Settings.redis?.web or Settings.redis or {host: "localhost", port: 6379} +redisConf = Settings.redis.web rclient = redis.createClient(redisConf.port, redisConf.host) rclient.auth(redisConf.password) From 8e8d25e1858b82ec35518ec2e95747ee77ab10d5 Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 17 Feb 2014 15:08:46 +0000 Subject: [PATCH 004/769] Point custom npm modules to github --- services/document-updater/package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index ff65e225fe..5af385b4ed 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -12,8 +12,8 @@ "async": "", "lynx": "0.0.11", "coffee-script": "1.4.0", - "settings-sharelatex": "git+ssh://git@bitbucket.org:sharelatex/settings-sharelatex.git#master", - "logger-sharelatex": "git+ssh://git@bitbucket.org:sharelatex/logger-sharelatex.git#bunyan", + "settings-sharelatex": "git+ssh://git@github.com:sharelatex/settings-sharelatex.git#master", + "logger-sharelatex": "git+ssh://git@github.com:sharelatex/logger-sharelatex.git#master", "sinon": "~1.5.2", "mongojs": "0.9.11" }, From b45a9968b3ab5855b84165115883b86e7792a732 Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 18 Feb 2014 17:23:12 +0000 Subject: [PATCH 005/769] Create README.md --- services/document-updater/README.md | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 services/document-updater/README.md diff --git a/services/document-updater/README.md b/services/document-updater/README.md new file mode 100644 index 0000000000..a58d453357 --- /dev/null +++ b/services/document-updater/README.md @@ -0,0 +1,11 @@ +document-updater-sharelatex +=========================== + +An API for applying incoming updates to documents in real-time. + +License +------- + +The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the `LICENSE` file. + +Copyright (c) ShareLaTeX, 2014. From 53929197dd4034b36dc8a475a85be395734f977b Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 18 Feb 2014 17:23:36 +0000 Subject: [PATCH 006/769] Create LICENSE --- services/document-updater/LICENSE | 662 ++++++++++++++++++++++++++++++ 1 file changed, 662 insertions(+) create mode 100644 services/document-updater/LICENSE diff --git a/services/document-updater/LICENSE b/services/document-updater/LICENSE new file mode 100644 index 0000000000..ac8619dcb9 --- /dev/null +++ b/services/document-updater/LICENSE @@ -0,0 +1,662 @@ + + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. From 45437c6006bd9cc62fc6c744c9c830c41c509f5f Mon Sep 17 00:00:00 2001 From: goodbest Date: Fri, 21 Feb 2014 23:10:26 +0800 Subject: [PATCH 007/769] change repo URL from git+ssh:// to git+https:// --- services/document-updater/package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 5af385b4ed..0bc012d4a6 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -12,8 +12,8 @@ "async": "", "lynx": "0.0.11", "coffee-script": "1.4.0", - "settings-sharelatex": "git+ssh://git@github.com:sharelatex/settings-sharelatex.git#master", - "logger-sharelatex": "git+ssh://git@github.com:sharelatex/logger-sharelatex.git#master", + "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#master", + "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#master", "sinon": "~1.5.2", "mongojs": "0.9.11" }, From 1a7d2643c0adb1eb12c67e56e898f07e0f15b4a5 Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 24 Feb 2014 14:24:44 +0000 Subject: [PATCH 008/769] Create .travis.yml --- services/document-updater/.travis.yml | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 services/document-updater/.travis.yml diff --git a/services/document-updater/.travis.yml b/services/document-updater/.travis.yml new file mode 100644 index 0000000000..d97f744899 --- /dev/null +++ b/services/document-updater/.travis.yml @@ -0,0 +1,19 @@ +language: node_js + +node_js: + - "0.10" + +before_install: + - npm install -g grunt-cli + +install: + - npm install + - grunt install + +script: + - grunt test:unit + +services: + - redis-server + - mongodb + From 55edef6d3dde0cb804c7a60257eb16beffb9496e Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 24 Feb 2014 14:37:38 +0000 Subject: [PATCH 009/769] Update .travis.yml --- services/document-updater/.travis.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/services/document-updater/.travis.yml b/services/document-updater/.travis.yml index d97f744899..29f5884d60 100644 --- a/services/document-updater/.travis.yml +++ b/services/document-updater/.travis.yml @@ -16,4 +16,3 @@ script: services: - redis-server - mongodb - From 86e42ce1de7ff70cb295c5b691ded090073defa4 Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 24 Feb 2014 14:43:33 +0000 Subject: [PATCH 010/769] Update README.md --- services/document-updater/README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/document-updater/README.md b/services/document-updater/README.md index a58d453357..f86dcda412 100644 --- a/services/document-updater/README.md +++ b/services/document-updater/README.md @@ -3,6 +3,8 @@ document-updater-sharelatex An API for applying incoming updates to documents in real-time. +[![Build Status](https://travis-ci.org/sharelatex/document-updater-sharelatex.png?branch=master)](https://travis-ci.org/sharelatex/document-updater-sharelatex) + License ------- From 7a977ca597708b6f5b177d150afd7ee189e9b72c Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 10 Feb 2014 15:17:08 +0000 Subject: [PATCH 011/769] Don't store ops in Mongo, just keep them in redis with a timeout --- services/document-updater/Gruntfile.coffee | 4 +- .../app/coffee/DocOpsManager.coffee | 122 +------- .../app/coffee/DocumentManager.coffee | 18 +- .../app/coffee/PersistenceManager.coffee | 5 +- .../app/coffee/RedisManager.coffee | 29 +- .../app/coffee/mongojs.coffee | 7 - .../coffee/ApplyingUpdatesToADocTests.coffee | 66 +--- .../coffee/DeletingADocumentTests.coffee | 5 +- .../coffee/DeletingAProjectTests.coffee | 1 + .../coffee/FlushingAProjectTests.coffee | 1 + .../coffee/FlushingDocsTests.coffee | 53 +--- .../coffee/GettingADocumentTests.coffee | 6 +- .../coffee/SettingADocumentTests.coffee | 15 +- .../coffee/helpers/DocUpdaterClient.coffee | 4 +- .../coffee/helpers/MockWebApi.coffee | 16 +- .../unit/coffee/AddingDocsToMemory.coffee | 4 - .../DocOpsManager/DocOpsManagerTests.coffee | 281 +----------------- .../DocumentManager/flushDocTests.coffee | 12 +- .../coffee/DocumentManager/getDocTests.coffee | 8 +- .../PersistenceManager/getDocTests.coffee | 7 +- .../PersistenceManager/setDocTests.coffee | 12 +- .../RedisManager/prependDocOpsTests.coffee | 32 -- .../coffee/RedisManager/pushDocOpTests.coffee | 67 +++-- 23 files changed, 147 insertions(+), 628 deletions(-) delete mode 100644 services/document-updater/app/coffee/mongojs.coffee delete mode 100644 services/document-updater/test/unit/coffee/RedisManager/prependDocOpsTests.coffee diff --git a/services/document-updater/Gruntfile.coffee b/services/document-updater/Gruntfile.coffee index 30dd63e708..a013653187 100644 --- a/services/document-updater/Gruntfile.coffee +++ b/services/document-updater/Gruntfile.coffee @@ -45,7 +45,7 @@ module.exports = (grunt) -> clean: app: ["app/js"] - acceptance_tests: ["test/unit/js"] + acceptance_tests: ["test/acceptance/js"] mochaTest: unit: @@ -103,7 +103,7 @@ module.exports = (grunt) -> grunt.registerTask 'install', "Compile everything when installing as an npm module", ['compile'] - grunt.registerTask 'test:unit', 'Run the unit tests (use --grep= for individual tests)', ['compile:unit_tests', 'mochaTest:unit'] + grunt.registerTask 'test:unit', 'Run the unit tests (use --grep= for individual tests)', ['compile:server', 'compile:unit_tests', 'mochaTest:unit'] grunt.registerTask 'test:acceptance', 'Run the acceptance tests (use --grep= for individual tests)', ['compile:acceptance_tests', 'mochaTest:acceptance'] grunt.registerTask 'run', "Compile and run the document-updater-sharelatex server", ['compile', 'bunyan', 'execute'] diff --git a/services/document-updater/app/coffee/DocOpsManager.coffee b/services/document-updater/app/coffee/DocOpsManager.coffee index 0e90f5b462..70e643dae1 100644 --- a/services/document-updater/app/coffee/DocOpsManager.coffee +++ b/services/document-updater/app/coffee/DocOpsManager.coffee @@ -1,127 +1,11 @@ RedisManager = require "./RedisManager" -mongojs = require("./mongojs") -db = mongojs.db -ObjectId = mongojs.ObjectId -logger = require "logger-sharelatex" -async = require "async" -Metrics = require("./Metrics") module.exports = DocOpsManager = - flushDocOpsToMongo: (project_id, doc_id, _callback = (error) ->) -> - timer = new Metrics.Timer("docOpsManager.flushDocOpsToMongo") - callback = (args...) -> - timer.done() - _callback(args...) - - DocOpsManager.getDocVersionInMongo doc_id, (error, mongoVersion) -> + getPreviousDocOps: (project_id, doc_id, start, end, callback = (error, ops) ->) -> + RedisManager.getPreviousDocOps doc_id, start, end, (error, ops) -> return callback(error) if error? - RedisManager.getDocVersion doc_id, (error, redisVersion) -> - return callback(error) if error? - if !mongoVersion? or !redisVersion? or mongoVersion > redisVersion - logger.error doc_id: doc_id, redisVersion: redisVersion, mongoVersion: mongoVersion, "mongo version is ahead of redis" - return callback(new Error("inconsistent versions")) - - RedisManager.getPreviousDocOps doc_id, mongoVersion, -1, (error, ops) -> - return callback(error) if error? - if ops.length != redisVersion - mongoVersion - logger.error doc_id: doc_id, redisVersion: redisVersion, mongoVersion: mongoVersion, opsLength: ops.length, "version difference does not match ops length" - return callback(new Error("inconsistent versions")) - logger.log doc_id: doc_id, redisVersion: redisVersion, mongoVersion: mongoVersion, "flushing doc ops to mongo" - DocOpsManager._appendDocOpsInMongo doc_id, ops, redisVersion, (error) -> - return callback(error) if error? - callback null - - getPreviousDocOps: (project_id, doc_id, start, end, _callback = (error, ops) ->) -> - timer = new Metrics.Timer("docOpsManager.getPreviousDocOps") - callback = (args...) -> - timer.done() - _callback(args...) - - DocOpsManager._ensureOpsAreLoaded project_id, doc_id, start, (error) -> - return callback(error) if error? - RedisManager.getPreviousDocOps doc_id, start, end, (error, ops) -> - return callback(error) if error? - callback null, ops + callback null, ops pushDocOp: (project_id, doc_id, op, callback = (error) ->) -> RedisManager.pushDocOp doc_id, op, callback - _ensureOpsAreLoaded: (project_id, doc_id, backToVersion, callback = (error) ->) -> - RedisManager.getDocVersion doc_id, (error, redisVersion) -> - return callback(error) if error? - RedisManager.getDocOpsLength doc_id, (error, opsLength) -> - return callback(error) if error? - oldestVersionInRedis = redisVersion - opsLength - if oldestVersionInRedis > backToVersion - # _getDocOpsFromMongo(, 4, 6, ...) will return the ops in positions 4 and 5, but not 6. - logger.log doc_id: doc_id, backToVersion: backToVersion, oldestVersionInRedis: oldestVersionInRedis, "loading old ops from mongo" - DocOpsManager._getDocOpsFromMongo doc_id, backToVersion, oldestVersionInRedis, (error, ops) -> - logger.log doc_id: doc_id, backToVersion: backToVersion, oldestVersionInRedis: oldestVersionInRedis, ops: ops, "loaded old ops from mongo" - return callback(error) if error? - RedisManager.prependDocOps doc_id, ops, (error) -> - return callback(error) if error? - callback null - else - logger.log doc_id: doc_id, backToVersion: backToVersion, oldestVersionInRedis: oldestVersionInRedis, "ops already in redis" - callback() - - getDocVersionInMongo: (doc_id, callback = (error, version) ->) -> - t = new Metrics.Timer("mongo-time") - db.docOps.find { - doc_id: ObjectId(doc_id) - }, { - version: 1 - }, (error, docs) -> - t.done() - return callback(error) if error? - if docs.length < 1 or !docs[0].version? - return callback null, 0 - else - return callback null, docs[0].version - - APPEND_OPS_BATCH_SIZE: 100 - - _appendDocOpsInMongo: (doc_id, docOps, newVersion, callback = (error) ->) -> - currentVersion = newVersion - docOps.length - batchSize = DocOpsManager.APPEND_OPS_BATCH_SIZE - noOfBatches = Math.ceil(docOps.length / batchSize) - if noOfBatches <= 0 - return callback() - jobs = [] - for batchNo in [0..(noOfBatches-1)] - do (batchNo) -> - jobs.push (callback) -> - batch = docOps.slice(batchNo * batchSize, (batchNo + 1) * batchSize) - currentVersion += batch.length - logger.log doc_id: doc_id, batchNo: batchNo, "appending doc op batch to Mongo" - t = new Metrics.Timer("mongo-time") - db.docOps.update { - doc_id: ObjectId(doc_id) - }, { - $push: docOps: { $each: batch, $slice: -100 } - $set: version: currentVersion - }, { - upsert: true - }, (err)-> - t.done() - callback(err) - - async.series jobs, (error) -> callback(error) - - _getDocOpsFromMongo: (doc_id, start, end, callback = (error, ops) ->) -> - DocOpsManager.getDocVersionInMongo doc_id, (error, version) -> - return callback(error) if error? - offset = - (version - start) # Negative tells mongo to count from the end backwards - limit = end - start - t = new Metrics.Timer("mongo-time") - db.docOps.find { - doc_id: ObjectId(doc_id) - }, { - docOps: $slice: [offset, limit] - }, (error, docs) -> - t.done() - if docs.length < 1 or !docs[0].docOps? - return callback null, [] - else - return callback null, docs[0].docOps - diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index aa64ac3d7f..423693a5e4 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -16,14 +16,12 @@ module.exports = DocumentManager = return callback(error) if error? if !lines? or !version? logger.log project_id: project_id, doc_id: doc_id, "doc not in redis so getting from persistence API" - PersistenceManager.getDoc project_id, doc_id, (error, lines) -> + PersistenceManager.getDoc project_id, doc_id, (error, lines, version) -> return callback(error) if error? - DocOpsManager.getDocVersionInMongo doc_id, (error, version) -> + logger.log project_id: project_id, doc_id: doc_id, lines: lines, version: version, "got doc from persistence API" + RedisManager.putDocInMemory project_id, doc_id, lines, version, (error) -> return callback(error) if error? - logger.log project_id: project_id, doc_id: doc_id, lines: lines, version: version, "got doc from persistence API" - RedisManager.putDocInMemory project_id, doc_id, lines, version, (error) -> - return callback(error) if error? - callback null, lines, version + callback null, lines, version else callback null, lines, version @@ -87,12 +85,10 @@ module.exports = DocumentManager = logger.log project_id: project_id, doc_id: doc_id, "doc is not loaded so not flushing" callback null else - logger.log project_id: project_id, doc_id: doc_id, "flushing doc" - PersistenceManager.setDoc project_id, doc_id, lines, (error) -> + logger.log project_id: project_id, doc_id: doc_id, version: version, "flushing doc" + PersistenceManager.setDoc project_id, doc_id, lines, version, (error) -> return callback(error) if error? - DocOpsManager.flushDocOpsToMongo project_id, doc_id, (error) -> - return callback(error) if error? - callback null + callback null flushAndDeleteDoc: (project_id, doc_id, _callback = (error) ->) -> timer = new Metrics.Timer("docManager.flushAndDeleteDoc") diff --git a/services/document-updater/app/coffee/PersistenceManager.coffee b/services/document-updater/app/coffee/PersistenceManager.coffee index eb1a7366c2..03cbe78cbe 100644 --- a/services/document-updater/app/coffee/PersistenceManager.coffee +++ b/services/document-updater/app/coffee/PersistenceManager.coffee @@ -28,13 +28,13 @@ module.exports = PersistenceManager = body = JSON.parse body catch e return callback(e) - return callback null, body.lines + return callback null, body.lines, body.version else if res.statusCode == 404 return callback(new Errors.NotFoundError("doc not not found: #{url}")) else return callback(new Error("error accessing web API: #{url} #{res.statusCode}")) - setDoc: (project_id, doc_id, lines, _callback = (error) ->) -> + setDoc: (project_id, doc_id, lines, version, _callback = (error) ->) -> timer = new Metrics.Timer("persistenceManager.setDoc") callback = (args...) -> timer.done() @@ -46,6 +46,7 @@ module.exports = PersistenceManager = method: "POST" body: JSON.stringify lines: lines + version: parseInt(version, 10) headers: "content-type": "application/json" auth: diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index b2c4c9c9d0..db7f3afb37 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -9,7 +9,10 @@ keys = require('./RedisKeyBuilder') logger = require('logger-sharelatex') metrics = require('./Metrics') -module.exports = +# Make times easy to read +minutes = 60 # seconds for Redis expire + +module.exports = RedisManager = putDocInMemory : (project_id, doc_id, docLines, version, callback)-> timer = new metrics.Timer("redis.put-doc") logger.log project_id:project_id, doc_id:doc_id, docLines:docLines, version: version, "putting doc in redis" @@ -17,7 +20,6 @@ module.exports = multi.set keys.docLines(doc_id:doc_id), JSON.stringify(docLines) multi.set keys.projectKey({doc_id:doc_id}), project_id multi.set keys.docVersion(doc_id:doc_id), version - multi.del keys.docOps(doc_id:doc_id) multi.sadd keys.allDocs, doc_id multi.sadd keys.docsInProject(project_id:project_id), doc_id multi.exec (err, replys)-> @@ -31,7 +33,6 @@ module.exports = multi.del keys.docLines(doc_id:doc_id) multi.del keys.projectKey(doc_id:doc_id) multi.del keys.docVersion(doc_id:doc_id) - multi.del keys.docOps(doc_id:doc_id) multi.srem keys.docsInProject(project_id:project_id), doc_id multi.srem keys.allDocs, doc_id multi.exec (err, replys)-> @@ -111,7 +112,6 @@ module.exports = rclient.srem keys.docsWithPendingUpdates, doc_key, callback getPreviousDocOps: (doc_id, start, end, callback = (error, jsonOps) ->) -> - # TODO: parse the ops and return them as objects, not JSON rclient.llen keys.docOps(doc_id: doc_id), (error, length) -> return callback(error) if error? rclient.get keys.docVersion(doc_id: doc_id), (error, version) -> @@ -141,19 +141,20 @@ module.exports = return callback(e) callback null, ops + DOC_OPS_TTL: 60 * minutes + DOC_OPS_MAX_LENGTH: 100 pushDocOp: (doc_id, op, callback = (error, new_version) ->) -> - # TODO: take a raw op object and JSONify it here jsonOp = JSON.stringify op - rclient.rpush keys.docOps(doc_id: doc_id), jsonOp, (error) -> + multi = rclient.multi() + multi.rpush keys.docOps(doc_id: doc_id), jsonOp + multi.expire keys.docOps(doc_id: doc_id), RedisManager.DOC_OPS_TTL + multi.ltrim keys.docOps(doc_id: doc_id), -RedisManager.DOC_OPS_MAX_LENGTH, -1 + multi.incr keys.docVersion(doc_id: doc_id) + multi.exec (error, replys) -> + [_, __, ___, version] = replys return callback(error) if error? - rclient.incr keys.docVersion(doc_id: doc_id), (error, version) -> - return callback(error) if error? - version = parseInt(version, 10) - callback null, version - - prependDocOps: (doc_id, ops, callback = (error) ->) -> - jsonOps = ops.map (op) -> JSON.stringify op - rclient.lpush keys.docOps(doc_id: doc_id), jsonOps.reverse(), callback + version = parseInt(version, 10) + callback null, version getDocOpsLength: (doc_id, callback = (error, length) ->) -> rclient.llen keys.docOps(doc_id: doc_id), callback diff --git a/services/document-updater/app/coffee/mongojs.coffee b/services/document-updater/app/coffee/mongojs.coffee deleted file mode 100644 index 9a1ae72bc0..0000000000 --- a/services/document-updater/app/coffee/mongojs.coffee +++ /dev/null @@ -1,7 +0,0 @@ -Settings = require "settings-sharelatex" -mongojs = require "mongojs" -db = mongojs.connect(Settings.mongo.url, ["docOps"]) -module.exports = - db: db - ObjectId: mongojs.ObjectId - diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index 5108a4c2cc..f42d296952 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -2,9 +2,6 @@ sinon = require "sinon" chai = require("chai") chai.should() async = require "async" -mongojs = require "../../../app/js/mongojs" -db = mongojs.db -ObjectId = mongojs.ObjectId MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" @@ -12,13 +9,14 @@ DocUpdaterClient = require "./helpers/DocUpdaterClient" describe "Applying updates to a doc", -> before -> @lines = ["one", "two", "three"] + @version = 42 @update = doc: @doc_id op: [{ i: "one and a half\n" p: 4 }] - v: 0 + v: @version @result = ["one", "one and a half", "two", "three"] describe "when the document is not loaded", -> @@ -26,6 +24,7 @@ describe "Applying updates to a doc", -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] MockWebApi.insertDoc @project_id, @doc_id, { lines: @lines + version: @version } sinon.spy MockWebApi, "getDocument" DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> @@ -50,6 +49,7 @@ describe "Applying updates to a doc", -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] MockWebApi.insertDoc @project_id, @doc_id, { lines: @lines + version: @version } DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => throw error if error? @@ -76,6 +76,7 @@ describe "Applying updates to a doc", -> @lines = ["", "", ""] MockWebApi.insertDoc @project_id, @doc_id, { lines: @lines + version: 0 } @updates = [ @@ -92,9 +93,6 @@ describe "Applying updates to a doc", -> { doc_id: @doc_id, v: 10, op: [i: "d", p: 10] } ] @result = ["hello world", "", ""] - MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines - } it "should be able to continue applying updates when the project has been deleted", (done) -> actions = [] @@ -118,6 +116,7 @@ describe "Applying updates to a doc", -> @lines = ["", "", ""] MockWebApi.insertDoc @project_id, @doc_id, { lines: @lines + version: 0 } @updates = [ @@ -129,9 +128,6 @@ describe "Applying updates to a doc", -> { doc_id: @doc_id, v: 0, op: [i: "world", p: 1 ] } ] @result = ["hello", "world", ""] - MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines - } it "should be able to continue applying updates when the project has been deleted", (done) -> actions = [] @@ -148,61 +144,13 @@ describe "Applying updates to a doc", -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => doc.lines.should.deep.equal @result done() - - describe "when the mongo array has been trimmed", -> - before -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - @lines = ["", "", ""] - MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines - } - - @updates = [ - { doc_id: @doc_id, v: 0, op: [i: "h", p: 0 ] } - { doc_id: @doc_id, v: 1, op: [i: "e", p: 1 ] } - { doc_id: @doc_id, v: 2, op: [i: "l", p: 2 ] } - { doc_id: @doc_id, v: 3, op: [i: "l", p: 3 ] } - { doc_id: @doc_id, v: 4, op: [i: "o", p: 4 ] } - { doc_id: @doc_id, v: 3, op: [i: "world", p: 4 ] } - ] - @result = ["hello", "world", ""] - MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines - } - - it "should be able to reload the required ops from the trimmed mongo array", (done) -> - actions = [] - # Apply first set of ops - for update in @updates.slice(0,5) - do (update) => - actions.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc_id, update, callback - # Delete doc from redis and trim ops back to version 3 - actions.push (callback) => DocUpdaterClient.deleteDoc @project_id, @doc_id, callback - actions.push (callback) => - db.docOps.update({doc_id: ObjectId(@doc_id)}, {$push: docOps: { $each: [], $slice: -2 }}, callback) - # Apply older update back from version 3 - for update in @updates.slice(5) - do (update) => - actions.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc_id, update, callback - # Flush ops to mongo - actions.push (callback) => DocUpdaterClient.flushDoc @project_id, @doc_id, callback - - async.series actions, (error) => - throw error if error? - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => - db.docOps.find {doc_id: ObjectId(@doc_id)}, (error, docOps) => - # Check mongo array has been trimmed - docOps = docOps[0] - docOps.docOps.length.should.equal 3 - # Check ops have all be applied properly - doc.lines.should.deep.equal @result - done() describe "with a broken update", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] MockWebApi.insertDoc @project_id, @doc_id, { lines: @lines + version: @version } DocUpdaterClient.sendUpdate @project_id, @doc_id, @undefined, (error) -> throw error if error? diff --git a/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee index d28f37cd6d..171dfcc6e2 100644 --- a/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee @@ -8,13 +8,14 @@ DocUpdaterClient = require "./helpers/DocUpdaterClient" describe "Deleting a document", -> before -> @lines = ["one", "two", "three"] + @version = 42 @update = doc: @doc_id op: [{ i: "one and a half\n" p: 4 }] - v: 0 + v: @version @result = ["one", "one and a half", "two", "three"] describe "when the updated doc exists in the doc updater", -> @@ -22,6 +23,7 @@ describe "Deleting a document", -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] MockWebApi.insertDoc @project_id, @doc_id, { lines: @lines + version: @version } sinon.spy MockWebApi, "setDocumentLines" sinon.spy MockWebApi, "getDocument" @@ -60,6 +62,7 @@ describe "Deleting a document", -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] MockWebApi.insertDoc @project_id, @doc_id, { lines: @lines + version: @version } sinon.spy MockWebApi, "setDocumentLines" sinon.spy MockWebApi, "getDocument" diff --git a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee index 7b07ed6a25..5bfc5a6ee8 100644 --- a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee +++ b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee @@ -35,6 +35,7 @@ describe "Deleting a project", -> for doc in @docs MockWebApi.insertDoc @project_id, doc.id, { lines: doc.lines + version: doc.update.v } describe "with documents which have been updated", -> diff --git a/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee b/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee index 02b44e3fd6..4949d529a2 100644 --- a/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee +++ b/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee @@ -35,6 +35,7 @@ describe "Flushing a project", -> for doc in @docs MockWebApi.insertDoc @project_id, doc.id, { lines: doc.lines + version: doc.update.v } describe "with documents which have been updated", -> diff --git a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee index aaaef99936..01db25fb40 100644 --- a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee +++ b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee @@ -5,23 +5,22 @@ async = require "async" MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" -mongojs = require "../../../app/js/mongojs" -db = mongojs.db -ObjectId = mongojs.ObjectId describe "Flushing a doc to Mongo", -> before -> @lines = ["one", "two", "three"] + @version = 42 @update = doc: @doc_id op: [{ i: "one and a half\n" p: 4 }] - v: 0 + v: @version @result = ["one", "one and a half", "two", "three"] MockWebApi.insertDoc @project_id, @doc_id, { lines: @lines + version: @version } describe "when the updated doc exists in the doc updater", -> @@ -29,8 +28,10 @@ describe "Flushing a doc to Mongo", -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] MockWebApi.insertDoc @project_id, @doc_id, { lines: @lines + version: @version } sinon.spy MockWebApi, "setDocumentLines" + sinon.spy MockWebApi, "setDocumentVersion" DocUpdaterClient.sendUpdates @project_id, @doc_id, [@update], (error) => throw error if error? @@ -40,58 +41,36 @@ describe "Flushing a doc to Mongo", -> after -> MockWebApi.setDocumentLines.restore() + MockWebApi.setDocumentVersion.restore() - it "should flush the updated document to the web api", -> + it "should flush the updated doc lines to the web api", -> MockWebApi.setDocumentLines .calledWith(@project_id, @doc_id, @result) .should.equal true - it "should flush the doc ops to Mongo", (done) -> - db.docOps.find doc_id: ObjectId(@doc_id), (error, docs) => - doc = docs[0] - doc.docOps[0].op.should.deep.equal @update.op - done() - - describe "when the doc has a large number of ops to be flushed", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines - } - @updates = [] - for v in [0..999] - @updates.push - doc_id: @doc_id, - op: [i: v.toString(), p: 0] - v: v - - DocUpdaterClient.sendUpdates @project_id, @doc_id, @updates, (error) => - throw error if error? - setTimeout () => - DocUpdaterClient.flushDoc @project_id, @doc_id, done - , 200 - - it "should flush the doc ops to Mongo in order", (done) -> - db.docOps.find doc_id: ObjectId(@doc_id), (error, docs) => - doc = docs[0] - updates = @updates.slice(-100) - for update, i in doc.docOps - update.op.should.deep.equal updates[i].op - done() + it "should flush the updated doc version to the web api", -> + MockWebApi.setDocumentVersion + .calledWith(@project_id, @doc_id, @version + 1) + .should.equal true describe "when the doc does not exist in the doc updater", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] MockWebApi.insertDoc @project_id, @doc_id, { lines: @lines + version: @version } sinon.spy MockWebApi, "setDocumentLines" + sinon.spy MockWebApi, "setDocumentVersion" DocUpdaterClient.flushDoc @project_id, @doc_id, done after -> MockWebApi.setDocumentLines.restore() + MockWebApi.setDocumentVersion.restore() it "should not flush the doc to the web api", -> MockWebApi.setDocumentLines.called.should.equal false + MockWebApi.setDocumentVersion.called.should.equal false + diff --git a/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee index 0e8456e45f..43c039a802 100644 --- a/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee @@ -11,6 +11,7 @@ describe "Getting a document", -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] MockWebApi.insertDoc @project_id, @doc_id, { lines: @lines = ["one", "two", "three"] + version: @version = 42 } sinon.spy MockWebApi, "getDocument" DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, @returnedDoc) => done() @@ -26,8 +27,8 @@ describe "Getting a document", -> it "should return the document lines", -> @returnedDoc.lines.should.deep.equal @lines - it "should return the document at version 0", -> - @returnedDoc.version.should.equal 0 + it "should return the document at its current version", -> + @returnedDoc.version.should.equal @version describe "when the document is already loaded", -> before (done) -> @@ -55,6 +56,7 @@ describe "Getting a document", -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] MockWebApi.insertDoc @project_id, @doc_id, { lines: @lines = ["one", "two", "three"] + version: 0 } @updates = for v in [0..99] diff --git a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee index cc0f30834a..5218a15281 100644 --- a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee @@ -9,22 +9,25 @@ describe "Setting a document", -> before -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] @lines = ["one", "two", "three"] + @version = 42 @update = doc: @doc_id op: [{ i: "one and a half\n" p: 4 }] - v: 0 + v: @version @result = ["one", "one and a half", "two", "three"] @newLines = ["these", "are", "the", "new", "lines"] MockWebApi.insertDoc @project_id, @doc_id, { lines: @lines + version: @version } describe "when the updated doc exists in the doc updater", -> before (done) -> sinon.spy MockWebApi, "setDocumentLines" + sinon.spy MockWebApi, "setDocumentVersion" DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => throw error if error? DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) => @@ -37,15 +40,21 @@ describe "Setting a document", -> after -> MockWebApi.setDocumentLines.restore() + MockWebApi.setDocumentVersion.restore() it "should return a 204 status code", -> @statusCode.should.equal 204 - it "should send the updated document to the web api", -> + it "should send the updated doc lines to the web api", -> MockWebApi.setDocumentLines .calledWith(@project_id, @doc_id, @newLines) .should.equal true + it "should send the updated doc version to the web api", -> + MockWebApi.setDocumentVersion + .calledWith(@project_id, @doc_id, @version + 2) + .should.equal true + it "should update the lines in the doc updater", (done) -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => doc.lines.should.deep.equal @newLines @@ -53,6 +62,6 @@ describe "Setting a document", -> it "should bump the version in the doc updater", (done) -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => - doc.version.should.equal 2 + doc.version.should.equal @version + 2 done() diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee index 4ddef90d26..f4789854c1 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee @@ -4,7 +4,9 @@ async = require "async" module.exports = DocUpdaterClient = randomId: () -> - return require("../../../../app/js/mongojs").ObjectId().toString() + chars = for i in [1..24] + Math.random().toString(16)[2] + return chars.join("") sendUpdate: (project_id, doc_id, update, callback = (error) ->) -> rclient.rpush "PendingUpdates:#{doc_id}", JSON.stringify(update), (error)-> diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee index 7d50eb8377..21a914dc4b 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee @@ -14,6 +14,11 @@ module.exports = MockWebApi = @docs["#{project_id}:#{doc_id}"].lines = lines callback null + setDocumentVersion: (project_id, doc_id, version, callback = (error) ->) -> + @docs["#{project_id}:#{doc_id}"] ||= {} + @docs["#{project_id}:#{doc_id}"].version = version + callback null + getDocument: (project_id, doc_id, callback = (error, doc) ->) -> callback null, @docs["#{project_id}:#{doc_id}"] @@ -28,11 +33,12 @@ module.exports = MockWebApi = res.send 404 app.post "/project/:project_id/doc/:doc_id", express.bodyParser(), (req, res, next) => - @setDocumentLines req.params.project_id, req.params.doc_id, req.body.lines, (error) -> - if error? - res.send 500 - else - res.send 204 + MockWebApi.setDocumentLines req.params.project_id, req.params.doc_id, req.body.lines, (error) -> + MockWebApi.setDocumentVersion req.params.project_id, req.params.doc_id, req.body.version, (error) -> + if error1? or error2? + res.send 500 + else + res.send 204 app.listen(3000) diff --git a/services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee b/services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee index 1ca00bb305..019f32bc74 100644 --- a/services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee +++ b/services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee @@ -20,9 +20,6 @@ describe 'putting a doc into memory', ()-> potentialSAdds[keys.allDocs] = doc_id potentialSAdds[keys.docsInProject(project_id:project_id)] = doc_id - potentialDels = {} - potentialDels[keys.docOps(doc_id:doc_id)] = true - mocks = "logger-sharelatex": log:-> redis: @@ -53,6 +50,5 @@ describe 'putting a doc into memory', ()-> redisManager.putDocInMemory project_id, doc_id, lines, version, ()-> assert.deepEqual potentialSets, {} assert.deepEqual potentialSAdds, {} - assert.deepEqual potentialDels, {} done() diff --git a/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee b/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee index 83e0ff48cf..decca4e14d 100644 --- a/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee @@ -3,109 +3,15 @@ chai = require('chai') should = chai.should() modulePath = "../../../../app/js/DocOpsManager.js" SandboxedModule = require('sandboxed-module') -ObjectId = require("../../../../app/js/mongojs").ObjectId describe "DocOpsManager", -> beforeEach -> - @doc_id = ObjectId().toString() + @doc_id = "doc-id" @project_id = "project-id" @callback = sinon.stub() @DocOpsManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} - "./mongojs": - db: @db = { docOps: {} } - ObjectId: ObjectId "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } - "./Metrics": @Metrics = - Timer: class Timer - done: sinon.stub() - - describe "flushDocOpsToMongo", -> - describe "when versions are consistent", -> - beforeEach -> - @mongo_version = 40 - @redis_version = 42 - @ops = [ "mock-op-1", "mock-op-2" ] - @DocOpsManager.getDocVersionInMongo = sinon.stub().callsArgWith(1, null, @mongo_version) - @RedisManager.getDocVersion = sinon.stub().callsArgWith(1, null, @redis_version) - @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) - @DocOpsManager._appendDocOpsInMongo = sinon.stub().callsArg(3) - @DocOpsManager.flushDocOpsToMongo @project_id, @doc_id, @callback - - it "should get the version from Mongo", -> - @DocOpsManager.getDocVersionInMongo - .calledWith(@doc_id) - .should.equal true - - it "should get the version from REdis", -> - @RedisManager.getDocVersion - .calledWith(@doc_id) - .should.equal true - - it "should get all doc ops since the version in Mongo", -> - @RedisManager.getPreviousDocOps - .calledWith(@doc_id, @mongo_version, -1) - .should.equal true - - it "should update Mongo with the new ops", -> - @DocOpsManager._appendDocOpsInMongo - .calledWith(@doc_id, @ops, @redis_version) - .should.equal true - - it "should call the callback", -> - @callback.called.should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "when the number of ops does not match the difference in versions", -> - beforeEach -> - @mongo_version = 40 - @redis_version = 45 - @ops = [ "mock-op-1", "mock-op-2" ] - @DocOpsManager.getDocVersionInMongo = sinon.stub().callsArgWith(1, null, @mongo_version) - @RedisManager.getDocVersion = sinon.stub().callsArgWith(1, null, @redis_version) - @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) - @DocOpsManager._appendDocOpsInMongo = sinon.stub().callsArg(3) - @DocOpsManager.flushDocOpsToMongo @project_id, @doc_id, @callback - - it "should call the callback with an error", -> - @callback.calledWith(new Error("inconsistet versions")).should.equal true - - it "should log an error", -> - @logger.error - .calledWith(doc_id: @doc_id, mongoVersion: @mongo_version, redisVersion: @redis_version, opsLength: @ops.length, "version difference does not match ops length") - .should.equal true - - it "should not modify mongo", -> - @DocOpsManager._appendDocOpsInMongo.called.should.equal false - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "when redis version is behind mongo version", -> - beforeEach -> - @mongo_version = 40 - @redis_version = 30 - @DocOpsManager.getDocVersionInMongo = sinon.stub().callsArgWith(1, null, @mongo_version) - @RedisManager.getDocVersion = sinon.stub().callsArgWith(1, null, @redis_version) - @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) - @DocOpsManager._appendDocOpsInMongo = sinon.stub().callsArg(3) - @DocOpsManager.flushDocOpsToMongo @project_id, @doc_id, @callback - - it "should call the callback with an error", -> - @callback.calledWith(new Error("inconsistet versions")).should.equal true - - it "should log an error", -> - @logger.error - .calledWith(doc_id: @doc_id, mongoVersion: @mongo_version, redisVersion: @redis_version, "mongo version is ahead of redis") - .should.equal true - - it "should not modify mongo", -> - @DocOpsManager._appendDocOpsInMongo.called.should.equal false - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true describe "getPreviousDocOps", -> beforeEach -> @@ -113,14 +19,8 @@ describe "DocOpsManager", -> @start = 30 @end = 32 @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) - @DocOpsManager._ensureOpsAreLoaded = sinon.stub().callsArg(3) @DocOpsManager.getPreviousDocOps @project_id, @doc_id, @start, @end, @callback - it "should ensure the ops are loaded back far enough", -> - @DocOpsManager._ensureOpsAreLoaded - .calledWith(@project_id, @doc_id, @start) - .should.equal true - it "should get the previous doc ops", -> @RedisManager.getPreviousDocOps .calledWith(@doc_id, @start, @end) @@ -128,182 +28,3 @@ describe "DocOpsManager", -> it "should call the callback with the ops", -> @callback.calledWith(null, @ops).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "_ensureOpsAreLoaded", -> - describe "when the ops are not loaded", -> - beforeEach -> - @redisVersion = 42 - @redisOpsLength = 10 - @backToVersion = 30 - @ops = [ "mock-op-1", "mock-op-2" ] - @RedisManager.getDocVersion = sinon.stub().callsArgWith(1, null, @redisVersion) - @RedisManager.getDocOpsLength = sinon.stub().callsArgWith(1, null, @redisOpsLength) - @DocOpsManager._getDocOpsFromMongo = sinon.stub().callsArgWith(3, null, @ops) - @RedisManager.prependDocOps = sinon.stub().callsArgWith(2, null) - @DocOpsManager._ensureOpsAreLoaded @project_id, @doc_id, @backToVersion, @callback - - it "should get the doc version from redis", -> - @RedisManager.getDocVersion - .calledWith(@doc_id) - .should.equal true - - it "should get the doc ops length in redis", -> - @RedisManager.getDocOpsLength - .calledWith(@doc_id) - .should.equal true - - it "should get the doc ops that need loading from Mongo", -> - @DocOpsManager._getDocOpsFromMongo - .calledWith(@doc_id, @backToVersion, @redisVersion - @redisOpsLength) - .should.equal true - - it "should prepend the retrieved ops to redis", -> - @RedisManager.prependDocOps - .calledWith(@doc_id, @ops) - .should.equal true - - it "should call the callback", -> - @callback.called.should.equal true - - describe "when the ops are loaded", -> - beforeEach -> - @redisVersion = 42 - @redisOpsLength = 10 - @backToVersion = 35 - @RedisManager.getDocVersion = sinon.stub().callsArgWith(1, null, @redisVersion) - @RedisManager.getDocOpsLength = sinon.stub().callsArgWith(1, null, @redisOpsLength) - @DocOpsManager._getDocOpsFromMongo = sinon.stub().callsArgWith(3, null, @ops) - @RedisManager.prependDocOps = sinon.stub().callsArgWith(2, null) - @DocOpsManager._ensureOpsAreLoaded @project_id, @doc_id, @backToVersion, @callback - - it "should not need to get the docs from Mongo or put any into redis", -> - @DocOpsManager._getDocOpsFromMongo.called.should.equal false - @RedisManager.prependDocOps.called.should.equal false - - it "should call the callback", -> - @callback.called.should.equal true - - describe "getDocVersionInMongo", -> - describe "when the doc exists", -> - beforeEach -> - @doc = - version: @version = 42 - @db.docOps.find = sinon.stub().callsArgWith(2, null, [@doc]) - @DocOpsManager.getDocVersionInMongo @doc_id, @callback - - it "should look for the doc in the database", -> - @db.docOps.find - .calledWith({ doc_id: ObjectId(@doc_id) }, {version: 1}) - .should.equal true - - it "should call the callback with the version", -> - @callback.calledWith(null, @version).should.equal true - - describe "when the doc doesn't exist", -> - beforeEach -> - @db.docOps.find = sinon.stub().callsArgWith(2, null, []) - @DocOpsManager.getDocVersionInMongo @doc_id, @callback - - it "should call the callback with 0", -> - @callback.calledWith(null, 0).should.equal true - - describe "_appendDocOpsInMongo", -> - describe "with a small set of updates", -> - beforeEach (done) -> - @ops = [ "mock-op-1", "mock-op-2" ] - @version = 42 - @db.docOps.update = sinon.stub().callsArg(3) - @DocOpsManager._appendDocOpsInMongo @doc_id, @ops, @version, (error) => - @callback(error) - done() - - it "should update the database", -> - @db.docOps.update - .calledWith({ - doc_id: ObjectId(@doc_id) - }, { - $push: docOps: { $each: @ops, $slice: -100 } - $set: version: @version - }, { - upsert: true - }) - .should.equal true - - it "should call the callbak", -> - @callback.called.should.equal true - - describe "with a large set of updates", -> - beforeEach (done) -> - @ops = [ "mock-op-1", "mock-op-2", "mock-op-3", "mock-op-4", "mock-op-5" ] - @version = 42 - @DocOpsManager.APPEND_OPS_BATCH_SIZE = 2 - @db.docOps.update = sinon.stub().callsArg(3) - @DocOpsManager._appendDocOpsInMongo @doc_id, @ops, @version, (error) => - @callback(error) - done() - - it "should update the database in batches", -> - @db.docOps.update - .calledWith({ doc_id: ObjectId(@doc_id) }, { - $push: docOps: { $each: @ops.slice(0,2), $slice: -100 } - $set: version: @version - 3 - }, { upsert: true }) - .should.equal true - @db.docOps.update - .calledWith({ doc_id: ObjectId(@doc_id) }, { - $push: docOps: { $each: @ops.slice(2,4), $slice: -100 } - $set: version: @version - 1 - }, { upsert: true }) - .should.equal true - @db.docOps.update - .calledWith({ doc_id: ObjectId(@doc_id) }, { - $push: docOps: { $each: @ops.slice(4,5), $slice: -100 } - $set: version: @version - }, { upsert: true }) - .should.equal true - - it "should call the callbak", -> - @callback.called.should.equal true - - describe "with no updates", -> - beforeEach (done) -> - @ops = [] - @version = 42 - @db.docOps.update = sinon.stub().callsArg(3) - @DocOpsManager._appendDocOpsInMongo @doc_id, @ops, @version, (error) => - @callback(error) - done() - - it "should not try to update the database", -> - @db.docOps.update.called.should.equal false - - describe "_getDocsOpsFromMongo", -> - beforeEach -> - @version = 42 - @start = 32 - @limit = 5 - @doc = - docOps: ["mock-ops"] - @DocOpsManager.getDocVersionInMongo = sinon.stub().callsArgWith(1, null, @version) - @db.docOps.find = sinon.stub().callsArgWith(2, null, [@doc]) - @DocOpsManager._getDocOpsFromMongo @doc_id, @start, @start + @limit, @callback - - it "should get the current version", -> - @DocOpsManager.getDocVersionInMongo - .calledWith(@doc_id) - .should.equal true - - it "should get the doc ops", -> - @db.docOps.find - .calledWith({ doc_id: ObjectId(@doc_id) }, { - docOps: $slice: [-(@version - @start), @limit] - }) - .should.equal true - - it "should return the ops", -> - @callback.calledWith(null, @doc.docOps).should.equal true - - diff --git a/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee index 079341a536..6bdba1a2b7 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee @@ -23,8 +23,7 @@ describe "DocumentUpdater - flushDocIfLoaded", -> describe "when the doc is in Redis", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(1, null, @lines, @version) - @PersistenceManager.setDoc = sinon.stub().callsArgWith(3) - @DocOpsManager.flushDocOpsToMongo = sinon.stub().callsArgWith(2) + @PersistenceManager.setDoc = sinon.stub().callsArgWith(4) @DocumentManager.flushDocIfLoaded @project_id, @doc_id, @callback it "should get the doc from redis", -> @@ -34,14 +33,9 @@ describe "DocumentUpdater - flushDocIfLoaded", -> it "should write the doc lines to the persistence layer", -> @PersistenceManager.setDoc - .calledWith(@project_id, @doc_id, @lines) + .calledWith(@project_id, @doc_id, @lines, @version) .should.equal true - it "should write the doc ops to mongo", -> - @DocOpsManager.flushDocOpsToMongo - .calledWith(@project_id, @doc_id) - .should.equal true - it "should call the callback without error", -> @callback.calledWith(null).should.equal true @@ -51,7 +45,7 @@ describe "DocumentUpdater - flushDocIfLoaded", -> describe "when the document is not in Redis", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(1, null, null, null) - @PersistenceManager.setDoc = sinon.stub().callsArgWith(3) + @PersistenceManager.setDoc = sinon.stub().callsArgWith(4) @DocOpsManager.flushDocOpsToMongo = sinon.stub().callsArgWith(2) @DocumentManager.flushDocIfLoaded @project_id, @doc_id, @callback diff --git a/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee index 93de1725fa..ea68890199 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee @@ -40,8 +40,7 @@ describe "DocumentUpdater - getDoc", -> describe "when the doc does not exist in Redis", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(1, null, null, null) - @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines) - @DocOpsManager.getDocVersionInMongo = sinon.stub().callsArgWith(1, null, @version) + @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version) @RedisManager.putDocInMemory = sinon.stub().callsArg(4) @DocumentManager.getDoc @project_id, @doc_id, @callback @@ -50,11 +49,6 @@ describe "DocumentUpdater - getDoc", -> .calledWith(@doc_id) .should.equal true - it "should get the doc version from Mongo", -> - @DocOpsManager.getDocVersionInMongo - .calledWith(@doc_id) - .should.equal true - it "should get the doc from the PersistenceManager", -> @PersistenceManager.getDoc .calledWith(@project_id, @doc_id) diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee index c5cfc35ac8..0bb881b3ee 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee @@ -16,6 +16,7 @@ describe "PersistenceManager.getDoc", -> @project_id = "project-id-123" @doc_id = "doc-id-123" @lines = ["one", "two", "three"] + @version = 42 @callback = sinon.stub() @Settings.apis = web: @@ -25,7 +26,7 @@ describe "PersistenceManager.getDoc", -> describe "with a successful response from the web api", -> beforeEach -> - @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(lines: @lines)) + @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(lines: @lines, version: @version)) @PersistenceManager.getDoc(@project_id, @doc_id, @callback) it "should call the web api", -> @@ -43,8 +44,8 @@ describe "PersistenceManager.getDoc", -> }) .should.equal true - it "should call the callback with the doc lines", -> - @callback.calledWith(null, @lines).should.equal true + it "should call the callback with the doc lines and version", -> + @callback.calledWith(null, @lines, @version).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/setDocTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/setDocTests.coffee index cd9d962d3b..82850e3074 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/setDocTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/setDocTests.coffee @@ -16,6 +16,7 @@ describe "PersistenceManager.setDoc", -> @project_id = "project-id-123" @doc_id = "doc-id-123" @lines = ["one", "two", "three"] + @version = 42 @callback = sinon.stub() @Settings.apis = web: @@ -25,8 +26,8 @@ describe "PersistenceManager.setDoc", -> describe "with a successful response from the web api", -> beforeEach -> - @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(lines: @lines)) - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @callback) + @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(lines: @lines, version: @version)) + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @callback) it "should call the web api", -> @request @@ -34,6 +35,7 @@ describe "PersistenceManager.setDoc", -> url: "#{@url}/project/#{@project_id}/doc/#{@doc_id}" body: JSON.stringify lines: @lines + version: @version method: "POST" headers: "content-type": "application/json" @@ -54,7 +56,7 @@ describe "PersistenceManager.setDoc", -> describe "when request returns an error", -> beforeEach -> @request.callsArgWith(1, @error = new Error("oops"), null, null) - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @callback) + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @callback) it "should return the error", -> @callback.calledWith(@error).should.equal true @@ -65,7 +67,7 @@ describe "PersistenceManager.setDoc", -> describe "when the request returns 404", -> beforeEach -> @request.callsArgWith(1, null, {statusCode: 404}, "") - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @callback) + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @callback) it "should return a NotFoundError", -> @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true @@ -76,7 +78,7 @@ describe "PersistenceManager.setDoc", -> describe "when the request returns an error status code", -> beforeEach -> @request.callsArgWith(1, null, {statusCode: 500}, "") - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @callback) + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @callback) it "should return an error", -> @callback.calledWith(new Error("web api error")).should.equal true diff --git a/services/document-updater/test/unit/coffee/RedisManager/prependDocOpsTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/prependDocOpsTests.coffee deleted file mode 100644 index b4a8192d12..0000000000 --- a/services/document-updater/test/unit/coffee/RedisManager/prependDocOpsTests.coffee +++ /dev/null @@ -1,32 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/RedisManager" -SandboxedModule = require('sandboxed-module') - -describe "RedisManager.clearDocFromPendingUpdatesSet", -> - beforeEach -> - @doc_id = "document-id" - @callback = sinon.stub() - @RedisManager = SandboxedModule.require modulePath, requires: - "redis" : createClient: () => - @rclient = auth:-> - - @rclient.lpush = sinon.stub().callsArg(2) - @ops = [ - { "mock" : "op-1" }, - { "mock" : "op-2" } - ] - @reversedJsonOps = @ops.map((op) -> JSON.stringify op).reverse() - @RedisManager.prependDocOps(@doc_id, @ops, @callback) - - it "should push the reversed JSONed ops", -> - @rclient.lpush - .calledWith("DocOps:#{@doc_id}", @reversedJsonOps) - .should.equal true - - it "should return the callback", -> - @callback.called.should.equal true - - - diff --git a/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee index 0c76730437..247862a257 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee @@ -1,37 +1,54 @@ sinon = require('sinon') chai = require('chai') should = chai.should() -modulePath = "../../../../app/js/RedisManager" +modulePath = "../../../../app/js/RedisManager.js" SandboxedModule = require('sandboxed-module') -describe "RedisManager.getPreviousDocOpsTests", -> +describe "RedisManager.pushDocOp", -> beforeEach -> - @callback = sinon.stub() @RedisManager = SandboxedModule.require modulePath, requires: - "redis" : createClient: () => + "redis": createClient: () => @rclient = - auth: -> - multi: => @rclient + auth: () -> + multi: () => @rclient + "logger-sharelatex": @logger = {log: sinon.stub()} @doc_id = "doc-id-123" + @callback = sinon.stub() + @rclient.rpush = sinon.stub() + @rclient.expire = sinon.stub() + @rclient.incr = sinon.stub() + @rclient.ltrim = sinon.stub() + + describe "successfully", -> + beforeEach -> + @op = { op: [{ i: "foo", p: 4 }] } + @version = 42 + _ = null + @rclient.exec = sinon.stub().callsArgWith(0, null, [_, _, _, @version]) + @RedisManager.pushDocOp @doc_id, @op, @callback + + it "should push the doc op into the doc ops list", -> + @rclient.rpush + .calledWith("DocOps:#{@doc_id}", JSON.stringify(@op)) + .should.equal true + + it "should renew the expiry ttl on the doc ops array", -> + @rclient.expire + .calledWith("DocOps:#{@doc_id}", @RedisManager.DOC_OPS_TTL) + .should.equal true + + it "should truncate the list to 100 members", -> + @rclient.ltrim + .calledWith("DocOps:#{@doc_id}", -@RedisManager.DOC_OPS_MAX_LENGTH, -1) + .should.equal true + + it "should increment the version number", -> + @rclient.incr + .calledWith("DocVersion:#{@doc_id}") + .should.equal true + + it "should call the callback with the version number", -> + @callback.calledWith(null, parseInt(@version, 10)).should.equal true - beforeEach -> - @version = 70 - @op = - { "mock": "op-1" } - @jsonOp = JSON.stringify @op - @rclient.rpush = sinon.stub().callsArgWith(2, null) - @rclient.incr = sinon.stub().callsArgWith(1, null, @version.toString()) - @RedisManager.pushDocOp(@doc_id, @op, @callback) - it "should push the op into redis", -> - @rclient.rpush - .calledWith("DocOps:#{@doc_id}", @jsonOp) - .should.equal true - it "should increment the version number", -> - @rclient.incr - .calledWith("DocVersion:#{@doc_id}") - .should.equal true - - it "should call the callback with the new version", -> - @callback.calledWith(null, @version).should.equal true From 6f19f46d961d92737a7df011a751f8fbe463c6fe Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 24 Feb 2014 16:37:45 +0000 Subject: [PATCH 012/769] Create method for pushing uncompressed ops into redis --- .../app/coffee/DocOpsManager.coffee | 1 + .../app/coffee/RedisKeyBuilder.coffee | 2 ++ .../app/coffee/RedisManager.coffee | 4 +++ .../pushUncompressedHistoryOpTests.coffee | 34 +++++++++++++++++++ 4 files changed, 41 insertions(+) create mode 100644 services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee diff --git a/services/document-updater/app/coffee/DocOpsManager.coffee b/services/document-updater/app/coffee/DocOpsManager.coffee index 0e90f5b462..032f9f6566 100644 --- a/services/document-updater/app/coffee/DocOpsManager.coffee +++ b/services/document-updater/app/coffee/DocOpsManager.coffee @@ -44,6 +44,7 @@ module.exports = DocOpsManager = callback null, ops pushDocOp: (project_id, doc_id, op, callback = (error) ->) -> + console.log "PUSHING OP", op RedisManager.pushDocOp doc_id, op, callback _ensureOpsAreLoaded: (project_id, doc_id, backToVersion, callback = (error) ->) -> diff --git a/services/document-updater/app/coffee/RedisKeyBuilder.coffee b/services/document-updater/app/coffee/RedisKeyBuilder.coffee index a444341ea1..2bd1ed08c8 100644 --- a/services/document-updater/app/coffee/RedisKeyBuilder.coffee +++ b/services/document-updater/app/coffee/RedisKeyBuilder.coffee @@ -8,12 +8,14 @@ DOCLINES = "doclines" DOCOPS = "DocOps" DOCVERSION = "DocVersion" DOCIDSWITHPENDINGUPDATES = "DocsWithPendingUpdates" +UNCOMPRESSED_HISTORY_OPS = "UncompressedHistoryOps" module.exports = allDocs : ALLDOCSKEY docLines : (op)-> DOCLINES+":"+op.doc_id docOps : (op)-> DOCOPS+":"+op.doc_id + uncompressedHistoryOp: (op) -> UNCOMPRESSED_HISTORY_OPS + ":" + op.doc_id docVersion : (op)-> DOCVERSION+":"+op.doc_id projectKey : (op)-> PROJECTKEY+":"+op.doc_id blockingKey : (op)-> BLOCKINGKEY+":"+op.doc_id diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index b2c4c9c9d0..5f6c880cee 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -155,6 +155,10 @@ module.exports = jsonOps = ops.map (op) -> JSON.stringify op rclient.lpush keys.docOps(doc_id: doc_id), jsonOps.reverse(), callback + pushUncompressedHistoryOp: (doc_id, op, callback = (error) ->) -> + jsonOp = JSON.stringify op + rclient.rpush keys.uncompressedHistoryOp(doc_id: doc_id), jsonOp, callback + getDocOpsLength: (doc_id, callback = (error, length) ->) -> rclient.llen keys.docOps(doc_id: doc_id), callback diff --git a/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee new file mode 100644 index 0000000000..3b743db6e4 --- /dev/null +++ b/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee @@ -0,0 +1,34 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/RedisManager.js" +SandboxedModule = require('sandboxed-module') + +describe "RedisManager.pushUncompressedHistoryOp", -> + beforeEach -> + @RedisManager = SandboxedModule.require modulePath, requires: + "redis": createClient: () => + @rclient = + auth: () -> + multi: () => @rclient + "logger-sharelatex": @logger = {log: sinon.stub()} + @doc_id = "doc-id-123" + @callback = sinon.stub() + @rclient.rpush = sinon.stub() + + describe "successfully", -> + beforeEach -> + @op = { op: [{ i: "foo", p: 4 }] } + @rclient.rpush = sinon.stub().callsArg(2) + @RedisManager.pushUncompressedHistoryOp @doc_id, @op, @callback + + it "should push the doc op into the doc ops list", -> + @rclient.rpush + .calledWith("UncompressedHistoryOps:#{@doc_id}", JSON.stringify(@op)) + .should.equal true + + it "should call the callback", -> + @callback.called.should.equal true + + + From b13f70eadb643a31c75a94c7e740d08ed3d0945f Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 24 Feb 2014 16:52:12 +0000 Subject: [PATCH 013/769] Push ops into uncompressedHistoryOps list --- services/document-updater/Gruntfile.coffee | 2 +- .../app/coffee/DocOpsManager.coffee | 5 +++-- .../app/coffee/ShareJsDB.coffee | 2 +- .../DocOpsManager/DocOpsManagerTests.coffee | 20 +++++++++++++++++++ 4 files changed, 25 insertions(+), 4 deletions(-) diff --git a/services/document-updater/Gruntfile.coffee b/services/document-updater/Gruntfile.coffee index 30dd63e708..717ebd464c 100644 --- a/services/document-updater/Gruntfile.coffee +++ b/services/document-updater/Gruntfile.coffee @@ -103,7 +103,7 @@ module.exports = (grunt) -> grunt.registerTask 'install', "Compile everything when installing as an npm module", ['compile'] - grunt.registerTask 'test:unit', 'Run the unit tests (use --grep= for individual tests)', ['compile:unit_tests', 'mochaTest:unit'] + grunt.registerTask 'test:unit', 'Run the unit tests (use --grep= for individual tests)', ['compile:server', 'compile:unit_tests', 'mochaTest:unit'] grunt.registerTask 'test:acceptance', 'Run the acceptance tests (use --grep= for individual tests)', ['compile:acceptance_tests', 'mochaTest:acceptance'] grunt.registerTask 'run', "Compile and run the document-updater-sharelatex server", ['compile', 'bunyan', 'execute'] diff --git a/services/document-updater/app/coffee/DocOpsManager.coffee b/services/document-updater/app/coffee/DocOpsManager.coffee index 032f9f6566..43fd7a4aac 100644 --- a/services/document-updater/app/coffee/DocOpsManager.coffee +++ b/services/document-updater/app/coffee/DocOpsManager.coffee @@ -44,8 +44,9 @@ module.exports = DocOpsManager = callback null, ops pushDocOp: (project_id, doc_id, op, callback = (error) ->) -> - console.log "PUSHING OP", op - RedisManager.pushDocOp doc_id, op, callback + RedisManager.pushDocOp doc_id, op, (error) -> + return callback(error) if error? + RedisManager.pushUncompressedHistoryOp doc_id, op, callback _ensureOpsAreLoaded: (project_id, doc_id, backToVersion, callback = (error) ->) -> RedisManager.getDocVersion doc_id, (error, redisVersion) -> diff --git a/services/document-updater/app/coffee/ShareJsDB.coffee b/services/document-updater/app/coffee/ShareJsDB.coffee index 3704121b6d..6eaf21846c 100644 --- a/services/document-updater/app/coffee/ShareJsDB.coffee +++ b/services/document-updater/app/coffee/ShareJsDB.coffee @@ -23,7 +23,7 @@ module.exports = ShareJsDB = writeOp: (doc_key, opData, callback) -> [project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key) - DocOpsManager.pushDocOp project_id, doc_id, {op:opData.op, meta:opData.meta}, (error, version) -> + DocOpsManager.pushDocOp project_id, doc_id, opData, (error, version) -> return callback error if error? if version == opData.v + 1 diff --git a/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee b/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee index 83e0ff48cf..c26171d98f 100644 --- a/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee @@ -306,4 +306,24 @@ describe "DocOpsManager", -> it "should return the ops", -> @callback.calledWith(null, @doc.docOps).should.equal true + describe "pushDocOp", -> + beforeEach -> + @op = "mock-op" + @RedisManager.pushDocOp = sinon.stub().callsArg(2) + @RedisManager.pushUncompressedHistoryOp = sinon.stub().callsArg(2) + @DocOpsManager.pushDocOp @project_id, @doc_id, @op, @callback + + it "should push the op in to the docOps list", -> + @RedisManager.pushDocOp + .calledWith(@doc_id, @op) + .should.equal true + + it "should push the op into the pushUncompressedHistoryOp", -> + @RedisManager.pushUncompressedHistoryOp + .calledWith(@doc_id, @op) + .should.equal true + + it "should call the callback", -> + @callback.called.should.equal true + From dfd3ec993b64fe49c1dcaa5d33b186ae419fd94f Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 26 Feb 2014 14:49:52 +0000 Subject: [PATCH 014/769] Ensure version is still returned from redis --- services/document-updater/app/coffee/DocOpsManager.coffee | 6 ++++-- services/document-updater/app/coffee/ShareJsDB.coffee | 7 ++++--- .../unit/coffee/DocOpsManager/DocOpsManagerTests.coffee | 6 +++--- .../test/unit/coffee/ShareJsDB/WriteOpsTests.coffee | 7 ++----- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/services/document-updater/app/coffee/DocOpsManager.coffee b/services/document-updater/app/coffee/DocOpsManager.coffee index 43fd7a4aac..971db83358 100644 --- a/services/document-updater/app/coffee/DocOpsManager.coffee +++ b/services/document-updater/app/coffee/DocOpsManager.coffee @@ -44,9 +44,11 @@ module.exports = DocOpsManager = callback null, ops pushDocOp: (project_id, doc_id, op, callback = (error) ->) -> - RedisManager.pushDocOp doc_id, op, (error) -> + RedisManager.pushDocOp doc_id, op, (error, version) -> return callback(error) if error? - RedisManager.pushUncompressedHistoryOp doc_id, op, callback + RedisManager.pushUncompressedHistoryOp doc_id, op, (error) -> + return callback(error) if error? + callback null, version _ensureOpsAreLoaded: (project_id, doc_id, backToVersion, callback = (error) ->) -> RedisManager.getDocVersion doc_id, (error, redisVersion) -> diff --git a/services/document-updater/app/coffee/ShareJsDB.coffee b/services/document-updater/app/coffee/ShareJsDB.coffee index 6eaf21846c..da6640685b 100644 --- a/services/document-updater/app/coffee/ShareJsDB.coffee +++ b/services/document-updater/app/coffee/ShareJsDB.coffee @@ -4,6 +4,7 @@ DocumentManager = require "./DocumentManager" RedisManager = require "./RedisManager" DocOpsManager = require "./DocOpsManager" Errors = require "./Errors" +logger = require "logger-sharelatex" module.exports = ShareJsDB = getOps: (doc_key, start, end, callback) -> @@ -29,9 +30,9 @@ module.exports = ShareJsDB = if version == opData.v + 1 callback() else - # The document has been corrupted by the change. For now, throw an exception. - # Later, rebuild the snapshot. - callback "Version mismatch in db.append. '#{doc_id}' is corrupted." + error = new Error("Version mismatch. '#{doc_id}' is corrupted.") + logger.error err: error, doc_id: doc_id, project_id: project_id, opVersion: opData.v, expectedVersion: version, "doc is corrupt" + callback error getSnapshot: (doc_key, callback) -> [project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key) diff --git a/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee b/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee index c26171d98f..df2a546480 100644 --- a/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee @@ -309,7 +309,7 @@ describe "DocOpsManager", -> describe "pushDocOp", -> beforeEach -> @op = "mock-op" - @RedisManager.pushDocOp = sinon.stub().callsArg(2) + @RedisManager.pushDocOp = sinon.stub().callsArgWith(2, null, @version = 42) @RedisManager.pushUncompressedHistoryOp = sinon.stub().callsArg(2) @DocOpsManager.pushDocOp @project_id, @doc_id, @op, @callback @@ -323,7 +323,7 @@ describe "DocOpsManager", -> .calledWith(@doc_id, @op) .should.equal true - it "should call the callback", -> - @callback.called.should.equal true + it "should call the callback with the version", -> + @callback.calledWith(null, @version).should.equal true diff --git a/services/document-updater/test/unit/coffee/ShareJsDB/WriteOpsTests.coffee b/services/document-updater/test/unit/coffee/ShareJsDB/WriteOpsTests.coffee index b28f23d2f4..6088de77f4 100644 --- a/services/document-updater/test/unit/coffee/ShareJsDB/WriteOpsTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJsDB/WriteOpsTests.coffee @@ -26,11 +26,8 @@ describe "ShareJsDB.writeOps", -> @ShareJsDB.writeOp @doc_key, @opData, @callback it "should write the op to redis", -> - op = - op: @opData.op - meta: @opData.meta @DocOpsManager.pushDocOp - .calledWith(@project_id, @doc_id, op) + .calledWith(@project_id, @doc_id, @opData) .should.equal true it "should call the callback without an error", -> @@ -46,7 +43,7 @@ describe "ShareJsDB.writeOps", -> @ShareJsDB.writeOp @doc_key, @opData, @callback it "should call the callback with an error", -> - @callback.calledWith(sinon.match.string).should.equal true + @callback.calledWith(new Error()).should.equal true From f3192da87f1f7f90ee2250f401ea7a5f7d4c1110 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 26 Feb 2014 15:56:52 +0000 Subject: [PATCH 015/769] Tell track changes api to flush doc when flushing doc to mongo --- services/document-updater/Gruntfile.coffee | 6 +-- services/document-updater/app.coffee | 9 ----- .../app/coffee/DocumentManager.coffee | 5 +++ .../app/coffee/TrackChangesManager.coffee | 20 ++++++++++ .../config/settings.development.coffee | 2 + .../coffee/ApplyingUpdatesToADocTests.coffee | 18 +++++++++ .../coffee/FlushingAProjectTests.coffee | 14 +++++++ .../coffee/FlushingDocsTests.coffee | 12 +++++- .../coffee/helpers/MockTrackChangesApi.coffee | 20 ++++++++++ .../coffee/helpers/MockWebApi.coffee | 3 +- .../DocumentManager/flushDocTests.coffee | 8 ++++ .../TrackChangesManagerTests.coffee | 38 +++++++++++++++++++ 12 files changed, 141 insertions(+), 14 deletions(-) create mode 100644 services/document-updater/app/coffee/TrackChangesManager.coffee create mode 100644 services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.coffee create mode 100644 services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee diff --git a/services/document-updater/Gruntfile.coffee b/services/document-updater/Gruntfile.coffee index 717ebd464c..8905cabf81 100644 --- a/services/document-updater/Gruntfile.coffee +++ b/services/document-updater/Gruntfile.coffee @@ -45,16 +45,16 @@ module.exports = (grunt) -> clean: app: ["app/js"] - acceptance_tests: ["test/unit/js"] + acceptance_tests: ["test/acceptance/js"] mochaTest: unit: - src: ['test/unit/js/**/*.js'] + src: ["test/unit/js/#{grunt.option('feature') or '**'}/*.js"] options: reporter: grunt.option('reporter') or 'spec' grep: grunt.option("grep") acceptance: - src: ['test/acceptance/js/**/*.js'] + src: ["test/acceptance/js/#{grunt.option('feature') or '*'}.js"] options: reporter: grunt.option('reporter') or 'spec' grep: grunt.option("grep") diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 1974169f4e..7168017790 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -21,15 +21,6 @@ app.configure -> app.use express.bodyParser() app.use app.router -app.configure 'development', ()-> - console.log "Development Enviroment" - app.use express.errorHandler({ dumpExceptions: true, showStack: true }) - -app.configure 'production', ()-> - console.log "Production Enviroment" - app.use express.logger() - app.use express.errorHandler() - rclient.subscribe("pending-updates") rclient.on "message", (channel, doc_key)-> [project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index aa64ac3d7f..2f3cfb8d79 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -2,6 +2,7 @@ RedisManager = require "./RedisManager" PersistenceManager = require "./PersistenceManager" DocOpsManager = require "./DocOpsManager" DiffCodec = require "./DiffCodec" +TrackChangesManager = require "./TrackChangesManager" logger = require "logger-sharelatex" Metrics = require "./Metrics" @@ -81,6 +82,10 @@ module.exports = DocumentManager = timer.done() _callback(args...) + TrackChangesManager.flushDocChanges doc_id, (error) -> + if error? + logger.error err: error, project_id: project_id, doc_id: doc_id, "error flushing doc to track changes api" + RedisManager.getDoc doc_id, (error, lines, version) -> return callback(error) if error? if !lines? or !version? diff --git a/services/document-updater/app/coffee/TrackChangesManager.coffee b/services/document-updater/app/coffee/TrackChangesManager.coffee new file mode 100644 index 0000000000..c0b40331dc --- /dev/null +++ b/services/document-updater/app/coffee/TrackChangesManager.coffee @@ -0,0 +1,20 @@ +settings = require "settings-sharelatex" +request = require "request" +logger = require "logger-sharelatex" + +module.exports = + flushDocChanges: (doc_id, callback = (error) ->) -> + if !settings.apis?.trackchanges? + logger.warn doc_id: doc_id, "track changes API is not configured, so not flushing" + return callback() + + url = "#{settings.apis.trackchanges.url}/doc/#{doc_id}/flush" + logger.log doc_id: doc_id, url: url, "flushing doc in track changes api" + request.post url, (error, res, body)-> + if error? + return callback(error) + else if res.statusCode >= 200 and res.statusCode < 300 + return callback(null) + else + error = new Error("track changes api returned a failure status code: #{res.statusCode}") + return callback(error) diff --git a/services/document-updater/config/settings.development.coffee b/services/document-updater/config/settings.development.coffee index d730bb0f2d..b4f12ed81c 100755 --- a/services/document-updater/config/settings.development.coffee +++ b/services/document-updater/config/settings.development.coffee @@ -12,6 +12,8 @@ module.exports = url: "http://localhost:3000" user: "sharelatex" pass: "password" + trackchanges: + url: "http://localhost:3014" redis: web: diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index 5108a4c2cc..ac1a24223a 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -5,6 +5,7 @@ async = require "async" mongojs = require "../../../app/js/mongojs" db = mongojs.db ObjectId = mongojs.ObjectId +rclient = require("redis").createClient() MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" @@ -45,6 +46,11 @@ describe "Applying updates to a doc", -> doc.lines.should.deep.equal @result done() + it "should push the applied updates to the track changes api", (done) -> + rclient.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => + JSON.parse(updates[0]).op.should.deep.equal @update.op + done() + describe "when the document is loaded", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] @@ -69,6 +75,11 @@ describe "Applying updates to a doc", -> doc.lines.should.deep.equal @result done() + it "should push the applied updates to the track changes api", (done) -> + rclient.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => + JSON.parse(updates[0]).op.should.deep.equal @update.op + done() + describe "when the document has been deleted", -> describe "when the ops come in a single linear order", -> before -> @@ -112,6 +123,13 @@ describe "Applying updates to a doc", -> doc.lines.should.deep.equal @result done() + it "should push the applied updates to the track changes api", (done) -> + rclient.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => + updates = (JSON.parse(u) for u in updates) + for appliedUpdate, i in @updates + appliedUpdate.op.should.deep.equal updates[i].op + done() + describe "when older ops come in after the delete", -> before -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] diff --git a/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee b/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee index 02b44e3fd6..9adbc2458c 100644 --- a/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee +++ b/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee @@ -4,6 +4,7 @@ chai.should() async = require "async" MockWebApi = require "./helpers/MockWebApi" +MockTrackChangesApi = require "./helpers/MockTrackChangesApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" describe "Flushing a project", -> @@ -40,6 +41,8 @@ describe "Flushing a project", -> describe "with documents which have been updated", -> before (done) -> sinon.spy MockWebApi, "setDocumentLines" + sinon.spy MockTrackChangesApi, "flushDoc" + async.series @docs.map((doc) => (callback) => DocUpdaterClient.preloadDoc @project_id, doc.id, (error) => @@ -56,6 +59,7 @@ describe "Flushing a project", -> after -> MockWebApi.setDocumentLines.restore() + MockTrackChangesApi.flushDoc.restore() it "should return a 204 status code", -> @statusCode.should.equal 204 @@ -74,3 +78,13 @@ describe "Flushing a project", -> callback() ), done + it "should flush the docs in the track changes api", (done) -> + # This is done in the background, so wait a little while to ensure it has happened + setTimeout () => + async.series @docs.map((doc) => + (callback) => + MockTrackChangesApi.flushDoc.calledWith(doc.id).should.equal true + ), done + done() + , 100 + diff --git a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee index aaaef99936..6d67dd68a0 100644 --- a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee +++ b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee @@ -4,6 +4,7 @@ chai.should() async = require "async" MockWebApi = require "./helpers/MockWebApi" +MockTrackChangesApi = require "./helpers/MockTrackChangesApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" mongojs = require "../../../app/js/mongojs" db = mongojs.db @@ -31,6 +32,7 @@ describe "Flushing a doc to Mongo", -> lines: @lines } sinon.spy MockWebApi, "setDocumentLines" + sinon.spy MockTrackChangesApi, "flushDoc" DocUpdaterClient.sendUpdates @project_id, @doc_id, [@update], (error) => throw error if error? @@ -40,6 +42,7 @@ describe "Flushing a doc to Mongo", -> after -> MockWebApi.setDocumentLines.restore() + MockTrackChangesApi.flushDoc.restore() it "should flush the updated document to the web api", -> MockWebApi.setDocumentLines @@ -52,6 +55,13 @@ describe "Flushing a doc to Mongo", -> doc.docOps[0].op.should.deep.equal @update.op done() + it "should flush the doc in the track changes api", (done) -> + # This is done in the background, so wait a little while to ensure it has happened + setTimeout () => + MockTrackChangesApi.flushDoc.calledWith(@doc_id).should.equal true + done() + , 100 + describe "when the doc has a large number of ops to be flushed", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] @@ -93,5 +103,5 @@ describe "Flushing a doc to Mongo", -> it "should not flush the doc to the web api", -> MockWebApi.setDocumentLines.called.should.equal false - + diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.coffee b/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.coffee new file mode 100644 index 0000000000..2fdff0d3ca --- /dev/null +++ b/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.coffee @@ -0,0 +1,20 @@ +express = require("express") +app = express() + +module.exports = MockTrackChangesApi = + flushDoc: (doc_id, callback = (error) ->) -> + callback() + + run: () -> + app.post "/doc/:doc_id/flush", (req, res, next) => + @flushDoc req.params.doc_id, (error) -> + if error? + res.send 500 + else + res.send 204 + + app.listen 3014, (error) -> + throw error if error? + +MockTrackChangesApi.run() + diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee index 7d50eb8377..693e98f8ad 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee @@ -34,7 +34,8 @@ module.exports = MockWebApi = else res.send 204 - app.listen(3000) + app.listen 3000, (error) -> + throw error if error? MockWebApi.run() diff --git a/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee index 079341a536..5a4adc4a36 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee @@ -10,6 +10,7 @@ describe "DocumentUpdater - flushDocIfLoaded", -> "./RedisManager": @RedisManager = {} "./PersistenceManager": @PersistenceManager = {} "./DocOpsManager": @DocOpsManager = {} + "./TrackChangesManager": @TrackChangesManager = {} "logger-sharelatex": @logger = {log: sinon.stub()} "./Metrics": @Metrics = Timer: class Timer @@ -25,6 +26,7 @@ describe "DocumentUpdater - flushDocIfLoaded", -> @RedisManager.getDoc = sinon.stub().callsArgWith(1, null, @lines, @version) @PersistenceManager.setDoc = sinon.stub().callsArgWith(3) @DocOpsManager.flushDocOpsToMongo = sinon.stub().callsArgWith(2) + @TrackChangesManager.flushDocChanges = sinon.stub().callsArg(1) @DocumentManager.flushDocIfLoaded @project_id, @doc_id, @callback it "should get the doc from redis", -> @@ -48,11 +50,17 @@ describe "DocumentUpdater - flushDocIfLoaded", -> it "should time the execution", -> @Metrics.Timer::done.called.should.equal true + it "should flush the doc in the track changes api", -> + @TrackChangesManager.flushDocChanges + .calledWith(@doc_id) + .should.equal true + describe "when the document is not in Redis", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(1, null, null, null) @PersistenceManager.setDoc = sinon.stub().callsArgWith(3) @DocOpsManager.flushDocOpsToMongo = sinon.stub().callsArgWith(2) + @TrackChangesManager.flushDocChanges = sinon.stub().callsArg(1) @DocumentManager.flushDocIfLoaded @project_id, @doc_id, @callback it "should get the doc from redis", -> diff --git a/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee b/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee new file mode 100644 index 0000000000..672cdcfaa4 --- /dev/null +++ b/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee @@ -0,0 +1,38 @@ +SandboxedModule = require('sandboxed-module') +sinon = require('sinon') +require('chai').should() +modulePath = require('path').join __dirname, '../../../../app/js/TrackChangesManager' + +describe "TrackChangesManager", -> + beforeEach -> + @TrackChangesManager = SandboxedModule.require modulePath, requires: + "request": @request = {} + "settings-sharelatex": @Settings = {} + @doc_id = "mock-doc-id" + @callback = sinon.stub() + + describe "flushDocChanges", -> + beforeEach -> + @Settings.apis = + trackchanges: url: "http://trackchanges.example.com" + + describe "successfully", -> + beforeEach -> + @request.post = sinon.stub().callsArgWith(1, null, statusCode: 204) + @TrackChangesManager.flushDocChanges @doc_id, @callback + + it "should send a request to the track changes api", -> + @request.post + .calledWith("#{@Settings.apis.trackchanges.url}/doc/#{@doc_id}/flush") + .should.equal true + + it "should return the callback", -> + @callback.calledWith(null).should.equal true + + describe "when the track changes api returns an error", -> + beforeEach -> + @request.post = sinon.stub().callsArgWith(1, null, statusCode: 500) + @TrackChangesManager.flushDocChanges @doc_id, @callback + + it "should return the callback with an error", -> + @callback.calledWith(new Error("track changes api return non-success code: 500")).should.equal true From 77c5a27e12360d6ab82e8c9594343b0749163056 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 26 Feb 2014 16:54:35 +0000 Subject: [PATCH 016/769] Set up acceptance tests in TravisCI --- services/document-updater/.gitignore | 2 ++ services/document-updater/.travis.yml | 4 ++++ services/document-updater/Gruntfile.coffee | 6 ++++++ services/document-updater/package.json | 3 ++- 4 files changed, 14 insertions(+), 1 deletion(-) diff --git a/services/document-updater/.gitignore b/services/document-updater/.gitignore index 5755e37b12..a477cfd66c 100644 --- a/services/document-updater/.gitignore +++ b/services/document-updater/.gitignore @@ -43,4 +43,6 @@ app/js/* test/unit/js/* test/acceptance/js/* +forever/ + **.swp diff --git a/services/document-updater/.travis.yml b/services/document-updater/.travis.yml index 29f5884d60..6adc08643a 100644 --- a/services/document-updater/.travis.yml +++ b/services/document-updater/.travis.yml @@ -10,8 +10,12 @@ install: - npm install - grunt install +before_script: + - grunt forever:app:start + script: - grunt test:unit + - grunt test:acceptance services: - redis-server diff --git a/services/document-updater/Gruntfile.coffee b/services/document-updater/Gruntfile.coffee index 8905cabf81..8c96ea0650 100644 --- a/services/document-updater/Gruntfile.coffee +++ b/services/document-updater/Gruntfile.coffee @@ -5,8 +5,14 @@ module.exports = (grunt) -> grunt.loadNpmTasks 'grunt-available-tasks' grunt.loadNpmTasks 'grunt-execute' grunt.loadNpmTasks 'grunt-bunyan' + grunt.loadNpmTasks 'grunt-forever' grunt.initConfig + forever: + app: + options: + index: "app.js" + execute: app: src: "app.js" diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 0bc012d4a6..fbcad4abc1 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -25,6 +25,7 @@ "grunt-available-tasks": "~0.4.1", "grunt-contrib-coffee": "~0.10.0", "bunyan": "~0.22.1", - "grunt-bunyan": "~0.5.0" + "grunt-bunyan": "~0.5.0", + "grunt-forever": "~0.4.2" } } From 3d70f9126e5f44ae94a3c81bd19323a907ae1721 Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 28 Feb 2014 18:29:05 +0000 Subject: [PATCH 017/769] Flush track changes api every 50 updates --- .../app/coffee/DocOpsManager.coffee | 3 +- .../app/coffee/DocumentManager.coffee | 5 --- .../app/coffee/TrackChangesManager.coffee | 15 +++++++- .../coffee/ApplyingUpdatesToADocTests.coffee | 28 ++++++++++++++- .../coffee/FlushingAProjectTests.coffee | 13 ------- .../coffee/FlushingDocsTests.coffee | 10 ------ .../DocOpsManager/DocOpsManagerTests.coffee | 5 +-- .../DocumentManager/flushDocTests.coffee | 8 ----- .../pushUncompressedHistoryOpTests.coffee | 6 ++-- .../TrackChangesManagerTests.coffee | 35 +++++++++++++++++++ 10 files changed, 84 insertions(+), 44 deletions(-) diff --git a/services/document-updater/app/coffee/DocOpsManager.coffee b/services/document-updater/app/coffee/DocOpsManager.coffee index 971db83358..180f1e564b 100644 --- a/services/document-updater/app/coffee/DocOpsManager.coffee +++ b/services/document-updater/app/coffee/DocOpsManager.coffee @@ -5,6 +5,7 @@ ObjectId = mongojs.ObjectId logger = require "logger-sharelatex" async = require "async" Metrics = require("./Metrics") +TrackChangesManager = require "./TrackChangesManager" module.exports = DocOpsManager = flushDocOpsToMongo: (project_id, doc_id, _callback = (error) ->) -> @@ -46,7 +47,7 @@ module.exports = DocOpsManager = pushDocOp: (project_id, doc_id, op, callback = (error) ->) -> RedisManager.pushDocOp doc_id, op, (error, version) -> return callback(error) if error? - RedisManager.pushUncompressedHistoryOp doc_id, op, (error) -> + TrackChangesManager.pushUncompressedHistoryOp doc_id, op, (error) -> return callback(error) if error? callback null, version diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 2f3cfb8d79..aa64ac3d7f 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -2,7 +2,6 @@ RedisManager = require "./RedisManager" PersistenceManager = require "./PersistenceManager" DocOpsManager = require "./DocOpsManager" DiffCodec = require "./DiffCodec" -TrackChangesManager = require "./TrackChangesManager" logger = require "logger-sharelatex" Metrics = require "./Metrics" @@ -82,10 +81,6 @@ module.exports = DocumentManager = timer.done() _callback(args...) - TrackChangesManager.flushDocChanges doc_id, (error) -> - if error? - logger.error err: error, project_id: project_id, doc_id: doc_id, "error flushing doc to track changes api" - RedisManager.getDoc doc_id, (error, lines, version) -> return callback(error) if error? if !lines? or !version? diff --git a/services/document-updater/app/coffee/TrackChangesManager.coffee b/services/document-updater/app/coffee/TrackChangesManager.coffee index c0b40331dc..489694fbf4 100644 --- a/services/document-updater/app/coffee/TrackChangesManager.coffee +++ b/services/document-updater/app/coffee/TrackChangesManager.coffee @@ -1,8 +1,9 @@ settings = require "settings-sharelatex" request = require "request" logger = require "logger-sharelatex" +RedisManager = require "./RedisManager" -module.exports = +module.exports = TrackChangesManager = flushDocChanges: (doc_id, callback = (error) ->) -> if !settings.apis?.trackchanges? logger.warn doc_id: doc_id, "track changes API is not configured, so not flushing" @@ -18,3 +19,15 @@ module.exports = else error = new Error("track changes api returned a failure status code: #{res.statusCode}") return callback(error) + + FLUSH_EVERY_N_OPS: 50 + pushUncompressedHistoryOp: (doc_id, op, callback = (error) ->) -> + RedisManager.pushUncompressedHistoryOp doc_id, op, (error, length) -> + if length > 0 and length % TrackChangesManager.FLUSH_EVERY_N_OPS == 0 + # Do this in the background since it uses HTTP and so may be too + # slow to wait for when processing a doc update. + logger.log length: length, doc_id: doc_id, "flushing track changes api" + TrackChangesManager.flushDocChanges doc_id, (error) -> + if error? + logger.error err: error, project_id: project_id, doc_id: doc_id, "error flushing doc to track changes api" + callback() diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index ac1a24223a..5b867b5807 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -7,6 +7,7 @@ db = mongojs.db ObjectId = mongojs.ObjectId rclient = require("redis").createClient() +MockTrackChangesApi = require "./helpers/MockTrackChangesApi" MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" @@ -230,4 +231,29 @@ describe "Applying updates to a doc", -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => doc.lines.should.deep.equal @lines done() - + + describe "with enough updates to flush to the track changes api", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines + } + @updates = [] + for v in [0..99] # Should flush after 50 ops + @updates.push + doc_id: @doc_id, + op: [i: v.toString(), p: 0] + v: v + + sinon.spy MockTrackChangesApi, "flushDoc" + + DocUpdaterClient.sendUpdates @project_id, @doc_id, @updates, (error) => + throw error if error? + setTimeout done, 200 + + after -> + MockTrackChangesApi.flushDoc.restore() + + it "should flush the doc twice", -> + console.log MockTrackChangesApi.flushDoc.args + MockTrackChangesApi.flushDoc.calledTwice.should.equal true diff --git a/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee b/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee index 9adbc2458c..b78decd5a9 100644 --- a/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee +++ b/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee @@ -4,7 +4,6 @@ chai.should() async = require "async" MockWebApi = require "./helpers/MockWebApi" -MockTrackChangesApi = require "./helpers/MockTrackChangesApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" describe "Flushing a project", -> @@ -41,7 +40,6 @@ describe "Flushing a project", -> describe "with documents which have been updated", -> before (done) -> sinon.spy MockWebApi, "setDocumentLines" - sinon.spy MockTrackChangesApi, "flushDoc" async.series @docs.map((doc) => (callback) => @@ -59,7 +57,6 @@ describe "Flushing a project", -> after -> MockWebApi.setDocumentLines.restore() - MockTrackChangesApi.flushDoc.restore() it "should return a 204 status code", -> @statusCode.should.equal 204 @@ -78,13 +75,3 @@ describe "Flushing a project", -> callback() ), done - it "should flush the docs in the track changes api", (done) -> - # This is done in the background, so wait a little while to ensure it has happened - setTimeout () => - async.series @docs.map((doc) => - (callback) => - MockTrackChangesApi.flushDoc.calledWith(doc.id).should.equal true - ), done - done() - , 100 - diff --git a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee index 6d67dd68a0..da0036bd02 100644 --- a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee +++ b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee @@ -4,7 +4,6 @@ chai.should() async = require "async" MockWebApi = require "./helpers/MockWebApi" -MockTrackChangesApi = require "./helpers/MockTrackChangesApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" mongojs = require "../../../app/js/mongojs" db = mongojs.db @@ -32,7 +31,6 @@ describe "Flushing a doc to Mongo", -> lines: @lines } sinon.spy MockWebApi, "setDocumentLines" - sinon.spy MockTrackChangesApi, "flushDoc" DocUpdaterClient.sendUpdates @project_id, @doc_id, [@update], (error) => throw error if error? @@ -42,7 +40,6 @@ describe "Flushing a doc to Mongo", -> after -> MockWebApi.setDocumentLines.restore() - MockTrackChangesApi.flushDoc.restore() it "should flush the updated document to the web api", -> MockWebApi.setDocumentLines @@ -55,13 +52,6 @@ describe "Flushing a doc to Mongo", -> doc.docOps[0].op.should.deep.equal @update.op done() - it "should flush the doc in the track changes api", (done) -> - # This is done in the background, so wait a little while to ensure it has happened - setTimeout () => - MockTrackChangesApi.flushDoc.calledWith(@doc_id).should.equal true - done() - , 100 - describe "when the doc has a large number of ops to be flushed", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] diff --git a/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee b/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee index df2a546480..fe13d1cd55 100644 --- a/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee @@ -19,6 +19,7 @@ describe "DocOpsManager", -> "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() + "./TrackChangesManager": @TrackChangesManager = {} describe "flushDocOpsToMongo", -> describe "when versions are consistent", -> @@ -310,7 +311,7 @@ describe "DocOpsManager", -> beforeEach -> @op = "mock-op" @RedisManager.pushDocOp = sinon.stub().callsArgWith(2, null, @version = 42) - @RedisManager.pushUncompressedHistoryOp = sinon.stub().callsArg(2) + @TrackChangesManager.pushUncompressedHistoryOp = sinon.stub().callsArg(2) @DocOpsManager.pushDocOp @project_id, @doc_id, @op, @callback it "should push the op in to the docOps list", -> @@ -319,7 +320,7 @@ describe "DocOpsManager", -> .should.equal true it "should push the op into the pushUncompressedHistoryOp", -> - @RedisManager.pushUncompressedHistoryOp + @TrackChangesManager.pushUncompressedHistoryOp .calledWith(@doc_id, @op) .should.equal true diff --git a/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee index 5a4adc4a36..079341a536 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee @@ -10,7 +10,6 @@ describe "DocumentUpdater - flushDocIfLoaded", -> "./RedisManager": @RedisManager = {} "./PersistenceManager": @PersistenceManager = {} "./DocOpsManager": @DocOpsManager = {} - "./TrackChangesManager": @TrackChangesManager = {} "logger-sharelatex": @logger = {log: sinon.stub()} "./Metrics": @Metrics = Timer: class Timer @@ -26,7 +25,6 @@ describe "DocumentUpdater - flushDocIfLoaded", -> @RedisManager.getDoc = sinon.stub().callsArgWith(1, null, @lines, @version) @PersistenceManager.setDoc = sinon.stub().callsArgWith(3) @DocOpsManager.flushDocOpsToMongo = sinon.stub().callsArgWith(2) - @TrackChangesManager.flushDocChanges = sinon.stub().callsArg(1) @DocumentManager.flushDocIfLoaded @project_id, @doc_id, @callback it "should get the doc from redis", -> @@ -50,17 +48,11 @@ describe "DocumentUpdater - flushDocIfLoaded", -> it "should time the execution", -> @Metrics.Timer::done.called.should.equal true - it "should flush the doc in the track changes api", -> - @TrackChangesManager.flushDocChanges - .calledWith(@doc_id) - .should.equal true - describe "when the document is not in Redis", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(1, null, null, null) @PersistenceManager.setDoc = sinon.stub().callsArgWith(3) @DocOpsManager.flushDocOpsToMongo = sinon.stub().callsArgWith(2) - @TrackChangesManager.flushDocChanges = sinon.stub().callsArg(1) @DocumentManager.flushDocIfLoaded @project_id, @doc_id, @callback it "should get the doc from redis", -> diff --git a/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee index 3b743db6e4..415f8e4572 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee @@ -19,7 +19,7 @@ describe "RedisManager.pushUncompressedHistoryOp", -> describe "successfully", -> beforeEach -> @op = { op: [{ i: "foo", p: 4 }] } - @rclient.rpush = sinon.stub().callsArg(2) + @rclient.rpush = sinon.stub().callsArgWith(2, null, @length = 42) @RedisManager.pushUncompressedHistoryOp @doc_id, @op, @callback it "should push the doc op into the doc ops list", -> @@ -27,8 +27,8 @@ describe "RedisManager.pushUncompressedHistoryOp", -> .calledWith("UncompressedHistoryOps:#{@doc_id}", JSON.stringify(@op)) .should.equal true - it "should call the callback", -> - @callback.called.should.equal true + it "should call the callback with the length", -> + @callback.calledWith(null, @length).should.equal true diff --git a/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee b/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee index 672cdcfaa4..6ff83d7414 100644 --- a/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee @@ -8,6 +8,8 @@ describe "TrackChangesManager", -> @TrackChangesManager = SandboxedModule.require modulePath, requires: "request": @request = {} "settings-sharelatex": @Settings = {} + "logger-sharelatex": @logger = { log: sinon.stub() } + "./RedisManager": @RedisManager = {} @doc_id = "mock-doc-id" @callback = sinon.stub() @@ -36,3 +38,36 @@ describe "TrackChangesManager", -> it "should return the callback with an error", -> @callback.calledWith(new Error("track changes api return non-success code: 500")).should.equal true + + describe "pushUncompressedHistoryOp", -> + beforeEach -> + @op = "mock-op" + @TrackChangesManager.flushDocChanges = sinon.stub().callsArg(1) + + describe "pushing the op", -> + beforeEach -> + @RedisManager.pushUncompressedHistoryOp = sinon.stub().callsArgWith(2, null, 1) + @TrackChangesManager.pushUncompressedHistoryOp @doc_id, @op, @callback + + it "should push the op into redis", -> + @RedisManager.pushUncompressedHistoryOp + .calledWith(@doc_id, @op) + .should.equal true + + it "should call the callback", -> + @callback.called.should.equal true + + it "should not try to flush the op", -> + @TrackChangesManager.flushDocChanges.called.should.equal false + + describe "when there are a multiple of FLUSH_EVERY_N_OPS ops", -> + beforeEach -> + @RedisManager.pushUncompressedHistoryOp = + sinon.stub().callsArgWith(2, null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS) + @TrackChangesManager.pushUncompressedHistoryOp @doc_id, @op, @callback + + it "should tell the track changes api to flush", -> + @TrackChangesManager.flushDocChanges + .calledWith(@doc_id) + .should.equal true + From 86195ce7c3df1fe345140a722326159a6961739a Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 28 Feb 2014 19:09:29 +0000 Subject: [PATCH 018/769] Add in load throttling based on a redis key --- .../app/coffee/RedisKeyBuilder.coffee | 1 + .../app/coffee/RedisManager.coffee | 6 ++ .../app/coffee/TrackChangesManager.coffee | 29 +++++++--- .../coffee/ApplyingUpdatesToADocTests.coffee | 36 ++++++++---- .../getHistoryLoadManagerThreshold.coffee | 43 ++++++++++++++ .../TrackChangesManagerTests.coffee | 57 +++++++++++++------ 6 files changed, 136 insertions(+), 36 deletions(-) create mode 100644 services/document-updater/test/unit/coffee/RedisManager/getHistoryLoadManagerThreshold.coffee diff --git a/services/document-updater/app/coffee/RedisKeyBuilder.coffee b/services/document-updater/app/coffee/RedisKeyBuilder.coffee index 2bd1ed08c8..de2bc85443 100644 --- a/services/document-updater/app/coffee/RedisKeyBuilder.coffee +++ b/services/document-updater/app/coffee/RedisKeyBuilder.coffee @@ -25,6 +25,7 @@ module.exports = docsWithPendingUpdates : DOCIDSWITHPENDINGUPDATES combineProjectIdAndDocId: (project_id, doc_id) -> "#{project_id}:#{doc_id}" splitProjectIdAndDocId: (project_and_doc_id) -> project_and_doc_id.split(":") + historyLoadManagerThreshold: "HistoryLoadManagerThreshold" now : (key)-> d = new Date() d.getDate()+":"+(d.getMonth()+1)+":"+d.getFullYear()+":"+key diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 5f6c880cee..3d8efdbc70 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -164,6 +164,12 @@ module.exports = getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) -> rclient.smembers keys.docsInProject(project_id: project_id), callback + + getHistoryLoadManagerThreshold: (callback = (error, threshold) ->) -> + rclient.get keys.historyLoadManagerThreshold, (error, value) -> + return callback(error) if error? + return callback null, 0 if !value? + callback null, parseInt(value, 10) getDocumentsProjectId = (doc_id, callback)-> diff --git a/services/document-updater/app/coffee/TrackChangesManager.coffee b/services/document-updater/app/coffee/TrackChangesManager.coffee index 489694fbf4..0aca12792b 100644 --- a/services/document-updater/app/coffee/TrackChangesManager.coffee +++ b/services/document-updater/app/coffee/TrackChangesManager.coffee @@ -2,6 +2,7 @@ settings = require "settings-sharelatex" request = require "request" logger = require "logger-sharelatex" RedisManager = require "./RedisManager" +crypto = require("crypto") module.exports = TrackChangesManager = flushDocChanges: (doc_id, callback = (error) ->) -> @@ -22,12 +23,22 @@ module.exports = TrackChangesManager = FLUSH_EVERY_N_OPS: 50 pushUncompressedHistoryOp: (doc_id, op, callback = (error) ->) -> - RedisManager.pushUncompressedHistoryOp doc_id, op, (error, length) -> - if length > 0 and length % TrackChangesManager.FLUSH_EVERY_N_OPS == 0 - # Do this in the background since it uses HTTP and so may be too - # slow to wait for when processing a doc update. - logger.log length: length, doc_id: doc_id, "flushing track changes api" - TrackChangesManager.flushDocChanges doc_id, (error) -> - if error? - logger.error err: error, project_id: project_id, doc_id: doc_id, "error flushing doc to track changes api" - callback() + RedisManager.getHistoryLoadManagerThreshold (error, threshold) -> + return callback(error) if error? + if TrackChangesManager.getLoadManagerBucket(doc_id) < threshold + RedisManager.pushUncompressedHistoryOp doc_id, op, (error, length) -> + return callback(error) if error? + if length > 0 and length % TrackChangesManager.FLUSH_EVERY_N_OPS == 0 + # Do this in the background since it uses HTTP and so may be too + # slow to wait for when processing a doc update. + logger.log length: length, doc_id: doc_id, "flushing track changes api" + TrackChangesManager.flushDocChanges doc_id, (error) -> + if error? + logger.error err: error, project_id: project_id, doc_id: doc_id, "error flushing doc to track changes api" + callback() + else + callback() + + getLoadManagerBucket: (doc_id) -> + hash = crypto.createHash("md5").update(doc_id).digest("hex") + return parseInt(hash.slice(0,4), 16) % 100 diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index 5b867b5807..7d5ac144e7 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -12,7 +12,7 @@ MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" describe "Applying updates to a doc", -> - before -> + before (done) -> @lines = ["one", "two", "three"] @update = doc: @doc_id @@ -22,6 +22,8 @@ describe "Applying updates to a doc", -> }] v: 0 @result = ["one", "one and a half", "two", "three"] + rclient.set "HistoryLoadManagerThreshold", 100, (error) => + done() describe "when the document is not loaded", -> before (done) -> @@ -233,7 +235,7 @@ describe "Applying updates to a doc", -> done() describe "with enough updates to flush to the track changes api", -> - before (done) -> + beforeEach -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] MockWebApi.insertDoc @project_id, @doc_id, { lines: @lines @@ -247,13 +249,27 @@ describe "Applying updates to a doc", -> sinon.spy MockTrackChangesApi, "flushDoc" - DocUpdaterClient.sendUpdates @project_id, @doc_id, @updates, (error) => - throw error if error? - setTimeout done, 200 - - after -> + afterEach -> MockTrackChangesApi.flushDoc.restore() - it "should flush the doc twice", -> - console.log MockTrackChangesApi.flushDoc.args - MockTrackChangesApi.flushDoc.calledTwice.should.equal true + describe "when under the load manager threshold", -> + beforeEach (done) -> + rclient.set "HistoryLoadManagerThreshold", 100, (error) => + throw error if error? + DocUpdaterClient.sendUpdates @project_id, @doc_id, @updates, (error) => + throw error if error? + setTimeout done, 200 + + it "should flush the doc twice", -> + MockTrackChangesApi.flushDoc.calledTwice.should.equal true + + describe "when over the load manager threshold", -> + beforeEach (done) -> + rclient.set "HistoryLoadManagerThreshold", 0, (error) => + throw error if error? + DocUpdaterClient.sendUpdates @project_id, @doc_id, @updates, (error) => + throw error if error? + setTimeout done, 200 + + it "should not flush the doc", -> + MockTrackChangesApi.flushDoc.called.should.equal false diff --git a/services/document-updater/test/unit/coffee/RedisManager/getHistoryLoadManagerThreshold.coffee b/services/document-updater/test/unit/coffee/RedisManager/getHistoryLoadManagerThreshold.coffee new file mode 100644 index 0000000000..d69cec370c --- /dev/null +++ b/services/document-updater/test/unit/coffee/RedisManager/getHistoryLoadManagerThreshold.coffee @@ -0,0 +1,43 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/RedisManager.js" +SandboxedModule = require('sandboxed-module') + +describe "RedisManager.getHistoryLoadManagerThreshold", -> + beforeEach -> + @RedisManager = SandboxedModule.require modulePath, requires: + "redis": createClient: () => + @rclient = + auth: () -> + "logger-sharelatex": @logger = {log: sinon.stub()} + @callback = sinon.stub() + + describe "with no value", -> + beforeEach -> + @rclient.get = sinon.stub().callsArgWith(1, null, null) + @RedisManager.getHistoryLoadManagerThreshold @callback + + it "should get the value", -> + @rclient.get + .calledWith("HistoryLoadManagerThreshold") + .should.equal true + + it "should call the callback with 0", -> + @callback.calledWith(null, 0).should.equal true + + describe "with a value", -> + beforeEach -> + @rclient.get = sinon.stub().callsArgWith(1, null, "42") + @RedisManager.getHistoryLoadManagerThreshold @callback + + it "should get the value", -> + @rclient.get + .calledWith("HistoryLoadManagerThreshold") + .should.equal true + + it "should call the callback with the numeric value", -> + @callback.calledWith(null, 42).should.equal true + + + diff --git a/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee b/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee index 6ff83d7414..0d696730b9 100644 --- a/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee @@ -44,30 +44,53 @@ describe "TrackChangesManager", -> @op = "mock-op" @TrackChangesManager.flushDocChanges = sinon.stub().callsArg(1) - describe "pushing the op", -> + describe "when the doc is under the load manager threshold", -> beforeEach -> + @RedisManager.getHistoryLoadManagerThreshold = sinon.stub().callsArgWith(0, null, 40) + @TrackChangesManager.getLoadManagerBucket = sinon.stub().returns(30) + + describe "pushing the op", -> + beforeEach -> + @RedisManager.pushUncompressedHistoryOp = sinon.stub().callsArgWith(2, null, 1) + @TrackChangesManager.pushUncompressedHistoryOp @doc_id, @op, @callback + + it "should push the op into redis", -> + @RedisManager.pushUncompressedHistoryOp + .calledWith(@doc_id, @op) + .should.equal true + + it "should call the callback", -> + @callback.called.should.equal true + + it "should not try to flush the op", -> + @TrackChangesManager.flushDocChanges.called.should.equal false + + describe "when there are a multiple of FLUSH_EVERY_N_OPS ops", -> + beforeEach -> + @RedisManager.pushUncompressedHistoryOp = + sinon.stub().callsArgWith(2, null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS) + @TrackChangesManager.pushUncompressedHistoryOp @doc_id, @op, @callback + + it "should tell the track changes api to flush", -> + @TrackChangesManager.flushDocChanges + .calledWith(@doc_id) + .should.equal true + + + describe "when the doc is over the load manager threshold", -> + beforeEach -> + @RedisManager.getHistoryLoadManagerThreshold = sinon.stub().callsArgWith(0, null, 40) + @TrackChangesManager.getLoadManagerBucket = sinon.stub().returns(50) @RedisManager.pushUncompressedHistoryOp = sinon.stub().callsArgWith(2, null, 1) @TrackChangesManager.pushUncompressedHistoryOp @doc_id, @op, @callback - it "should push the op into redis", -> - @RedisManager.pushUncompressedHistoryOp - .calledWith(@doc_id, @op) - .should.equal true - - it "should call the callback", -> - @callback.called.should.equal true + it "should not push the op", -> + @RedisManager.pushUncompressedHistoryOp.called.should.equal false it "should not try to flush the op", -> @TrackChangesManager.flushDocChanges.called.should.equal false - describe "when there are a multiple of FLUSH_EVERY_N_OPS ops", -> - beforeEach -> - @RedisManager.pushUncompressedHistoryOp = - sinon.stub().callsArgWith(2, null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS) - @TrackChangesManager.pushUncompressedHistoryOp @doc_id, @op, @callback + it "should call the callback", -> + @callback.called.should.equal true - it "should tell the track changes api to flush", -> - @TrackChangesManager.flushDocChanges - .calledWith(@doc_id) - .should.equal true From 5d45e191f39c98b77b59379c521ada19e45a1352 Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 4 Mar 2014 12:39:02 +0000 Subject: [PATCH 019/769] Don't crash when logging out error --- .../app/coffee/TrackChangesManager.coffee | 2 +- .../TrackChangesManagerTests.coffee | 17 ++++++++++++++++- 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/TrackChangesManager.coffee b/services/document-updater/app/coffee/TrackChangesManager.coffee index 0aca12792b..9c8b514753 100644 --- a/services/document-updater/app/coffee/TrackChangesManager.coffee +++ b/services/document-updater/app/coffee/TrackChangesManager.coffee @@ -34,7 +34,7 @@ module.exports = TrackChangesManager = logger.log length: length, doc_id: doc_id, "flushing track changes api" TrackChangesManager.flushDocChanges doc_id, (error) -> if error? - logger.error err: error, project_id: project_id, doc_id: doc_id, "error flushing doc to track changes api" + logger.error err: error, doc_id: doc_id, "error flushing doc to track changes api" callback() else callback() diff --git a/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee b/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee index 0d696730b9..dd72937c39 100644 --- a/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee @@ -8,7 +8,7 @@ describe "TrackChangesManager", -> @TrackChangesManager = SandboxedModule.require modulePath, requires: "request": @request = {} "settings-sharelatex": @Settings = {} - "logger-sharelatex": @logger = { log: sinon.stub() } + "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } "./RedisManager": @RedisManager = {} @doc_id = "mock-doc-id" @callback = sinon.stub() @@ -76,6 +76,21 @@ describe "TrackChangesManager", -> .calledWith(@doc_id) .should.equal true + describe "when TrackChangesManager errors", -> + beforeEach -> + @RedisManager.pushUncompressedHistoryOp = + sinon.stub().callsArgWith(2, null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS) + @TrackChangesManager.flushDocChanges = sinon.stub().callsArgWith(1, @error = new Error("oops")) + @TrackChangesManager.pushUncompressedHistoryOp @doc_id, @op, @callback + + it "should log out the error", -> + @logger.error + .calledWith( + err: @error + doc_id: @doc_id + "error flushing doc to track changes api" + ) + .should.equal true describe "when the doc is over the load manager threshold", -> beforeEach -> From 4f878e000be984f0444acde40f6cf160a785060e Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 11 Mar 2014 12:47:26 +0000 Subject: [PATCH 020/769] Allow source and user_id to be included when setting a document --- .../app/coffee/DocumentManager.coffee | 8 ++-- .../app/coffee/HttpController.coffee | 6 ++- .../coffee/SettingADocumentTests.coffee | 4 +- .../coffee/helpers/DocUpdaterClient.coffee | 4 +- .../coffee/DocumentManager/setDocTests.coffee | 45 ++++++++----------- .../coffee/HttpController/setDocTests.coffee | 12 +++-- 6 files changed, 41 insertions(+), 38 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index aa64ac3d7f..38f8a9dac4 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -42,7 +42,7 @@ module.exports = DocumentManager = return callback(error) if error? callback null, lines, version, ops - setDoc: (project_id, doc_id, newLines, _callback = (error) ->) -> + setDoc: (project_id, doc_id, newLines, source, user_id, _callback = (error) ->) -> timer = new Metrics.Timer("docManager.setDoc") callback = (args...) -> timer.done() @@ -68,6 +68,8 @@ module.exports = DocumentManager = v: version meta: type: "external" + source: source + user_id: user_id UpdateManager.applyUpdates project_id, doc_id, [update], (error) -> return callback(error) if error? DocumentManager.flushDocIfLoaded project_id, doc_id, (error) -> @@ -114,9 +116,9 @@ module.exports = DocumentManager = UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.getDocAndRecentOps, project_id, doc_id, fromVersion, callback - setDocWithLock: (project_id, doc_id, lines, callback = (error) ->) -> + setDocWithLock: (project_id, doc_id, lines, source, user_id, callback = (error) ->) -> UpdateManager = require "./UpdateManager" - UpdateManager.lockUpdatesAndDo DocumentManager.setDoc, project_id, doc_id, lines, callback + UpdateManager.lockUpdatesAndDo DocumentManager.setDoc, project_id, doc_id, lines, source, user_id, callback flushDocIfLoadedWithLock: (project_id, doc_id, callback = (error) ->) -> UpdateManager = require "./UpdateManager" diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 391d02ee37..ef9fb38e19 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -32,9 +32,11 @@ module.exports = HttpController = doc_id = req.params.doc_id project_id = req.params.project_id lines = req.body.lines - logger.log project_id: project_id, doc_id: doc_id, lines: lines, "setting doc via http" + source = req.body.source + user_id = req.body.user_id + logger.log project_id: project_id, doc_id: doc_id, lines: lines, source: source, user_id: user_id, "setting doc via http" timer = new Metrics.Timer("http.setDoc") - DocumentManager.setDocWithLock project_id, doc_id, lines, (error) -> + DocumentManager.setDocWithLock project_id, doc_id, lines, source, user_id, (error) -> timer.done() return next(error) if error? logger.log project_id: project_id, doc_id: doc_id, "set doc via http" diff --git a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee index cc0f30834a..a02cb0250a 100644 --- a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee @@ -18,6 +18,8 @@ describe "Setting a document", -> v: 0 @result = ["one", "one and a half", "two", "three"] @newLines = ["these", "are", "the", "new", "lines"] + @source = "dropbox" + @user_id = "user-id-123" MockWebApi.insertDoc @project_id, @doc_id, { lines: @lines } @@ -30,7 +32,7 @@ describe "Setting a document", -> DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) => throw error if error? setTimeout () => - DocUpdaterClient.setDocLines @project_id, @doc_id, @newLines, (error, res, body) => + DocUpdaterClient.setDocLines @project_id, @doc_id, @newLines, @source, @user_id, (error, res, body) => @statusCode = res.statusCode done() , 200 diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee index 4ddef90d26..ec70023876 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee @@ -43,11 +43,13 @@ module.exports = DocUpdaterClient = request.post "http://localhost:3003/project/#{project_id}/doc/#{doc_id}/flush", (error, res, body) -> callback error, res, body - setDocLines: (project_id, doc_id, lines, callback = (error) ->) -> + setDocLines: (project_id, doc_id, lines, source, user_id, callback = (error) ->) -> request.post { url: "http://localhost:3003/project/#{project_id}/doc/#{doc_id}" json: lines: lines + source: source + user_id: user_id }, (error, res, body) -> callback error, res, body diff --git a/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee index d4b5e931b8..b827b584f8 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee @@ -22,6 +22,8 @@ describe "DocumentManager - setDoc", -> @version = 42 @ops = ["mock-ops"] @callback = sinon.stub() + @source = "dropbox" + @user_id = "mock-user-id" describe "with plain tex lines", -> beforeEach -> @@ -34,7 +36,7 @@ describe "DocumentManager - setDoc", -> @DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, @ops) @UpdateManager.applyUpdates = sinon.stub().callsArgWith(3, null) @DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2) - @DocumentManager.setDoc @project_id, @doc_id, @afterLines, @callback + @DocumentManager.setDoc @project_id, @doc_id, @afterLines, @source, @user_id, @callback it "should get the current doc lines", -> @DocumentManager.getDoc @@ -48,7 +50,20 @@ describe "DocumentManager - setDoc", -> it "should apply the diff as a ShareJS op", -> @UpdateManager.applyUpdates - .calledWith(@project_id, @doc_id, [doc: @doc_id, v: @version, op: @ops, meta: { type: "external" }]) + .calledWith( + @project_id, + @doc_id, + [ + doc: @doc_id, + v: @version, + op: @ops, + meta: { + type: "external" + source: @source + user_id: @user_id + } + ] + ) .should.equal true it "should flush the doc to Mongo", -> @@ -62,30 +77,6 @@ describe "DocumentManager - setDoc", -> it "should time the execution", -> @Metrics.Timer::done.called.should.equal true - describe "with json lines", -> - beforeEach -> - @beforeLines = [text: "before", text: "lines"] - @afterLines = ["after", "lines"] - - describe "successfully", -> - beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version) - @DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, @ops) - @UpdateManager.applyUpdates = sinon.stub().callsArgWith(3, null) - @DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2) - @DocumentManager.setDoc @project_id, @doc_id, @afterLines, @callback - - it "should get the current doc lines", -> - @DocumentManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should return not try to get a diff", -> - @DiffCodec.diffAsShareJsOp.called.should.equal false - - it "should call the callback", -> - @callback.calledWith(null).should.equal true - describe "without new lines", -> beforeEach -> @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version) @@ -96,7 +87,7 @@ describe "DocumentManager - setDoc", -> it "should not try to get the doc lines", -> @DocumentManager.getDoc.called.should.equal false - + diff --git a/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee b/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee index 2c3924c030..dd2a7c1d59 100644 --- a/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee @@ -19,6 +19,8 @@ describe "HttpController - setDoc", -> @project_id = "project-id-123" @doc_id = "doc-id-123" @lines = ["one", "two", "three"] + @source = "dropbox" + @user_id = "user-id-123" @res = send: sinon.stub() @req = @@ -27,16 +29,18 @@ describe "HttpController - setDoc", -> doc_id: @doc_id body: lines: @lines + source: @source + user_id: @user_id @next = sinon.stub() describe "successfully", -> beforeEach -> - @DocumentManager.setDocWithLock = sinon.stub().callsArgWith(3) + @DocumentManager.setDocWithLock = sinon.stub().callsArgWith(5) @HttpController.setDoc(@req, @res, @next) it "should set the doc", -> @DocumentManager.setDocWithLock - .calledWith(@project_id, @doc_id) + .calledWith(@project_id, @doc_id, @lines, @source, @user_id) .should.equal true it "should return a successful No Content response", -> @@ -46,7 +50,7 @@ describe "HttpController - setDoc", -> it "should log the request", -> @logger.log - .calledWith(doc_id: @doc_id, project_id: @project_id, lines: @lines, "setting doc via http") + .calledWith(doc_id: @doc_id, project_id: @project_id, lines: @lines, source: @source, user_id: @user_id, "setting doc via http") .should.equal true it "should time the request", -> @@ -54,7 +58,7 @@ describe "HttpController - setDoc", -> describe "when an errors occurs", -> beforeEach -> - @DocumentManager.setDocWithLock = sinon.stub().callsArgWith(3, new Error("oops")) + @DocumentManager.setDocWithLock = sinon.stub().callsArgWith(5, new Error("oops")) @HttpController.setDoc(@req, @res, @next) it "should call next with the error", -> From 2d28f1903f8def8357baf1d0281e1698b2bed0f8 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 19 Mar 2014 15:56:44 +0000 Subject: [PATCH 021/769] Flush to the track changes api using the project id as well --- .../app/coffee/DocOpsManager.coffee | 2 +- .../app/coffee/TrackChangesManager.coffee | 14 ++++++------ .../coffee/helpers/MockTrackChangesApi.coffee | 2 +- .../DocOpsManager/DocOpsManagerTests.coffee | 4 ++-- .../TrackChangesManagerTests.coffee | 22 ++++++++++--------- 5 files changed, 23 insertions(+), 21 deletions(-) diff --git a/services/document-updater/app/coffee/DocOpsManager.coffee b/services/document-updater/app/coffee/DocOpsManager.coffee index 180f1e564b..403488208d 100644 --- a/services/document-updater/app/coffee/DocOpsManager.coffee +++ b/services/document-updater/app/coffee/DocOpsManager.coffee @@ -47,7 +47,7 @@ module.exports = DocOpsManager = pushDocOp: (project_id, doc_id, op, callback = (error) ->) -> RedisManager.pushDocOp doc_id, op, (error, version) -> return callback(error) if error? - TrackChangesManager.pushUncompressedHistoryOp doc_id, op, (error) -> + TrackChangesManager.pushUncompressedHistoryOp project_id, doc_id, op, (error) -> return callback(error) if error? callback null, version diff --git a/services/document-updater/app/coffee/TrackChangesManager.coffee b/services/document-updater/app/coffee/TrackChangesManager.coffee index 9c8b514753..65b9762fe5 100644 --- a/services/document-updater/app/coffee/TrackChangesManager.coffee +++ b/services/document-updater/app/coffee/TrackChangesManager.coffee @@ -5,13 +5,13 @@ RedisManager = require "./RedisManager" crypto = require("crypto") module.exports = TrackChangesManager = - flushDocChanges: (doc_id, callback = (error) ->) -> + flushDocChanges: (project_id, doc_id, callback = (error) ->) -> if !settings.apis?.trackchanges? logger.warn doc_id: doc_id, "track changes API is not configured, so not flushing" return callback() - url = "#{settings.apis.trackchanges.url}/doc/#{doc_id}/flush" - logger.log doc_id: doc_id, url: url, "flushing doc in track changes api" + url = "#{settings.apis.trackchanges.url}/project/#{project_id}/doc/#{doc_id}/flush" + logger.log project_id: project_id, doc_id: doc_id, url: url, "flushing doc in track changes api" request.post url, (error, res, body)-> if error? return callback(error) @@ -22,7 +22,7 @@ module.exports = TrackChangesManager = return callback(error) FLUSH_EVERY_N_OPS: 50 - pushUncompressedHistoryOp: (doc_id, op, callback = (error) ->) -> + pushUncompressedHistoryOp: (project_id, doc_id, op, callback = (error) ->) -> RedisManager.getHistoryLoadManagerThreshold (error, threshold) -> return callback(error) if error? if TrackChangesManager.getLoadManagerBucket(doc_id) < threshold @@ -31,10 +31,10 @@ module.exports = TrackChangesManager = if length > 0 and length % TrackChangesManager.FLUSH_EVERY_N_OPS == 0 # Do this in the background since it uses HTTP and so may be too # slow to wait for when processing a doc update. - logger.log length: length, doc_id: doc_id, "flushing track changes api" - TrackChangesManager.flushDocChanges doc_id, (error) -> + logger.log length: length, doc_id: doc_id, project_id: project_id, "flushing track changes api" + TrackChangesManager.flushDocChanges project_id, doc_id, (error) -> if error? - logger.error err: error, doc_id: doc_id, "error flushing doc to track changes api" + logger.error err: error, doc_id: doc_id, project_id: project_id, "error flushing doc to track changes api" callback() else callback() diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.coffee b/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.coffee index 2fdff0d3ca..43416e37fc 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.coffee @@ -6,7 +6,7 @@ module.exports = MockTrackChangesApi = callback() run: () -> - app.post "/doc/:doc_id/flush", (req, res, next) => + app.post "/project/:project_id/doc/:doc_id/flush", (req, res, next) => @flushDoc req.params.doc_id, (error) -> if error? res.send 500 diff --git a/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee b/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee index fe13d1cd55..1680a367d2 100644 --- a/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee @@ -311,7 +311,7 @@ describe "DocOpsManager", -> beforeEach -> @op = "mock-op" @RedisManager.pushDocOp = sinon.stub().callsArgWith(2, null, @version = 42) - @TrackChangesManager.pushUncompressedHistoryOp = sinon.stub().callsArg(2) + @TrackChangesManager.pushUncompressedHistoryOp = sinon.stub().callsArg(3) @DocOpsManager.pushDocOp @project_id, @doc_id, @op, @callback it "should push the op in to the docOps list", -> @@ -321,7 +321,7 @@ describe "DocOpsManager", -> it "should push the op into the pushUncompressedHistoryOp", -> @TrackChangesManager.pushUncompressedHistoryOp - .calledWith(@doc_id, @op) + .calledWith(@project_id, @doc_id, @op) .should.equal true it "should call the callback with the version", -> diff --git a/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee b/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee index dd72937c39..148eeb33bb 100644 --- a/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee @@ -10,6 +10,7 @@ describe "TrackChangesManager", -> "settings-sharelatex": @Settings = {} "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } "./RedisManager": @RedisManager = {} + @project_id = "mock-project-id" @doc_id = "mock-doc-id" @callback = sinon.stub() @@ -21,11 +22,11 @@ describe "TrackChangesManager", -> describe "successfully", -> beforeEach -> @request.post = sinon.stub().callsArgWith(1, null, statusCode: 204) - @TrackChangesManager.flushDocChanges @doc_id, @callback + @TrackChangesManager.flushDocChanges @project_id, @doc_id, @callback it "should send a request to the track changes api", -> @request.post - .calledWith("#{@Settings.apis.trackchanges.url}/doc/#{@doc_id}/flush") + .calledWith("#{@Settings.apis.trackchanges.url}/project/#{@project_id}/doc/#{@doc_id}/flush") .should.equal true it "should return the callback", -> @@ -34,7 +35,7 @@ describe "TrackChangesManager", -> describe "when the track changes api returns an error", -> beforeEach -> @request.post = sinon.stub().callsArgWith(1, null, statusCode: 500) - @TrackChangesManager.flushDocChanges @doc_id, @callback + @TrackChangesManager.flushDocChanges @project_id, @doc_id, @callback it "should return the callback with an error", -> @callback.calledWith(new Error("track changes api return non-success code: 500")).should.equal true @@ -42,7 +43,7 @@ describe "TrackChangesManager", -> describe "pushUncompressedHistoryOp", -> beforeEach -> @op = "mock-op" - @TrackChangesManager.flushDocChanges = sinon.stub().callsArg(1) + @TrackChangesManager.flushDocChanges = sinon.stub().callsArg(2) describe "when the doc is under the load manager threshold", -> beforeEach -> @@ -52,7 +53,7 @@ describe "TrackChangesManager", -> describe "pushing the op", -> beforeEach -> @RedisManager.pushUncompressedHistoryOp = sinon.stub().callsArgWith(2, null, 1) - @TrackChangesManager.pushUncompressedHistoryOp @doc_id, @op, @callback + @TrackChangesManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback it "should push the op into redis", -> @RedisManager.pushUncompressedHistoryOp @@ -69,25 +70,26 @@ describe "TrackChangesManager", -> beforeEach -> @RedisManager.pushUncompressedHistoryOp = sinon.stub().callsArgWith(2, null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS) - @TrackChangesManager.pushUncompressedHistoryOp @doc_id, @op, @callback + @TrackChangesManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback it "should tell the track changes api to flush", -> @TrackChangesManager.flushDocChanges - .calledWith(@doc_id) + .calledWith(@project_id, @doc_id) .should.equal true describe "when TrackChangesManager errors", -> beforeEach -> @RedisManager.pushUncompressedHistoryOp = sinon.stub().callsArgWith(2, null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS) - @TrackChangesManager.flushDocChanges = sinon.stub().callsArgWith(1, @error = new Error("oops")) - @TrackChangesManager.pushUncompressedHistoryOp @doc_id, @op, @callback + @TrackChangesManager.flushDocChanges = sinon.stub().callsArgWith(2, @error = new Error("oops")) + @TrackChangesManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback it "should log out the error", -> @logger.error .calledWith( err: @error doc_id: @doc_id + project_id: @project_id "error flushing doc to track changes api" ) .should.equal true @@ -97,7 +99,7 @@ describe "TrackChangesManager", -> @RedisManager.getHistoryLoadManagerThreshold = sinon.stub().callsArgWith(0, null, 40) @TrackChangesManager.getLoadManagerBucket = sinon.stub().returns(50) @RedisManager.pushUncompressedHistoryOp = sinon.stub().callsArgWith(2, null, 1) - @TrackChangesManager.pushUncompressedHistoryOp @doc_id, @op, @callback + @TrackChangesManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback it "should not push the op", -> @RedisManager.pushUncompressedHistoryOp.called.should.equal false From c0be3ef37b76a6a358defadf31c573ef9c805aa5 Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 21 Mar 2014 12:41:05 +0000 Subject: [PATCH 022/769] Put doc_ids with history changes into project level set --- .../app/coffee/RedisKeyBuilder.coffee | 3 +- .../app/coffee/RedisManager.coffee | 16 ++-- .../app/coffee/TrackChangesManager.coffee | 27 ++---- .../coffee/ApplyingUpdatesToADocTests.coffee | 48 ++++------ .../clearDocFromPendingUpdatesSetTests.coffee | 1 + .../getDocsWithPendingUpdatesTests.coffee | 1 + .../getHistoryLoadManagerThreshold.coffee | 43 --------- .../RedisManager/prependDocOpsTests.coffee | 3 +- .../coffee/RedisManager/pushDocOpTests.coffee | 3 +- .../pushUncompressedHistoryOpTests.coffee | 13 ++- .../TrackChangesManagerTests.coffee | 95 ++++++++----------- 11 files changed, 94 insertions(+), 159 deletions(-) delete mode 100644 services/document-updater/test/unit/coffee/RedisManager/getHistoryLoadManagerThreshold.coffee diff --git a/services/document-updater/app/coffee/RedisKeyBuilder.coffee b/services/document-updater/app/coffee/RedisKeyBuilder.coffee index de2bc85443..0cfd330721 100644 --- a/services/document-updater/app/coffee/RedisKeyBuilder.coffee +++ b/services/document-updater/app/coffee/RedisKeyBuilder.coffee @@ -8,6 +8,7 @@ DOCLINES = "doclines" DOCOPS = "DocOps" DOCVERSION = "DocVersion" DOCIDSWITHPENDINGUPDATES = "DocsWithPendingUpdates" +DOCSWITHHISTORYOPS = "DocsWithHistoryOps" UNCOMPRESSED_HISTORY_OPS = "UncompressedHistoryOps" module.exports = @@ -25,7 +26,7 @@ module.exports = docsWithPendingUpdates : DOCIDSWITHPENDINGUPDATES combineProjectIdAndDocId: (project_id, doc_id) -> "#{project_id}:#{doc_id}" splitProjectIdAndDocId: (project_and_doc_id) -> project_and_doc_id.split(":") - historyLoadManagerThreshold: "HistoryLoadManagerThreshold" + docsWithHistoryOps: (op) -> DOCSWITHHISTORYOPS + ":" + op.project_id now : (key)-> d = new Date() d.getDate()+":"+(d.getMonth()+1)+":"+d.getFullYear()+":"+key diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 3d8efdbc70..c47e679339 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -155,9 +155,15 @@ module.exports = jsonOps = ops.map (op) -> JSON.stringify op rclient.lpush keys.docOps(doc_id: doc_id), jsonOps.reverse(), callback - pushUncompressedHistoryOp: (doc_id, op, callback = (error) ->) -> + pushUncompressedHistoryOp: (project_id, doc_id, op, callback = (error, length) ->) -> jsonOp = JSON.stringify op - rclient.rpush keys.uncompressedHistoryOp(doc_id: doc_id), jsonOp, callback + multi = rclient.multi() + multi.rpush keys.uncompressedHistoryOp(doc_id: doc_id), jsonOp + multi.sadd keys.docsWithHistoryOps(project_id: project_id), doc_id + multi.exec (error, results) -> + return callback(error) if error? + [length, _] = results + callback(error, length) getDocOpsLength: (doc_id, callback = (error, length) ->) -> rclient.llen keys.docOps(doc_id: doc_id), callback @@ -165,12 +171,6 @@ module.exports = getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) -> rclient.smembers keys.docsInProject(project_id: project_id), callback - getHistoryLoadManagerThreshold: (callback = (error, threshold) ->) -> - rclient.get keys.historyLoadManagerThreshold, (error, value) -> - return callback(error) if error? - return callback null, 0 if !value? - callback null, parseInt(value, 10) - getDocumentsProjectId = (doc_id, callback)-> rclient.get keys.projectKey({doc_id:doc_id}), (err, project_id)-> diff --git a/services/document-updater/app/coffee/TrackChangesManager.coffee b/services/document-updater/app/coffee/TrackChangesManager.coffee index 65b9762fe5..2d887a4f9c 100644 --- a/services/document-updater/app/coffee/TrackChangesManager.coffee +++ b/services/document-updater/app/coffee/TrackChangesManager.coffee @@ -23,22 +23,15 @@ module.exports = TrackChangesManager = FLUSH_EVERY_N_OPS: 50 pushUncompressedHistoryOp: (project_id, doc_id, op, callback = (error) ->) -> - RedisManager.getHistoryLoadManagerThreshold (error, threshold) -> + logger.log project_id: project_id, doc_id: doc_id, "pushing history op" + RedisManager.pushUncompressedHistoryOp project_id, doc_id, op, (error, length) -> return callback(error) if error? - if TrackChangesManager.getLoadManagerBucket(doc_id) < threshold - RedisManager.pushUncompressedHistoryOp doc_id, op, (error, length) -> - return callback(error) if error? - if length > 0 and length % TrackChangesManager.FLUSH_EVERY_N_OPS == 0 - # Do this in the background since it uses HTTP and so may be too - # slow to wait for when processing a doc update. - logger.log length: length, doc_id: doc_id, project_id: project_id, "flushing track changes api" - TrackChangesManager.flushDocChanges project_id, doc_id, (error) -> - if error? - logger.error err: error, doc_id: doc_id, project_id: project_id, "error flushing doc to track changes api" - callback() - else - callback() + if length > 0 and length % TrackChangesManager.FLUSH_EVERY_N_OPS == 0 + # Do this in the background since it uses HTTP and so may be too + # slow to wait for when processing a doc update. + logger.log length: length, doc_id: doc_id, project_id: project_id, "flushing track changes api" + TrackChangesManager.flushDocChanges project_id, doc_id, (error) -> + if error? + logger.error err: error, doc_id: doc_id, project_id: project_id, "error flushing doc to track changes api" + callback() - getLoadManagerBucket: (doc_id) -> - hash = crypto.createHash("md5").update(doc_id).digest("hex") - return parseInt(hash.slice(0,4), 16) % 100 diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index 7d5ac144e7..6b4010b305 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -12,7 +12,7 @@ MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" describe "Applying updates to a doc", -> - before (done) -> + before -> @lines = ["one", "two", "three"] @update = doc: @doc_id @@ -22,8 +22,6 @@ describe "Applying updates to a doc", -> }] v: 0 @result = ["one", "one and a half", "two", "three"] - rclient.set "HistoryLoadManagerThreshold", 100, (error) => - done() describe "when the document is not loaded", -> before (done) -> @@ -51,8 +49,13 @@ describe "Applying updates to a doc", -> it "should push the applied updates to the track changes api", (done) -> rclient.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => + throw error if error? JSON.parse(updates[0]).op.should.deep.equal @update.op - done() + rclient.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => + throw error if error? + result.should.equal 1 + done() + describe "when the document is loaded", -> before (done) -> @@ -81,7 +84,9 @@ describe "Applying updates to a doc", -> it "should push the applied updates to the track changes api", (done) -> rclient.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => JSON.parse(updates[0]).op.should.deep.equal @update.op - done() + rclient.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => + result.should.equal 1 + done() describe "when the document has been deleted", -> describe "when the ops come in a single linear order", -> @@ -131,7 +136,10 @@ describe "Applying updates to a doc", -> updates = (JSON.parse(u) for u in updates) for appliedUpdate, i in @updates appliedUpdate.op.should.deep.equal updates[i].op - done() + + rclient.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => + result.should.equal 1 + done() describe "when older ops come in after the delete", -> before -> @@ -235,7 +243,7 @@ describe "Applying updates to a doc", -> done() describe "with enough updates to flush to the track changes api", -> - beforeEach -> + beforeEach (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] MockWebApi.insertDoc @project_id, @doc_id, { lines: @lines @@ -249,27 +257,13 @@ describe "Applying updates to a doc", -> sinon.spy MockTrackChangesApi, "flushDoc" + DocUpdaterClient.sendUpdates @project_id, @doc_id, @updates, (error) => + throw error if error? + setTimeout done, 200 + afterEach -> MockTrackChangesApi.flushDoc.restore() - describe "when under the load manager threshold", -> - beforeEach (done) -> - rclient.set "HistoryLoadManagerThreshold", 100, (error) => - throw error if error? - DocUpdaterClient.sendUpdates @project_id, @doc_id, @updates, (error) => - throw error if error? - setTimeout done, 200 + it "should flush the doc twice", -> + MockTrackChangesApi.flushDoc.calledTwice.should.equal true - it "should flush the doc twice", -> - MockTrackChangesApi.flushDoc.calledTwice.should.equal true - - describe "when over the load manager threshold", -> - beforeEach (done) -> - rclient.set "HistoryLoadManagerThreshold", 0, (error) => - throw error if error? - DocUpdaterClient.sendUpdates @project_id, @doc_id, @updates, (error) => - throw error if error? - setTimeout done, 200 - - it "should not flush the doc", -> - MockTrackChangesApi.flushDoc.called.should.equal false diff --git a/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee index 676d454167..016d96a2ae 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee @@ -12,6 +12,7 @@ describe "RedisManager.clearDocFromPendingUpdatesSet", -> @RedisManager = SandboxedModule.require modulePath, requires: "redis" : createClient: () => @rclient = auth:-> + "logger-sharelatex": {} @rclient.srem = sinon.stub().callsArg(2) @RedisManager.clearDocFromPendingUpdatesSet(@project_id, @doc_id, @callback) diff --git a/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee index 602197ad57..d179b45f9d 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee @@ -10,6 +10,7 @@ describe "RedisManager.getDocsWithPendingUpdates", -> @RedisManager = SandboxedModule.require modulePath, requires: "redis" : createClient: () => @rclient = auth:-> + "logger-sharelatex": {} @docs = [{ doc_id: "doc-id-1" diff --git a/services/document-updater/test/unit/coffee/RedisManager/getHistoryLoadManagerThreshold.coffee b/services/document-updater/test/unit/coffee/RedisManager/getHistoryLoadManagerThreshold.coffee deleted file mode 100644 index d69cec370c..0000000000 --- a/services/document-updater/test/unit/coffee/RedisManager/getHistoryLoadManagerThreshold.coffee +++ /dev/null @@ -1,43 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/RedisManager.js" -SandboxedModule = require('sandboxed-module') - -describe "RedisManager.getHistoryLoadManagerThreshold", -> - beforeEach -> - @RedisManager = SandboxedModule.require modulePath, requires: - "redis": createClient: () => - @rclient = - auth: () -> - "logger-sharelatex": @logger = {log: sinon.stub()} - @callback = sinon.stub() - - describe "with no value", -> - beforeEach -> - @rclient.get = sinon.stub().callsArgWith(1, null, null) - @RedisManager.getHistoryLoadManagerThreshold @callback - - it "should get the value", -> - @rclient.get - .calledWith("HistoryLoadManagerThreshold") - .should.equal true - - it "should call the callback with 0", -> - @callback.calledWith(null, 0).should.equal true - - describe "with a value", -> - beforeEach -> - @rclient.get = sinon.stub().callsArgWith(1, null, "42") - @RedisManager.getHistoryLoadManagerThreshold @callback - - it "should get the value", -> - @rclient.get - .calledWith("HistoryLoadManagerThreshold") - .should.equal true - - it "should call the callback with the numeric value", -> - @callback.calledWith(null, 42).should.equal true - - - diff --git a/services/document-updater/test/unit/coffee/RedisManager/prependDocOpsTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/prependDocOpsTests.coffee index b4a8192d12..dee8b2f435 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/prependDocOpsTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/prependDocOpsTests.coffee @@ -4,13 +4,14 @@ should = chai.should() modulePath = "../../../../app/js/RedisManager" SandboxedModule = require('sandboxed-module') -describe "RedisManager.clearDocFromPendingUpdatesSet", -> +describe "RedisManager.prependDocOps", -> beforeEach -> @doc_id = "document-id" @callback = sinon.stub() @RedisManager = SandboxedModule.require modulePath, requires: "redis" : createClient: () => @rclient = auth:-> + "logger-sharelatex": {} @rclient.lpush = sinon.stub().callsArg(2) @ops = [ diff --git a/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee index 0c76730437..d911af16bd 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee @@ -4,7 +4,7 @@ should = chai.should() modulePath = "../../../../app/js/RedisManager" SandboxedModule = require('sandboxed-module') -describe "RedisManager.getPreviousDocOpsTests", -> +describe "RedisManager.pushDocOp", -> beforeEach -> @callback = sinon.stub() @RedisManager = SandboxedModule.require modulePath, requires: @@ -12,6 +12,7 @@ describe "RedisManager.getPreviousDocOpsTests", -> @rclient = auth: -> multi: => @rclient + "logger-sharelatex": {} @doc_id = "doc-id-123" beforeEach -> diff --git a/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee index 415f8e4572..d6e19f163e 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee @@ -13,20 +13,27 @@ describe "RedisManager.pushUncompressedHistoryOp", -> multi: () => @rclient "logger-sharelatex": @logger = {log: sinon.stub()} @doc_id = "doc-id-123" + @project_id = "project-id-123" @callback = sinon.stub() - @rclient.rpush = sinon.stub() describe "successfully", -> beforeEach -> @op = { op: [{ i: "foo", p: 4 }] } - @rclient.rpush = sinon.stub().callsArgWith(2, null, @length = 42) - @RedisManager.pushUncompressedHistoryOp @doc_id, @op, @callback + @rclient.rpush = sinon.stub() + @rclient.sadd = sinon.stub() + @rclient.exec = sinon.stub().callsArgWith(0, null, [@length = 42, "1"]) + @RedisManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback it "should push the doc op into the doc ops list", -> @rclient.rpush .calledWith("UncompressedHistoryOps:#{@doc_id}", JSON.stringify(@op)) .should.equal true + it "should add the doc_id to the set of which records the project docs", -> + @rclient.sadd + .calledWith("DocsWithHistoryOps:#{@project_id}", @doc_id) + .should.equal true + it "should call the callback with the length", -> @callback.calledWith(null, @length).should.equal true diff --git a/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee b/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee index 148eeb33bb..8fad5322e2 100644 --- a/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee @@ -45,69 +45,48 @@ describe "TrackChangesManager", -> @op = "mock-op" @TrackChangesManager.flushDocChanges = sinon.stub().callsArg(2) - describe "when the doc is under the load manager threshold", -> + describe "pushing the op", -> beforeEach -> - @RedisManager.getHistoryLoadManagerThreshold = sinon.stub().callsArgWith(0, null, 40) - @TrackChangesManager.getLoadManagerBucket = sinon.stub().returns(30) - - describe "pushing the op", -> - beforeEach -> - @RedisManager.pushUncompressedHistoryOp = sinon.stub().callsArgWith(2, null, 1) - @TrackChangesManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback - - it "should push the op into redis", -> - @RedisManager.pushUncompressedHistoryOp - .calledWith(@doc_id, @op) - .should.equal true - - it "should call the callback", -> - @callback.called.should.equal true - - it "should not try to flush the op", -> - @TrackChangesManager.flushDocChanges.called.should.equal false - - describe "when there are a multiple of FLUSH_EVERY_N_OPS ops", -> - beforeEach -> - @RedisManager.pushUncompressedHistoryOp = - sinon.stub().callsArgWith(2, null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS) - @TrackChangesManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback - - it "should tell the track changes api to flush", -> - @TrackChangesManager.flushDocChanges - .calledWith(@project_id, @doc_id) - .should.equal true - - describe "when TrackChangesManager errors", -> - beforeEach -> - @RedisManager.pushUncompressedHistoryOp = - sinon.stub().callsArgWith(2, null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS) - @TrackChangesManager.flushDocChanges = sinon.stub().callsArgWith(2, @error = new Error("oops")) - @TrackChangesManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback - - it "should log out the error", -> - @logger.error - .calledWith( - err: @error - doc_id: @doc_id - project_id: @project_id - "error flushing doc to track changes api" - ) - .should.equal true - - describe "when the doc is over the load manager threshold", -> - beforeEach -> - @RedisManager.getHistoryLoadManagerThreshold = sinon.stub().callsArgWith(0, null, 40) - @TrackChangesManager.getLoadManagerBucket = sinon.stub().returns(50) - @RedisManager.pushUncompressedHistoryOp = sinon.stub().callsArgWith(2, null, 1) + @RedisManager.pushUncompressedHistoryOp = sinon.stub().callsArgWith(3, null, 1) @TrackChangesManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback - it "should not push the op", -> - @RedisManager.pushUncompressedHistoryOp.called.should.equal false - - it "should not try to flush the op", -> - @TrackChangesManager.flushDocChanges.called.should.equal false + it "should push the op into redis", -> + @RedisManager.pushUncompressedHistoryOp + .calledWith(@project_id, @doc_id, @op) + .should.equal true it "should call the callback", -> @callback.called.should.equal true + it "should not try to flush the op", -> + @TrackChangesManager.flushDocChanges.called.should.equal false + + describe "when there are a multiple of FLUSH_EVERY_N_OPS ops", -> + beforeEach -> + @RedisManager.pushUncompressedHistoryOp = + sinon.stub().callsArgWith(3, null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS) + @TrackChangesManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback + + it "should tell the track changes api to flush", -> + @TrackChangesManager.flushDocChanges + .calledWith(@project_id, @doc_id) + .should.equal true + + describe "when TrackChangesManager errors", -> + beforeEach -> + @RedisManager.pushUncompressedHistoryOp = + sinon.stub().callsArgWith(3, null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS) + @TrackChangesManager.flushDocChanges = sinon.stub().callsArgWith(2, @error = new Error("oops")) + @TrackChangesManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback + + it "should log out the error", -> + @logger.error + .calledWith( + err: @error + doc_id: @doc_id + project_id: @project_id + "error flushing doc to track changes api" + ) + .should.equal true + From 375427bf5eb1cfbe78a8eeb968379052a721f2c4 Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 21 Mar 2014 13:15:42 +0000 Subject: [PATCH 023/769] Remove extraneous logging --- services/document-updater/app/coffee/TrackChangesManager.coffee | 1 - 1 file changed, 1 deletion(-) diff --git a/services/document-updater/app/coffee/TrackChangesManager.coffee b/services/document-updater/app/coffee/TrackChangesManager.coffee index 2d887a4f9c..90cba86b36 100644 --- a/services/document-updater/app/coffee/TrackChangesManager.coffee +++ b/services/document-updater/app/coffee/TrackChangesManager.coffee @@ -23,7 +23,6 @@ module.exports = TrackChangesManager = FLUSH_EVERY_N_OPS: 50 pushUncompressedHistoryOp: (project_id, doc_id, op, callback = (error) ->) -> - logger.log project_id: project_id, doc_id: doc_id, "pushing history op" RedisManager.pushUncompressedHistoryOp project_id, doc_id, op, (error, length) -> return callback(error) if error? if length > 0 and length % TrackChangesManager.FLUSH_EVERY_N_OPS == 0 From c2ebaaa338b3d546bf422ddb37058f3f505f85ef Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 10 Apr 2014 12:44:46 +0100 Subject: [PATCH 024/769] Split lines on Windows line endings too --- .../app/coffee/ShareJsUpdateManager.coffee | 5 +- .../coffee/ShareJsUpdateManagerTests.coffee | 151 +++++++----------- 2 files changed, 57 insertions(+), 99 deletions(-) diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee index 9cde95492b..5f3cba4fbc 100644 --- a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee +++ b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee @@ -44,10 +44,7 @@ module.exports = ShareJsUpdateManager = if error? @_sendError(project_id, doc_id, error) return callback(error) - if typeof data.snapshot == "string" - docLines = data.snapshot.split("\n") - else - docLines = data.snapshot.lines + docLines = data.snapshot.split(/\r\n|\n|\r/) callback(null, docLines, data.v) _listenForOps: (model) -> diff --git a/services/document-updater/test/unit/coffee/ShareJsUpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/ShareJsUpdateManagerTests.coffee index af5a475836..20e737fc97 100644 --- a/services/document-updater/test/unit/coffee/ShareJsUpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJsUpdateManagerTests.coffee @@ -29,113 +29,74 @@ describe "ShareJsUpdateManager", -> @ShareJsUpdateManager.getNewShareJsModel = sinon.stub().returns(@model) @ShareJsUpdateManager._listenForOps = sinon.stub() @ShareJsUpdateManager.removeDocFromCache = sinon.stub().callsArg(1) + @updates = [ + {p: 4, t: "foo"} + {p: 6, t: "bar"} + ] + @updatedDocLines = ["one", "two"] - describe "with a text document", -> - beforeEach -> - @updates = [ - {p: 4, t: "foo"} - {p: 6, t: "bar"} - ] - @updatedDocLines = ["one", "two"] + describe "successfully", -> + beforeEach (done) -> + @model.getSnapshot.callsArgWith(1, null, {snapshot: @updatedDocLines.join("\n"), v: @version}) + @ShareJsUpdateManager.applyUpdates @project_id, @doc_id, @updates, (err, docLines, version) => + @callback(err, docLines, version) + done() - describe "successfully", -> - beforeEach (done) -> - @model.getSnapshot.callsArgWith(1, null, {snapshot: @updatedDocLines.join("\n"), v: @version}) - @ShareJsUpdateManager.applyUpdates @project_id, @doc_id, @updates, (err, docLines, version) => - @callback(err, docLines, version) - done() + it "should create a new ShareJs model", -> + @ShareJsUpdateManager.getNewShareJsModel + .called.should.equal true - it "should create a new ShareJs model", -> - @ShareJsUpdateManager.getNewShareJsModel - .called.should.equal true + it "should listen for ops on the model", -> + @ShareJsUpdateManager._listenForOps + .calledWith(@model) + .should.equal true - it "should listen for ops on the model", -> - @ShareJsUpdateManager._listenForOps - .calledWith(@model) - .should.equal true + it "should send each update to ShareJs", -> + for update in @updates + @model.applyOp + .calledWith("#{@project_id}:#{@doc_id}", update).should.equal true - it "should send each update to ShareJs", -> - for update in @updates - @model.applyOp - .calledWith("#{@project_id}:#{@doc_id}", update).should.equal true + it "should get the updated doc lines", -> + @model.getSnapshot + .calledWith("#{@project_id}:#{@doc_id}") + .should.equal true - it "should get the updated doc lines", -> - @model.getSnapshot - .calledWith("#{@project_id}:#{@doc_id}") - .should.equal true + it "should return the updated doc lines", -> + @callback.calledWith(null, @updatedDocLines, @version).should.equal true - it "should return the updated doc lines", -> - @callback.calledWith(null, @updatedDocLines, @version).should.equal true + describe "when applyOp fails", -> + beforeEach (done) -> + @error = new Error("Something went wrong") + @ShareJsUpdateManager._sendError = sinon.stub() + @model.applyOp = sinon.stub().callsArgWith(2, @error) + @ShareJsUpdateManager.applyUpdates @project_id, @doc_id, @updates, (err, docLines, version) => + @callback(err, docLines, version) + done() - describe "when applyOp fails", -> - beforeEach (done) -> - @error = new Error("Something went wrong") - @ShareJsUpdateManager._sendError = sinon.stub() - @model.applyOp = sinon.stub().callsArgWith(2, @error) - @ShareJsUpdateManager.applyUpdates @project_id, @doc_id, @updates, (err, docLines, version) => - @callback(err, docLines, version) - done() + it "should call sendError with the error", -> + @ShareJsUpdateManager._sendError + .calledWith(@project_id, @doc_id, @error) + .should.equal true - it "should call sendError with the error", -> - @ShareJsUpdateManager._sendError - .calledWith(@project_id, @doc_id, @error) - .should.equal true + it "should call the callback with the error", -> + @callback.calledWith(@error).should.equal true - it "should call the callback with the error", -> - @callback.calledWith(@error).should.equal true + describe "when getSnapshot fails", -> + beforeEach (done) -> + @error = new Error("Something went wrong") + @ShareJsUpdateManager._sendError = sinon.stub() + @model.getSnapshot.callsArgWith(1, @error) + @ShareJsUpdateManager.applyUpdates @project_id, @doc_id, @updates, (err, docLines, version) => + @callback(err, docLines, version) + done() - describe "when getSnapshot fails", -> - beforeEach (done) -> - @error = new Error("Something went wrong") - @ShareJsUpdateManager._sendError = sinon.stub() - @model.getSnapshot.callsArgWith(1, @error) - @ShareJsUpdateManager.applyUpdates @project_id, @doc_id, @updates, (err, docLines, version) => - @callback(err, docLines, version) - done() + it "should call sendError with the error", -> + @ShareJsUpdateManager._sendError + .calledWith(@project_id, @doc_id, @error) + .should.equal true - it "should call sendError with the error", -> - @ShareJsUpdateManager._sendError - .calledWith(@project_id, @doc_id, @error) - .should.equal true - - it "should call the callback with the error", -> - @callback.calledWith(@error).should.equal true - - describe "with a JSON document", -> - beforeEach -> - @updates = [ - {p: ["lines", 0], dl: { foo: "bar "}} - ] - @docLines = [text: "one", text: "two"] - - describe "successfully", -> - beforeEach (done) -> - @model.getSnapshot.callsArgWith(1, null, {snapshot: {lines: @docLines}, v: @version}) - @ShareJsUpdateManager.applyUpdates @project_id, @doc_id, @updates, (err, docLines, version) => - @callback(err, docLines, version) - done() - - it "should create a new ShareJs model", -> - @ShareJsUpdateManager.getNewShareJsModel - .called.should.equal true - - it "should listen for ops on the model", -> - @ShareJsUpdateManager._listenForOps - .calledWith(@model) - .should.equal true - - it "should send each update to ShareJs", -> - for update in @updates - @model.applyOp - .calledWith("#{@project_id}:#{@doc_id}", update).should.equal true - - it "should get the updated doc lines", -> - @model.getSnapshot - .calledWith("#{@project_id}:#{@doc_id}") - .should.equal true - - it "should return the updated doc lines", -> - @callback.calledWith(null, @docLines, @version).should.equal true + it "should call the callback with the error", -> + @callback.calledWith(@error).should.equal true describe "_listenForOps", -> beforeEach -> From d1434f764614223fcb03958442882a2604ad64de Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 7 May 2014 09:48:29 +0100 Subject: [PATCH 025/769] Increase redis lock expiry time to 30 seconds --- services/document-updater/app/coffee/LockManager.coffee | 4 ++-- .../test/unit/coffee/LockManager/tryLockTests.coffee | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/services/document-updater/app/coffee/LockManager.coffee b/services/document-updater/app/coffee/LockManager.coffee index 97c06ad721..a43bd84a1b 100644 --- a/services/document-updater/app/coffee/LockManager.coffee +++ b/services/document-updater/app/coffee/LockManager.coffee @@ -10,10 +10,10 @@ logger = require "logger-sharelatex" module.exports = LockManager = LOCK_TEST_INTERVAL: 50 # 50ms between each test of the lock MAX_LOCK_WAIT_TIME: 10000 # 10s maximum time to spend trying to get the lock + REDIS_LOCK_EXPIRY: 30 # seconds. Time until lock auto expires in redis. tryLock : (doc_id, callback = (err, isFree)->)-> - tenSeconds = 10 - rclient.set keys.blockingKey(doc_id: doc_id), "locked", "EX", 10, "NX", (err, gotLock)-> + rclient.set keys.blockingKey(doc_id: doc_id), "locked", "EX", LockManager.REDIS_LOCK_EXPIRY, "NX", (err, gotLock)-> return callback(err) if err? if gotLock == "OK" metrics.inc "doc-not-blocking" diff --git a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee index cff2b9538b..6c2c8972af 100644 --- a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee @@ -21,7 +21,7 @@ describe 'LockManager - trying the lock', -> @LockManager.tryLock @doc_id, @callback it "should set the lock key with an expiry if it is not set", -> - @set.calledWith("Blocking:#{@doc_id}", "locked", "EX", 10, "NX") + @set.calledWith("Blocking:#{@doc_id}", "locked", "EX", 30, "NX") .should.equal true it "should return the callback with true", -> From f511ebd4b63069ca7b1a15151df91ea9b13cbb47 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 7 May 2014 10:05:07 +0100 Subject: [PATCH 026/769] Exit cleanly on SIGINT et al --- services/document-updater/app.coffee | 27 +++++++++++++++++++++++---- 1 file changed, 23 insertions(+), 4 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 7168017790..ddc438f752 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -22,10 +22,13 @@ app.configure -> app.use app.router rclient.subscribe("pending-updates") -rclient.on "message", (channel, doc_key)-> +rclient.on "message", (channel, doc_key) -> [project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key) - UpdateManager.processOutstandingUpdatesWithLock project_id, doc_id, (error) -> - logger.error err: error, project_id: project_id, doc_id: doc_id, "error processing update" if error? + if !Settings.shuttingDown + UpdateManager.processOutstandingUpdatesWithLock project_id, doc_id, (error) -> + logger.error err: error, project_id: project_id, doc_id: doc_id, "error processing update" if error? + else + logger.log project_id: project_id, doc_id: doc_id, "ignoring incoming update" UpdateManager.resumeProcessing() @@ -47,7 +50,10 @@ app.get '/total', (req, res)-> res.send {total:count} app.get '/status', (req, res)-> - res.send('document updater is alive') + if Settings.shuttingDown + res.send 503 # Service unavailable + else + res.send('document updater is alive') app.use (error, req, res, next) -> logger.error err: error, "request errored" @@ -56,6 +62,19 @@ app.use (error, req, res, next) -> else res.send(500, "Oops, something went wrong") +shutdownCleanly = (signal) -> + return () -> + logger.log signal: signal, "received interrupt, cleaning up" + Settings.shuttingDown = true + setTimeout () -> + logger.log signal: signal, "shutting down" + process.exit() + , 10000 + + port = Settings.internal?.documentupdater?.port or Settings.apis?.documentupdater?.port or 3003 app.listen port, "localhost", -> logger.log("documentupdater-sharelatex server listening on port #{port}") + +for signal in ['SIGINT', 'SIGHUP', 'SIGQUIT', 'SIGUSR1', 'SIGUSR2', 'SIGTERM', 'SIGABRT'] + process.on signal, shutdownCleanly(signal) \ No newline at end of file From 6011ce47837fc086dfa8d7f38889c195a37f1a56 Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 8 May 2014 09:28:13 +0100 Subject: [PATCH 027/769] Use new metrics module --- services/document-updater/app.coffee | 15 ++++++------ .../app/coffee/Metrics.coffee | 24 +------------------ services/document-updater/package.json | 1 + 3 files changed, 9 insertions(+), 31 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index ddc438f752..a52b26f500 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -7,7 +7,6 @@ RedisManager = require('./app/js/RedisManager.js') UpdateManager = require('./app/js/UpdateManager.js') Keys = require('./app/js/RedisKeyBuilder') redis = require('redis') -metrics = require('./app/js/Metrics') Errors = require "./app/js/Errors" HttpController = require "./app/js/HttpController" @@ -15,9 +14,14 @@ redisConf = Settings.redis.web rclient = redis.createClient(redisConf.port, redisConf.host) rclient.auth(redisConf.password) +Path = require "path" +Metrics = require "metrics-sharelatex" +Metrics.initialize("doc-updater") +Metrics.mongodb.monitor(Path.resolve(__dirname + "/node_modules/mongojs/node_modules/mongodb"), logger) + app = express() app.configure -> - app.use(express.logger(':remote-addr - [:date] - :user-agent ":method :url" :status - :response-time ms')); + app.use(Metrics.http.monitor(logger)); app.use express.bodyParser() app.use app.router @@ -32,10 +36,6 @@ rclient.on "message", (channel, doc_key) -> UpdateManager.resumeProcessing() -app.use (req, res, next)-> - metrics.inc "http-request" - next() - app.get '/project/:project_id/doc/:doc_id', HttpController.getDoc app.post '/project/:project_id/doc/:doc_id', HttpController.setDoc app.post '/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLoaded @@ -44,7 +44,7 @@ app.delete '/project/:project_id', HttpController.deleteProjec app.post '/project/:project_id/flush', HttpController.flushProject app.get '/total', (req, res)-> - timer = new metrics.Timer("http.allDocList") + timer = new Metrics.Timer("http.allDocList") RedisManager.getCountOfDocsInMemory (err, count)-> timer.done() res.send {total:count} @@ -71,7 +71,6 @@ shutdownCleanly = (signal) -> process.exit() , 10000 - port = Settings.internal?.documentupdater?.port or Settings.apis?.documentupdater?.port or 3003 app.listen port, "localhost", -> logger.log("documentupdater-sharelatex server listening on port #{port}") diff --git a/services/document-updater/app/coffee/Metrics.coffee b/services/document-updater/app/coffee/Metrics.coffee index 0b98550c0e..4bf5c6dba5 100644 --- a/services/document-updater/app/coffee/Metrics.coffee +++ b/services/document-updater/app/coffee/Metrics.coffee @@ -1,23 +1 @@ -StatsD = require('lynx') -statsd = new StatsD('localhost', 8125, {on_error:->}) - -buildKey = (key)-> "doc-updater.#{process.env.NODE_ENV}.#{key}" - -module.exports = - set : (key, value, sampleRate = 1)-> - statsd.set buildKey(key), value, sampleRate - - inc : (key, sampleRate = 1)-> - statsd.increment buildKey(key), sampleRate - - Timer : class - constructor :(key, sampleRate = 1)-> - this.start = new Date() - this.key = buildKey(key) - done:-> - timeSpan = new Date - this.start - statsd.timing(this.key, timeSpan, this.sampleRate) - - gauge : (key, value, sampleRate = 1)-> - statsd.gauge key, value, sampleRate - +module.exports = require "metrics-sharelatex" \ No newline at end of file diff --git a/services/document-updater/package.json b/services/document-updater/package.json index fbcad4abc1..25feab87d8 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -14,6 +14,7 @@ "coffee-script": "1.4.0", "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#master", "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#master", + "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#master", "sinon": "~1.5.2", "mongojs": "0.9.11" }, From 0199f2e1297bde87106db5760b39de867fe93395 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 14 May 2014 13:28:17 +0100 Subject: [PATCH 028/769] Use version from web, with fallback to old mongo collection --- services/document-updater/Gruntfile.coffee | 3 +- .../app/coffee/DocOpsManager.coffee | 1 + .../app/coffee/PersistenceManager.coffee | 29 +++++- .../app/coffee/mongojs.coffee | 6 ++ .../coffee/ApplyingUpdatesToADocTests.coffee | 90 +++++++++++++++--- .../DocOpsManager/DocOpsManagerTests.coffee | 8 +- .../getDocFromWebTests.coffee | 86 +++++++++++++++++ .../PersistenceManager/getDocTests.coffee | 94 ++++++++----------- .../getDocVersionInMongoTests.coffee | 46 +++++++++ .../test/unit/js/module-loader.js | 29 ------ 10 files changed, 284 insertions(+), 108 deletions(-) create mode 100644 services/document-updater/app/coffee/mongojs.coffee create mode 100644 services/document-updater/test/unit/coffee/PersistenceManager/getDocFromWebTests.coffee create mode 100644 services/document-updater/test/unit/coffee/PersistenceManager/getDocVersionInMongoTests.coffee delete mode 100644 services/document-updater/test/unit/js/module-loader.js diff --git a/services/document-updater/Gruntfile.coffee b/services/document-updater/Gruntfile.coffee index 8c96ea0650..3497455a57 100644 --- a/services/document-updater/Gruntfile.coffee +++ b/services/document-updater/Gruntfile.coffee @@ -52,6 +52,7 @@ module.exports = (grunt) -> clean: app: ["app/js"] acceptance_tests: ["test/acceptance/js"] + unit_tests: ["test/unit/js"] mochaTest: unit: @@ -102,7 +103,7 @@ module.exports = (grunt) -> grunt.registerTask 'help', 'Display this help list', 'availabletasks' grunt.registerTask 'compile:server', 'Compile the server side coffee script', ['clean:app', 'coffee:app', 'coffee:app_dir'] - grunt.registerTask 'compile:unit_tests', 'Compile the unit tests', ['coffee:unit_tests'] + grunt.registerTask 'compile:unit_tests', 'Compile the unit tests', ['clean:unit_tests', 'coffee:unit_tests'] grunt.registerTask 'compile:acceptance_tests', 'Compile the acceptance tests', ['clean:acceptance_tests', 'coffee:acceptance_tests'] grunt.registerTask 'compile:tests', 'Compile all the tests', ['compile:acceptance_tests', 'compile:unit_tests'] grunt.registerTask 'compile', 'Compiles everything need to run document-updater-sharelatex', ['compile:server'] diff --git a/services/document-updater/app/coffee/DocOpsManager.coffee b/services/document-updater/app/coffee/DocOpsManager.coffee index a8896f8b12..a85a1e18ee 100644 --- a/services/document-updater/app/coffee/DocOpsManager.coffee +++ b/services/document-updater/app/coffee/DocOpsManager.coffee @@ -14,3 +14,4 @@ module.exports = DocOpsManager = return callback(error) if error? callback null, version + diff --git a/services/document-updater/app/coffee/PersistenceManager.coffee b/services/document-updater/app/coffee/PersistenceManager.coffee index 03cbe78cbe..6ac999629a 100644 --- a/services/document-updater/app/coffee/PersistenceManager.coffee +++ b/services/document-updater/app/coffee/PersistenceManager.coffee @@ -2,9 +2,35 @@ request = require "request" Settings = require "settings-sharelatex" Errors = require "./Errors" Metrics = require "./Metrics" +{db, ObjectId} = require("./mongojs") module.exports = PersistenceManager = - getDoc: (project_id, doc_id, _callback = (error, lines) ->) -> + getDoc: (project_id, doc_id, callback = (error, lines, version) ->) -> + PersistenceManager.getDocFromWeb project_id, doc_id, (error, lines, version) -> + return callback(error) if error? + if version? + callback null, lines, version + else + PersistenceManager.getDocVersionInMongo doc_id, (error, version) -> + return callback(error) if error? + if version? + callback null, lines, version + else + callback null, lines, 0 + + getDocVersionInMongo: (doc_id, callback = (error, version) ->) -> + db.docOps.find { + doc_id: ObjectId(doc_id) + }, { + version: 1 + }, (error, docs) -> + return callback(error) if error? + if docs.length < 1 or !docs[0].version? + return callback null, null + else + return callback null, docs[0].version + + getDocFromWeb: (project_id, doc_id, _callback = (error, lines, version) ->) -> timer = new Metrics.Timer("persistenceManager.getDoc") callback = (args...) -> timer.done() @@ -62,6 +88,5 @@ module.exports = PersistenceManager = return callback(new Errors.NotFoundError("doc not not found: #{url}")) else return callback(new Error("error accessing web API: #{url} #{res.statusCode}")) - diff --git a/services/document-updater/app/coffee/mongojs.coffee b/services/document-updater/app/coffee/mongojs.coffee new file mode 100644 index 0000000000..cf9f5fec86 --- /dev/null +++ b/services/document-updater/app/coffee/mongojs.coffee @@ -0,0 +1,6 @@ +Settings = require "settings-sharelatex" +mongojs = require "mongojs" +db = mongojs.connect(Settings.mongo.url, ["docOps"]) +module.exports = + db: db + ObjectId: mongojs.ObjectId diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index 1810d222e5..94730b406c 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -3,6 +3,7 @@ chai = require("chai") chai.should() async = require "async" rclient = require("redis").createClient() +{db, ObjectId} = require "../../../app/js/mongojs" MockTrackChangesApi = require "./helpers/MockTrackChangesApi" MockWebApi = require "./helpers/MockWebApi" @@ -92,9 +93,9 @@ describe "Applying updates to a doc", -> describe "when the ops come in a single linear order", -> before -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - @lines = ["", "", ""] + lines = ["", "", ""] MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines + lines: lines version: 0 } @@ -111,7 +112,7 @@ describe "Applying updates to a doc", -> { doc_id: @doc_id, v: 9, op: [i: "l", p: 9 ] } { doc_id: @doc_id, v: 10, op: [i: "d", p: 10] } ] - @result = ["hello world", "", ""] + @my_result = ["hello world", "", ""] it "should be able to continue applying updates when the project has been deleted", (done) -> actions = [] @@ -126,7 +127,7 @@ describe "Applying updates to a doc", -> async.series actions, (error) => throw error if error? DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => - doc.lines.should.deep.equal @result + doc.lines.should.deep.equal @my_result done() it "should push the applied updates to the track changes api", (done) -> @@ -142,9 +143,9 @@ describe "Applying updates to a doc", -> describe "when older ops come in after the delete", -> before -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - @lines = ["", "", ""] + lines = ["", "", ""] MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines + lines: lines version: 0 } @@ -156,7 +157,7 @@ describe "Applying updates to a doc", -> { doc_id: @doc_id, v: 4, op: [i: "o", p: 4 ] } { doc_id: @doc_id, v: 0, op: [i: "world", p: 1 ] } ] - @result = ["hello", "world", ""] + @my_result = ["hello", "world", ""] it "should be able to continue applying updates when the project has been deleted", (done) -> actions = [] @@ -171,7 +172,7 @@ describe "Applying updates to a doc", -> async.series actions, (error) => throw error if error? DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => - doc.lines.should.deep.equal @result + doc.lines.should.deep.equal @my_result done() describe "with a broken update", -> @@ -191,28 +192,91 @@ describe "Applying updates to a doc", -> done() describe "with enough updates to flush to the track changes api", -> - beforeEach (done) -> + before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] MockWebApi.insertDoc @project_id, @doc_id, { lines: @lines version: 0 } - @updates = [] + updates = [] for v in [0..99] # Should flush after 50 ops - @updates.push + updates.push doc_id: @doc_id, op: [i: v.toString(), p: 0] v: v sinon.spy MockTrackChangesApi, "flushDoc" - DocUpdaterClient.sendUpdates @project_id, @doc_id, @updates, (error) => + DocUpdaterClient.sendUpdates @project_id, @doc_id, updates, (error) => throw error if error? setTimeout done, 200 - afterEach -> + after -> MockTrackChangesApi.flushDoc.restore() it "should flush the doc twice", -> MockTrackChangesApi.flushDoc.calledTwice.should.equal true + describe "when the document does not have a version in the web api but does in Mongo", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines + } + + db.docOps.insert { + doc_id: ObjectId(@doc_id) + version: @version + }, (error) => + throw error if error? + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> + throw error if error? + setTimeout done, 200 + + it "should update the doc (using the mongo version)", (done) -> + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => + doc.lines.should.deep.equal @result + done() + + describe "when the document version in the web api is ahead of Mongo", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines + version: @version + } + + db.docOps.insert { + doc_id: ObjectId(@doc_id) + version: @version - 20 + }, (error) => + throw error if error? + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> + throw error if error? + setTimeout done, 200 + + it "should update the doc (using the web version)", (done) -> + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => + doc.lines.should.deep.equal @result + done() + + describe "when there is no version yet", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines + } + + update = + doc: @doc_id + op: @update.op + v: 0 + DocUpdaterClient.sendUpdate @project_id, @doc_id, update, (error) -> + throw error if error? + setTimeout done, 200 + + it "should update the doc (using version = 0)", (done) -> + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => + doc.lines.should.deep.equal @result + done() + diff --git a/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee b/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee index 04814241e4..6f6094c855 100644 --- a/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee @@ -6,15 +6,11 @@ SandboxedModule = require('sandboxed-module') describe "DocOpsManager", -> beforeEach -> - @doc_id = "doc-id" - @project_id = "project-id" + @doc_id = ObjectId().toString() + @project_id = ObjectId().toString() @callback = sinon.stub() @DocOpsManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} - "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } - "./Metrics": @Metrics = - Timer: class Timer - done: sinon.stub() "./TrackChangesManager": @TrackChangesManager = {} describe "getPreviousDocOps", -> diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/getDocFromWebTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/getDocFromWebTests.coffee new file mode 100644 index 0000000000..b2aebdd84d --- /dev/null +++ b/services/document-updater/test/unit/coffee/PersistenceManager/getDocFromWebTests.coffee @@ -0,0 +1,86 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/PersistenceManager.js" +SandboxedModule = require('sandboxed-module') +Errors = require "../../../../app/js/Errors" + +describe "PersistenceManager.getDocFromWeb", -> + beforeEach -> + @PersistenceManager = SandboxedModule.require modulePath, requires: + "request": @request = sinon.stub() + "settings-sharelatex": @Settings = {} + "./Metrics": @Metrics = + Timer: class Timer + done: sinon.stub() + @project_id = "project-id-123" + @doc_id = "doc-id-123" + @lines = ["one", "two", "three"] + @version = 42 + @callback = sinon.stub() + @Settings.apis = + web: + url: @url = "www.example.com" + user: @user = "sharelatex" + pass: @pass = "password" + + describe "with a successful response from the web api", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(lines: @lines, version: @version)) + @PersistenceManager.getDocFromWeb(@project_id, @doc_id, @callback) + + it "should call the web api", -> + @request + .calledWith({ + url: "#{@url}/project/#{@project_id}/doc/#{@doc_id}" + method: "GET" + headers: + "accept": "application/json" + auth: + user: @user + pass: @pass + sendImmediately: true + jar: false + }) + .should.equal true + + it "should call the callback with the doc lines and version", -> + @callback.calledWith(null, @lines, @version).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when request returns an error", -> + beforeEach -> + @request.callsArgWith(1, @error = new Error("oops"), null, null) + @PersistenceManager.getDocFromWeb(@project_id, @doc_id, @callback) + + it "should return the error", -> + @callback.calledWith(@error).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when the request returns 404", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 404}, "") + @PersistenceManager.getDocFromWeb(@project_id, @doc_id, @callback) + + it "should return a NotFoundError", -> + @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when the request returns an error status code", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 500}, "") + @PersistenceManager.getDocFromWeb(@project_id, @doc_id, @callback) + + it "should return an error", -> + @callback.calledWith(new Error("web api error")).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee index 0bb881b3ee..50ba0984ac 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee @@ -3,7 +3,7 @@ chai = require('chai') should = chai.should() modulePath = "../../../../app/js/PersistenceManager.js" SandboxedModule = require('sandboxed-module') -Errors = require "../../../../app/js/Errors" +{ObjectId} = require("mongojs") describe "PersistenceManager.getDoc", -> beforeEach -> @@ -13,74 +13,54 @@ describe "PersistenceManager.getDoc", -> "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() - @project_id = "project-id-123" - @doc_id = "doc-id-123" - @lines = ["one", "two", "three"] - @version = 42 + "./mongojs": + db: @db = { docOps: {} } + ObjectId: ObjectId + + @project_id = ObjectId().toString() + @doc_id = ObjectId().toString() @callback = sinon.stub() - @Settings.apis = - web: - url: @url = "www.example.com" - user: @user = "sharelatex" - pass: @pass = "password" + @lines = ["mock", "doc", "lines"] + @version = 42 - describe "with a successful response from the web api", -> + describe "when the version is set in the web api", -> beforeEach -> - @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(lines: @lines, version: @version)) - @PersistenceManager.getDoc(@project_id, @doc_id, @callback) + @PersistenceManager.getDocFromWeb = sinon.stub().callsArgWith(2, null, @lines, @version) + @PersistenceManager.getDocVersionInMongo = sinon.stub() + @PersistenceManager.getDoc @project_id, @doc_id, @callback - it "should call the web api", -> - @request - .calledWith({ - url: "#{@url}/project/#{@project_id}/doc/#{@doc_id}" - method: "GET" - headers: - "accept": "application/json" - auth: - user: @user - pass: @pass - sendImmediately: true - jar: false - }) + it "should look up the doc in the web api", -> + @PersistenceManager.getDocFromWeb + .calledWith(@project_id, @doc_id) .should.equal true - it "should call the callback with the doc lines and version", -> + it "should not look up the version in Mongo", -> + @PersistenceManager.getDocVersionInMongo + .called.should.equal false + + it "should call the callback with the lines and version", -> @callback.calledWith(null, @lines, @version).should.equal true - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "when request returns an error", -> + describe "when the version is not set in the web api, but is in Mongo", -> beforeEach -> - @request.callsArgWith(1, @error = new Error("oops"), null, null) - @PersistenceManager.getDoc(@project_id, @doc_id, @callback) + @PersistenceManager.getDocFromWeb = sinon.stub().callsArgWith(2, null, @lines, null) + @PersistenceManager.getDocVersionInMongo = sinon.stub().callsArgWith(1, null, @version) + @PersistenceManager.getDoc @project_id, @doc_id, @callback - it "should return the error", -> - @callback.calledWith(@error).should.equal true + it "should look up the version in Mongo", -> + @PersistenceManager.getDocVersionInMongo + .calledWith(@doc_id) + .should.equal true - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true + it "should call the callback with the lines and version", -> + @callback.calledWith(null, @lines, @version).should.equal true - describe "when the request returns 404", -> + describe "when the version is not set", -> beforeEach -> - @request.callsArgWith(1, null, {statusCode: 404}, "") - @PersistenceManager.getDoc(@project_id, @doc_id, @callback) - - it "should return a NotFoundError", -> - @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true + @PersistenceManager.getDocFromWeb = sinon.stub().callsArgWith(2, null, @lines, null) + @PersistenceManager.getDocVersionInMongo = sinon.stub().callsArgWith(1, null, null) + @PersistenceManager.getDoc @project_id, @doc_id, @callback - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true + it "should call the callback with the lines and version = 0", -> + @callback.calledWith(null, @lines, 0).should.equal true - describe "when the request returns an error status code", -> - beforeEach -> - @request.callsArgWith(1, null, {statusCode: 500}, "") - @PersistenceManager.getDoc(@project_id, @doc_id, @callback) - - it "should return an error", -> - @callback.calledWith(new Error("web api error")).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/getDocVersionInMongoTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/getDocVersionInMongoTests.coffee new file mode 100644 index 0000000000..bbe6c43c48 --- /dev/null +++ b/services/document-updater/test/unit/coffee/PersistenceManager/getDocVersionInMongoTests.coffee @@ -0,0 +1,46 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/PersistenceManager.js" +SandboxedModule = require('sandboxed-module') +Errors = require "../../../../app/js/Errors" +{ObjectId} = require("mongojs") + +describe "PersistenceManager.getDocVersionInMongo", -> + beforeEach -> + @PersistenceManager = SandboxedModule.require modulePath, requires: + "request": @request = sinon.stub() + "settings-sharelatex": @Settings = {} + "./Metrics": @Metrics = + Timer: class Timer + done: sinon.stub() + "./mongojs": + db: @db = { docOps: {} } + ObjectId: ObjectId + + @doc_id = ObjectId().toString() + @callback = sinon.stub() + + describe "getDocVersionInMongo", -> + describe "when the doc exists", -> + beforeEach -> + @doc = + version: @version = 42 + @db.docOps.find = sinon.stub().callsArgWith(2, null, [@doc]) + @PersistenceManager.getDocVersionInMongo @doc_id, @callback + + it "should look for the doc in the database", -> + @db.docOps.find + .calledWith({ doc_id: ObjectId(@doc_id) }, {version: 1}) + .should.equal true + + it "should call the callback with the version", -> + @callback.calledWith(null, @version).should.equal true + + describe "when the doc doesn't exist", -> + beforeEach -> + @db.docOps.find = sinon.stub().callsArgWith(2, null, []) + @PersistenceManager.getDocVersionInMongo @doc_id, @callback + + it "should call the callback with null", -> + @callback.calledWith(null, null).should.equal true \ No newline at end of file diff --git a/services/document-updater/test/unit/js/module-loader.js b/services/document-updater/test/unit/js/module-loader.js deleted file mode 100644 index ac4cae7601..0000000000 --- a/services/document-updater/test/unit/js/module-loader.js +++ /dev/null @@ -1,29 +0,0 @@ -var vm = require('vm'); -var fs = require('fs'); -var path = require('path'); - -module.exports.loadModule = function(filePath, mocks) { - mocks = mocks || {}; - - // this is necessary to allow relative path modules within loaded file - // i.e. requiring ./some inside file /a/b.js needs to be resolved to /a/some - var resolveModule = function(module) { - if (module.charAt(0) !== '.') return module; - return path.resolve(path.dirname(filePath), module); - }; - - var exports = {}; - var context = { - require: function(name) { - return mocks[name] || require(resolveModule(name)); - }, - console: console, - exports: exports, - module: { - exports: exports - } - }; - file = fs.readFileSync(filePath); - vm.runInNewContext(file, context); - return context; -}; From bdfe018cadf93e73a572d6881830a71d375d4b8c Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 14 May 2014 14:16:27 +0100 Subject: [PATCH 029/769] Log out warning when using Mongo for version still --- .../document-updater/app/coffee/PersistenceManager.coffee | 2 ++ .../test/unit/coffee/AddingDocsToMemory.coffee | 4 ++-- .../unit/coffee/DocOpsManager/DocOpsManagerTests.coffee | 1 + .../test/unit/coffee/GettingListOfPendingUpdates.coffee | 4 ++-- .../test/unit/coffee/GettingTotalNumberOfDocs.coffee | 4 ++-- .../test/unit/coffee/LockManager/CheckingTheLock.coffee | 4 ++-- .../test/unit/coffee/LockManager/ReleasingTheLock.coffee | 4 ++-- .../test/unit/coffee/PersistenceManager/getDocTests.coffee | 6 ++++++ .../test/unit/coffee/RemovingSingleDocFromMemory.coffee | 4 ++-- 9 files changed, 21 insertions(+), 12 deletions(-) diff --git a/services/document-updater/app/coffee/PersistenceManager.coffee b/services/document-updater/app/coffee/PersistenceManager.coffee index 6ac999629a..4b084f1574 100644 --- a/services/document-updater/app/coffee/PersistenceManager.coffee +++ b/services/document-updater/app/coffee/PersistenceManager.coffee @@ -3,6 +3,7 @@ Settings = require "settings-sharelatex" Errors = require "./Errors" Metrics = require "./Metrics" {db, ObjectId} = require("./mongojs") +logger = require "logger-sharelatex" module.exports = PersistenceManager = getDoc: (project_id, doc_id, callback = (error, lines, version) ->) -> @@ -11,6 +12,7 @@ module.exports = PersistenceManager = if version? callback null, lines, version else + logger.warn project_id: project_id, doc_id: doc_id, "loading doc version from mongo - deprecated" PersistenceManager.getDocVersionInMongo doc_id, (error, version) -> return callback(error) if error? if version? diff --git a/services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee b/services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee index 019f32bc74..ffed682321 100644 --- a/services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee +++ b/services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee @@ -5,7 +5,7 @@ modulePath = path.join __dirname, '../../../app/js/RedisManager.js' keys = require(path.join __dirname, '../../../app/js/RedisKeyBuilder.js') project_id = 1234 doc_id = 5678 -loadModule = require('./module-loader').loadModule +SandboxedModule = require('sandboxed-module') describe 'putting a doc into memory', ()-> lines = ["this is one line", "and another line"] @@ -44,7 +44,7 @@ describe 'putting a doc into memory', ()-> exec:(callback)-> callback() - redisManager = loadModule(modulePath, mocks).module.exports + redisManager = SandboxedModule.require(modulePath, requires: mocks) it 'should put a all data into memory', (done)-> redisManager.putDocInMemory project_id, doc_id, lines, version, ()-> diff --git a/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee b/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee index 6f6094c855..a215b0ccd4 100644 --- a/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee @@ -3,6 +3,7 @@ chai = require('chai') should = chai.should() modulePath = "../../../../app/js/DocOpsManager.js" SandboxedModule = require('sandboxed-module') +{ObjectId} = require "mongojs" describe "DocOpsManager", -> beforeEach -> diff --git a/services/document-updater/test/unit/coffee/GettingListOfPendingUpdates.coffee b/services/document-updater/test/unit/coffee/GettingListOfPendingUpdates.coffee index cb98e8f601..14f81f6052 100644 --- a/services/document-updater/test/unit/coffee/GettingListOfPendingUpdates.coffee +++ b/services/document-updater/test/unit/coffee/GettingListOfPendingUpdates.coffee @@ -3,7 +3,7 @@ should = require('chai').should() path = require('path') modulePath = path.join __dirname, '../../../app/js/RedisManager.js' _ = require('underscore') -loadModule = require('./module-loader').loadModule +SandboxedModule = require('sandboxed-module') keys = require(path.join __dirname, '../../../app/js/RedisKeyBuilder.js') describe 'getting entire list of pending updates', ()-> @@ -33,7 +33,7 @@ describe 'getting entire list of pending updates', ()-> exec: (callback)-> callback(null, redisReturn) - redisManager = loadModule(modulePath, mocks).module.exports + redisManager = SandboxedModule.require(modulePath, requires: mocks) it 'should have 3 elements in array', (done)-> redisManager.getPendingUpdatesForDoc doc_id, (err, listOfUpdates)-> diff --git a/services/document-updater/test/unit/coffee/GettingTotalNumberOfDocs.coffee b/services/document-updater/test/unit/coffee/GettingTotalNumberOfDocs.coffee index ae4af4825d..3b4f449f94 100644 --- a/services/document-updater/test/unit/coffee/GettingTotalNumberOfDocs.coffee +++ b/services/document-updater/test/unit/coffee/GettingTotalNumberOfDocs.coffee @@ -4,7 +4,7 @@ should = require('chai').should() path = require('path') modulePath = path.join __dirname, '../../../app/js/RedisManager.js' keys = require(path.join __dirname, '../../../app/js/RedisKeyBuilder.js') -loadModule = require('./module-loader').loadModule +SandboxedModule = require('sandboxed-module') describe 'getting cound of docs from memory', ()-> @@ -35,7 +35,7 @@ describe 'getting cound of docs from memory', ()-> exec:(callback)-> callback() - redisManager = loadModule(modulePath, mocks).module.exports + redisManager = SandboxedModule.require(modulePath, requires: mocks) redisManager.putDocInMemory project_id, doc_id1, 0, ["line"], -> redisManager.putDocInMemory project_id, doc_id2, 0, ["ledf"], -> redisManager.putDocInMemory project_id, doc_id3, 0, ["ledf"], -> diff --git a/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee b/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee index ac72cbae93..fe744d2379 100644 --- a/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee @@ -7,7 +7,7 @@ keys = require(path.join __dirname, '../../../../app/js/RedisKeyBuilder.js') project_id = 1234 doc_id = 5678 blockingKey = "Blocking:#{doc_id}" -loadModule = require('../module-loader').loadModule +SandboxedModule = require('sandboxed-module') describe 'Lock Manager - checking the lock', ()-> @@ -27,7 +27,7 @@ describe 'Lock Manager - checking the lock', ()-> expire: exireStub set: setStub exec: execStub - LockManager = loadModule(modulePath, mocks).module.exports + LockManager = SandboxedModule.require(modulePath, requires: mocks) it 'should check if lock exists but not set or expire', (done)-> execStub.callsArgWith(0, null, ["1"]) diff --git a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee index 81f42a3f59..39f16b09b9 100644 --- a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee @@ -6,7 +6,7 @@ modulePath = path.join __dirname, '../../../../app/js/LockManager.js' keys = require(path.join __dirname, '../../../../app/js/RedisKeyBuilder.js') project_id = 1234 doc_id = 5678 -loadModule = require('../module-loader').loadModule +SandboxedModule = require('sandboxed-module') describe 'LockManager - releasing the lock', ()-> @@ -19,7 +19,7 @@ describe 'LockManager - releasing the lock', ()-> auth:-> del:deleteStub - LockManager = loadModule(modulePath, mocks).module.exports + LockManager = SandboxedModule.require(modulePath, requires: mocks) it 'should put a all data into memory', (done)-> LockManager.releaseLock doc_id, -> diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee index 50ba0984ac..5b4efe402f 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee @@ -13,6 +13,7 @@ describe "PersistenceManager.getDoc", -> "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() + "logger-sharelatex": @logger = {warn: sinon.stub()} "./mongojs": db: @db = { docOps: {} } ObjectId: ObjectId @@ -52,6 +53,11 @@ describe "PersistenceManager.getDoc", -> .calledWith(@doc_id) .should.equal true + it "shoud log a warning", -> + @logger.warn + .calledWith(project_id: @project_id, doc_id: @doc_id, "loading doc version from mongo - deprecated") + .should.equal true + it "should call the callback with the lines and version", -> @callback.calledWith(null, @lines, @version).should.equal true diff --git a/services/document-updater/test/unit/coffee/RemovingSingleDocFromMemory.coffee b/services/document-updater/test/unit/coffee/RemovingSingleDocFromMemory.coffee index 9fd0136aad..be18d41c57 100644 --- a/services/document-updater/test/unit/coffee/RemovingSingleDocFromMemory.coffee +++ b/services/document-updater/test/unit/coffee/RemovingSingleDocFromMemory.coffee @@ -5,7 +5,7 @@ sinon = require('sinon') path = require('path') modulePath = path.join __dirname, '../../../app/js/RedisManager.js' keys = require(path.join __dirname, '../../../app/js/RedisKeyBuilder.js') -loadModule = require('./module-loader').loadModule +SandboxedModule = require('sandboxed-module') describe 'removing single doc from memory', ()-> @@ -43,7 +43,7 @@ describe 'removing single doc from memory', ()-> exec:(callback)-> callback(null, []) - redisManager = loadModule(modulePath, mocks).module.exports + redisManager = SandboxedModule.require(modulePath, requires: mocks) redisManager.putDocInMemory project_id, doc_id1, 0, ["line"], -> redisManager.putDocInMemory project_id, doc_id2, 0, ["ledf"], -> redisManager.putDocInMemory project_id, doc_id3, 0, ["ledf"], -> From e25fb7e4357cbdee5b3c7086ecec98f82b685b1c Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 14 May 2014 16:04:25 +0100 Subject: [PATCH 030/769] Keeping writing and reading updates from Mongo to cover the deploy period --- .../app/coffee/PersistenceManager.coffee | 65 +++++++++----- .../coffee/ApplyingUpdatesToADocTests.coffee | 22 +++++ .../coffee/FlushingDocsTests.coffee | 12 +++ .../PersistenceManager/getDocTests.coffee | 32 +++++-- .../setDocInWebTests.coffee | 88 +++++++++++++++++++ .../PersistenceManager/setDocTests.coffee | 87 ++++-------------- .../setDocVersionInMongo.coffee | 43 +++++++++ 7 files changed, 250 insertions(+), 99 deletions(-) create mode 100644 services/document-updater/test/unit/coffee/PersistenceManager/setDocInWebTests.coffee create mode 100644 services/document-updater/test/unit/coffee/PersistenceManager/setDocVersionInMongo.coffee diff --git a/services/document-updater/app/coffee/PersistenceManager.coffee b/services/document-updater/app/coffee/PersistenceManager.coffee index 4b084f1574..bd119234fc 100644 --- a/services/document-updater/app/coffee/PersistenceManager.coffee +++ b/services/document-updater/app/coffee/PersistenceManager.coffee @@ -7,30 +7,29 @@ logger = require "logger-sharelatex" module.exports = PersistenceManager = getDoc: (project_id, doc_id, callback = (error, lines, version) ->) -> - PersistenceManager.getDocFromWeb project_id, doc_id, (error, lines, version) -> + PersistenceManager.getDocFromWeb project_id, doc_id, (error, lines, webVersion) -> return callback(error) if error? - if version? + PersistenceManager.getDocVersionInMongo doc_id, (error, mongoVersion) -> + return callback(error) if error? + if !webVersion? and !mongoVersion? + version = 0 + else if !webVersion? + logger.warn project_id: project_id, doc_id: doc_id, "loading doc version from mongo - deprecated" + version = mongoVersion + else if !mongoVersion? + version = webVersion + else if webVersion > mongoVersion + version = webVersion + else + version = mongoVersion callback null, lines, version - else - logger.warn project_id: project_id, doc_id: doc_id, "loading doc version from mongo - deprecated" - PersistenceManager.getDocVersionInMongo doc_id, (error, version) -> - return callback(error) if error? - if version? - callback null, lines, version - else - callback null, lines, 0 - - getDocVersionInMongo: (doc_id, callback = (error, version) ->) -> - db.docOps.find { - doc_id: ObjectId(doc_id) - }, { - version: 1 - }, (error, docs) -> + + setDoc: (project_id, doc_id, lines, version, callback = (error) ->) -> + PersistenceManager.setDocInWeb project_id, doc_id, lines, version, (error) -> return callback(error) if error? - if docs.length < 1 or !docs[0].version? - return callback null, null - else - return callback null, docs[0].version + PersistenceManager.setDocVersionInMongo doc_id, version, (error) -> + return callback(error) if error? + callback() getDocFromWeb: (project_id, doc_id, _callback = (error, lines, version) ->) -> timer = new Metrics.Timer("persistenceManager.getDoc") @@ -62,7 +61,7 @@ module.exports = PersistenceManager = else return callback(new Error("error accessing web API: #{url} #{res.statusCode}")) - setDoc: (project_id, doc_id, lines, version, _callback = (error) ->) -> + setDocInWeb: (project_id, doc_id, lines, version, _callback = (error) ->) -> timer = new Metrics.Timer("persistenceManager.setDoc") callback = (args...) -> timer.done() @@ -90,5 +89,27 @@ module.exports = PersistenceManager = return callback(new Errors.NotFoundError("doc not not found: #{url}")) else return callback(new Error("error accessing web API: #{url} #{res.statusCode}")) + + getDocVersionInMongo: (doc_id, callback = (error, version) ->) -> + db.docOps.find { + doc_id: ObjectId(doc_id) + }, { + version: 1 + }, (error, docs) -> + return callback(error) if error? + if docs.length < 1 or !docs[0].version? + return callback null, null + else + return callback null, docs[0].version + + setDocVersionInMongo: (doc_id, version, callback = (error) ->) -> + db.docOps.update { + doc_id: ObjectId(doc_id) + }, { + $set: version: version + }, { + upsert: true + }, callback + diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index 94730b406c..f312dbbd9e 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -260,6 +260,28 @@ describe "Applying updates to a doc", -> doc.lines.should.deep.equal @result done() + describe "when the document version in Mongo is ahead of the web api", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines + version: @version - 20 + } + + db.docOps.insert { + doc_id: ObjectId(@doc_id) + version: @version + }, (error) => + throw error if error? + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> + throw error if error? + setTimeout done, 200 + + it "should update the doc (using the web version)", (done) -> + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => + doc.lines.should.deep.equal @result + done() + describe "when there is no version yet", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] diff --git a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee index 04cb52c478..4f4f2f04e2 100644 --- a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee +++ b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee @@ -2,6 +2,7 @@ sinon = require "sinon" chai = require("chai") chai.should() async = require "async" +{db, ObjectId} = require "../../../app/js/mongojs" MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" @@ -53,6 +54,17 @@ describe "Flushing a doc to Mongo", -> .calledWith(@project_id, @doc_id, @version + 1) .should.equal true + it "should store the updated doc version into mongo", (done) -> + db.docOps.find { + doc_id: ObjectId(@doc_id) + }, { + version: 1 + }, (error, docs) => + doc = docs[0] + doc.version.should.equal @version + 1 + done() + + describe "when the doc does not exist in the doc updater", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee index 5b4efe402f..e9088fac30 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee @@ -24,10 +24,10 @@ describe "PersistenceManager.getDoc", -> @lines = ["mock", "doc", "lines"] @version = 42 - describe "when the version is set in the web api", -> + describe "when the version is set in the web api but not Mongo", -> beforeEach -> @PersistenceManager.getDocFromWeb = sinon.stub().callsArgWith(2, null, @lines, @version) - @PersistenceManager.getDocVersionInMongo = sinon.stub() + @PersistenceManager.getDocVersionInMongo = sinon.stub().callsArgWith(1, null, null) @PersistenceManager.getDoc @project_id, @doc_id, @callback it "should look up the doc in the web api", -> @@ -35,9 +35,10 @@ describe "PersistenceManager.getDoc", -> .calledWith(@project_id, @doc_id) .should.equal true - it "should not look up the version in Mongo", -> + it "should look up the version in Mongo", -> @PersistenceManager.getDocVersionInMongo - .called.should.equal false + .calledWith(@doc_id) + .should.equal true it "should call the callback with the lines and version", -> @callback.calledWith(null, @lines, @version).should.equal true @@ -48,11 +49,6 @@ describe "PersistenceManager.getDoc", -> @PersistenceManager.getDocVersionInMongo = sinon.stub().callsArgWith(1, null, @version) @PersistenceManager.getDoc @project_id, @doc_id, @callback - it "should look up the version in Mongo", -> - @PersistenceManager.getDocVersionInMongo - .calledWith(@doc_id) - .should.equal true - it "shoud log a warning", -> @logger.warn .calledWith(project_id: @project_id, doc_id: @doc_id, "loading doc version from mongo - deprecated") @@ -61,6 +57,24 @@ describe "PersistenceManager.getDoc", -> it "should call the callback with the lines and version", -> @callback.calledWith(null, @lines, @version).should.equal true + describe "when the version in the web api is ahead of Mongo", -> + beforeEach -> + @PersistenceManager.getDocFromWeb = sinon.stub().callsArgWith(2, null, @lines, @version) + @PersistenceManager.getDocVersionInMongo = sinon.stub().callsArgWith(1, null, @version - 20) + @PersistenceManager.getDoc @project_id, @doc_id, @callback + + it "should call the callback with the web version", -> + @callback.calledWith(null, @lines, @version).should.equal true + + describe "when the version in the web api is behind Mongo", -> + beforeEach -> + @PersistenceManager.getDocFromWeb = sinon.stub().callsArgWith(2, null, @lines, @version - 20) + @PersistenceManager.getDocVersionInMongo = sinon.stub().callsArgWith(1, null, @version) + @PersistenceManager.getDoc @project_id, @doc_id, @callback + + it "should call the callback with the Mongo version", -> + @callback.calledWith(null, @lines, @version).should.equal true + describe "when the version is not set", -> beforeEach -> @PersistenceManager.getDocFromWeb = sinon.stub().callsArgWith(2, null, @lines, null) diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/setDocInWebTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/setDocInWebTests.coffee new file mode 100644 index 0000000000..915e72affe --- /dev/null +++ b/services/document-updater/test/unit/coffee/PersistenceManager/setDocInWebTests.coffee @@ -0,0 +1,88 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/PersistenceManager.js" +SandboxedModule = require('sandboxed-module') +Errors = require "../../../../app/js/Errors" + +describe "PersistenceManager.setDocInWeb", -> + beforeEach -> + @PersistenceManager = SandboxedModule.require modulePath, requires: + "request": @request = sinon.stub() + "settings-sharelatex": @Settings = {} + "./Metrics": @Metrics = + Timer: class Timer + done: sinon.stub() + @project_id = "project-id-123" + @doc_id = "doc-id-123" + @lines = ["one", "two", "three"] + @version = 42 + @callback = sinon.stub() + @Settings.apis = + web: + url: @url = "www.example.com" + user: @user = "sharelatex" + pass: @pass = "password" + + describe "with a successful response from the web api", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(lines: @lines, version: @version)) + @PersistenceManager.setDocInWeb(@project_id, @doc_id, @lines, @version, @callback) + + it "should call the web api", -> + @request + .calledWith({ + url: "#{@url}/project/#{@project_id}/doc/#{@doc_id}" + body: JSON.stringify + lines: @lines + version: @version + method: "POST" + headers: + "content-type": "application/json" + auth: + user: @user + pass: @pass + sendImmediately: true + jar: false + }) + .should.equal true + + it "should call the callback without error", -> + @callback.calledWith(null).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when request returns an error", -> + beforeEach -> + @request.callsArgWith(1, @error = new Error("oops"), null, null) + @PersistenceManager.setDocInWeb(@project_id, @doc_id, @lines, @version, @callback) + + it "should return the error", -> + @callback.calledWith(@error).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when the request returns 404", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 404}, "") + @PersistenceManager.setDocInWeb(@project_id, @doc_id, @lines, @version, @callback) + + it "should return a NotFoundError", -> + @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when the request returns an error status code", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 500}, "") + @PersistenceManager.setDocInWeb(@project_id, @doc_id, @lines, @version, @callback) + + it "should return an error", -> + @callback.calledWith(new Error("web api error")).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/setDocTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/setDocTests.coffee index 82850e3074..7c8cacd095 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/setDocTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/setDocTests.coffee @@ -3,7 +3,6 @@ chai = require('chai') should = chai.should() modulePath = "../../../../app/js/PersistenceManager.js" SandboxedModule = require('sandboxed-module') -Errors = require "../../../../app/js/Errors" describe "PersistenceManager.setDoc", -> beforeEach -> @@ -13,76 +12,28 @@ describe "PersistenceManager.setDoc", -> "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() - @project_id = "project-id-123" - @doc_id = "doc-id-123" - @lines = ["one", "two", "three"] - @version = 42 + "logger-sharelatex": @logger = {warn: sinon.stub()} + + @project_id = "mock-project-id" + @doc_id = "mock-doc-id" @callback = sinon.stub() - @Settings.apis = - web: - url: @url = "www.example.com" - user: @user = "sharelatex" - pass: @pass = "password" + @lines = ["mock", "doc", "lines"] + @version = 42 - describe "with a successful response from the web api", -> - beforeEach -> - @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(lines: @lines, version: @version)) - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @callback) + @PersistenceManager.setDocInWeb = sinon.stub().callsArg(4) + @PersistenceManager.setDocVersionInMongo = sinon.stub().callsArg(2) - it "should call the web api", -> - @request - .calledWith({ - url: "#{@url}/project/#{@project_id}/doc/#{@doc_id}" - body: JSON.stringify - lines: @lines - version: @version - method: "POST" - headers: - "content-type": "application/json" - auth: - user: @user - pass: @pass - sendImmediately: true - jar: false - }) - .should.equal true + @PersistenceManager.setDoc @project_id, @doc_id, @lines, @version, @callback - it "should call the callback without error", -> - @callback.calledWith(null).should.equal true + it "should set the doc in the web api", -> + @PersistenceManager.setDocInWeb + .calledWith(@project_id, @doc_id, @lines, @version) + .should.equal true - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "when request returns an error", -> - beforeEach -> - @request.callsArgWith(1, @error = new Error("oops"), null, null) - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @callback) - - it "should return the error", -> - @callback.calledWith(@error).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "when the request returns 404", -> - beforeEach -> - @request.callsArgWith(1, null, {statusCode: 404}, "") - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @callback) - - it "should return a NotFoundError", -> - @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "when the request returns an error status code", -> - beforeEach -> - @request.callsArgWith(1, null, {statusCode: 500}, "") - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @callback) - - it "should return an error", -> - @callback.calledWith(new Error("web api error")).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true + it "should set the doc version in mongo", -> + @PersistenceManager.setDocVersionInMongo + .calledWith(@doc_id, @version) + .should.equal true + it "should call the callback", -> + @callback.called.should.equal true diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/setDocVersionInMongo.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/setDocVersionInMongo.coffee new file mode 100644 index 0000000000..7f228fc341 --- /dev/null +++ b/services/document-updater/test/unit/coffee/PersistenceManager/setDocVersionInMongo.coffee @@ -0,0 +1,43 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/PersistenceManager.js" +SandboxedModule = require('sandboxed-module') +Errors = require "../../../../app/js/Errors" +{ObjectId} = require("mongojs") + +describe "PersistenceManager.getDocVersionInMongo", -> + beforeEach -> + @PersistenceManager = SandboxedModule.require modulePath, requires: + "request": @request = sinon.stub() + "settings-sharelatex": @Settings = {} + "./Metrics": @Metrics = + Timer: class Timer + done: sinon.stub() + "./mongojs": + db: @db = { docOps: {} } + ObjectId: ObjectId + + @doc_id = ObjectId().toString() + @callback = sinon.stub() + + describe "setDocVersionInMongo", -> + beforeEach -> + @version = 42 + @db.docOps.update = sinon.stub().callsArg(3) + @PersistenceManager.setDocVersionInMongo @doc_id, @version, @callback + + it "should update the doc version", -> + @db.docOps.update + .calledWith({ + doc_id: ObjectId(@doc_id) + }, { + $set: + version: @version + }, { + upsert: true + }) + .should.equal true + + it "should call the callback", -> + @callback.called.should.equal true From ffd10a8439df6d5bfd5cc77197004aa7cec56a94 Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 15 May 2014 11:13:16 +0100 Subject: [PATCH 031/769] Don't send or get versioning info to/from web The version number is only used by the doc updater and so can be cleanly encapsulated in a collection that only the doc updater knows about. The version is already stored under docOps, so continue to tore it there. --- .../app/coffee/PersistenceManager.coffee | 26 +-- .../coffee/ApplyingUpdatesToADocTests.coffee | 188 ++++++------------ .../coffee/DeletingADocumentTests.coffee | 37 ++-- .../coffee/FlushingDocsTests.coffee | 34 +--- .../coffee/GettingADocumentTests.coffee | 36 ++-- .../coffee/SettingADocumentTests.coffee | 21 +- .../coffee/helpers/MockWebApi.coffee | 14 +- .../getDocFromWebTests.coffee | 7 +- .../PersistenceManager/getDocTests.coffee | 46 +---- .../getDocVersionInMongoTests.coffee | 4 +- .../setDocInWebTests.coffee | 10 +- .../PersistenceManager/setDocTests.coffee | 5 +- 12 files changed, 155 insertions(+), 273 deletions(-) diff --git a/services/document-updater/app/coffee/PersistenceManager.coffee b/services/document-updater/app/coffee/PersistenceManager.coffee index bd119234fc..089700f23d 100644 --- a/services/document-updater/app/coffee/PersistenceManager.coffee +++ b/services/document-updater/app/coffee/PersistenceManager.coffee @@ -7,31 +7,20 @@ logger = require "logger-sharelatex" module.exports = PersistenceManager = getDoc: (project_id, doc_id, callback = (error, lines, version) ->) -> - PersistenceManager.getDocFromWeb project_id, doc_id, (error, lines, webVersion) -> + PersistenceManager.getDocFromWeb project_id, doc_id, (error, lines) -> return callback(error) if error? - PersistenceManager.getDocVersionInMongo doc_id, (error, mongoVersion) -> + PersistenceManager.getDocVersionInMongo doc_id, (error, version) -> return callback(error) if error? - if !webVersion? and !mongoVersion? - version = 0 - else if !webVersion? - logger.warn project_id: project_id, doc_id: doc_id, "loading doc version from mongo - deprecated" - version = mongoVersion - else if !mongoVersion? - version = webVersion - else if webVersion > mongoVersion - version = webVersion - else - version = mongoVersion callback null, lines, version setDoc: (project_id, doc_id, lines, version, callback = (error) ->) -> - PersistenceManager.setDocInWeb project_id, doc_id, lines, version, (error) -> + PersistenceManager.setDocInWeb project_id, doc_id, lines, (error) -> return callback(error) if error? PersistenceManager.setDocVersionInMongo doc_id, version, (error) -> return callback(error) if error? callback() - getDocFromWeb: (project_id, doc_id, _callback = (error, lines, version) ->) -> + getDocFromWeb: (project_id, doc_id, _callback = (error, lines) ->) -> timer = new Metrics.Timer("persistenceManager.getDoc") callback = (args...) -> timer.done() @@ -55,13 +44,13 @@ module.exports = PersistenceManager = body = JSON.parse body catch e return callback(e) - return callback null, body.lines, body.version + return callback null, body.lines else if res.statusCode == 404 return callback(new Errors.NotFoundError("doc not not found: #{url}")) else return callback(new Error("error accessing web API: #{url} #{res.statusCode}")) - setDocInWeb: (project_id, doc_id, lines, version, _callback = (error) ->) -> + setDocInWeb: (project_id, doc_id, lines, _callback = (error) ->) -> timer = new Metrics.Timer("persistenceManager.setDoc") callback = (args...) -> timer.done() @@ -73,7 +62,6 @@ module.exports = PersistenceManager = method: "POST" body: JSON.stringify lines: lines - version: parseInt(version, 10) headers: "content-type": "application/json" auth: @@ -98,7 +86,7 @@ module.exports = PersistenceManager = }, (error, docs) -> return callback(error) if error? if docs.length < 1 or !docs[0].version? - return callback null, null + return callback null, 0 else return callback null, docs[0].version diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index f312dbbd9e..1e9c2e2689 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -25,14 +25,17 @@ describe "Applying updates to a doc", -> describe "when the document is not loaded", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines - version: @version - } sinon.spy MockWebApi, "getDocument" - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> + + MockWebApi.insertDoc @project_id, @doc_id, lines: @lines + db.docOps.insert { + doc_id: ObjectId(@doc_id) + version: @version + }, (error) => throw error if error? - setTimeout done, 200 + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> + throw error if error? + setTimeout done, 200 after -> MockWebApi.getDocument.restore() @@ -60,16 +63,16 @@ describe "Applying updates to a doc", -> describe "when the document is loaded", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines - version: @version - } - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + + MockWebApi.insertDoc @project_id, @doc_id, lines: @lines + db.docOps.insert doc_id: ObjectId(@doc_id), version: @version, (error) => throw error if error? - sinon.spy MockWebApi, "getDocument" - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => throw error if error? - setTimeout done, 200 + sinon.spy MockWebApi, "getDocument" + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> + throw error if error? + setTimeout done, 200 after -> MockWebApi.getDocument.restore() @@ -91,28 +94,27 @@ describe "Applying updates to a doc", -> describe "when the document has been deleted", -> describe "when the ops come in a single linear order", -> - before -> + before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] lines = ["", "", ""] - MockWebApi.insertDoc @project_id, @doc_id, { - lines: lines - version: 0 - } - - @updates = [ - { doc_id: @doc_id, v: 0, op: [i: "h", p: 0 ] } - { doc_id: @doc_id, v: 1, op: [i: "e", p: 1 ] } - { doc_id: @doc_id, v: 2, op: [i: "l", p: 2 ] } - { doc_id: @doc_id, v: 3, op: [i: "l", p: 3 ] } - { doc_id: @doc_id, v: 4, op: [i: "o", p: 4 ] } - { doc_id: @doc_id, v: 5, op: [i: " ", p: 5 ] } - { doc_id: @doc_id, v: 6, op: [i: "w", p: 6 ] } - { doc_id: @doc_id, v: 7, op: [i: "o", p: 7 ] } - { doc_id: @doc_id, v: 8, op: [i: "r", p: 8 ] } - { doc_id: @doc_id, v: 9, op: [i: "l", p: 9 ] } - { doc_id: @doc_id, v: 10, op: [i: "d", p: 10] } - ] - @my_result = ["hello world", "", ""] + MockWebApi.insertDoc @project_id, @doc_id, lines: lines + db.docOps.insert doc_id: ObjectId(@doc_id), version: 0, (error) => + throw error if error? + @updates = [ + { doc_id: @doc_id, v: 0, op: [i: "h", p: 0 ] } + { doc_id: @doc_id, v: 1, op: [i: "e", p: 1 ] } + { doc_id: @doc_id, v: 2, op: [i: "l", p: 2 ] } + { doc_id: @doc_id, v: 3, op: [i: "l", p: 3 ] } + { doc_id: @doc_id, v: 4, op: [i: "o", p: 4 ] } + { doc_id: @doc_id, v: 5, op: [i: " ", p: 5 ] } + { doc_id: @doc_id, v: 6, op: [i: "w", p: 6 ] } + { doc_id: @doc_id, v: 7, op: [i: "o", p: 7 ] } + { doc_id: @doc_id, v: 8, op: [i: "r", p: 8 ] } + { doc_id: @doc_id, v: 9, op: [i: "l", p: 9 ] } + { doc_id: @doc_id, v: 10, op: [i: "d", p: 10] } + ] + @my_result = ["hello world", "", ""] + done() it "should be able to continue applying updates when the project has been deleted", (done) -> actions = [] @@ -141,23 +143,24 @@ describe "Applying updates to a doc", -> done() describe "when older ops come in after the delete", -> - before -> + before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] lines = ["", "", ""] - MockWebApi.insertDoc @project_id, @doc_id, { - lines: lines - version: 0 - } + MockWebApi.insertDoc @project_id, @doc_id, lines: lines + db.docOps.insert doc_id: ObjectId(@doc_id), version: 0, (error) => + throw error if error? - @updates = [ - { doc_id: @doc_id, v: 0, op: [i: "h", p: 0 ] } - { doc_id: @doc_id, v: 1, op: [i: "e", p: 1 ] } - { doc_id: @doc_id, v: 2, op: [i: "l", p: 2 ] } - { doc_id: @doc_id, v: 3, op: [i: "l", p: 3 ] } - { doc_id: @doc_id, v: 4, op: [i: "o", p: 4 ] } - { doc_id: @doc_id, v: 0, op: [i: "world", p: 1 ] } - ] - @my_result = ["hello", "world", ""] + @updates = [ + { doc_id: @doc_id, v: 0, op: [i: "h", p: 0 ] } + { doc_id: @doc_id, v: 1, op: [i: "e", p: 1 ] } + { doc_id: @doc_id, v: 2, op: [i: "l", p: 2 ] } + { doc_id: @doc_id, v: 3, op: [i: "l", p: 3 ] } + { doc_id: @doc_id, v: 4, op: [i: "o", p: 4 ] } + { doc_id: @doc_id, v: 0, op: [i: "world", p: 1 ] } + ] + @my_result = ["hello", "world", ""] + + done() it "should be able to continue applying updates when the project has been deleted", (done) -> actions = [] @@ -178,13 +181,12 @@ describe "Applying updates to a doc", -> describe "with a broken update", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines - version: @version - } - DocUpdaterClient.sendUpdate @project_id, @doc_id, @undefined, (error) -> + MockWebApi.insertDoc @project_id, @doc_id, lines: @lines + db.docOps.insert doc_id: ObjectId(@doc_id), version: @version, (error) => throw error if error? - setTimeout done, 200 + DocUpdaterClient.sendUpdate @project_id, @doc_id, @undefined, (error) -> + throw error if error? + setTimeout done, 200 it "should not update the doc", (done) -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => @@ -194,10 +196,6 @@ describe "Applying updates to a doc", -> describe "with enough updates to flush to the track changes api", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines - version: 0 - } updates = [] for v in [0..99] # Should flush after 50 ops updates.push @@ -207,9 +205,12 @@ describe "Applying updates to a doc", -> sinon.spy MockTrackChangesApi, "flushDoc" - DocUpdaterClient.sendUpdates @project_id, @doc_id, updates, (error) => + MockWebApi.insertDoc @project_id, @doc_id, lines: @lines + db.docOps.insert doc_id: ObjectId(@doc_id), version: 0, (error) => throw error if error? - setTimeout done, 200 + DocUpdaterClient.sendUpdates @project_id, @doc_id, updates, (error) => + throw error if error? + setTimeout done, 200 after -> MockTrackChangesApi.flushDoc.restore() @@ -217,72 +218,7 @@ describe "Applying updates to a doc", -> it "should flush the doc twice", -> MockTrackChangesApi.flushDoc.calledTwice.should.equal true - describe "when the document does not have a version in the web api but does in Mongo", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines - } - - db.docOps.insert { - doc_id: ObjectId(@doc_id) - version: @version - }, (error) => - throw error if error? - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> - throw error if error? - setTimeout done, 200 - - it "should update the doc (using the mongo version)", (done) -> - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => - doc.lines.should.deep.equal @result - done() - - describe "when the document version in the web api is ahead of Mongo", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines - version: @version - } - - db.docOps.insert { - doc_id: ObjectId(@doc_id) - version: @version - 20 - }, (error) => - throw error if error? - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> - throw error if error? - setTimeout done, 200 - - it "should update the doc (using the web version)", (done) -> - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => - doc.lines.should.deep.equal @result - done() - - describe "when the document version in Mongo is ahead of the web api", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines - version: @version - 20 - } - - db.docOps.insert { - doc_id: ObjectId(@doc_id) - version: @version - }, (error) => - throw error if error? - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> - throw error if error? - setTimeout done, 200 - - it "should update the doc (using the web version)", (done) -> - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => - doc.lines.should.deep.equal @result - done() - - describe "when there is no version yet", -> + describe "when there is no version in Mongo", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] MockWebApi.insertDoc @project_id, @doc_id, { diff --git a/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee index 171dfcc6e2..139ba9bbed 100644 --- a/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee @@ -1,6 +1,7 @@ sinon = require "sinon" chai = require("chai") chai.should() +{db, ObjectId} = require "../../../app/js/mongojs" MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" @@ -21,21 +22,24 @@ describe "Deleting a document", -> describe "when the updated doc exists in the doc updater", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines - version: @version - } sinon.spy MockWebApi, "setDocumentLines" sinon.spy MockWebApi, "getDocument" - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + + MockWebApi.insertDoc @project_id, @doc_id, lines: @lines + db.docOps.insert { + doc_id: ObjectId(@doc_id) + version: @version + }, (error) => throw error if error? - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) => + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => throw error if error? - setTimeout () => - DocUpdaterClient.deleteDoc @project_id, @doc_id, (error, res, body) => - @statusCode = res.statusCode - done() - , 200 + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) => + throw error if error? + setTimeout () => + DocUpdaterClient.deleteDoc @project_id, @doc_id, (error, res, body) => + @statusCode = res.statusCode + done() + , 200 after -> MockWebApi.setDocumentLines.restore() @@ -49,6 +53,16 @@ describe "Deleting a document", -> .calledWith(@project_id, @doc_id, @result) .should.equal true + it "should write the version to mongo", (done) -> + db.docOps.find { + doc_id: ObjectId(@doc_id) + }, { + version: 1 + }, (error, docs) => + doc = docs[0] + doc.version.should.equal @version + 1 + done() + it "should need to reload the doc if read again", (done) -> MockWebApi.getDocument.called.should.equal.false DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => @@ -62,7 +76,6 @@ describe "Deleting a document", -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] MockWebApi.insertDoc @project_id, @doc_id, { lines: @lines - version: @version } sinon.spy MockWebApi, "setDocumentLines" sinon.spy MockWebApi, "getDocument" diff --git a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee index 4f4f2f04e2..4513fd7d5c 100644 --- a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee +++ b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee @@ -19,41 +19,32 @@ describe "Flushing a doc to Mongo", -> }] v: @version @result = ["one", "one and a half", "two", "three"] - MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines - version: @version - } describe "when the updated doc exists in the doc updater", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines - version: @version - } sinon.spy MockWebApi, "setDocumentLines" - sinon.spy MockWebApi, "setDocumentVersion" - DocUpdaterClient.sendUpdates @project_id, @doc_id, [@update], (error) => + MockWebApi.insertDoc @project_id, @doc_id, lines: @lines + db.docOps.insert { + doc_id: ObjectId(@doc_id) + version: @version + }, (error) => throw error if error? - setTimeout () => - DocUpdaterClient.flushDoc @project_id, @doc_id, done - , 200 + DocUpdaterClient.sendUpdates @project_id, @doc_id, [@update], (error) => + throw error if error? + setTimeout () => + DocUpdaterClient.flushDoc @project_id, @doc_id, done + , 200 after -> MockWebApi.setDocumentLines.restore() - MockWebApi.setDocumentVersion.restore() it "should flush the updated doc lines to the web api", -> MockWebApi.setDocumentLines .calledWith(@project_id, @doc_id, @result) .should.equal true - it "should flush the updated doc version to the web api", -> - MockWebApi.setDocumentVersion - .calledWith(@project_id, @doc_id, @version + 1) - .should.equal true - it "should store the updated doc version into mongo", (done) -> db.docOps.find { doc_id: ObjectId(@doc_id) @@ -70,18 +61,13 @@ describe "Flushing a doc to Mongo", -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] MockWebApi.insertDoc @project_id, @doc_id, { lines: @lines - version: @version } sinon.spy MockWebApi, "setDocumentLines" - sinon.spy MockWebApi, "setDocumentVersion" DocUpdaterClient.flushDoc @project_id, @doc_id, done after -> MockWebApi.setDocumentLines.restore() - MockWebApi.setDocumentVersion.restore() it "should not flush the doc to the web api", -> MockWebApi.setDocumentLines.called.should.equal false - MockWebApi.setDocumentVersion.called.should.equal false - diff --git a/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee index 43c039a802..980d73fa93 100644 --- a/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee @@ -1,20 +1,28 @@ sinon = require "sinon" chai = require("chai") chai.should() +{db, ObjectId} = require "../../../app/js/mongojs" MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" describe "Getting a document", -> + beforeEach -> + @lines = ["one", "two", "three"] + @version = 42 + describe "when the document is not loaded", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines = ["one", "two", "three"] - version: @version = 42 - } sinon.spy MockWebApi, "getDocument" - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, @returnedDoc) => done() + + MockWebApi.insertDoc @project_id, @doc_id, lines: @lines + db.docOps.insert { + doc_id: ObjectId(@doc_id) + version: @version + }, (error) => + throw error if error? + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, @returnedDoc) => done() after -> MockWebApi.getDocument.restore() @@ -33,14 +41,17 @@ describe "Getting a document", -> describe "when the document is already loaded", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines = ["one", "two", "three"] - } - - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + + MockWebApi.insertDoc @project_id, @doc_id, lines: @lines + db.docOps.insert { + doc_id: ObjectId(@doc_id) + version: @version + }, (error) => throw error if error? - sinon.spy MockWebApi, "getDocument" - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, @returnedDoc) => done() + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + throw error if error? + sinon.spy MockWebApi, "getDocument" + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, @returnedDoc) => done() after -> MockWebApi.getDocument.restore() @@ -56,7 +67,6 @@ describe "Getting a document", -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] MockWebApi.insertDoc @project_id, @doc_id, { lines: @lines = ["one", "two", "three"] - version: 0 } @updates = for v in [0..99] diff --git a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee index 143fbc868e..5dfc5d95f0 100644 --- a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee @@ -1,12 +1,13 @@ sinon = require "sinon" chai = require("chai") chai.should() +{db, ObjectId} = require "../../../app/js/mongojs" MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" describe "Setting a document", -> - before -> + before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] @lines = ["one", "two", "three"] @version = 42 @@ -21,15 +22,19 @@ describe "Setting a document", -> @newLines = ["these", "are", "the", "new", "lines"] @source = "dropbox" @user_id = "user-id-123" - MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines + + MockWebApi.insertDoc @project_id, @doc_id, lines: @lines + db.docOps.insert { + doc_id: ObjectId(@doc_id) version: @version - } + }, (error) => + throw error if error? + done() + describe "when the updated doc exists in the doc updater", -> before (done) -> sinon.spy MockWebApi, "setDocumentLines" - sinon.spy MockWebApi, "setDocumentVersion" DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => throw error if error? DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) => @@ -42,7 +47,6 @@ describe "Setting a document", -> after -> MockWebApi.setDocumentLines.restore() - MockWebApi.setDocumentVersion.restore() it "should return a 204 status code", -> @statusCode.should.equal 204 @@ -52,11 +56,6 @@ describe "Setting a document", -> .calledWith(@project_id, @doc_id, @newLines) .should.equal true - it "should send the updated doc version to the web api", -> - MockWebApi.setDocumentVersion - .calledWith(@project_id, @doc_id, @version + 2) - .should.equal true - it "should update the lines in the doc updater", (done) -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => doc.lines.should.deep.equal @newLines diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee index 7c7dd1e211..7bab5b9b9f 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee @@ -14,11 +14,6 @@ module.exports = MockWebApi = @docs["#{project_id}:#{doc_id}"].lines = lines callback null - setDocumentVersion: (project_id, doc_id, version, callback = (error) ->) -> - @docs["#{project_id}:#{doc_id}"] ||= {} - @docs["#{project_id}:#{doc_id}"].version = version - callback null - getDocument: (project_id, doc_id, callback = (error, doc) ->) -> callback null, @docs["#{project_id}:#{doc_id}"] @@ -34,11 +29,10 @@ module.exports = MockWebApi = app.post "/project/:project_id/doc/:doc_id", express.bodyParser(), (req, res, next) => MockWebApi.setDocumentLines req.params.project_id, req.params.doc_id, req.body.lines, (error) -> - MockWebApi.setDocumentVersion req.params.project_id, req.params.doc_id, req.body.version, (error) -> - if error1? or error2? - res.send 500 - else - res.send 204 + if error? + res.send 500 + else + res.send 204 app.listen 3000, (error) -> throw error if error? diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/getDocFromWebTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/getDocFromWebTests.coffee index b2aebdd84d..82ee937591 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/getDocFromWebTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/getDocFromWebTests.coffee @@ -16,7 +16,6 @@ describe "PersistenceManager.getDocFromWeb", -> @project_id = "project-id-123" @doc_id = "doc-id-123" @lines = ["one", "two", "three"] - @version = 42 @callback = sinon.stub() @Settings.apis = web: @@ -26,7 +25,7 @@ describe "PersistenceManager.getDocFromWeb", -> describe "with a successful response from the web api", -> beforeEach -> - @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(lines: @lines, version: @version)) + @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(lines: @lines)) @PersistenceManager.getDocFromWeb(@project_id, @doc_id, @callback) it "should call the web api", -> @@ -44,8 +43,8 @@ describe "PersistenceManager.getDocFromWeb", -> }) .should.equal true - it "should call the callback with the doc lines and version", -> - @callback.calledWith(null, @lines, @version).should.equal true + it "should call the callback with the doc lines", -> + @callback.calledWith(null, @lines).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee index e9088fac30..ae3e476ec4 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee @@ -24,10 +24,10 @@ describe "PersistenceManager.getDoc", -> @lines = ["mock", "doc", "lines"] @version = 42 - describe "when the version is set in the web api but not Mongo", -> + describe "successfully", -> beforeEach -> - @PersistenceManager.getDocFromWeb = sinon.stub().callsArgWith(2, null, @lines, @version) - @PersistenceManager.getDocVersionInMongo = sinon.stub().callsArgWith(1, null, null) + @PersistenceManager.getDocFromWeb = sinon.stub().callsArgWith(2, null, @lines) + @PersistenceManager.getDocVersionInMongo = sinon.stub().callsArgWith(1, null, @version) @PersistenceManager.getDoc @project_id, @doc_id, @callback it "should look up the doc in the web api", -> @@ -43,44 +43,4 @@ describe "PersistenceManager.getDoc", -> it "should call the callback with the lines and version", -> @callback.calledWith(null, @lines, @version).should.equal true - describe "when the version is not set in the web api, but is in Mongo", -> - beforeEach -> - @PersistenceManager.getDocFromWeb = sinon.stub().callsArgWith(2, null, @lines, null) - @PersistenceManager.getDocVersionInMongo = sinon.stub().callsArgWith(1, null, @version) - @PersistenceManager.getDoc @project_id, @doc_id, @callback - - it "shoud log a warning", -> - @logger.warn - .calledWith(project_id: @project_id, doc_id: @doc_id, "loading doc version from mongo - deprecated") - .should.equal true - - it "should call the callback with the lines and version", -> - @callback.calledWith(null, @lines, @version).should.equal true - - describe "when the version in the web api is ahead of Mongo", -> - beforeEach -> - @PersistenceManager.getDocFromWeb = sinon.stub().callsArgWith(2, null, @lines, @version) - @PersistenceManager.getDocVersionInMongo = sinon.stub().callsArgWith(1, null, @version - 20) - @PersistenceManager.getDoc @project_id, @doc_id, @callback - - it "should call the callback with the web version", -> - @callback.calledWith(null, @lines, @version).should.equal true - - describe "when the version in the web api is behind Mongo", -> - beforeEach -> - @PersistenceManager.getDocFromWeb = sinon.stub().callsArgWith(2, null, @lines, @version - 20) - @PersistenceManager.getDocVersionInMongo = sinon.stub().callsArgWith(1, null, @version) - @PersistenceManager.getDoc @project_id, @doc_id, @callback - - it "should call the callback with the Mongo version", -> - @callback.calledWith(null, @lines, @version).should.equal true - - describe "when the version is not set", -> - beforeEach -> - @PersistenceManager.getDocFromWeb = sinon.stub().callsArgWith(2, null, @lines, null) - @PersistenceManager.getDocVersionInMongo = sinon.stub().callsArgWith(1, null, null) - @PersistenceManager.getDoc @project_id, @doc_id, @callback - - it "should call the callback with the lines and version = 0", -> - @callback.calledWith(null, @lines, 0).should.equal true diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/getDocVersionInMongoTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/getDocVersionInMongoTests.coffee index bbe6c43c48..a5015279fe 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/getDocVersionInMongoTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/getDocVersionInMongoTests.coffee @@ -42,5 +42,5 @@ describe "PersistenceManager.getDocVersionInMongo", -> @db.docOps.find = sinon.stub().callsArgWith(2, null, []) @PersistenceManager.getDocVersionInMongo @doc_id, @callback - it "should call the callback with null", -> - @callback.calledWith(null, null).should.equal true \ No newline at end of file + it "should call the callback with 0", -> + @callback.calledWith(null, 0).should.equal true \ No newline at end of file diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/setDocInWebTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/setDocInWebTests.coffee index 915e72affe..ad218caa10 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/setDocInWebTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/setDocInWebTests.coffee @@ -16,7 +16,6 @@ describe "PersistenceManager.setDocInWeb", -> @project_id = "project-id-123" @doc_id = "doc-id-123" @lines = ["one", "two", "three"] - @version = 42 @callback = sinon.stub() @Settings.apis = web: @@ -27,7 +26,7 @@ describe "PersistenceManager.setDocInWeb", -> describe "with a successful response from the web api", -> beforeEach -> @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(lines: @lines, version: @version)) - @PersistenceManager.setDocInWeb(@project_id, @doc_id, @lines, @version, @callback) + @PersistenceManager.setDocInWeb(@project_id, @doc_id, @lines, @callback) it "should call the web api", -> @request @@ -35,7 +34,6 @@ describe "PersistenceManager.setDocInWeb", -> url: "#{@url}/project/#{@project_id}/doc/#{@doc_id}" body: JSON.stringify lines: @lines - version: @version method: "POST" headers: "content-type": "application/json" @@ -56,7 +54,7 @@ describe "PersistenceManager.setDocInWeb", -> describe "when request returns an error", -> beforeEach -> @request.callsArgWith(1, @error = new Error("oops"), null, null) - @PersistenceManager.setDocInWeb(@project_id, @doc_id, @lines, @version, @callback) + @PersistenceManager.setDocInWeb(@project_id, @doc_id, @lines, @callback) it "should return the error", -> @callback.calledWith(@error).should.equal true @@ -67,7 +65,7 @@ describe "PersistenceManager.setDocInWeb", -> describe "when the request returns 404", -> beforeEach -> @request.callsArgWith(1, null, {statusCode: 404}, "") - @PersistenceManager.setDocInWeb(@project_id, @doc_id, @lines, @version, @callback) + @PersistenceManager.setDocInWeb(@project_id, @doc_id, @lines, @callback) it "should return a NotFoundError", -> @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true @@ -78,7 +76,7 @@ describe "PersistenceManager.setDocInWeb", -> describe "when the request returns an error status code", -> beforeEach -> @request.callsArgWith(1, null, {statusCode: 500}, "") - @PersistenceManager.setDocInWeb(@project_id, @doc_id, @lines, @version, @callback) + @PersistenceManager.setDocInWeb(@project_id, @doc_id, @lines, @callback) it "should return an error", -> @callback.calledWith(new Error("web api error")).should.equal true diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/setDocTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/setDocTests.coffee index 7c8cacd095..80c0a5e18f 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/setDocTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/setDocTests.coffee @@ -18,16 +18,15 @@ describe "PersistenceManager.setDoc", -> @doc_id = "mock-doc-id" @callback = sinon.stub() @lines = ["mock", "doc", "lines"] - @version = 42 - @PersistenceManager.setDocInWeb = sinon.stub().callsArg(4) + @PersistenceManager.setDocInWeb = sinon.stub().callsArg(3) @PersistenceManager.setDocVersionInMongo = sinon.stub().callsArg(2) @PersistenceManager.setDoc @project_id, @doc_id, @lines, @version, @callback it "should set the doc in the web api", -> @PersistenceManager.setDocInWeb - .calledWith(@project_id, @doc_id, @lines, @version) + .calledWith(@project_id, @doc_id, @lines) .should.equal true it "should set the doc version in mongo", -> From 946f45115506408a264af2e30ea142f9e16913df Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 15 May 2014 16:22:06 +0100 Subject: [PATCH 032/769] Move to default settings file --- .../{settings.development.coffee => settings.defaults.coffee} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename services/document-updater/config/{settings.development.coffee => settings.defaults.coffee} (100%) diff --git a/services/document-updater/config/settings.development.coffee b/services/document-updater/config/settings.defaults.coffee similarity index 100% rename from services/document-updater/config/settings.development.coffee rename to services/document-updater/config/settings.defaults.coffee From cab5509076942975a61ea4190dae9ea0438f75b4 Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 7 Aug 2014 11:45:19 +0100 Subject: [PATCH 033/769] Listen on queue via BLPOP rather than pub/sub --- services/document-updater/app.coffee | 9 ++- .../app/coffee/WorkersManager.coffee | 43 +++++++++++ .../coffee/helpers/DocUpdaterClient.coffee | 2 +- .../WorkersManager/WorkersManagerTests.coffee | 73 +++++++++++++++++++ 4 files changed, 123 insertions(+), 4 deletions(-) create mode 100644 services/document-updater/app/coffee/WorkersManager.coffee create mode 100644 services/document-updater/test/unit/coffee/WorkersManager/WorkersManagerTests.coffee diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index a52b26f500..79db012081 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -3,8 +3,9 @@ http = require("http") Settings = require('settings-sharelatex') logger = require('logger-sharelatex') logger.initialize("documentupdater") -RedisManager = require('./app/js/RedisManager.js') -UpdateManager = require('./app/js/UpdateManager.js') +RedisManager = require('./app/js/RedisManager') +UpdateManager = require('./app/js/UpdateManager') +WorkersManager = require('./app/js/WorkersManager') Keys = require('./app/js/RedisKeyBuilder') redis = require('redis') Errors = require "./app/js/Errors" @@ -32,7 +33,9 @@ rclient.on "message", (channel, doc_key) -> UpdateManager.processOutstandingUpdatesWithLock project_id, doc_id, (error) -> logger.error err: error, project_id: project_id, doc_id: doc_id, "error processing update" if error? else - logger.log project_id: project_id, doc_id: doc_id, "ignoring incoming update" + logger.log project_id: project_id, doc_id: doc_id, "ignoring incoming update" + +WorkersManager.createAndStartWorkers(Settings.workerCount || 10) UpdateManager.resumeProcessing() diff --git a/services/document-updater/app/coffee/WorkersManager.coffee b/services/document-updater/app/coffee/WorkersManager.coffee new file mode 100644 index 0000000000..bfc2d705e4 --- /dev/null +++ b/services/document-updater/app/coffee/WorkersManager.coffee @@ -0,0 +1,43 @@ +Settings = require('settings-sharelatex') +logger = require('logger-sharelatex') +Keys = require('./RedisKeyBuilder') +redis = require('redis') +UpdateManager = require('./UpdateManager') + +module.exports = WorkersManager = + createWorker: () -> + redisConf = Settings.redis.web + client = redis.createClient(redisConf.port, redisConf.host) + client.auth(redisConf.password) + + worker = { + client: client + waitForAndProcessUpdate: (callback = (error) ->) -> + worker.client.blpop "pending-updates-list", 0, (error, result) -> + return callback(error) if error? + return callback() if !result? + [list_name, doc_key] = result + [project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key) + UpdateManager.processOutstandingUpdatesWithLock project_id, doc_id, (error) -> + logger.error err: error, project_id: project_id, doc_id: doc_id, "error processing update" if error? + return callback(error) if error? + return callback() + + run: () -> + return if Settings.shuttingDown + worker.waitForAndProcessUpdate (error) => + if error? + logger.error err: error, "Error in worker process, waiting 1 second before continuing" + setTimeout () -> + worker.run() + , 1000 + else + worker.run() + } + + return worker + + createAndStartWorkers: (number) -> + for i in [1..number] + worker = WorkersManager.createWorker() + worker.run() \ No newline at end of file diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee index 53fc274f58..e9fd0b0c34 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee @@ -14,7 +14,7 @@ module.exports = DocUpdaterClient = doc_key = "#{project_id}:#{doc_id}" rclient.sadd "DocsWithPendingUpdates", doc_key, (error) -> return callback(error) if error? - rclient.publish "pending-updates", doc_key, callback + rclient.rpush "pending-updates-list", doc_key, callback sendUpdates: (project_id, doc_id, updates, callback = (error) ->) -> DocUpdaterClient.preloadDoc project_id, doc_id, (error) -> diff --git a/services/document-updater/test/unit/coffee/WorkersManager/WorkersManagerTests.coffee b/services/document-updater/test/unit/coffee/WorkersManager/WorkersManagerTests.coffee new file mode 100644 index 0000000000..4a86d7cdd0 --- /dev/null +++ b/services/document-updater/test/unit/coffee/WorkersManager/WorkersManagerTests.coffee @@ -0,0 +1,73 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/WorkersManager.js" +SandboxedModule = require('sandboxed-module') + +describe "WorkersManager", -> + beforeEach -> + @WorkersManager = SandboxedModule.require modulePath, requires: + "./UpdateManager" : @UpdateManager = {} + "logger-sharelatex": @logger = { log: sinon.stub() } + "settings-sharelatex": @settings = + redis: + web: {} + "redis": @redis = {} + @callback = sinon.stub() + + describe "each worker", -> + beforeEach -> + @client = + auth: sinon.stub() + @redis.createClient = sinon.stub().returns @client + + @worker = @WorkersManager.createWorker() + + it "should create a new redis client", -> + @redis.createClient.called.should.equal true + + describe "waitForAndProcessUpdate", -> + beforeEach -> + @project_id = "project-id-123" + @doc_id = "doc-id-123" + @doc_key = "#{@project_id}:#{@doc_id}" + @client.blpop = sinon.stub().callsArgWith(2, null, ["pending-updates-list", @doc_key]) + @UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArg(2) + + @worker.waitForAndProcessUpdate @callback + + it "should call redis with BLPOP", -> + @client.blpop + .calledWith("pending-updates-list", 0) + .should.equal true + + it "should call processOutstandingUpdatesWithLock", -> + @UpdateManager.processOutstandingUpdatesWithLock + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should call the callback", -> + @callback.called.should.equal true + + describe "run", -> + it "should call waitForAndProcessUpdate until shutting down", (done) -> + callCount = 0 + @worker.waitForAndProcessUpdate = (callback = (error) ->) => + callCount++ + if callCount == 3 + @settings.shuttingDown = true + setTimeout () -> + callback() + , 10 + sinon.spy @worker, "waitForAndProcessUpdate" + + + @worker.run() + + setTimeout () => + @worker.waitForAndProcessUpdate.callCount.should.equal 3 + done() + , 100 + + + \ No newline at end of file From 773e17df66f702e600ef9b57adc4808aef0c903c Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 7 Aug 2014 13:16:11 +0100 Subject: [PATCH 034/769] Send worker idle waiting times to statsd --- services/document-updater/app/coffee/WorkersManager.coffee | 3 +++ 1 file changed, 3 insertions(+) diff --git a/services/document-updater/app/coffee/WorkersManager.coffee b/services/document-updater/app/coffee/WorkersManager.coffee index bfc2d705e4..b421422f29 100644 --- a/services/document-updater/app/coffee/WorkersManager.coffee +++ b/services/document-updater/app/coffee/WorkersManager.coffee @@ -3,6 +3,7 @@ logger = require('logger-sharelatex') Keys = require('./RedisKeyBuilder') redis = require('redis') UpdateManager = require('./UpdateManager') +Metrics = require('./Metrics') module.exports = WorkersManager = createWorker: () -> @@ -13,7 +14,9 @@ module.exports = WorkersManager = worker = { client: client waitForAndProcessUpdate: (callback = (error) ->) -> + timer = new Metrics.Timer "worker.waiting" worker.client.blpop "pending-updates-list", 0, (error, result) -> + timer.done() return callback(error) if error? return callback() if !result? [list_name, doc_key] = result From 9da118c809c08904b7469acc303aafc6caa93983 Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 11 Aug 2014 14:16:05 +0100 Subject: [PATCH 035/769] Process updates in the background so that workers only act as dispatchers --- services/document-updater/app.coffee | 4 ++-- ...sManager.coffee => DispatchManager.coffee} | 22 +++++++++---------- .../DispatchManagerTests.coffee} | 20 ++++++++--------- 3 files changed, 22 insertions(+), 24 deletions(-) rename services/document-updater/app/coffee/{WorkersManager.coffee => DispatchManager.coffee} (70%) rename services/document-updater/test/unit/coffee/{WorkersManager/WorkersManagerTests.coffee => DispatchManager/DispatchManagerTests.coffee} (72%) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 79db012081..e246d366e8 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -5,7 +5,7 @@ logger = require('logger-sharelatex') logger.initialize("documentupdater") RedisManager = require('./app/js/RedisManager') UpdateManager = require('./app/js/UpdateManager') -WorkersManager = require('./app/js/WorkersManager') +DispatchManager = require('./app/js/DispatchManager') Keys = require('./app/js/RedisKeyBuilder') redis = require('redis') Errors = require "./app/js/Errors" @@ -35,7 +35,7 @@ rclient.on "message", (channel, doc_key) -> else logger.log project_id: project_id, doc_id: doc_id, "ignoring incoming update" -WorkersManager.createAndStartWorkers(Settings.workerCount || 10) +DispatchManager.createAndStartDispatchers(Settings.dispatcherCount || 10) UpdateManager.resumeProcessing() diff --git a/services/document-updater/app/coffee/WorkersManager.coffee b/services/document-updater/app/coffee/DispatchManager.coffee similarity index 70% rename from services/document-updater/app/coffee/WorkersManager.coffee rename to services/document-updater/app/coffee/DispatchManager.coffee index b421422f29..afb1a5d176 100644 --- a/services/document-updater/app/coffee/WorkersManager.coffee +++ b/services/document-updater/app/coffee/DispatchManager.coffee @@ -5,15 +5,15 @@ redis = require('redis') UpdateManager = require('./UpdateManager') Metrics = require('./Metrics') -module.exports = WorkersManager = - createWorker: () -> +module.exports = DispatchManager = + createDispatcher: () -> redisConf = Settings.redis.web client = redis.createClient(redisConf.port, redisConf.host) client.auth(redisConf.password) worker = { client: client - waitForAndProcessUpdate: (callback = (error) ->) -> + _waitForUpdateThenDispatchWorker: (callback = (error) ->) -> timer = new Metrics.Timer "worker.waiting" worker.client.blpop "pending-updates-list", 0, (error, result) -> timer.done() @@ -21,26 +21,24 @@ module.exports = WorkersManager = return callback() if !result? [list_name, doc_key] = result [project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key) + # Dispatch this in the background UpdateManager.processOutstandingUpdatesWithLock project_id, doc_id, (error) -> logger.error err: error, project_id: project_id, doc_id: doc_id, "error processing update" if error? - return callback(error) if error? - return callback() + callback() run: () -> return if Settings.shuttingDown - worker.waitForAndProcessUpdate (error) => + worker._waitForUpdateThenDispatchWorker (error) => if error? - logger.error err: error, "Error in worker process, waiting 1 second before continuing" - setTimeout () -> - worker.run() - , 1000 + logger.error err: error, "Error in worker process" + throw error else worker.run() } return worker - createAndStartWorkers: (number) -> + createAndStartDispatchers: (number) -> for i in [1..number] - worker = WorkersManager.createWorker() + worker = DispatchManager.createDispatcher() worker.run() \ No newline at end of file diff --git a/services/document-updater/test/unit/coffee/WorkersManager/WorkersManagerTests.coffee b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee similarity index 72% rename from services/document-updater/test/unit/coffee/WorkersManager/WorkersManagerTests.coffee rename to services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee index 4a86d7cdd0..3d4be1cfec 100644 --- a/services/document-updater/test/unit/coffee/WorkersManager/WorkersManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee @@ -1,12 +1,12 @@ sinon = require('sinon') chai = require('chai') should = chai.should() -modulePath = "../../../../app/js/WorkersManager.js" +modulePath = "../../../../app/js/DispatchManager.js" SandboxedModule = require('sandboxed-module') -describe "WorkersManager", -> +describe "DispatchManager", -> beforeEach -> - @WorkersManager = SandboxedModule.require modulePath, requires: + @DispatchManager = SandboxedModule.require modulePath, requires: "./UpdateManager" : @UpdateManager = {} "logger-sharelatex": @logger = { log: sinon.stub() } "settings-sharelatex": @settings = @@ -21,12 +21,12 @@ describe "WorkersManager", -> auth: sinon.stub() @redis.createClient = sinon.stub().returns @client - @worker = @WorkersManager.createWorker() + @worker = @DispatchManager.createDispatcher() it "should create a new redis client", -> @redis.createClient.called.should.equal true - describe "waitForAndProcessUpdate", -> + describe "_waitForUpdateThenDispatchWorker", -> beforeEach -> @project_id = "project-id-123" @doc_id = "doc-id-123" @@ -34,7 +34,7 @@ describe "WorkersManager", -> @client.blpop = sinon.stub().callsArgWith(2, null, ["pending-updates-list", @doc_key]) @UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArg(2) - @worker.waitForAndProcessUpdate @callback + @worker._waitForUpdateThenDispatchWorker @callback it "should call redis with BLPOP", -> @client.blpop @@ -50,22 +50,22 @@ describe "WorkersManager", -> @callback.called.should.equal true describe "run", -> - it "should call waitForAndProcessUpdate until shutting down", (done) -> + it "should call _waitForUpdateThenDispatchWorker until shutting down", (done) -> callCount = 0 - @worker.waitForAndProcessUpdate = (callback = (error) ->) => + @worker._waitForUpdateThenDispatchWorker = (callback = (error) ->) => callCount++ if callCount == 3 @settings.shuttingDown = true setTimeout () -> callback() , 10 - sinon.spy @worker, "waitForAndProcessUpdate" + sinon.spy @worker, "_waitForUpdateThenDispatchWorker" @worker.run() setTimeout () => - @worker.waitForAndProcessUpdate.callCount.should.equal 3 + @worker._waitForUpdateThenDispatchWorker.callCount.should.equal 3 done() , 100 From 48c5f66af38c53d1065e57cf67be38ed1a7c992a Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 15 Aug 2014 11:06:31 +0100 Subject: [PATCH 036/769] Remove acceptance test from travis --- services/document-updater/.travis.yml | 8 -------- 1 file changed, 8 deletions(-) diff --git a/services/document-updater/.travis.yml b/services/document-updater/.travis.yml index 6adc08643a..5c181b1569 100644 --- a/services/document-updater/.travis.yml +++ b/services/document-updater/.travis.yml @@ -10,13 +10,5 @@ install: - npm install - grunt install -before_script: - - grunt forever:app:start - script: - grunt test:unit - - grunt test:acceptance - -services: - - redis-server - - mongodb From 3ffe96f5486723c17ad22d58472b2a4bbf0b8b1b Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 19 Aug 2014 14:06:56 +0100 Subject: [PATCH 037/769] Lock down module versions --- services/document-updater/package.json | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 25feab87d8..da4f09d5d8 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -1,6 +1,11 @@ { "name": "document-updater-sharelatex", "version": "0.0.1", + "description": "An API for applying incoming updates to documents in real-time", + "repository": { + "type": "git", + "url": "https://github.com/sharelatex/document-updater-sharelatex.git" + }, "dependencies": { "express": "3.3.4", "underscore": "1.2.2", @@ -12,9 +17,9 @@ "async": "", "lynx": "0.0.11", "coffee-script": "1.4.0", - "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#master", - "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#master", - "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#master", + "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0", + "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.0.0", + "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.0.0", "sinon": "~1.5.2", "mongojs": "0.9.11" }, From a4a0eef15383ff5d347269cbdb50c80fc40a7599 Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 19 Aug 2014 14:07:14 +0100 Subject: [PATCH 038/769] Release version 0.1.0 --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index da4f09d5d8..417627bbcf 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -1,6 +1,6 @@ { "name": "document-updater-sharelatex", - "version": "0.0.1", + "version": "0.1.0", "description": "An API for applying incoming updates to documents in real-time", "repository": { "type": "git", From 114a9ba4d686241e265e0bc6012d7f4369934a1b Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 4 Sep 2014 12:40:17 +0100 Subject: [PATCH 039/769] bumbed forever up to same version as docstore as npm is complaining --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 25feab87d8..b3b3998859 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -27,6 +27,6 @@ "grunt-contrib-coffee": "~0.10.0", "bunyan": "~0.22.1", "grunt-bunyan": "~0.5.0", - "grunt-forever": "~0.4.2" + "grunt-forever": "~0.4.4" } } From 57fc710be64b0cf7208c7557d0dcc206a5c53c2f Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 9 Sep 2014 12:12:04 +0100 Subject: [PATCH 040/769] tried downgrading forever --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 3380db46ce..d3af868c5f 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -32,6 +32,6 @@ "grunt-contrib-coffee": "~0.10.0", "bunyan": "~0.22.1", "grunt-bunyan": "~0.5.0", - "grunt-forever": "~0.4.4" + "grunt-forever": "0.4.1" } } From 672dd3de1bdbd44ce6ec4b95d4a757dfc6161e49 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 26 Sep 2014 17:22:02 +0100 Subject: [PATCH 041/769] works with sentinal v1 --- services/document-updater/app.coffee | 6 ++---- .../document-updater/app/coffee/DispatchManager.coffee | 8 +++----- services/document-updater/app/coffee/LockManager.coffee | 6 ++---- services/document-updater/app/coffee/RedisManager.coffee | 6 ++---- .../app/coffee/ShareJsUpdateManager.coffee | 6 ++---- services/document-updater/package.json | 3 ++- 6 files changed, 13 insertions(+), 22 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index e246d366e8..c7840f608a 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -7,13 +7,11 @@ RedisManager = require('./app/js/RedisManager') UpdateManager = require('./app/js/UpdateManager') DispatchManager = require('./app/js/DispatchManager') Keys = require('./app/js/RedisKeyBuilder') -redis = require('redis') Errors = require "./app/js/Errors" HttpController = require "./app/js/HttpController" -redisConf = Settings.redis.web -rclient = redis.createClient(redisConf.port, redisConf.host) -rclient.auth(redisConf.password) +redis = require("redis-sharelatex") +rclient = redis.createClient(Settings.redis.web) Path = require "path" Metrics = require "metrics-sharelatex" diff --git a/services/document-updater/app/coffee/DispatchManager.coffee b/services/document-updater/app/coffee/DispatchManager.coffee index afb1a5d176..6078304f2a 100644 --- a/services/document-updater/app/coffee/DispatchManager.coffee +++ b/services/document-updater/app/coffee/DispatchManager.coffee @@ -1,16 +1,14 @@ Settings = require('settings-sharelatex') logger = require('logger-sharelatex') Keys = require('./RedisKeyBuilder') -redis = require('redis') UpdateManager = require('./UpdateManager') Metrics = require('./Metrics') +redis = require("redis-sharelatex") module.exports = DispatchManager = createDispatcher: () -> - redisConf = Settings.redis.web - client = redis.createClient(redisConf.port, redisConf.host) - client.auth(redisConf.password) - + client = redis.createClient(Settings.redis.web) + worker = { client: client _waitForUpdateThenDispatchWorker: (callback = (error) ->) -> diff --git a/services/document-updater/app/coffee/LockManager.coffee b/services/document-updater/app/coffee/LockManager.coffee index a43bd84a1b..0facb8519b 100644 --- a/services/document-updater/app/coffee/LockManager.coffee +++ b/services/document-updater/app/coffee/LockManager.coffee @@ -1,9 +1,7 @@ metrics = require('./Metrics') Settings = require('settings-sharelatex') -redis = require('redis') -redisConf = Settings.redis.web -rclient = redis.createClient(redisConf.port, redisConf.host) -rclient.auth(redisConf.password) +redis = require("redis-sharelatex") +rclient = redis.createClient(Settings.redis.web) keys = require('./RedisKeyBuilder') logger = require "logger-sharelatex" diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 9f3ad1bca2..6a920b42c4 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -1,8 +1,6 @@ Settings = require('settings-sharelatex') -redis = require('redis') -redisConf = Settings.redis.web -rclient = redis.createClient(redisConf.port, redisConf.host) -rclient.auth(redisConf.password) +redis = require("redis-sharelatex") +rclient = redis.createClient(Settings.redis.web) async = require('async') _ = require('underscore') keys = require('./RedisKeyBuilder') diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee index 5f3cba4fbc..d842ccce2d 100644 --- a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee +++ b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee @@ -7,10 +7,8 @@ Keys = require "./RedisKeyBuilder" {EventEmitter} = require "events" util = require "util" -redis = require('redis') -redisConf = Settings.redis.web -rclient = redis.createClient(redisConf.port, redisConf.host) -rclient.auth(redisConf.password) +redis = require("redis-sharelatex") +rclient = redis.createClient(Settings.redis.web) ShareJsModel:: = {} util.inherits ShareJsModel, EventEmitter diff --git a/services/document-updater/package.json b/services/document-updater/package.json index d3af868c5f..29799f47c4 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -21,7 +21,8 @@ "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.0.0", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.0.0", "sinon": "~1.5.2", - "mongojs": "0.9.11" + "mongojs": "0.9.11", + "redis-sharelatex": "0.0.1" }, "devDependencies": { "grunt-execute": "~0.1.5", From d7894c4ea73bb1410c2c99f7870819fd625da76e Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 26 Sep 2014 17:57:15 +0100 Subject: [PATCH 042/769] fixed tests --- .../document-updater/test/unit/coffee/AddingDocsToMemory.coffee | 2 +- .../test/unit/coffee/CheckingUpdatesLength.coffee | 2 +- .../unit/coffee/DispatchManager/DispatchManagerTests.coffee | 2 +- services/document-updater/test/unit/coffee/GettingDoc.coffee | 2 +- .../test/unit/coffee/GettingListOfPendingUpdates.coffee | 2 +- .../test/unit/coffee/GettingTotalNumberOfDocs.coffee | 2 +- .../test/unit/coffee/LockManager/CheckingTheLock.coffee | 2 +- .../test/unit/coffee/LockManager/ReleasingTheLock.coffee | 2 +- .../test/unit/coffee/LockManager/getLockTests.coffee | 2 +- .../test/unit/coffee/LockManager/tryLockTests.coffee | 2 +- .../RedisManager/clearDocFromPendingUpdatesSetTests.coffee | 2 +- .../coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee | 2 +- .../coffee/RedisManager/getPendingUpdatesForDocTests.coffee | 2 +- .../test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee | 2 +- .../test/unit/coffee/RedisManager/pushDocOpTests.coffee | 2 +- .../coffee/RedisManager/pushUncompressedHistoryOpTests.coffee | 2 +- .../test/unit/coffee/ShareJsUpdateManagerTests.coffee | 2 +- 17 files changed, 17 insertions(+), 17 deletions(-) diff --git a/services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee b/services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee index ffed682321..328eb13b8c 100644 --- a/services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee +++ b/services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee @@ -22,7 +22,7 @@ describe 'putting a doc into memory', ()-> mocks = "logger-sharelatex": log:-> - redis: + "redis-sharelatex": createClient : ()-> auth:-> multi: ()-> diff --git a/services/document-updater/test/unit/coffee/CheckingUpdatesLength.coffee b/services/document-updater/test/unit/coffee/CheckingUpdatesLength.coffee index 4f76c48ae7..de04724fef 100644 --- a/services/document-updater/test/unit/coffee/CheckingUpdatesLength.coffee +++ b/services/document-updater/test/unit/coffee/CheckingUpdatesLength.coffee @@ -13,7 +13,7 @@ describe 'Document Manager - getUpdatesLength ', -> @llenStub = sinon.stub() @redisManager = SandboxedModule.require modulePath, requires: - redis: + "redis-sharelatex": createClient:=> auth:-> llen:@llenStub diff --git a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee index 3d4be1cfec..2b38b8f077 100644 --- a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee @@ -12,7 +12,7 @@ describe "DispatchManager", -> "settings-sharelatex": @settings = redis: web: {} - "redis": @redis = {} + "redis-sharelatex": @redis = {} @callback = sinon.stub() describe "each worker", -> diff --git a/services/document-updater/test/unit/coffee/GettingDoc.coffee b/services/document-updater/test/unit/coffee/GettingDoc.coffee index 824fe14a3f..6796f4c8bb 100644 --- a/services/document-updater/test/unit/coffee/GettingDoc.coffee +++ b/services/document-updater/test/unit/coffee/GettingDoc.coffee @@ -11,7 +11,7 @@ describe 'RedisManager - getDoc', -> @rclient.multi = () => @rclient @RedisManager = SandboxedModule.require modulePath, requires: - "redis": @redis = + "redis-sharelatex": @redis = createClient: () => @rclient @doc_id = "doc-id-123" diff --git a/services/document-updater/test/unit/coffee/GettingListOfPendingUpdates.coffee b/services/document-updater/test/unit/coffee/GettingListOfPendingUpdates.coffee index 14f81f6052..10d1c39038 100644 --- a/services/document-updater/test/unit/coffee/GettingListOfPendingUpdates.coffee +++ b/services/document-updater/test/unit/coffee/GettingListOfPendingUpdates.coffee @@ -18,7 +18,7 @@ describe 'getting entire list of pending updates', ()-> redisReturn = [] mocks = - redis: + "redis-sharelatex": createClient: ()-> auth:-> multi: ()-> diff --git a/services/document-updater/test/unit/coffee/GettingTotalNumberOfDocs.coffee b/services/document-updater/test/unit/coffee/GettingTotalNumberOfDocs.coffee index 3b4f449f94..bac04361c3 100644 --- a/services/document-updater/test/unit/coffee/GettingTotalNumberOfDocs.coffee +++ b/services/document-updater/test/unit/coffee/GettingTotalNumberOfDocs.coffee @@ -19,7 +19,7 @@ describe 'getting cound of docs from memory', ()-> beforeEach (done)-> mocks = "logger-sharelatex": log:-> - redis: + "redis-sharelatex": createClient : ()-> auth:-> smembers:(key, callback)-> diff --git a/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee b/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee index fe744d2379..598c4903c2 100644 --- a/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee @@ -19,7 +19,7 @@ describe 'Lock Manager - checking the lock', ()-> mocks = "logger-sharelatex": log:-> - redis: + "redis-sharelatex": createClient : ()-> auth:-> multi: -> diff --git a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee index 39f16b09b9..4ae75e7719 100644 --- a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee @@ -14,7 +14,7 @@ describe 'LockManager - releasing the lock', ()-> mocks = "logger-sharelatex": log:-> - redis: + "redis-sharelatex": createClient : ()-> auth:-> del:deleteStub diff --git a/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee b/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee index f378650d95..ee88dfb2db 100644 --- a/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee @@ -8,7 +8,7 @@ describe 'LockManager - getting the lock', -> beforeEach -> @LockManager = SandboxedModule.require modulePath, requires: "logger-sharelatex": log:-> - redis: + "redis-sharelatex": createClient : () => auth:-> @callback = sinon.stub() diff --git a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee index 6c2c8972af..c828399c12 100644 --- a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee @@ -8,7 +8,7 @@ describe 'LockManager - trying the lock', -> beforeEach -> @LockManager = SandboxedModule.require modulePath, requires: "logger-sharelatex": log:-> - redis: + "redis-sharelatex": createClient : () => auth:-> set: @set = sinon.stub() diff --git a/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee index 016d96a2ae..81eb0bfefe 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee @@ -10,7 +10,7 @@ describe "RedisManager.clearDocFromPendingUpdatesSet", -> @doc_id = "document-id" @callback = sinon.stub() @RedisManager = SandboxedModule.require modulePath, requires: - "redis" : createClient: () => + "redis-sharelatex" : createClient: () => @rclient = auth:-> "logger-sharelatex": {} diff --git a/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee index d179b45f9d..5bbb93a723 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee @@ -8,7 +8,7 @@ describe "RedisManager.getDocsWithPendingUpdates", -> beforeEach -> @callback = sinon.stub() @RedisManager = SandboxedModule.require modulePath, requires: - "redis" : createClient: () => + "redis-sharelatex" : createClient: () => @rclient = auth:-> "logger-sharelatex": {} diff --git a/services/document-updater/test/unit/coffee/RedisManager/getPendingUpdatesForDocTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/getPendingUpdatesForDocTests.coffee index 4910f1498f..9c70033eb4 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/getPendingUpdatesForDocTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/getPendingUpdatesForDocTests.coffee @@ -7,7 +7,7 @@ SandboxedModule = require('sandboxed-module') describe "RedisManager.getPendingUpdatesForDoc", -> beforeEach -> @RedisManager = SandboxedModule.require modulePath, requires: - "redis": createClient: () => + "redis-sharelatex": createClient: () => @rclient = auth: () -> multi: () => @rclient diff --git a/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee index 775418313e..6cd4980fd8 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee @@ -8,7 +8,7 @@ describe "RedisManager.getPreviousDocOpsTests", -> beforeEach -> @callback = sinon.stub() @RedisManager = SandboxedModule.require modulePath, requires: - "redis" : createClient: () => + "redis-sharelatex" : createClient: () => @rclient = auth: -> multi: => @rclient diff --git a/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee index 247862a257..71a36bb4f3 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee @@ -7,7 +7,7 @@ SandboxedModule = require('sandboxed-module') describe "RedisManager.pushDocOp", -> beforeEach -> @RedisManager = SandboxedModule.require modulePath, requires: - "redis": createClient: () => + "redis-sharelatex": createClient: () => @rclient = auth: () -> multi: () => @rclient diff --git a/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee index d6e19f163e..621a3b1a3b 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee @@ -7,7 +7,7 @@ SandboxedModule = require('sandboxed-module') describe "RedisManager.pushUncompressedHistoryOp", -> beforeEach -> @RedisManager = SandboxedModule.require modulePath, requires: - "redis": createClient: () => + "redis-sharelatex": createClient: () => @rclient = auth: () -> multi: () => @rclient diff --git a/services/document-updater/test/unit/coffee/ShareJsUpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/ShareJsUpdateManagerTests.coffee index 20e737fc97..ecccc91a7e 100644 --- a/services/document-updater/test/unit/coffee/ShareJsUpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJsUpdateManagerTests.coffee @@ -15,7 +15,7 @@ describe "ShareJsUpdateManager", -> class Model constructor: (@db) -> "./ShareJsDB" : @ShareJsDB = { mockDB: true } - "redis" : createClient: () => @rclient = auth:-> + "redis-sharelatex" : createClient: () => @rclient = auth:-> "logger-sharelatex": @logger = { log: sinon.stub() } globals: clearTimeout: @clearTimeout = sinon.stub() From 7b1b6d27f3c4cef5bc1ea51b0435866261ed09ba Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Sun, 28 Sep 2014 20:14:14 +0100 Subject: [PATCH 043/769] fixed missing new redis depenency --- .../test/unit/coffee/RemovingSingleDocFromMemory.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/test/unit/coffee/RemovingSingleDocFromMemory.coffee b/services/document-updater/test/unit/coffee/RemovingSingleDocFromMemory.coffee index be18d41c57..aad1e94804 100644 --- a/services/document-updater/test/unit/coffee/RemovingSingleDocFromMemory.coffee +++ b/services/document-updater/test/unit/coffee/RemovingSingleDocFromMemory.coffee @@ -24,7 +24,7 @@ describe 'removing single doc from memory', ()-> "logger-sharelatex": error:-> log:-> - redis: + "redis-sharelatex": createClient : -> auth:-> multi: -> From 228045667ce0d313094229127df58d82f4b3fd85 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Sun, 28 Sep 2014 20:35:05 +0100 Subject: [PATCH 044/769] increament redis-sharelatex --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 29799f47c4..9b80b65242 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -22,7 +22,7 @@ "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.0.0", "sinon": "~1.5.2", "mongojs": "0.9.11", - "redis-sharelatex": "0.0.1" + "redis-sharelatex": "0.0.2" }, "devDependencies": { "grunt-execute": "~0.1.5", From fd6c2b56161848c2975c1ef69c5e6305c5fbb1c7 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 29 Sep 2014 11:41:26 +0100 Subject: [PATCH 045/769] bump redis-sharelatex --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 9b80b65242..a56b05372c 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -22,7 +22,7 @@ "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.0.0", "sinon": "~1.5.2", "mongojs": "0.9.11", - "redis-sharelatex": "0.0.2" + "redis-sharelatex": "0.0.3" }, "devDependencies": { "grunt-execute": "~0.1.5", From 9c85e9b848ac469128a5a9b78342645e71ac29e6 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 29 Sep 2014 12:07:32 +0100 Subject: [PATCH 046/769] moved back to not include sentinal while we upgrade to 0.10.x node --- services/document-updater/app.coffee | 6 ++++-- .../document-updater/app/coffee/DispatchManager.coffee | 8 +++++--- services/document-updater/app/coffee/LockManager.coffee | 6 ++++-- services/document-updater/app/coffee/RedisManager.coffee | 6 ++++-- .../app/coffee/ShareJsUpdateManager.coffee | 6 ++++-- services/document-updater/package.json | 3 +-- .../test/unit/coffee/AddingDocsToMemory.coffee | 2 +- .../test/unit/coffee/CheckingUpdatesLength.coffee | 2 +- .../coffee/DispatchManager/DispatchManagerTests.coffee | 2 +- .../document-updater/test/unit/coffee/GettingDoc.coffee | 2 +- .../test/unit/coffee/GettingListOfPendingUpdates.coffee | 2 +- .../test/unit/coffee/GettingTotalNumberOfDocs.coffee | 2 +- .../test/unit/coffee/LockManager/CheckingTheLock.coffee | 2 +- .../test/unit/coffee/LockManager/ReleasingTheLock.coffee | 2 +- .../test/unit/coffee/LockManager/getLockTests.coffee | 2 +- .../test/unit/coffee/LockManager/tryLockTests.coffee | 2 +- .../clearDocFromPendingUpdatesSetTests.coffee | 2 +- .../RedisManager/getDocsWithPendingUpdatesTests.coffee | 2 +- .../RedisManager/getPendingUpdatesForDocTests.coffee | 2 +- .../coffee/RedisManager/getPreviousDocOpsTests.coffee | 2 +- .../test/unit/coffee/RedisManager/pushDocOpTests.coffee | 2 +- .../RedisManager/pushUncompressedHistoryOpTests.coffee | 2 +- .../test/unit/coffee/RemovingSingleDocFromMemory.coffee | 2 +- .../test/unit/coffee/ShareJsUpdateManagerTests.coffee | 2 +- 24 files changed, 40 insertions(+), 31 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index c7840f608a..e246d366e8 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -7,11 +7,13 @@ RedisManager = require('./app/js/RedisManager') UpdateManager = require('./app/js/UpdateManager') DispatchManager = require('./app/js/DispatchManager') Keys = require('./app/js/RedisKeyBuilder') +redis = require('redis') Errors = require "./app/js/Errors" HttpController = require "./app/js/HttpController" -redis = require("redis-sharelatex") -rclient = redis.createClient(Settings.redis.web) +redisConf = Settings.redis.web +rclient = redis.createClient(redisConf.port, redisConf.host) +rclient.auth(redisConf.password) Path = require "path" Metrics = require "metrics-sharelatex" diff --git a/services/document-updater/app/coffee/DispatchManager.coffee b/services/document-updater/app/coffee/DispatchManager.coffee index 6078304f2a..afb1a5d176 100644 --- a/services/document-updater/app/coffee/DispatchManager.coffee +++ b/services/document-updater/app/coffee/DispatchManager.coffee @@ -1,14 +1,16 @@ Settings = require('settings-sharelatex') logger = require('logger-sharelatex') Keys = require('./RedisKeyBuilder') +redis = require('redis') UpdateManager = require('./UpdateManager') Metrics = require('./Metrics') -redis = require("redis-sharelatex") module.exports = DispatchManager = createDispatcher: () -> - client = redis.createClient(Settings.redis.web) - + redisConf = Settings.redis.web + client = redis.createClient(redisConf.port, redisConf.host) + client.auth(redisConf.password) + worker = { client: client _waitForUpdateThenDispatchWorker: (callback = (error) ->) -> diff --git a/services/document-updater/app/coffee/LockManager.coffee b/services/document-updater/app/coffee/LockManager.coffee index 0facb8519b..a43bd84a1b 100644 --- a/services/document-updater/app/coffee/LockManager.coffee +++ b/services/document-updater/app/coffee/LockManager.coffee @@ -1,7 +1,9 @@ metrics = require('./Metrics') Settings = require('settings-sharelatex') -redis = require("redis-sharelatex") -rclient = redis.createClient(Settings.redis.web) +redis = require('redis') +redisConf = Settings.redis.web +rclient = redis.createClient(redisConf.port, redisConf.host) +rclient.auth(redisConf.password) keys = require('./RedisKeyBuilder') logger = require "logger-sharelatex" diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 6a920b42c4..9f3ad1bca2 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -1,6 +1,8 @@ Settings = require('settings-sharelatex') -redis = require("redis-sharelatex") -rclient = redis.createClient(Settings.redis.web) +redis = require('redis') +redisConf = Settings.redis.web +rclient = redis.createClient(redisConf.port, redisConf.host) +rclient.auth(redisConf.password) async = require('async') _ = require('underscore') keys = require('./RedisKeyBuilder') diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee index d842ccce2d..5f3cba4fbc 100644 --- a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee +++ b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee @@ -7,8 +7,10 @@ Keys = require "./RedisKeyBuilder" {EventEmitter} = require "events" util = require "util" -redis = require("redis-sharelatex") -rclient = redis.createClient(Settings.redis.web) +redis = require('redis') +redisConf = Settings.redis.web +rclient = redis.createClient(redisConf.port, redisConf.host) +rclient.auth(redisConf.password) ShareJsModel:: = {} util.inherits ShareJsModel, EventEmitter diff --git a/services/document-updater/package.json b/services/document-updater/package.json index a56b05372c..d3af868c5f 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -21,8 +21,7 @@ "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.0.0", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.0.0", "sinon": "~1.5.2", - "mongojs": "0.9.11", - "redis-sharelatex": "0.0.3" + "mongojs": "0.9.11" }, "devDependencies": { "grunt-execute": "~0.1.5", diff --git a/services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee b/services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee index 328eb13b8c..ffed682321 100644 --- a/services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee +++ b/services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee @@ -22,7 +22,7 @@ describe 'putting a doc into memory', ()-> mocks = "logger-sharelatex": log:-> - "redis-sharelatex": + redis: createClient : ()-> auth:-> multi: ()-> diff --git a/services/document-updater/test/unit/coffee/CheckingUpdatesLength.coffee b/services/document-updater/test/unit/coffee/CheckingUpdatesLength.coffee index de04724fef..4f76c48ae7 100644 --- a/services/document-updater/test/unit/coffee/CheckingUpdatesLength.coffee +++ b/services/document-updater/test/unit/coffee/CheckingUpdatesLength.coffee @@ -13,7 +13,7 @@ describe 'Document Manager - getUpdatesLength ', -> @llenStub = sinon.stub() @redisManager = SandboxedModule.require modulePath, requires: - "redis-sharelatex": + redis: createClient:=> auth:-> llen:@llenStub diff --git a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee index 2b38b8f077..3d4be1cfec 100644 --- a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee @@ -12,7 +12,7 @@ describe "DispatchManager", -> "settings-sharelatex": @settings = redis: web: {} - "redis-sharelatex": @redis = {} + "redis": @redis = {} @callback = sinon.stub() describe "each worker", -> diff --git a/services/document-updater/test/unit/coffee/GettingDoc.coffee b/services/document-updater/test/unit/coffee/GettingDoc.coffee index 6796f4c8bb..824fe14a3f 100644 --- a/services/document-updater/test/unit/coffee/GettingDoc.coffee +++ b/services/document-updater/test/unit/coffee/GettingDoc.coffee @@ -11,7 +11,7 @@ describe 'RedisManager - getDoc', -> @rclient.multi = () => @rclient @RedisManager = SandboxedModule.require modulePath, requires: - "redis-sharelatex": @redis = + "redis": @redis = createClient: () => @rclient @doc_id = "doc-id-123" diff --git a/services/document-updater/test/unit/coffee/GettingListOfPendingUpdates.coffee b/services/document-updater/test/unit/coffee/GettingListOfPendingUpdates.coffee index 10d1c39038..14f81f6052 100644 --- a/services/document-updater/test/unit/coffee/GettingListOfPendingUpdates.coffee +++ b/services/document-updater/test/unit/coffee/GettingListOfPendingUpdates.coffee @@ -18,7 +18,7 @@ describe 'getting entire list of pending updates', ()-> redisReturn = [] mocks = - "redis-sharelatex": + redis: createClient: ()-> auth:-> multi: ()-> diff --git a/services/document-updater/test/unit/coffee/GettingTotalNumberOfDocs.coffee b/services/document-updater/test/unit/coffee/GettingTotalNumberOfDocs.coffee index bac04361c3..3b4f449f94 100644 --- a/services/document-updater/test/unit/coffee/GettingTotalNumberOfDocs.coffee +++ b/services/document-updater/test/unit/coffee/GettingTotalNumberOfDocs.coffee @@ -19,7 +19,7 @@ describe 'getting cound of docs from memory', ()-> beforeEach (done)-> mocks = "logger-sharelatex": log:-> - "redis-sharelatex": + redis: createClient : ()-> auth:-> smembers:(key, callback)-> diff --git a/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee b/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee index 598c4903c2..fe744d2379 100644 --- a/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee @@ -19,7 +19,7 @@ describe 'Lock Manager - checking the lock', ()-> mocks = "logger-sharelatex": log:-> - "redis-sharelatex": + redis: createClient : ()-> auth:-> multi: -> diff --git a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee index 4ae75e7719..39f16b09b9 100644 --- a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee @@ -14,7 +14,7 @@ describe 'LockManager - releasing the lock', ()-> mocks = "logger-sharelatex": log:-> - "redis-sharelatex": + redis: createClient : ()-> auth:-> del:deleteStub diff --git a/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee b/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee index ee88dfb2db..f378650d95 100644 --- a/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee @@ -8,7 +8,7 @@ describe 'LockManager - getting the lock', -> beforeEach -> @LockManager = SandboxedModule.require modulePath, requires: "logger-sharelatex": log:-> - "redis-sharelatex": + redis: createClient : () => auth:-> @callback = sinon.stub() diff --git a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee index c828399c12..6c2c8972af 100644 --- a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee @@ -8,7 +8,7 @@ describe 'LockManager - trying the lock', -> beforeEach -> @LockManager = SandboxedModule.require modulePath, requires: "logger-sharelatex": log:-> - "redis-sharelatex": + redis: createClient : () => auth:-> set: @set = sinon.stub() diff --git a/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee index 81eb0bfefe..016d96a2ae 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee @@ -10,7 +10,7 @@ describe "RedisManager.clearDocFromPendingUpdatesSet", -> @doc_id = "document-id" @callback = sinon.stub() @RedisManager = SandboxedModule.require modulePath, requires: - "redis-sharelatex" : createClient: () => + "redis" : createClient: () => @rclient = auth:-> "logger-sharelatex": {} diff --git a/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee index 5bbb93a723..d179b45f9d 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee @@ -8,7 +8,7 @@ describe "RedisManager.getDocsWithPendingUpdates", -> beforeEach -> @callback = sinon.stub() @RedisManager = SandboxedModule.require modulePath, requires: - "redis-sharelatex" : createClient: () => + "redis" : createClient: () => @rclient = auth:-> "logger-sharelatex": {} diff --git a/services/document-updater/test/unit/coffee/RedisManager/getPendingUpdatesForDocTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/getPendingUpdatesForDocTests.coffee index 9c70033eb4..4910f1498f 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/getPendingUpdatesForDocTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/getPendingUpdatesForDocTests.coffee @@ -7,7 +7,7 @@ SandboxedModule = require('sandboxed-module') describe "RedisManager.getPendingUpdatesForDoc", -> beforeEach -> @RedisManager = SandboxedModule.require modulePath, requires: - "redis-sharelatex": createClient: () => + "redis": createClient: () => @rclient = auth: () -> multi: () => @rclient diff --git a/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee index 6cd4980fd8..775418313e 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee @@ -8,7 +8,7 @@ describe "RedisManager.getPreviousDocOpsTests", -> beforeEach -> @callback = sinon.stub() @RedisManager = SandboxedModule.require modulePath, requires: - "redis-sharelatex" : createClient: () => + "redis" : createClient: () => @rclient = auth: -> multi: => @rclient diff --git a/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee index 71a36bb4f3..247862a257 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee @@ -7,7 +7,7 @@ SandboxedModule = require('sandboxed-module') describe "RedisManager.pushDocOp", -> beforeEach -> @RedisManager = SandboxedModule.require modulePath, requires: - "redis-sharelatex": createClient: () => + "redis": createClient: () => @rclient = auth: () -> multi: () => @rclient diff --git a/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee index 621a3b1a3b..d6e19f163e 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee @@ -7,7 +7,7 @@ SandboxedModule = require('sandboxed-module') describe "RedisManager.pushUncompressedHistoryOp", -> beforeEach -> @RedisManager = SandboxedModule.require modulePath, requires: - "redis-sharelatex": createClient: () => + "redis": createClient: () => @rclient = auth: () -> multi: () => @rclient diff --git a/services/document-updater/test/unit/coffee/RemovingSingleDocFromMemory.coffee b/services/document-updater/test/unit/coffee/RemovingSingleDocFromMemory.coffee index aad1e94804..be18d41c57 100644 --- a/services/document-updater/test/unit/coffee/RemovingSingleDocFromMemory.coffee +++ b/services/document-updater/test/unit/coffee/RemovingSingleDocFromMemory.coffee @@ -24,7 +24,7 @@ describe 'removing single doc from memory', ()-> "logger-sharelatex": error:-> log:-> - "redis-sharelatex": + redis: createClient : -> auth:-> multi: -> diff --git a/services/document-updater/test/unit/coffee/ShareJsUpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/ShareJsUpdateManagerTests.coffee index ecccc91a7e..20e737fc97 100644 --- a/services/document-updater/test/unit/coffee/ShareJsUpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJsUpdateManagerTests.coffee @@ -15,7 +15,7 @@ describe "ShareJsUpdateManager", -> class Model constructor: (@db) -> "./ShareJsDB" : @ShareJsDB = { mockDB: true } - "redis-sharelatex" : createClient: () => @rclient = auth:-> + "redis" : createClient: () => @rclient = auth:-> "logger-sharelatex": @logger = { log: sinon.stub() } globals: clearTimeout: @clearTimeout = sinon.stub() From 5ff898352803fec07977305d7f6c330ee0a6cc72 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 7 Oct 2014 12:08:36 +0100 Subject: [PATCH 047/769] implemeneted redis-sharelatex --- services/document-updater/app.coffee | 7 +++---- .../document-updater/app/coffee/DispatchManager.coffee | 8 +++----- services/document-updater/app/coffee/LockManager.coffee | 6 ++---- services/document-updater/app/coffee/RedisManager.coffee | 6 ++---- .../app/coffee/ShareJsUpdateManager.coffee | 7 +++---- services/document-updater/package.json | 4 ++-- .../test/unit/coffee/AddingDocsToMemory.coffee | 2 +- .../test/unit/coffee/CheckingUpdatesLength.coffee | 2 +- .../coffee/DispatchManager/DispatchManagerTests.coffee | 2 +- .../document-updater/test/unit/coffee/GettingDoc.coffee | 4 ++-- .../test/unit/coffee/GettingListOfPendingUpdates.coffee | 2 +- .../test/unit/coffee/GettingTotalNumberOfDocs.coffee | 2 +- .../test/unit/coffee/LockManager/CheckingTheLock.coffee | 2 +- .../test/unit/coffee/LockManager/ReleasingTheLock.coffee | 2 +- .../test/unit/coffee/LockManager/getLockTests.coffee | 2 +- .../test/unit/coffee/LockManager/tryLockTests.coffee | 2 +- .../clearDocFromPendingUpdatesSetTests.coffee | 2 +- .../RedisManager/getDocsWithPendingUpdatesTests.coffee | 2 +- .../RedisManager/getPendingUpdatesForDocTests.coffee | 2 +- .../coffee/RedisManager/getPreviousDocOpsTests.coffee | 2 +- .../test/unit/coffee/RedisManager/pushDocOpTests.coffee | 2 +- .../RedisManager/pushUncompressedHistoryOpTests.coffee | 2 +- .../test/unit/coffee/RemovingSingleDocFromMemory.coffee | 2 +- .../test/unit/coffee/ShareJsUpdateManagerTests.coffee | 2 +- 24 files changed, 34 insertions(+), 42 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index e246d366e8..6ef7f478f1 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -7,13 +7,12 @@ RedisManager = require('./app/js/RedisManager') UpdateManager = require('./app/js/UpdateManager') DispatchManager = require('./app/js/DispatchManager') Keys = require('./app/js/RedisKeyBuilder') -redis = require('redis') Errors = require "./app/js/Errors" HttpController = require "./app/js/HttpController" -redisConf = Settings.redis.web -rclient = redis.createClient(redisConf.port, redisConf.host) -rclient.auth(redisConf.password) +redis = require("redis-sharelatex") +rclient = redis.createClient(Settings.redis.web) + Path = require "path" Metrics = require "metrics-sharelatex" diff --git a/services/document-updater/app/coffee/DispatchManager.coffee b/services/document-updater/app/coffee/DispatchManager.coffee index afb1a5d176..4216a6dbcf 100644 --- a/services/document-updater/app/coffee/DispatchManager.coffee +++ b/services/document-updater/app/coffee/DispatchManager.coffee @@ -1,16 +1,14 @@ Settings = require('settings-sharelatex') logger = require('logger-sharelatex') Keys = require('./RedisKeyBuilder') -redis = require('redis') +redis = require("redis-sharelatex") + UpdateManager = require('./UpdateManager') Metrics = require('./Metrics') module.exports = DispatchManager = createDispatcher: () -> - redisConf = Settings.redis.web - client = redis.createClient(redisConf.port, redisConf.host) - client.auth(redisConf.password) - + client = redis.createClient(Settings.redis.web) worker = { client: client _waitForUpdateThenDispatchWorker: (callback = (error) ->) -> diff --git a/services/document-updater/app/coffee/LockManager.coffee b/services/document-updater/app/coffee/LockManager.coffee index a43bd84a1b..0facb8519b 100644 --- a/services/document-updater/app/coffee/LockManager.coffee +++ b/services/document-updater/app/coffee/LockManager.coffee @@ -1,9 +1,7 @@ metrics = require('./Metrics') Settings = require('settings-sharelatex') -redis = require('redis') -redisConf = Settings.redis.web -rclient = redis.createClient(redisConf.port, redisConf.host) -rclient.auth(redisConf.password) +redis = require("redis-sharelatex") +rclient = redis.createClient(Settings.redis.web) keys = require('./RedisKeyBuilder') logger = require "logger-sharelatex" diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 9f3ad1bca2..6a920b42c4 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -1,8 +1,6 @@ Settings = require('settings-sharelatex') -redis = require('redis') -redisConf = Settings.redis.web -rclient = redis.createClient(redisConf.port, redisConf.host) -rclient.auth(redisConf.password) +redis = require("redis-sharelatex") +rclient = redis.createClient(Settings.redis.web) async = require('async') _ = require('underscore') keys = require('./RedisKeyBuilder') diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee index 5f3cba4fbc..278a104beb 100644 --- a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee +++ b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee @@ -7,10 +7,9 @@ Keys = require "./RedisKeyBuilder" {EventEmitter} = require "events" util = require "util" -redis = require('redis') -redisConf = Settings.redis.web -rclient = redis.createClient(redisConf.port, redisConf.host) -rclient.auth(redisConf.password) +redis = require("redis-sharelatex") +rclient = redis.createClient(Settings.redis.web) + ShareJsModel:: = {} util.inherits ShareJsModel, EventEmitter diff --git a/services/document-updater/package.json b/services/document-updater/package.json index d3af868c5f..6318b7b060 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -9,7 +9,6 @@ "dependencies": { "express": "3.3.4", "underscore": "1.2.2", - "redis": "0.7.2", "chai": "", "request": "2.25.0", "sandboxed-module": "~0.2.0", @@ -21,7 +20,8 @@ "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.0.0", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.0.0", "sinon": "~1.5.2", - "mongojs": "0.9.11" + "mongojs": "0.9.11", + "redis-sharelatex": "~0.0.4" }, "devDependencies": { "grunt-execute": "~0.1.5", diff --git a/services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee b/services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee index ffed682321..328eb13b8c 100644 --- a/services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee +++ b/services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee @@ -22,7 +22,7 @@ describe 'putting a doc into memory', ()-> mocks = "logger-sharelatex": log:-> - redis: + "redis-sharelatex": createClient : ()-> auth:-> multi: ()-> diff --git a/services/document-updater/test/unit/coffee/CheckingUpdatesLength.coffee b/services/document-updater/test/unit/coffee/CheckingUpdatesLength.coffee index 4f76c48ae7..de04724fef 100644 --- a/services/document-updater/test/unit/coffee/CheckingUpdatesLength.coffee +++ b/services/document-updater/test/unit/coffee/CheckingUpdatesLength.coffee @@ -13,7 +13,7 @@ describe 'Document Manager - getUpdatesLength ', -> @llenStub = sinon.stub() @redisManager = SandboxedModule.require modulePath, requires: - redis: + "redis-sharelatex": createClient:=> auth:-> llen:@llenStub diff --git a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee index 3d4be1cfec..2b38b8f077 100644 --- a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee @@ -12,7 +12,7 @@ describe "DispatchManager", -> "settings-sharelatex": @settings = redis: web: {} - "redis": @redis = {} + "redis-sharelatex": @redis = {} @callback = sinon.stub() describe "each worker", -> diff --git a/services/document-updater/test/unit/coffee/GettingDoc.coffee b/services/document-updater/test/unit/coffee/GettingDoc.coffee index 824fe14a3f..0b4b466c24 100644 --- a/services/document-updater/test/unit/coffee/GettingDoc.coffee +++ b/services/document-updater/test/unit/coffee/GettingDoc.coffee @@ -4,14 +4,14 @@ should = chai.should() modulePath = "../../../app/js/RedisManager.js" SandboxedModule = require('sandboxed-module') -describe 'RedisManager - getDoc', -> +describe 'RedisManager.getDoc', -> beforeEach -> @rclient = {} @rclient.auth = () -> @rclient.multi = () => @rclient @RedisManager = SandboxedModule.require modulePath, requires: - "redis": @redis = + "redis-sharelatex": @redis = createClient: () => @rclient @doc_id = "doc-id-123" diff --git a/services/document-updater/test/unit/coffee/GettingListOfPendingUpdates.coffee b/services/document-updater/test/unit/coffee/GettingListOfPendingUpdates.coffee index 14f81f6052..10d1c39038 100644 --- a/services/document-updater/test/unit/coffee/GettingListOfPendingUpdates.coffee +++ b/services/document-updater/test/unit/coffee/GettingListOfPendingUpdates.coffee @@ -18,7 +18,7 @@ describe 'getting entire list of pending updates', ()-> redisReturn = [] mocks = - redis: + "redis-sharelatex": createClient: ()-> auth:-> multi: ()-> diff --git a/services/document-updater/test/unit/coffee/GettingTotalNumberOfDocs.coffee b/services/document-updater/test/unit/coffee/GettingTotalNumberOfDocs.coffee index 3b4f449f94..bac04361c3 100644 --- a/services/document-updater/test/unit/coffee/GettingTotalNumberOfDocs.coffee +++ b/services/document-updater/test/unit/coffee/GettingTotalNumberOfDocs.coffee @@ -19,7 +19,7 @@ describe 'getting cound of docs from memory', ()-> beforeEach (done)-> mocks = "logger-sharelatex": log:-> - redis: + "redis-sharelatex": createClient : ()-> auth:-> smembers:(key, callback)-> diff --git a/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee b/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee index fe744d2379..598c4903c2 100644 --- a/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee @@ -19,7 +19,7 @@ describe 'Lock Manager - checking the lock', ()-> mocks = "logger-sharelatex": log:-> - redis: + "redis-sharelatex": createClient : ()-> auth:-> multi: -> diff --git a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee index 39f16b09b9..4ae75e7719 100644 --- a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee @@ -14,7 +14,7 @@ describe 'LockManager - releasing the lock', ()-> mocks = "logger-sharelatex": log:-> - redis: + "redis-sharelatex": createClient : ()-> auth:-> del:deleteStub diff --git a/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee b/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee index f378650d95..ee88dfb2db 100644 --- a/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee @@ -8,7 +8,7 @@ describe 'LockManager - getting the lock', -> beforeEach -> @LockManager = SandboxedModule.require modulePath, requires: "logger-sharelatex": log:-> - redis: + "redis-sharelatex": createClient : () => auth:-> @callback = sinon.stub() diff --git a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee index 6c2c8972af..c828399c12 100644 --- a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee @@ -8,7 +8,7 @@ describe 'LockManager - trying the lock', -> beforeEach -> @LockManager = SandboxedModule.require modulePath, requires: "logger-sharelatex": log:-> - redis: + "redis-sharelatex": createClient : () => auth:-> set: @set = sinon.stub() diff --git a/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee index 016d96a2ae..81eb0bfefe 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee @@ -10,7 +10,7 @@ describe "RedisManager.clearDocFromPendingUpdatesSet", -> @doc_id = "document-id" @callback = sinon.stub() @RedisManager = SandboxedModule.require modulePath, requires: - "redis" : createClient: () => + "redis-sharelatex" : createClient: () => @rclient = auth:-> "logger-sharelatex": {} diff --git a/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee index d179b45f9d..5bbb93a723 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee @@ -8,7 +8,7 @@ describe "RedisManager.getDocsWithPendingUpdates", -> beforeEach -> @callback = sinon.stub() @RedisManager = SandboxedModule.require modulePath, requires: - "redis" : createClient: () => + "redis-sharelatex" : createClient: () => @rclient = auth:-> "logger-sharelatex": {} diff --git a/services/document-updater/test/unit/coffee/RedisManager/getPendingUpdatesForDocTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/getPendingUpdatesForDocTests.coffee index 4910f1498f..9c70033eb4 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/getPendingUpdatesForDocTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/getPendingUpdatesForDocTests.coffee @@ -7,7 +7,7 @@ SandboxedModule = require('sandboxed-module') describe "RedisManager.getPendingUpdatesForDoc", -> beforeEach -> @RedisManager = SandboxedModule.require modulePath, requires: - "redis": createClient: () => + "redis-sharelatex": createClient: () => @rclient = auth: () -> multi: () => @rclient diff --git a/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee index 775418313e..6cd4980fd8 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee @@ -8,7 +8,7 @@ describe "RedisManager.getPreviousDocOpsTests", -> beforeEach -> @callback = sinon.stub() @RedisManager = SandboxedModule.require modulePath, requires: - "redis" : createClient: () => + "redis-sharelatex" : createClient: () => @rclient = auth: -> multi: => @rclient diff --git a/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee index 247862a257..71a36bb4f3 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee @@ -7,7 +7,7 @@ SandboxedModule = require('sandboxed-module') describe "RedisManager.pushDocOp", -> beforeEach -> @RedisManager = SandboxedModule.require modulePath, requires: - "redis": createClient: () => + "redis-sharelatex": createClient: () => @rclient = auth: () -> multi: () => @rclient diff --git a/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee index d6e19f163e..621a3b1a3b 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee @@ -7,7 +7,7 @@ SandboxedModule = require('sandboxed-module') describe "RedisManager.pushUncompressedHistoryOp", -> beforeEach -> @RedisManager = SandboxedModule.require modulePath, requires: - "redis": createClient: () => + "redis-sharelatex": createClient: () => @rclient = auth: () -> multi: () => @rclient diff --git a/services/document-updater/test/unit/coffee/RemovingSingleDocFromMemory.coffee b/services/document-updater/test/unit/coffee/RemovingSingleDocFromMemory.coffee index be18d41c57..aad1e94804 100644 --- a/services/document-updater/test/unit/coffee/RemovingSingleDocFromMemory.coffee +++ b/services/document-updater/test/unit/coffee/RemovingSingleDocFromMemory.coffee @@ -24,7 +24,7 @@ describe 'removing single doc from memory', ()-> "logger-sharelatex": error:-> log:-> - redis: + "redis-sharelatex": createClient : -> auth:-> multi: -> diff --git a/services/document-updater/test/unit/coffee/ShareJsUpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/ShareJsUpdateManagerTests.coffee index 20e737fc97..ecccc91a7e 100644 --- a/services/document-updater/test/unit/coffee/ShareJsUpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJsUpdateManagerTests.coffee @@ -15,7 +15,7 @@ describe "ShareJsUpdateManager", -> class Model constructor: (@db) -> "./ShareJsDB" : @ShareJsDB = { mockDB: true } - "redis" : createClient: () => @rclient = auth:-> + "redis-sharelatex" : createClient: () => @rclient = auth:-> "logger-sharelatex": @logger = { log: sinon.stub() } globals: clearTimeout: @clearTimeout = sinon.stub() From c137e05bfd4881a3b5a31d96bf6466222b584453 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 7 Oct 2014 12:31:16 +0100 Subject: [PATCH 048/769] fixed acceptence tests --- .../test/acceptance/coffee/helpers/MockTrackChangesApi.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.coffee b/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.coffee index 43416e37fc..d6c2e05b3b 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.coffee @@ -13,7 +13,7 @@ module.exports = MockTrackChangesApi = else res.send 204 - app.listen 3014, (error) -> + app.listen 3015, (error) -> throw error if error? MockTrackChangesApi.run() From c822bb6ea4e1ba1d3082535e3bee19e30773242a Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 19 Nov 2014 12:51:19 +0000 Subject: [PATCH 049/769] Add in missed error catching --- services/document-updater/app/coffee/RedisManager.coffee | 1 + 1 file changed, 1 insertion(+) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 6a920b42c4..69d917815e 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -81,6 +81,7 @@ module.exports = RedisManager = multi.lrange keys.pendingUpdates(doc_id:doc_id), 0 , -1 multi.del keys.pendingUpdates(doc_id:doc_id) multi.exec (error, replys) -> + return callback(error) if error? jsonUpdates = replys[0] updates = [] for jsonUpdate in jsonUpdates From 0eab2d99b6ee6a8742e2295c15521e47b3956f85 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 19 Nov 2014 12:54:47 +0000 Subject: [PATCH 050/769] Use new 'robust' subscription scheme --- services/document-updater/app.coffee | 2 +- services/document-updater/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 6ef7f478f1..0f3e6a717b 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -11,7 +11,7 @@ Errors = require "./app/js/Errors" HttpController = require "./app/js/HttpController" redis = require("redis-sharelatex") -rclient = redis.createClient(Settings.redis.web) +rclient = redis.createRobustSubscriptionClient(Settings.redis.web) Path = require "path" diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 6318b7b060..fe2681c1a2 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -21,7 +21,7 @@ "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.0.0", "sinon": "~1.5.2", "mongojs": "0.9.11", - "redis-sharelatex": "~0.0.4" + "redis-sharelatex": "git+https://github.com/sharelatex/redis-sharelatex.git#master" }, "devDependencies": { "grunt-execute": "~0.1.5", From 5ae364e6c1f3bdd95285d23fb912702261917de5 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 19 Nov 2014 16:48:43 +0000 Subject: [PATCH 051/769] lock down the redis version --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index fe2681c1a2..03a2903523 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -21,7 +21,7 @@ "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.0.0", "sinon": "~1.5.2", "mongojs": "0.9.11", - "redis-sharelatex": "git+https://github.com/sharelatex/redis-sharelatex.git#master" + "redis-sharelatex": "0.0.6" }, "devDependencies": { "grunt-execute": "~0.1.5", From 566bf72bfdd9ce0dbb11c0bb923e7e735ee86115 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 19 Nov 2014 16:53:50 +0000 Subject: [PATCH 052/769] Revert "lock down the redis version" This reverts commit 8c942ca7149afff241c968f66669b17c7e36ca4e. and dd4b938365d0f6196206fd7c70056c34477d13d5 --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 03a2903523..fe2681c1a2 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -21,7 +21,7 @@ "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.0.0", "sinon": "~1.5.2", "mongojs": "0.9.11", - "redis-sharelatex": "0.0.6" + "redis-sharelatex": "git+https://github.com/sharelatex/redis-sharelatex.git#master" }, "devDependencies": { "grunt-execute": "~0.1.5", From 4f56d8d4e89f3d0fe65f6fd8287af6922883f6c7 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 19 Nov 2014 16:54:02 +0000 Subject: [PATCH 053/769] Revert "Use new 'robust' subscription scheme" This reverts commit dd4b938365d0f6196206fd7c70056c34477d13d5 --- services/document-updater/app.coffee | 2 +- services/document-updater/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 0f3e6a717b..6ef7f478f1 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -11,7 +11,7 @@ Errors = require "./app/js/Errors" HttpController = require "./app/js/HttpController" redis = require("redis-sharelatex") -rclient = redis.createRobustSubscriptionClient(Settings.redis.web) +rclient = redis.createClient(Settings.redis.web) Path = require "path" diff --git a/services/document-updater/package.json b/services/document-updater/package.json index fe2681c1a2..6318b7b060 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -21,7 +21,7 @@ "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.0.0", "sinon": "~1.5.2", "mongojs": "0.9.11", - "redis-sharelatex": "git+https://github.com/sharelatex/redis-sharelatex.git#master" + "redis-sharelatex": "~0.0.4" }, "devDependencies": { "grunt-execute": "~0.1.5", From db0322e054c776ebb19ab7fbfabfb92bcea149e2 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 20 Nov 2014 11:40:52 +0000 Subject: [PATCH 054/769] added redis health check --- services/document-updater/app.coffee | 9 +++++++++ services/document-updater/package.json | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 6ef7f478f1..3cb2723b34 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -57,6 +57,15 @@ app.get '/status', (req, res)-> else res.send('document updater is alive') + +redisCheck = require("redis-sharelatex").activeHealthCheckRedis(Settings.redis.web) +app.get "/health_check/redis", (req, res, next)-> + if redisCheck.isAlive() + res.send 200 + else + res.send 500 + + app.use (error, req, res, next) -> logger.error err: error, "request errored" if error instanceof Errors.NotFoundError diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 6318b7b060..fd61774739 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -21,7 +21,7 @@ "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.0.0", "sinon": "~1.5.2", "mongojs": "0.9.11", - "redis-sharelatex": "~0.0.4" + "redis-sharelatex": "0.0.8" }, "devDependencies": { "grunt-execute": "~0.1.5", From e23b0f0076ea205e7c41595d3d02fb859bfb8bf6 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 20 Nov 2014 12:53:41 +0000 Subject: [PATCH 055/769] bump redis package --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index fd61774739..5b47ebf00d 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -21,7 +21,7 @@ "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.0.0", "sinon": "~1.5.2", "mongojs": "0.9.11", - "redis-sharelatex": "0.0.8" + "redis-sharelatex": "0.0.9" }, "devDependencies": { "grunt-execute": "~0.1.5", From 5cf819cd44a98bd482f28687bf10330f2f0995af Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 10 Feb 2015 13:17:14 +0000 Subject: [PATCH 056/769] Release version 0.1.2 --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 5b47ebf00d..e62ffef415 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -1,6 +1,6 @@ { "name": "document-updater-sharelatex", - "version": "0.1.0", + "version": "0.1.2", "description": "An API for applying incoming updates to documents in real-time", "repository": { "type": "git", From 4942038a237e457a758f79c454e60734d226eb1c Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 26 Feb 2015 11:23:45 +0000 Subject: [PATCH 057/769] Release version 0.1.3 --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index e62ffef415..4799e83e6e 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -1,6 +1,6 @@ { "name": "document-updater-sharelatex", - "version": "0.1.2", + "version": "0.1.3", "description": "An API for applying incoming updates to documents in real-time", "repository": { "type": "git", From d12341da1dd0d3a4fdf3837bb1679d45bb19843d Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 20 Mar 2015 14:21:34 +0000 Subject: [PATCH 058/769] Release version 0.1.4 --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 4799e83e6e..ac7e45147d 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -1,6 +1,6 @@ { "name": "document-updater-sharelatex", - "version": "0.1.3", + "version": "0.1.4", "description": "An API for applying incoming updates to documents in real-time", "repository": { "type": "git", From e3d73d445668edc5d848cd492abf12aa55a59f85 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 25 Mar 2015 16:53:20 +0000 Subject: [PATCH 059/769] add gzip support for large documents to reduce load on redis --- .../app/coffee/RedisManager.coffee | 32 ++++++++----- .../app/coffee/ZipManager.coffee | 47 +++++++++++++++++++ .../config/settings.defaults.coffee | 3 ++ .../clearDocFromPendingUpdatesSetTests.coffee | 2 +- .../getDocsWithPendingUpdatesTests.coffee | 2 +- .../getPreviousDocOpsTests.coffee | 2 +- .../coffee/RedisManager/pushDocOpTests.coffee | 2 +- .../pushUncompressedHistoryOpTests.coffee | 2 +- 8 files changed, 75 insertions(+), 17 deletions(-) create mode 100644 services/document-updater/app/coffee/ZipManager.coffee diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 69d917815e..523d7057a9 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -6,6 +6,11 @@ _ = require('underscore') keys = require('./RedisKeyBuilder') logger = require('logger-sharelatex') metrics = require('./Metrics') +ZipManager = require('./ZipManager') + +redisOptions = _.clone(Settings.redis.web) +redisOptions.return_buffers = true +rclientBuffer = redis.createClient(redisOptions) # Make times easy to read minutes = 60 # seconds for Redis expire @@ -44,19 +49,21 @@ module.exports = RedisManager = getDoc : (doc_id, callback = (error, lines, version) ->)-> timer = new metrics.Timer("redis.get-doc") - multi = rclient.multi() + # use Buffer when retrieving data as it may be gzipped + multi = rclientBuffer.multi() linesKey = keys.docLines(doc_id:doc_id) multi.get linesKey multi.get keys.docVersion(doc_id:doc_id) multi.exec (error, result)-> timer.done() return callback(error) if error? - try - docLines = JSON.parse result[0] - catch e - return callback(e) - version = parseInt(result[1] or 0, 10) - callback null, docLines, version + ZipManager.uncompressIfNeeded doc_id, result, (error, result) -> + try + docLines = JSON.parse result[0] + catch e + return callback(e) + version = parseInt(result[1] or 0, 10) + callback null, docLines, version getDocVersion: (doc_id, callback = (error, version) ->) -> rclient.get keys.docVersion(doc_id: doc_id), (error, version) -> @@ -70,11 +77,12 @@ module.exports = RedisManager = callback null, len setDocument : (doc_id, docLines, version, callback = (error) ->)-> - multi = rclient.multi() - multi.set keys.docLines(doc_id:doc_id), JSON.stringify(docLines) - multi.set keys.docVersion(doc_id:doc_id), version - multi.incr keys.now("docsets") - multi.exec (error, replys) -> callback(error) + ZipManager.compressIfNeeded doc_id, JSON.stringify(docLines), (err, result) -> + multi = rclient.multi() + multi.set keys.docLines(doc_id:doc_id), result + multi.set keys.docVersion(doc_id:doc_id), version + multi.incr keys.now("docsets") + multi.exec (error, replys) -> callback(error) getPendingUpdatesForDoc : (doc_id, callback)-> multi = rclient.multi() diff --git a/services/document-updater/app/coffee/ZipManager.coffee b/services/document-updater/app/coffee/ZipManager.coffee new file mode 100644 index 0000000000..3809d1c7da --- /dev/null +++ b/services/document-updater/app/coffee/ZipManager.coffee @@ -0,0 +1,47 @@ +Settings = require('settings-sharelatex') +logger = require('logger-sharelatex') +metrics = require('./Metrics') +zlib = require('zlib') + +ZIP_WRITES_ENABLED = Settings.redis.zip?.writesEnabled? +ZIP_MINSIZE = Settings.redis.zip?.minsize || 64*1024 + +module.exports = ZipManager = + uncompressIfNeeded: (doc_id, result, callback) -> + # result is an array of [text, version]. Each entry is a node + # Buffer object which we need to convert to strings on output + + # first make sure the version (result[1]) is returned as a string + if result?[1]?.toString? + result[1] = result[1].toString() + + # now uncompress the text (result[0]) if needed + buf = result?[0] + + # Check if we have a GZIP file + if buf? and buf[0] == 0x1F and buf[1] == 0x8B + zlib.gunzip buf, (err, newbuf) -> + if err? + logger.err doc_id:doc_id, err:err, "error uncompressing doc" + callback(err, null) + else + logger.log doc_id:doc_id, fromBytes: buf.length, toChars: newbuf.length, factor: buf.length/newbuf.length, "uncompressed successfully" + result[0] = newbuf.toString() + callback(null, result) + else + # if we don't have a GZIP file it's just a buffer of text, convert it back to a string + if buf?.toString? + result[0] = buf.toString() + callback(null, result) + + compressIfNeeded: (doc_id, text, callback) -> + if ZIP_WRITES_ENABLED && ZIP_MINSIZE > 0 and text.length > ZIP_MINSIZE + zlib.gzip text, (err, buf) -> + if err? + logger.err doc_id:doc_id, err:err, "error compressing doc" + callback(err, null) + else + logger.log doc_id:doc_id, fromChars: text.length, toBytes: buf.length, factor: buf.length/text.length , "compressed successfully" + callback(null, buf) + else + callback(null, text) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index b4f12ed81c..37be211f6e 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -20,6 +20,9 @@ module.exports = port:"6379" host:"localhost" password:"" + zip: + minSize: 8*1024 + writesEnabled: true mongo: url: 'mongodb://127.0.0.1/sharelatex' diff --git a/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee index 81eb0bfefe..86ab837a2f 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee @@ -11,7 +11,7 @@ describe "RedisManager.clearDocFromPendingUpdatesSet", -> @callback = sinon.stub() @RedisManager = SandboxedModule.require modulePath, requires: "redis-sharelatex" : createClient: () => - @rclient = auth:-> + @rclient ?= auth:-> # only assign one rclient "logger-sharelatex": {} @rclient.srem = sinon.stub().callsArg(2) diff --git a/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee index 5bbb93a723..2f54ba171e 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee @@ -9,7 +9,7 @@ describe "RedisManager.getDocsWithPendingUpdates", -> @callback = sinon.stub() @RedisManager = SandboxedModule.require modulePath, requires: "redis-sharelatex" : createClient: () => - @rclient = auth:-> + @rclient ?= auth:-> "logger-sharelatex": {} @docs = [{ diff --git a/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee index 6cd4980fd8..4a6d42c1ab 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee @@ -9,7 +9,7 @@ describe "RedisManager.getPreviousDocOpsTests", -> @callback = sinon.stub() @RedisManager = SandboxedModule.require modulePath, requires: "redis-sharelatex" : createClient: () => - @rclient = + @rclient ?= auth: -> multi: => @rclient "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub() } diff --git a/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee index 71a36bb4f3..a90b20bced 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee @@ -8,7 +8,7 @@ describe "RedisManager.pushDocOp", -> beforeEach -> @RedisManager = SandboxedModule.require modulePath, requires: "redis-sharelatex": createClient: () => - @rclient = + @rclient ?= auth: () -> multi: () => @rclient "logger-sharelatex": @logger = {log: sinon.stub()} diff --git a/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee index 621a3b1a3b..82b28a25d2 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee @@ -8,7 +8,7 @@ describe "RedisManager.pushUncompressedHistoryOp", -> beforeEach -> @RedisManager = SandboxedModule.require modulePath, requires: "redis-sharelatex": createClient: () => - @rclient = + @rclient ?= auth: () -> multi: () => @rclient "logger-sharelatex": @logger = {log: sinon.stub()} From 6bffa4d9e0644bcaa93a795e35f796a79608200f Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 25 Mar 2015 16:54:36 +0000 Subject: [PATCH 060/769] don't log docLines when document removed from redis they can now be binary gzipped data which messes up the logs --- services/document-updater/app/coffee/RedisManager.coffee | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 523d7057a9..5ca4e1c174 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -32,7 +32,6 @@ module.exports = RedisManager = removeDocFromMemory : (project_id, doc_id, callback)-> logger.log project_id:project_id, doc_id:doc_id, "removing doc from redis" multi = rclient.multi() - multi.get keys.docLines(doc_id:doc_id) multi.del keys.docLines(doc_id:doc_id) multi.del keys.projectKey(doc_id:doc_id) multi.del keys.docVersion(doc_id:doc_id) @@ -43,8 +42,7 @@ module.exports = RedisManager = logger.err project_id:project_id, doc_id:doc_id, err:err, "error removing doc from redis" callback(err, null) else - docLines = replys[0] - logger.log project_id:project_id, doc_id:doc_id, docLines:docLines, "removed doc from redis" + logger.log project_id:project_id, doc_id:doc_id, "removed doc from redis" callback() getDoc : (doc_id, callback = (error, lines, version) ->)-> From 27d466aa85e6a7ba13792490afe9910586438b18 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 27 Mar 2015 13:17:38 +0000 Subject: [PATCH 061/769] added acceptance test for redis compression --- .../coffee/ApplyingUpdatesToADocTests.coffee | 150 ++++++++++++++++++ 1 file changed, 150 insertions(+) diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index 1e9c2e2689..f213458168 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -238,3 +238,153 @@ describe "Applying updates to a doc", -> doc.lines.should.deep.equal @result done() + + + +describe "Applying updates to a large doc (uses compression)", -> + MIN_SIZE = 500000 + before -> + @lines = ["one", "two", "three"] + while @lines.join('').length < MIN_SIZE + @lines.push "this is a repeated long line which will create a large document which must be compressed #{@lines.length}" + @version = 42 + @update = + doc: @doc_id + op: [{ + i: "one and a half\n" + p: 4 + }] + v: @version + @result = @lines.slice() + @result.splice 1, 0, "one and a half" + + describe "when the document is not loaded", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + sinon.spy MockWebApi, "getDocument" + + MockWebApi.insertDoc @project_id, @doc_id, lines: @lines + db.docOps.insert { + doc_id: ObjectId(@doc_id) + version: @version + }, (error) => + throw error if error? + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> + throw error if error? + setTimeout done, 200 + + after -> + MockWebApi.getDocument.restore() + + it "should load the document from the web API", -> + MockWebApi.getDocument + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should update the doc", (done) -> + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => + doc.lines.should.deep.equal @result + done() + + it "should push the applied updates to the track changes api", (done) -> + rclient.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => + throw error if error? + JSON.parse(updates[0]).op.should.deep.equal @update.op + rclient.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => + throw error if error? + result.should.equal 1 + done() + + + describe "when the document is loaded", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + + MockWebApi.insertDoc @project_id, @doc_id, lines: @lines + db.docOps.insert doc_id: ObjectId(@doc_id), version: @version, (error) => + throw error if error? + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + throw error if error? + sinon.spy MockWebApi, "getDocument" + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> + throw error if error? + setTimeout done, 200 + + after -> + MockWebApi.getDocument.restore() + + it "should not need to call the web api", -> + MockWebApi.getDocument.called.should.equal false + + it "should update the doc", (done) -> + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => + doc.lines.should.deep.equal @result + done() + + it "should push the applied updates to the track changes api", (done) -> + rclient.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => + JSON.parse(updates[0]).op.should.deep.equal @update.op + rclient.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => + result.should.equal 1 + done() + + describe "with a broken update", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + MockWebApi.insertDoc @project_id, @doc_id, lines: @lines + db.docOps.insert doc_id: ObjectId(@doc_id), version: @version, (error) => + throw error if error? + DocUpdaterClient.sendUpdate @project_id, @doc_id, @undefined, (error) -> + throw error if error? + setTimeout done, 200 + + it "should not update the doc", (done) -> + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => + doc.lines.should.deep.equal @lines + done() + + describe "with enough updates to flush to the track changes api", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + updates = [] + for v in [0..99] # Should flush after 50 ops + updates.push + doc_id: @doc_id, + op: [i: v.toString(), p: 0] + v: v + + sinon.spy MockTrackChangesApi, "flushDoc" + + MockWebApi.insertDoc @project_id, @doc_id, lines: @lines + db.docOps.insert doc_id: ObjectId(@doc_id), version: 0, (error) => + throw error if error? + DocUpdaterClient.sendUpdates @project_id, @doc_id, updates, (error) => + throw error if error? + setTimeout done, 200 + + after -> + MockTrackChangesApi.flushDoc.restore() + + it "should flush the doc twice", -> + MockTrackChangesApi.flushDoc.calledTwice.should.equal true + + describe "when there is no version in Mongo", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines + } + + update = + doc: @doc_id + op: @update.op + v: 0 + DocUpdaterClient.sendUpdate @project_id, @doc_id, update, (error) -> + throw error if error? + setTimeout done, 200 + + it "should update the doc (using version = 0)", (done) -> + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => + doc.lines.should.deep.equal @result + done() + From 66fa170ac83ad4a43f8b7feba6454011ada45713 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 27 Mar 2015 15:32:13 +0000 Subject: [PATCH 062/769] disable compression by default --- services/document-updater/app/coffee/RedisManager.coffee | 2 +- services/document-updater/config/settings.defaults.coffee | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 5ca4e1c174..b08687a170 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -18,7 +18,7 @@ minutes = 60 # seconds for Redis expire module.exports = RedisManager = putDocInMemory : (project_id, doc_id, docLines, version, callback)-> timer = new metrics.Timer("redis.put-doc") - logger.log project_id:project_id, doc_id:doc_id, docLines:docLines, version: version, "putting doc in redis" + logger.log project_id:project_id, doc_id:doc_id, version: version, "putting doc in redis" multi = rclient.multi() multi.set keys.docLines(doc_id:doc_id), JSON.stringify(docLines) multi.set keys.projectKey({doc_id:doc_id}), project_id diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 37be211f6e..babdaafdc3 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -21,8 +21,8 @@ module.exports = host:"localhost" password:"" zip: - minSize: 8*1024 - writesEnabled: true + minSize: 10*1024 + writesEnabled: false mongo: url: 'mongodb://127.0.0.1/sharelatex' From 03564b21387fe3c76b2d05764b16a6795065fff4 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 27 Mar 2015 17:00:30 +0000 Subject: [PATCH 063/769] fix variable zip.minsize to match config name zip.minSize --- services/document-updater/app/coffee/ZipManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/ZipManager.coffee b/services/document-updater/app/coffee/ZipManager.coffee index 3809d1c7da..2484a686ac 100644 --- a/services/document-updater/app/coffee/ZipManager.coffee +++ b/services/document-updater/app/coffee/ZipManager.coffee @@ -4,7 +4,7 @@ metrics = require('./Metrics') zlib = require('zlib') ZIP_WRITES_ENABLED = Settings.redis.zip?.writesEnabled? -ZIP_MINSIZE = Settings.redis.zip?.minsize || 64*1024 +ZIP_MINSIZE = Settings.redis.zip?.minSize || 64*1024 module.exports = ZipManager = uncompressIfNeeded: (doc_id, result, callback) -> From 86505047a3786fd23c4d4af64edc0f3362318562 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 27 Mar 2015 17:04:38 +0000 Subject: [PATCH 064/769] added unit test (work in progress) --- .../test/unit/coffee/Compression.coffee | 100 ++++++++++++++++++ 1 file changed, 100 insertions(+) create mode 100644 services/document-updater/test/unit/coffee/Compression.coffee diff --git a/services/document-updater/test/unit/coffee/Compression.coffee b/services/document-updater/test/unit/coffee/Compression.coffee new file mode 100644 index 0000000000..bba1dd5c3e --- /dev/null +++ b/services/document-updater/test/unit/coffee/Compression.coffee @@ -0,0 +1,100 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../app/js/RedisManager" +SandboxedModule = require('sandboxed-module') + +describe "RedisManager.setDocument and getDocument", -> + beforeEach -> + @zip_opts = + writesEnabled: true + minSize: 1000 + @doc_id = "document-id" + @version = 123 + @RedisManager = SandboxedModule.require modulePath, requires: + "settings-sharelatex" : + redis: + web: + host: 'none' + port: 'none' + zip: @zip_opts + "redis-sharelatex" : createClient: () => + @rclient ?= + auth:-> # only assign one rclient + multi: () => @rclient + set: (key, value) => @rclient.store[key] = value + get: (key) => @rclient.results.push @rclient.store[key] + incr: (key) => @rclient.store[key]++ + exec: (callback) => + callback.apply(null, [null, @rclient.results]) + @rclient.results = [] + store: {} + results: [] + "logger-sharelatex": {} + + @RedisManager.setDocument(@doc_id, @docLines, @version, @callback) + + describe "for a small document (uncompressed)", -> + before -> + @docLines = ["hello", "world"] + @callback = sinon.stub() + + it "should set the document", -> + @rclient.store['doclines:document-id'] + .should.equal JSON.stringify(@docLines) + + it "should return the callback", -> + @callback.called.should.equal true + + it "should get the document back again", (done) -> + @RedisManager.getDoc @doc_id, (err, lines, version) => + @docLines.should.eql lines + done() + + describe "for a large document (with compression enabled)", -> + before -> + @zip_opts = + writesEnabled: true + minSize: 1000 + @docLines = [] + while @docLines.join('').length <= @zip_opts.minSize + @docLines.push "this is a long line in a long document" + @callback = sinon.stub() + + it "should set the document as a gzipped blob", -> + @rclient.store['doclines:document-id'] + .should.not.equal JSON.stringify(@docLines) + + it "should return the callback", -> + @callback.called.should.equal true + + it "should get the uncompressed document back again", (done) -> + @RedisManager.getDoc @doc_id, (err, lines, version) => + @docLines.should.eql lines + done() + + describe "for a large document (with compression disabled)", -> + before -> + @zip_opts = + writesEnabled: false + minSize: 1000 + @docLines = [] + while @docLines.join('').length <= @zip_opts.minSize + @docLines.push "this is a long line in a long document" + @callback = sinon.stub() + + it "should set the document", -> + @rclient.store['doclines:document-id'] + .should.equal JSON.stringify(@docLines) + + it "should return the callback", -> + @callback.called.should.equal true + + it "should get the document back again", (done) -> + @RedisManager.getDoc @doc_id, (err, lines, version) => + @docLines.should.eql lines + done() + + + + From 8e8ee5b3dad873e3466553c8ae8954fa78c1e4d9 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 30 Mar 2015 16:41:23 +0100 Subject: [PATCH 065/769] fix tests --- .../test/unit/coffee/Compression.coffee | 100 ----------- .../unit/coffee/ZipManager/ZipManager.coffee | 158 ++++++++++++++++++ 2 files changed, 158 insertions(+), 100 deletions(-) delete mode 100644 services/document-updater/test/unit/coffee/Compression.coffee create mode 100644 services/document-updater/test/unit/coffee/ZipManager/ZipManager.coffee diff --git a/services/document-updater/test/unit/coffee/Compression.coffee b/services/document-updater/test/unit/coffee/Compression.coffee deleted file mode 100644 index bba1dd5c3e..0000000000 --- a/services/document-updater/test/unit/coffee/Compression.coffee +++ /dev/null @@ -1,100 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../app/js/RedisManager" -SandboxedModule = require('sandboxed-module') - -describe "RedisManager.setDocument and getDocument", -> - beforeEach -> - @zip_opts = - writesEnabled: true - minSize: 1000 - @doc_id = "document-id" - @version = 123 - @RedisManager = SandboxedModule.require modulePath, requires: - "settings-sharelatex" : - redis: - web: - host: 'none' - port: 'none' - zip: @zip_opts - "redis-sharelatex" : createClient: () => - @rclient ?= - auth:-> # only assign one rclient - multi: () => @rclient - set: (key, value) => @rclient.store[key] = value - get: (key) => @rclient.results.push @rclient.store[key] - incr: (key) => @rclient.store[key]++ - exec: (callback) => - callback.apply(null, [null, @rclient.results]) - @rclient.results = [] - store: {} - results: [] - "logger-sharelatex": {} - - @RedisManager.setDocument(@doc_id, @docLines, @version, @callback) - - describe "for a small document (uncompressed)", -> - before -> - @docLines = ["hello", "world"] - @callback = sinon.stub() - - it "should set the document", -> - @rclient.store['doclines:document-id'] - .should.equal JSON.stringify(@docLines) - - it "should return the callback", -> - @callback.called.should.equal true - - it "should get the document back again", (done) -> - @RedisManager.getDoc @doc_id, (err, lines, version) => - @docLines.should.eql lines - done() - - describe "for a large document (with compression enabled)", -> - before -> - @zip_opts = - writesEnabled: true - minSize: 1000 - @docLines = [] - while @docLines.join('').length <= @zip_opts.minSize - @docLines.push "this is a long line in a long document" - @callback = sinon.stub() - - it "should set the document as a gzipped blob", -> - @rclient.store['doclines:document-id'] - .should.not.equal JSON.stringify(@docLines) - - it "should return the callback", -> - @callback.called.should.equal true - - it "should get the uncompressed document back again", (done) -> - @RedisManager.getDoc @doc_id, (err, lines, version) => - @docLines.should.eql lines - done() - - describe "for a large document (with compression disabled)", -> - before -> - @zip_opts = - writesEnabled: false - minSize: 1000 - @docLines = [] - while @docLines.join('').length <= @zip_opts.minSize - @docLines.push "this is a long line in a long document" - @callback = sinon.stub() - - it "should set the document", -> - @rclient.store['doclines:document-id'] - .should.equal JSON.stringify(@docLines) - - it "should return the callback", -> - @callback.called.should.equal true - - it "should get the document back again", (done) -> - @RedisManager.getDoc @doc_id, (err, lines, version) => - @docLines.should.eql lines - done() - - - - diff --git a/services/document-updater/test/unit/coffee/ZipManager/ZipManager.coffee b/services/document-updater/test/unit/coffee/ZipManager/ZipManager.coffee new file mode 100644 index 0000000000..11d1839994 --- /dev/null +++ b/services/document-updater/test/unit/coffee/ZipManager/ZipManager.coffee @@ -0,0 +1,158 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +zipModulePath = "../../../../app/js/ZipManager" +redisModulePath = "../../../../app/js/RedisManager" +SandboxedModule = require('sandboxed-module') + +MIN_SIZE = 9999 + +describe "ZipManager with RedisManager", -> + describe "for a small document (uncompressed)", -> + rclient = null + beforeEach (done) -> + @ZipManager = SandboxedModule.require zipModulePath, requires: + 'settings-sharelatex': redis: + web: + host: 'none' + port: 'none' + zip: + writesEnabled: true + minSize: MIN_SIZE + @RedisManager = SandboxedModule.require redisModulePath, requires: + "./ZipManager" : @ZipManager + "redis-sharelatex" : createClient: () => + rclient ?= + auth:-> # only assign one rclient + multi: () => + console.log 'returning rclient', rclient + rclient + set: (key, value) => rclient.store[key] = value + get: (key) => + console.log 'GET', key + rclient.results.push rclient.store[key] + incr: (key) => rclient.store[key]++ + exec: (callback) => + callback.apply(null, [null, rclient.results]) + rclient.results = [] + store: {} + results: [] + "logger-sharelatex": {} + @doc_id = "document-id" + @version = 123 + + @docLines = ["hello", "world"] + @callback = sinon.stub() + + @RedisManager.setDocument @doc_id, @docLines, @version, () => + @callback() + done() + + it "should set the document", -> + rclient.store['doclines:document-id'] + .should.equal JSON.stringify(@docLines) + + it "should return the callback", -> + @callback.called.should.equal true + + it "should get the document back again", (done) -> + @RedisManager.getDoc @doc_id, (err, lines, version) => + @docLines.should.eql lines + done() + + describe "for a large document (with compression enabled)", -> + rclient = null + beforeEach (done) -> + @ZipManager = SandboxedModule.require zipModulePath, requires: + 'settings-sharelatex': redis: + web: + host: 'none' + port: 'none' + zip: + writesEnabled: true + minSize: MIN_SIZE + @RedisManager = SandboxedModule.require redisModulePath, requires: + "./ZipManager" : @ZipManager + "redis-sharelatex" : createClient: () => + rclient ?= + auth:-> # only assign one rclient + multi: () => rclient + set: (key, value) => rclient.store[key] = value + get: (key) => rclient.results.push rclient.store[key] + incr: (key) => rclient.store[key]++ + exec: (callback) => + callback.apply(null, [null, rclient.results]) + rclient.results = [] + store: {} + results: [] + "logger-sharelatex": {} + @doc_id = "document-id" + @version = 123 + + @docLines = [] + while @docLines.join('').length <= MIN_SIZE + @docLines.push "this is a long line in a long document" + @callback = sinon.stub() + @RedisManager.setDocument @doc_id, @docLines, @version, () => + @callback() + done() + + it "should set the document as a gzipped blob", -> + rclient.store['doclines:document-id'] + .should.not.equal JSON.stringify(@docLines) + + it "should return the callback", -> + @callback.called.should.equal true + + it "should get the uncompressed document back again", (done) -> + @RedisManager.getDoc @doc_id, (err, lines, version) => + @docLines.should.eql lines + done() + + describe "for a large document (with compression disabled)", -> + rclient = null + beforeEach (done) -> + @ZipManager = SandboxedModule.require zipModulePath, requires: + 'settings-sharelatex': redis: + web: + host: 'none' + port: 'none' + zip: + writesEnabled: false + minSize: MIN_SIZE + @RedisManager = SandboxedModule.require redisModulePath, requires: + "./ZipManager" : @ZipManager + "redis-sharelatex" : createClient: () => + rclient ?= + auth:-> # only assign one rclient + multi: () => rclient + set: (key, value) => rclient.store[key] = value + get: (key) => rclient.results.push rclient.store[key] + incr: (key) => rclient.store[key]++ + exec: (callback) => + callback.apply(null, [null, rclient.results]) + rclient.results = [] + store: {} + results: [] + "logger-sharelatex": {} + @doc_id = "document-id" + @version = 123 + @docLines = [] + while @docLines.join('').length <= MIN_SIZE + @docLines.push "this is a long line in a long document" + @callback = sinon.stub() + @RedisManager.setDocument @doc_id, @docLines, @version, () => + @callback() + done() + + it "should set the document", -> + rclient.store['doclines:document-id'] + .should.equal JSON.stringify(@docLines) + + it "should return the callback", -> + @callback.called.should.equal true + + it "should get the document back again", (done) -> + @RedisManager.getDoc @doc_id, (err, lines, version) => + @docLines.should.eql lines + done() From c8c12e8b410f257e3a1367d76105bac25586de34 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 30 Mar 2015 16:41:45 +0100 Subject: [PATCH 066/769] fix error in ZipManager writesEnabled setting --- services/document-updater/app/coffee/ZipManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/ZipManager.coffee b/services/document-updater/app/coffee/ZipManager.coffee index 2484a686ac..7009362a7e 100644 --- a/services/document-updater/app/coffee/ZipManager.coffee +++ b/services/document-updater/app/coffee/ZipManager.coffee @@ -3,7 +3,7 @@ logger = require('logger-sharelatex') metrics = require('./Metrics') zlib = require('zlib') -ZIP_WRITES_ENABLED = Settings.redis.zip?.writesEnabled? +ZIP_WRITES_ENABLED = Settings.redis.zip?.writesEnabled ZIP_MINSIZE = Settings.redis.zip?.minSize || 64*1024 module.exports = ZipManager = From 495af5d568fa34d6b1c9f12780fbf63b3d16a6b7 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 30 Mar 2015 16:56:07 +0100 Subject: [PATCH 067/769] remove console.logs from tests --- .../test/unit/coffee/ZipManager/ZipManager.coffee | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/services/document-updater/test/unit/coffee/ZipManager/ZipManager.coffee b/services/document-updater/test/unit/coffee/ZipManager/ZipManager.coffee index 11d1839994..c491c29587 100644 --- a/services/document-updater/test/unit/coffee/ZipManager/ZipManager.coffee +++ b/services/document-updater/test/unit/coffee/ZipManager/ZipManager.coffee @@ -24,13 +24,9 @@ describe "ZipManager with RedisManager", -> "redis-sharelatex" : createClient: () => rclient ?= auth:-> # only assign one rclient - multi: () => - console.log 'returning rclient', rclient - rclient + multi: () => rclient set: (key, value) => rclient.store[key] = value - get: (key) => - console.log 'GET', key - rclient.results.push rclient.store[key] + get: (key) => rclient.results.push rclient.store[key] incr: (key) => rclient.store[key]++ exec: (callback) => callback.apply(null, [null, rclient.results]) From e61beed92f8175c857c6d5068d02a178633a87c5 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 30 Mar 2015 16:58:00 +0100 Subject: [PATCH 068/769] suppress logging in ZipManager tests --- .../test/unit/coffee/ZipManager/ZipManager.coffee | 3 +++ 1 file changed, 3 insertions(+) diff --git a/services/document-updater/test/unit/coffee/ZipManager/ZipManager.coffee b/services/document-updater/test/unit/coffee/ZipManager/ZipManager.coffee index c491c29587..0ae53ebc19 100644 --- a/services/document-updater/test/unit/coffee/ZipManager/ZipManager.coffee +++ b/services/document-updater/test/unit/coffee/ZipManager/ZipManager.coffee @@ -12,6 +12,7 @@ describe "ZipManager with RedisManager", -> rclient = null beforeEach (done) -> @ZipManager = SandboxedModule.require zipModulePath, requires: + "logger-sharelatex": log:-> 'settings-sharelatex': redis: web: host: 'none' @@ -60,6 +61,7 @@ describe "ZipManager with RedisManager", -> rclient = null beforeEach (done) -> @ZipManager = SandboxedModule.require zipModulePath, requires: + "logger-sharelatex": log:-> 'settings-sharelatex': redis: web: host: 'none' @@ -109,6 +111,7 @@ describe "ZipManager with RedisManager", -> rclient = null beforeEach (done) -> @ZipManager = SandboxedModule.require zipModulePath, requires: + "logger-sharelatex": log:-> 'settings-sharelatex': redis: web: host: 'none' From 9bb08d7ba50c465f9075ebb91e57ca1b8bf8832b Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 31 Mar 2015 10:07:11 +0100 Subject: [PATCH 069/769] add ZipManager comments --- .../app/coffee/ZipManager.coffee | 27 ++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/ZipManager.coffee b/services/document-updater/app/coffee/ZipManager.coffee index 7009362a7e..6011fee659 100644 --- a/services/document-updater/app/coffee/ZipManager.coffee +++ b/services/document-updater/app/coffee/ZipManager.coffee @@ -3,6 +3,31 @@ logger = require('logger-sharelatex') metrics = require('./Metrics') zlib = require('zlib') +# Compress and uncompress data sent to Redis using the node 'zlib' +# module, to reduce load on Redis. +# +# Measurements show that most of the load on Redis comes from a very +# large documents. We can shift some of that CPU load from redis to +# the docupdaters (which are scalable) by compressing the data in the +# docupdater first. +# +# To avoid overloading the docupdater clients we impose a minimum size +# on data we will compress, so we only catch the large ones. +# +# The optimimum size for the cutoff is about 10K, below this we do +# more work but don't really gain any extra reduction in Redis CPU +# +# |--------------------+-----------+--------------------------| +# | Compression cutoff | Redis CPU | Extra doc updater CPU(*) | +# |--------------------+-----------+--------------------------| +# | N/A | 100% | 0% | +# | 100k | 80% | 10% | +# | 10k | 50% | 30% | +# |--------------------+-----------+--------------------------| +# +# (*) percentage of a single core, because node zlib runs in multiple +# threads. + ZIP_WRITES_ENABLED = Settings.redis.zip?.writesEnabled ZIP_MINSIZE = Settings.redis.zip?.minSize || 64*1024 @@ -18,7 +43,7 @@ module.exports = ZipManager = # now uncompress the text (result[0]) if needed buf = result?[0] - # Check if we have a GZIP file + # Check if we have a GZIP file (magic numbers in header) if buf? and buf[0] == 0x1F and buf[1] == 0x8B zlib.gunzip buf, (err, newbuf) -> if err? From 36f60d5bce9bac735b2f6d53f5ace582b2fd0f97 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 31 Mar 2015 10:07:39 +0100 Subject: [PATCH 070/769] enforce minimum size of 1k for compression --- services/document-updater/app/coffee/ZipManager.coffee | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/ZipManager.coffee b/services/document-updater/app/coffee/ZipManager.coffee index 6011fee659..18482a5c69 100644 --- a/services/document-updater/app/coffee/ZipManager.coffee +++ b/services/document-updater/app/coffee/ZipManager.coffee @@ -60,7 +60,8 @@ module.exports = ZipManager = callback(null, result) compressIfNeeded: (doc_id, text, callback) -> - if ZIP_WRITES_ENABLED && ZIP_MINSIZE > 0 and text.length > ZIP_MINSIZE + if ZIP_WRITES_ENABLED and ZIP_MINSIZE > 1024 and text.length > ZIP_MINSIZE + # N.B. skip files of 1k or less, because gzip increases the size zlib.gzip text, (err, buf) -> if err? logger.err doc_id:doc_id, err:err, "error compressing doc" From 6cdf5615fc0c36df96b2df47eff7777be5eaa037 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 31 Mar 2015 10:24:09 +0100 Subject: [PATCH 071/769] remove old unused functions --- .../app/coffee/RedisManager.coffee | 22 ------------------- 1 file changed, 22 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index b08687a170..d280de1cea 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -176,25 +176,3 @@ module.exports = RedisManager = getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) -> rclient.smembers keys.docsInProject(project_id: project_id), callback - - -getDocumentsProjectId = (doc_id, callback)-> - rclient.get keys.projectKey({doc_id:doc_id}), (err, project_id)-> - callback err, {doc_id:doc_id, project_id:project_id} - -getAllProjectDocsIds = (project_id, callback)-> - rclient.SMEMBERS keys.docsInProject(project_id:project_id), (err, doc_ids)-> - if callback? - callback(err, doc_ids) - -getDocumentsAndExpire = (doc_ids, callback)-> - multi = rclient.multi() - oneDay = 86400 - doc_ids.forEach (doc_id)-> - # rclient.expire keys.docLines(doc_id:doc_id), oneDay, -> - doc_ids.forEach (doc_id)-> - multi.get keys.docLines(doc_id:doc_id) - multi.exec (err, docsLines)-> - callback err, docsLines - - From d862227314d69375fd568c92e7f9a083210e92a4 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 30 Apr 2015 15:04:43 +0100 Subject: [PATCH 072/769] make startup message consistent --- services/document-updater/app.coffee | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 3cb2723b34..3717d2f272 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -83,8 +83,9 @@ shutdownCleanly = (signal) -> , 10000 port = Settings.internal?.documentupdater?.port or Settings.apis?.documentupdater?.port or 3003 -app.listen port, "localhost", -> - logger.log("documentupdater-sharelatex server listening on port #{port}") +host = Settings.internal.documentupdater.host or "localhost" +app.listen port, host, -> + logger.info "Document-updater starting up, listening on #{host}:#{port}" for signal in ['SIGINT', 'SIGHUP', 'SIGQUIT', 'SIGUSR1', 'SIGUSR2', 'SIGTERM', 'SIGABRT'] process.on signal, shutdownCleanly(signal) \ No newline at end of file From 57f691948526632a41c0ca501dc0456be92b5b9e Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 18 May 2015 09:03:51 +0100 Subject: [PATCH 073/769] modify LockManager test to avoid dependence on timing --- .../test/unit/coffee/LockManager/getLockTests.coffee | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee b/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee index ee88dfb2db..3147f9701a 100644 --- a/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee @@ -35,9 +35,11 @@ describe 'LockManager - getting the lock', -> describe "when the lock is initially set", -> beforeEach (done) -> startTime = Date.now() + tries = 0 @LockManager.LOCK_TEST_INTERVAL = 5 @LockManager.tryLock = (doc_id, callback = (error, isFree) ->) -> - if Date.now() - startTime < 20 + if (Date.now() - startTime < 20) or (tries < 2) + tries = tries + 1 callback null, false else callback null, true From 85eab2e967300dbd03a4a2576353d22b0397ed06 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 19 May 2015 14:23:32 +0100 Subject: [PATCH 074/769] modify DispatchManager test to allow for slow shutdown --- .../DispatchManagerTests.coffee | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee index 2b38b8f077..eddb1eaddb 100644 --- a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee @@ -63,11 +63,13 @@ describe "DispatchManager", -> @worker.run() - - setTimeout () => - @worker._waitForUpdateThenDispatchWorker.callCount.should.equal 3 - done() - , 100 - - - \ No newline at end of file + + checkStatus = () => + if not @settings.shuttingDown # retry until shutdown + setTimeout checkStatus, 100 + return + else + @worker._waitForUpdateThenDispatchWorker.callCount.should.equal 3 + done() + + checkStatus() From 85a4bf0da6a668663130debcf1864674d58a133c Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 19 May 2015 14:39:47 +0100 Subject: [PATCH 075/769] add tests calling zlib.gzip directly (for debugging) --- .../test/unit/coffee/ZipManager/ZipManager.coffee | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/services/document-updater/test/unit/coffee/ZipManager/ZipManager.coffee b/services/document-updater/test/unit/coffee/ZipManager/ZipManager.coffee index 0ae53ebc19..da7cdf4195 100644 --- a/services/document-updater/test/unit/coffee/ZipManager/ZipManager.coffee +++ b/services/document-updater/test/unit/coffee/ZipManager/ZipManager.coffee @@ -4,6 +4,7 @@ should = chai.should() zipModulePath = "../../../../app/js/ZipManager" redisModulePath = "../../../../app/js/RedisManager" SandboxedModule = require('sandboxed-module') +zlib = require('zlib') MIN_SIZE = 9999 @@ -57,6 +58,16 @@ describe "ZipManager with RedisManager", -> @docLines.should.eql lines done() + describe "calling node zlib.gzip directly", -> + it "should compress the string 'hello world' within the timeout", (done) -> + zlib.gzip "hello world", done + + it "should compress a 10k string within the timeout", (done) -> + text = "" + while text.length < 10*1024 + text = text + "helloworld" + zlib.gzip text, done + describe "for a large document (with compression enabled)", -> rclient = null beforeEach (done) -> @@ -90,6 +101,7 @@ describe "ZipManager with RedisManager", -> @docLines = [] while @docLines.join('').length <= MIN_SIZE @docLines.push "this is a long line in a long document" + console.log "length of doclines", @docLines.join('').length @callback = sinon.stub() @RedisManager.setDocument @doc_id, @docLines, @version, () => @callback() From 60db8bf5dec1cf25c80bd3ece340281127318021 Mon Sep 17 00:00:00 2001 From: Brian Date: Wed, 27 May 2015 16:21:50 +0100 Subject: [PATCH 076/769] flush ops when document is flushed --- services/document-updater/app/coffee/DocumentManager.coffee | 5 ++++- .../coffee/DocumentManager/flushAndDeleteDocTests.coffee | 1 + .../test/unit/coffee/DocumentManager/flushDocTests.coffee | 2 ++ .../coffee/DocumentManager/getDocAndRecentOpsTests.coffee | 1 + .../test/unit/coffee/DocumentManager/getDocTests.coffee | 1 + .../test/unit/coffee/DocumentManager/setDocTests.coffee | 1 + 6 files changed, 10 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 0faa8d2b8a..81fa0aa66c 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -4,6 +4,7 @@ DocOpsManager = require "./DocOpsManager" DiffCodec = require "./DiffCodec" logger = require "logger-sharelatex" Metrics = require "./Metrics" +TrackChangesManager = require "./TrackChangesManager" module.exports = DocumentManager = getDoc: (project_id, doc_id, _callback = (error, lines, version) ->) -> @@ -90,7 +91,9 @@ module.exports = DocumentManager = logger.log project_id: project_id, doc_id: doc_id, version: version, "flushing doc" PersistenceManager.setDoc project_id, doc_id, lines, version, (error) -> return callback(error) if error? - callback null + TrackChangesManager.flushDocChanges project_id, doc_id, (error) -> + return callback(error) if error? + callback null flushAndDeleteDoc: (project_id, doc_id, _callback = (error) ->) -> timer = new Metrics.Timer("docManager.flushAndDeleteDoc") diff --git a/services/document-updater/test/unit/coffee/DocumentManager/flushAndDeleteDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/flushAndDeleteDocTests.coffee index 85a25ee5a7..c4a6ef3d1a 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/flushAndDeleteDocTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/flushAndDeleteDocTests.coffee @@ -8,6 +8,7 @@ describe "DocumentUpdater - flushAndDeleteDoc", -> beforeEach -> @DocumentManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} + "./TrackChangesManager": @TrackChangesManager = {} "./PersistenceManager": @PersistenceManager = {} "logger-sharelatex": @logger = {log: sinon.stub()} "./DocOpsManager" :{} diff --git a/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee index 6bdba1a2b7..551b444f7f 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee @@ -8,6 +8,7 @@ describe "DocumentUpdater - flushDocIfLoaded", -> beforeEach -> @DocumentManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} + "./TrackChangesManager": @TrackChangesManager = {} "./PersistenceManager": @PersistenceManager = {} "./DocOpsManager": @DocOpsManager = {} "logger-sharelatex": @logger = {log: sinon.stub()} @@ -23,6 +24,7 @@ describe "DocumentUpdater - flushDocIfLoaded", -> describe "when the doc is in Redis", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(1, null, @lines, @version) + @TrackChangesManager.flushDocChanges = sinon.stub().callsArg(2) @PersistenceManager.setDoc = sinon.stub().callsArgWith(4) @DocumentManager.flushDocIfLoaded @project_id, @doc_id, @callback diff --git a/services/document-updater/test/unit/coffee/DocumentManager/getDocAndRecentOpsTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/getDocAndRecentOpsTests.coffee index 7a296cc47d..1e5f521b36 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/getDocAndRecentOpsTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/getDocAndRecentOpsTests.coffee @@ -8,6 +8,7 @@ describe "DocumentUpdater - getDocAndRecentOps", -> beforeEach -> @DocumentManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} + "./TrackChangesManager": @TrackChangesManager = {} "./PersistenceManager": @PersistenceManager = {} "./DocOpsManager": @DocOpsManager = {} "logger-sharelatex": @logger = {log: sinon.stub()} diff --git a/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee index ea68890199..9d95d46a87 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee @@ -8,6 +8,7 @@ describe "DocumentUpdater - getDoc", -> beforeEach -> @DocumentManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} + "./TrackChangesManager": @TrackChangesManager = {} "./PersistenceManager": @PersistenceManager = {} "./DocOpsManager": @DocOpsManager = {} "logger-sharelatex": @logger = {log: sinon.stub()} diff --git a/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee index b827b584f8..a1667a6669 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee @@ -8,6 +8,7 @@ describe "DocumentManager - setDoc", -> beforeEach -> @DocumentManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} + "./TrackChangesManager": @TrackChangesManager = {} "./PersistenceManager": @PersistenceManager = {} "./DiffCodec": @DiffCodec = {} "./DocOpsManager":{} From e7e82fc89d97032d8b1094f4e57725462bf0a299 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 28 May 2015 16:43:58 +0100 Subject: [PATCH 077/769] update port to match default track changes configuration --- services/document-updater/config/settings.defaults.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index babdaafdc3..fbda700b59 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -13,7 +13,7 @@ module.exports = user: "sharelatex" pass: "password" trackchanges: - url: "http://localhost:3014" + url: "http://localhost:3015" redis: web: From a3847d21d5631209ae01abaeeca7106292494927 Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 12 Jun 2015 10:14:35 +0100 Subject: [PATCH 078/769] Replace UTF-16 surrogate characters with 'replacement character' MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit In Javascript, characters are 16-bits wide. It does not understand surrogates as characters. From Wikipedia (http://en.wikipedia.org/wiki/Plane_(Unicode)#Basic_Multilingual_Plane): "The High Surrogates (U+D800–U+DBFF) and Low Surrogate (U+DC00–U+DFFF) codes are reserved for encoding non-BMP characters in UTF-16 by using a pair of 16-bit codes: one High Surrogate and one Low Surrogate. A single surrogate code point will never be assigned a character."" The main offender seems to be \uD835 as a stand alone character, which would be the first 16-bit character of a blackboard bold character (http://www.fileformat.info/info/unicode/char/1d400/index.htm). Something must be going on client side that is screwing up the encoding and splitting the two 16-bit characters so that \uD835 is standalone. --- .../app/coffee/UpdateManager.coffee | 21 ++++++++++- .../UpdateManager/ApplyingUpdates.coffee | 36 ++++++++++++++----- 2 files changed, 48 insertions(+), 9 deletions(-) diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index a1db456457..feff628453 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -54,6 +54,8 @@ module.exports = UpdateManager = UpdateManager.applyUpdates project_id, doc_id, updates, callback applyUpdates: (project_id, doc_id, updates, callback = (error) ->) -> + for update in updates + UpdateManager._sanitizeUpdate update ShareJsUpdateManager.applyUpdates project_id, doc_id, updates, (error, updatedDocLines, version) -> return callback(error) if error? logger.log doc_id: doc_id, version: version, "updating doc via sharejs" @@ -75,5 +77,22 @@ module.exports = UpdateManager = _handleErrorInsideLock: (doc_id, original_error, callback = (error) ->) -> LockManager.releaseLock doc_id, (lock_error) -> callback(original_error) + + _sanitizeUpdate: (update) -> + # In Javascript, characters are 16-bits wide. It does not understand surrogates as characters. + # + # From Wikipedia (http://en.wikipedia.org/wiki/Plane_(Unicode)#Basic_Multilingual_Plane): + # "The High Surrogates (U+D800–U+DBFF) and Low Surrogate (U+DC00–U+DFFF) codes are reserved + # for encoding non-BMP characters in UTF-16 by using a pair of 16-bit codes: one High Surrogate + # and one Low Surrogate. A single surrogate code point will never be assigned a character."" + # + # The main offender seems to be \uD835 as a stand alone character, which would be the first + # 16-bit character of a blackboard bold character (http://www.fileformat.info/info/unicode/char/1d400/index.htm). + # Something must be going on client side that is screwing up the encoding and splitting the + # two 16-bit characters so that \uD835 is standalone. + for op in update.op or [] + if op.i? + # Replace high and low surrogate characters with 'replacement character' (\uFFFD) + op.i = op.i.replace(/[\uD800-\uDFFF]/g, "\uFFFD") + return update - diff --git a/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee b/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee index f421d545a7..70bab70c71 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee @@ -180,19 +180,39 @@ describe "UpdateManager", -> describe "applyUpdates", -> beforeEach -> - @updates = [{p: 1, t: "foo"}] + @updates = [{op: [{p: 42, i: "foo"}]}] @updatedDocLines = ["updated", "lines"] @version = 34 @ShareJsUpdateManager.applyUpdates = sinon.stub().callsArgWith(3, null, @updatedDocLines, @version) @RedisManager.setDocument = sinon.stub().callsArg(3) - @UpdateManager.applyUpdates @project_id, @doc_id, @updates, @callback + + describe "normally", -> + beforeEach -> + @UpdateManager.applyUpdates @project_id, @doc_id, @updates, @callback + + it "should apply the updates via ShareJS", -> + @ShareJsUpdateManager.applyUpdates + .calledWith(@project_id, @doc_id, @updates) + .should.equal true - it "should save the document", -> - @RedisManager.setDocument - .calledWith(@doc_id, @updatedDocLines, @version) - .should.equal true + it "should save the document", -> + @RedisManager.setDocument + .calledWith(@doc_id, @updatedDocLines, @version) + .should.equal true - it "should call the callback", -> - @callback.called.should.equal true + it "should call the callback", -> + @callback.called.should.equal true + describe "with UTF-16 surrogate pairs in the update", -> + beforeEach -> + @updates = [{op: [{p: 42, i: "\uD835\uDC00"}]}] + @UpdateManager.applyUpdates @project_id, @doc_id, @updates, @callback + + it "should apply the update but with surrogate pairs removed", -> + @ShareJsUpdateManager.applyUpdates + .calledWith(@project_id, @doc_id, @updates) + .should.equal true + + # \uFFFD is 'replacement character' + @updates[0].op[0].i.should.equal "\uFFFD\uFFFD" From 333591d0877f7feda21795e5ffe1eca70d72e8d6 Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 12 Jun 2015 10:16:33 +0100 Subject: [PATCH 079/769] Extra null check --- services/document-updater/app/coffee/UpdateManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index feff628453..8765c330ff 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -54,7 +54,7 @@ module.exports = UpdateManager = UpdateManager.applyUpdates project_id, doc_id, updates, callback applyUpdates: (project_id, doc_id, updates, callback = (error) ->) -> - for update in updates + for update in updates or [] UpdateManager._sanitizeUpdate update ShareJsUpdateManager.applyUpdates project_id, doc_id, updates, (error, updatedDocLines, version) -> return callback(error) if error? From 03e9d7390ffdd52971507696e926a50929bc78e9 Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 31 Aug 2015 15:57:26 +0100 Subject: [PATCH 080/769] Add in mongo health check end point --- services/document-updater/app.coffee | 7 +++ .../app/coffee/MongoHealthCheck.coffee | 26 ++++++++++ .../MongoHealthCheckTests.coffee | 52 +++++++++++++++++++ 3 files changed, 85 insertions(+) create mode 100644 services/document-updater/app/coffee/MongoHealthCheck.coffee create mode 100644 services/document-updater/test/unit/coffee/MongoHealthCheckTests/MongoHealthCheckTests.coffee diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 3717d2f272..b0813e47f9 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -9,6 +9,7 @@ DispatchManager = require('./app/js/DispatchManager') Keys = require('./app/js/RedisKeyBuilder') Errors = require "./app/js/Errors" HttpController = require "./app/js/HttpController" +MongoHealthCheck = require('./app/js/MongoHealthCheck') redis = require("redis-sharelatex") rclient = redis.createClient(Settings.redis.web) @@ -57,6 +58,12 @@ app.get '/status', (req, res)-> else res.send('document updater is alive') +app.get '/health_check/mongo', (req, res, next) -> + MongoHealthCheck.isAlive (error) -> + if error? + res.send 500, error.message + else + res.send 200 redisCheck = require("redis-sharelatex").activeHealthCheckRedis(Settings.redis.web) app.get "/health_check/redis", (req, res, next)-> diff --git a/services/document-updater/app/coffee/MongoHealthCheck.coffee b/services/document-updater/app/coffee/MongoHealthCheck.coffee new file mode 100644 index 0000000000..3872c051c4 --- /dev/null +++ b/services/document-updater/app/coffee/MongoHealthCheck.coffee @@ -0,0 +1,26 @@ +Settings = require "settings-sharelatex" +PersistenceManager = require "./PersistenceManager" + +module.exports = MongoHealthCheck = + isAlive: (_callback = (error) ->) -> + # We've seen very occasionally the doc-updater losing its connection to Mongo. + # E.g. https://sharelatex.hackpad.com/29th-Aug-2015-0650-0740-fHlw8RL8zuN + # It seems that the mongo callbacks never returned. + # Mongo is only called in the persistence manager, so we do a read-only + # test call, check that it's working, and returns in a reasonable time. + callback = (args...) -> + _callback(args...) + _callback = () -> + + doc_id = Settings.smokeTest?.doc_id + if !doc_id? + return callback(new Error("No test doc_id configured")) + + PersistenceManager.getDocVersionInMongo doc_id, (error, version) -> + return callback(error) if error? + callback(null) + + timeout = Settings.smokeTest?.timeout or 10000 + setTimeout () -> + callback(new Error("Mongo did not return in #{timeout}ms")) + , timeout \ No newline at end of file diff --git a/services/document-updater/test/unit/coffee/MongoHealthCheckTests/MongoHealthCheckTests.coffee b/services/document-updater/test/unit/coffee/MongoHealthCheckTests/MongoHealthCheckTests.coffee new file mode 100644 index 0000000000..b8da766a4c --- /dev/null +++ b/services/document-updater/test/unit/coffee/MongoHealthCheckTests/MongoHealthCheckTests.coffee @@ -0,0 +1,52 @@ +SandboxedModule = require('sandboxed-module') +sinon = require('sinon') +require('chai').should() +modulePath = require('path').join __dirname, '../../../../app/js/MongoHealthCheck' + +describe "MongoHealthCheck", -> + beforeEach -> + @MongoHealthCheck = SandboxedModule.require modulePath, requires: + "settings-sharelatex": @Settings = {} + "./PersistenceManager": @PersistenceManager = {} + @doc_id = "mock-doc-id" + @callback = sinon.stub() + + describe "isAlive", -> + describe "with no configured doc_id", -> + beforeEach -> + @MongoHealthCheck.isAlive @callback + + it "should call the call the callback with an error", -> + @callback.calledOnce.should.equal true + error = @callback.args[0][0] + error.message.should.equal "No test doc_id configured" + + describe "when mongo returns within the timeout", -> + beforeEach -> + @Settings.smokeTest = + doc_id: @doc_id + @PersistenceManager.getDocVersionInMongo = sinon.stub().callsArg(1) + @MongoHealthCheck.isAlive @callback + + it "should call PersistenceManager.getDocVersionInMongo", -> + @PersistenceManager.getDocVersionInMongo + .calledWith(@doc_id) + .should.equal true + + it "should call the call the callback without an error", -> + @callback.calledOnce.should.equal true + @callback.calledWith(null).should.equal true + + describe "when mongo does not return within the timeout", -> + beforeEach (done) -> + @Settings.smokeTest = + doc_id: @doc_id + timeout: 50 + @PersistenceManager.getDocVersionInMongo = (doc_id, callback) -> + setTimeout callback, 100 + @MongoHealthCheck.isAlive (@error) => + done() + + it "should call the call the callback with an error", -> + @error.message.should.equal "Mongo did not return in 50ms" + \ No newline at end of file From e73890bfc22b978712fb6ecc6a385d7a1fd89cc8 Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 6 Nov 2015 12:52:03 +0000 Subject: [PATCH 081/769] Error if update would make document too long --- .../document-updater/app/coffee/ShareJsUpdateManager.coffee | 2 +- .../document-updater/app/coffee/sharejs/server/model.coffee | 3 +++ services/document-updater/config/settings.defaults.coffee | 2 ++ 3 files changed, 6 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee index 278a104beb..83786cbb96 100644 --- a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee +++ b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee @@ -15,7 +15,7 @@ ShareJsModel:: = {} util.inherits ShareJsModel, EventEmitter module.exports = ShareJsUpdateManager = - getNewShareJsModel: () -> new ShareJsModel(ShareJsDB) + getNewShareJsModel: () -> new ShareJsModel(ShareJsDB, maxDocLength: Settings.max_doc_length) applyUpdates: (project_id, doc_id, updates, callback = (error, updatedDocLines) ->) -> logger.log project_id: project_id, doc_id: doc_id, updates: updates, "applying sharejs updates" diff --git a/services/document-updater/app/coffee/sharejs/server/model.coffee b/services/document-updater/app/coffee/sharejs/server/model.coffee index 284d6fd770..0e699cce92 100644 --- a/services/document-updater/app/coffee/sharejs/server/model.coffee +++ b/services/document-updater/app/coffee/sharejs/server/model.coffee @@ -136,6 +136,9 @@ module.exports = Model = (db, options) -> catch error console.error error.stack return callback error.message + + if options.maxDocLength? and doc.snapshot.length > options.maxDocLength + return callback "Update takes doc over max doc size" # The op data should be at the current version, and the new document data should be at # the next version. diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index fbda700b59..41d25b4ea2 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -23,6 +23,8 @@ module.exports = zip: minSize: 10*1024 writesEnabled: false + + max_doc_length: 2 * 1024 * 1024 # 2mb mongo: url: 'mongodb://127.0.0.1/sharelatex' From 2589e2d417439642c1d0e58771be3cc5628c3b49 Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 19 Nov 2015 10:54:28 +0000 Subject: [PATCH 082/769] Gracefully return when an op has already been submitted It is not a fatal error if an op has already been submitted. We just need to send an ack back to the client that submitted it and continue. If we detect a duplicate op, set dup: true on the op and pass it back to real-time for distributing. The dup: true flag will ensure it only gets acknowledged to the submitting client, not everyone. --- .../app/coffee/ShareJsUpdateManager.coffee | 23 ++++++++++++++----- 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee index 83786cbb96..20de2d1dfd 100644 --- a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee +++ b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee @@ -32,7 +32,14 @@ module.exports = ShareJsUpdateManager = for update in updates do (update) => jobs.push (callback) => - model.applyOp doc_key, update, callback + model.applyOp doc_key, update, (error) -> + if error == "Op already submitted" + logger.warn {project_id, doc_id, update}, "op has already been submitted" + update.dup = true + ShareJsUpdateManager._sendOp(project_id, doc_id, update) + callback() + else + callback(error) async.series jobs, (error) => logger.log project_id: project_id, doc_id: doc_id, error: error, "applied updates" @@ -49,11 +56,15 @@ module.exports = ShareJsUpdateManager = _listenForOps: (model) -> model.on "applyOp", (doc_key, opData) -> [project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key) - data = JSON.stringify - project_id: project_id - doc_id: doc_id - op: opData - rclient.publish "applied-ops", data + ShareJsUpdateManager._sendOp(project_id, doc_id, opData) + + _sendOp: (project_id, doc_id, opData) -> + data = + project_id: project_id + doc_id: doc_id + op: opData + data = JSON.stringify data + rclient.publish "applied-ops", data _sendError: (project_id, doc_id, error) -> data = JSON.stringify From 7a9577e081e1a2ae6a160cd97af78ac322a6ac86 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 30 Nov 2015 15:17:11 +0000 Subject: [PATCH 083/769] only flush to track-changes when ops are queued --- .../document-updater/app/coffee/DocumentManager.coffee | 2 +- .../document-updater/app/coffee/RedisManager.coffee | 3 +++ .../app/coffee/TrackChangesManager.coffee | 10 ++++++++++ 3 files changed, 14 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 81fa0aa66c..c7aa1e565a 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -91,7 +91,7 @@ module.exports = DocumentManager = logger.log project_id: project_id, doc_id: doc_id, version: version, "flushing doc" PersistenceManager.setDoc project_id, doc_id, lines, version, (error) -> return callback(error) if error? - TrackChangesManager.flushDocChanges project_id, doc_id, (error) -> + TrackChangesManager.flushDocChangesIfNeeded project_id, doc_id, (error) -> return callback(error) if error? callback null diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index d280de1cea..b0a33aa734 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -171,6 +171,9 @@ module.exports = RedisManager = [length, _] = results callback(error, length) + getUncompressedHistoryOpLength: (doc_id, callback = (error, length) ->) -> + rclient.llen keys.uncompressedHistoryOp(doc_id: doc_id), callback + getDocOpsLength: (doc_id, callback = (error, length) ->) -> rclient.llen keys.docOps(doc_id: doc_id), callback diff --git a/services/document-updater/app/coffee/TrackChangesManager.coffee b/services/document-updater/app/coffee/TrackChangesManager.coffee index 90cba86b36..86bb329f9b 100644 --- a/services/document-updater/app/coffee/TrackChangesManager.coffee +++ b/services/document-updater/app/coffee/TrackChangesManager.coffee @@ -5,6 +5,16 @@ RedisManager = require "./RedisManager" crypto = require("crypto") module.exports = TrackChangesManager = + + flushDocChangesIfNeeded: (project_id, doc_id, callback = (error) ->) -> + RedisManager.getUncompressedHistoryOpLength doc_id, (error, length) -> + return callback(error) if error? + if length > 0 + # only make request to track changes if there are queued ops + TrackChangesManager.flushDocChanges project_id, doc_id, callback + else + callback() + flushDocChanges: (project_id, doc_id, callback = (error) ->) -> if !settings.apis?.trackchanges? logger.warn doc_id: doc_id, "track changes API is not configured, so not flushing" From 4bf90afe0e20138f0e85ee5b17e0a813ab160063 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 30 Nov 2015 15:22:21 +0000 Subject: [PATCH 084/769] update tests --- .../test/unit/coffee/DocumentManager/flushDocTests.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee index 551b444f7f..97085d58b3 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee @@ -24,7 +24,7 @@ describe "DocumentUpdater - flushDocIfLoaded", -> describe "when the doc is in Redis", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(1, null, @lines, @version) - @TrackChangesManager.flushDocChanges = sinon.stub().callsArg(2) + @TrackChangesManager.flushDocChangesIfNeeded = sinon.stub().callsArg(2) @PersistenceManager.setDoc = sinon.stub().callsArgWith(4) @DocumentManager.flushDocIfLoaded @project_id, @doc_id, @callback From 64f5d5526e822c729d792606cdac0cbdf0e4bf4d Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 1 Dec 2015 15:48:31 +0000 Subject: [PATCH 085/769] Revert "update tests" This reverts commit 501e891760fd5c5bd0e6ffc75f866fd565561f31. --- .../test/unit/coffee/DocumentManager/flushDocTests.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee index 97085d58b3..551b444f7f 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee @@ -24,7 +24,7 @@ describe "DocumentUpdater - flushDocIfLoaded", -> describe "when the doc is in Redis", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(1, null, @lines, @version) - @TrackChangesManager.flushDocChangesIfNeeded = sinon.stub().callsArg(2) + @TrackChangesManager.flushDocChanges = sinon.stub().callsArg(2) @PersistenceManager.setDoc = sinon.stub().callsArgWith(4) @DocumentManager.flushDocIfLoaded @project_id, @doc_id, @callback From 13e22e1802c866dd884a4f2941239a8233c644f0 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 1 Dec 2015 15:48:49 +0000 Subject: [PATCH 086/769] Revert "only flush to track-changes when ops are queued" This reverts commit 0baa8f989481ca263fa71e90af21f13b29c29504. --- .../document-updater/app/coffee/DocumentManager.coffee | 2 +- .../document-updater/app/coffee/RedisManager.coffee | 3 --- .../app/coffee/TrackChangesManager.coffee | 10 ---------- 3 files changed, 1 insertion(+), 14 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index c7aa1e565a..81fa0aa66c 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -91,7 +91,7 @@ module.exports = DocumentManager = logger.log project_id: project_id, doc_id: doc_id, version: version, "flushing doc" PersistenceManager.setDoc project_id, doc_id, lines, version, (error) -> return callback(error) if error? - TrackChangesManager.flushDocChangesIfNeeded project_id, doc_id, (error) -> + TrackChangesManager.flushDocChanges project_id, doc_id, (error) -> return callback(error) if error? callback null diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index b0a33aa734..d280de1cea 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -171,9 +171,6 @@ module.exports = RedisManager = [length, _] = results callback(error, length) - getUncompressedHistoryOpLength: (doc_id, callback = (error, length) ->) -> - rclient.llen keys.uncompressedHistoryOp(doc_id: doc_id), callback - getDocOpsLength: (doc_id, callback = (error, length) ->) -> rclient.llen keys.docOps(doc_id: doc_id), callback diff --git a/services/document-updater/app/coffee/TrackChangesManager.coffee b/services/document-updater/app/coffee/TrackChangesManager.coffee index 86bb329f9b..90cba86b36 100644 --- a/services/document-updater/app/coffee/TrackChangesManager.coffee +++ b/services/document-updater/app/coffee/TrackChangesManager.coffee @@ -5,16 +5,6 @@ RedisManager = require "./RedisManager" crypto = require("crypto") module.exports = TrackChangesManager = - - flushDocChangesIfNeeded: (project_id, doc_id, callback = (error) ->) -> - RedisManager.getUncompressedHistoryOpLength doc_id, (error, length) -> - return callback(error) if error? - if length > 0 - # only make request to track changes if there are queued ops - TrackChangesManager.flushDocChanges project_id, doc_id, callback - else - callback() - flushDocChanges: (project_id, doc_id, callback = (error) ->) -> if !settings.apis?.trackchanges? logger.warn doc_id: doc_id, "track changes API is not configured, so not flushing" From 6e97521971a38c262de7011043699c893b8623e1 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 1 Dec 2015 15:58:40 +0000 Subject: [PATCH 087/769] Revert "Merge pull request #4 from sharelatex/flush-ops-from-redis" This reverts commit 84a9ad8b67b860a92271cf7bb669cf21cc0b5b86, reversing changes made to a6d4649f4f2db06862e8cbc5e132a073ce8c20b2. --- services/document-updater/app/coffee/DocumentManager.coffee | 5 +---- .../coffee/DocumentManager/flushAndDeleteDocTests.coffee | 1 - .../test/unit/coffee/DocumentManager/flushDocTests.coffee | 2 -- .../coffee/DocumentManager/getDocAndRecentOpsTests.coffee | 1 - .../test/unit/coffee/DocumentManager/getDocTests.coffee | 1 - .../test/unit/coffee/DocumentManager/setDocTests.coffee | 1 - 6 files changed, 1 insertion(+), 10 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 81fa0aa66c..0faa8d2b8a 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -4,7 +4,6 @@ DocOpsManager = require "./DocOpsManager" DiffCodec = require "./DiffCodec" logger = require "logger-sharelatex" Metrics = require "./Metrics" -TrackChangesManager = require "./TrackChangesManager" module.exports = DocumentManager = getDoc: (project_id, doc_id, _callback = (error, lines, version) ->) -> @@ -91,9 +90,7 @@ module.exports = DocumentManager = logger.log project_id: project_id, doc_id: doc_id, version: version, "flushing doc" PersistenceManager.setDoc project_id, doc_id, lines, version, (error) -> return callback(error) if error? - TrackChangesManager.flushDocChanges project_id, doc_id, (error) -> - return callback(error) if error? - callback null + callback null flushAndDeleteDoc: (project_id, doc_id, _callback = (error) ->) -> timer = new Metrics.Timer("docManager.flushAndDeleteDoc") diff --git a/services/document-updater/test/unit/coffee/DocumentManager/flushAndDeleteDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/flushAndDeleteDocTests.coffee index c4a6ef3d1a..85a25ee5a7 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/flushAndDeleteDocTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/flushAndDeleteDocTests.coffee @@ -8,7 +8,6 @@ describe "DocumentUpdater - flushAndDeleteDoc", -> beforeEach -> @DocumentManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} - "./TrackChangesManager": @TrackChangesManager = {} "./PersistenceManager": @PersistenceManager = {} "logger-sharelatex": @logger = {log: sinon.stub()} "./DocOpsManager" :{} diff --git a/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee index 551b444f7f..6bdba1a2b7 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee @@ -8,7 +8,6 @@ describe "DocumentUpdater - flushDocIfLoaded", -> beforeEach -> @DocumentManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} - "./TrackChangesManager": @TrackChangesManager = {} "./PersistenceManager": @PersistenceManager = {} "./DocOpsManager": @DocOpsManager = {} "logger-sharelatex": @logger = {log: sinon.stub()} @@ -24,7 +23,6 @@ describe "DocumentUpdater - flushDocIfLoaded", -> describe "when the doc is in Redis", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(1, null, @lines, @version) - @TrackChangesManager.flushDocChanges = sinon.stub().callsArg(2) @PersistenceManager.setDoc = sinon.stub().callsArgWith(4) @DocumentManager.flushDocIfLoaded @project_id, @doc_id, @callback diff --git a/services/document-updater/test/unit/coffee/DocumentManager/getDocAndRecentOpsTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/getDocAndRecentOpsTests.coffee index 1e5f521b36..7a296cc47d 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/getDocAndRecentOpsTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/getDocAndRecentOpsTests.coffee @@ -8,7 +8,6 @@ describe "DocumentUpdater - getDocAndRecentOps", -> beforeEach -> @DocumentManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} - "./TrackChangesManager": @TrackChangesManager = {} "./PersistenceManager": @PersistenceManager = {} "./DocOpsManager": @DocOpsManager = {} "logger-sharelatex": @logger = {log: sinon.stub()} diff --git a/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee index 9d95d46a87..ea68890199 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee @@ -8,7 +8,6 @@ describe "DocumentUpdater - getDoc", -> beforeEach -> @DocumentManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} - "./TrackChangesManager": @TrackChangesManager = {} "./PersistenceManager": @PersistenceManager = {} "./DocOpsManager": @DocOpsManager = {} "logger-sharelatex": @logger = {log: sinon.stub()} diff --git a/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee index a1667a6669..b827b584f8 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee @@ -8,7 +8,6 @@ describe "DocumentManager - setDoc", -> beforeEach -> @DocumentManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} - "./TrackChangesManager": @TrackChangesManager = {} "./PersistenceManager": @PersistenceManager = {} "./DiffCodec": @DiffCodec = {} "./DocOpsManager":{} From e8f09f33577894e01141fa25909e71e190e2ccff Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 1 Dec 2015 16:03:05 +0000 Subject: [PATCH 088/769] added comment about flushing to track changes --- services/document-updater/app/coffee/DocumentManager.coffee | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 0faa8d2b8a..258784a48c 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -100,6 +100,11 @@ module.exports = DocumentManager = DocumentManager.flushDocIfLoaded project_id, doc_id, (error) -> return callback(error) if error? + # We should flush pending ops to track-changes here but this is + # already done in the real-time WebsocketController.leaveProject + # method so we leave it there. Note, if you ever add the flush + # in here be sure to do it in the background because it can take + # a long time. RedisManager.removeDocFromMemory project_id, doc_id, (error) -> return callback(error) if error? callback null From 184c9031cd502b3d5b3b51a25935b749fb5782f1 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 20 Jan 2016 14:31:25 +0000 Subject: [PATCH 089/769] Delete doc from redis after a full set if it wasn't in redis before --- .../app/coffee/DocumentManager.coffee | 23 ++++-- .../coffee/ApplyingUpdatesToADocTests.coffee | 2 +- .../coffee/SettingADocumentTests.coffee | 80 ++++++++++++++----- .../coffee/DocumentManager/getDocTests.coffee | 4 +- .../coffee/DocumentManager/setDocTests.coffee | 38 +++++---- 5 files changed, 102 insertions(+), 45 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 258784a48c..1b1afeda3c 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -12,7 +12,7 @@ module.exports = DocumentManager = timer.done() _callback(args...) - RedisManager.getDoc doc_id, (error, lines, version) -> + RedisManager.getDoc doc_id, (error, lines, version, alreadyLoaded) -> return callback(error) if error? if !lines? or !version? logger.log project_id: project_id, doc_id: doc_id, "doc not in redis so getting from persistence API" @@ -21,9 +21,9 @@ module.exports = DocumentManager = logger.log project_id: project_id, doc_id: doc_id, lines: lines, version: version, "got doc from persistence API" RedisManager.putDocInMemory project_id, doc_id, lines, version, (error) -> return callback(error) if error? - callback null, lines, version + callback null, lines, version, false else - callback null, lines, version + callback null, lines, version, true getDocAndRecentOps: (project_id, doc_id, fromVersion, _callback = (error, lines, version, recentOps) ->) -> timer = new Metrics.Timer("docManager.getDocAndRecentOps") @@ -50,7 +50,7 @@ module.exports = DocumentManager = return callback(new Error("No lines were provided to setDoc")) UpdateManager = require "./UpdateManager" - DocumentManager.getDoc project_id, doc_id, (error, oldLines, version) -> + DocumentManager.getDoc project_id, doc_id, (error, oldLines, version, alreadyLoaded) -> return callback(error) if error? if oldLines? and oldLines.length > 0 and oldLines[0].text? @@ -70,9 +70,18 @@ module.exports = DocumentManager = user_id: user_id UpdateManager.applyUpdates project_id, doc_id, [update], (error) -> return callback(error) if error? - DocumentManager.flushDocIfLoaded project_id, doc_id, (error) -> - return callback(error) if error? - callback null + # If the document was loaded already, then someone has it open + # in a project, and the usual flushing mechanism will happen. + # Otherwise we should remove it immediately since nothing else + # is using it. + if alreadyLoaded + DocumentManager.flushDocIfLoaded project_id, doc_id, (error) -> + return callback(error) if error? + callback null + else + DocumentManager.flushAndDeleteDoc project_id, doc_id, (error) -> + return callback(error) if error? + callback null flushDocIfLoaded: (project_id, doc_id, _callback = (error) ->) -> diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index f213458168..e9b5da5b1d 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -360,7 +360,7 @@ describe "Applying updates to a large doc (uses compression)", -> throw error if error? DocUpdaterClient.sendUpdates @project_id, @doc_id, updates, (error) => throw error if error? - setTimeout done, 200 + setTimeout done, 500 after -> MockTrackChangesApi.flushDoc.restore() diff --git a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee index 5dfc5d95f0..cf0d224995 100644 --- a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee @@ -1,14 +1,16 @@ sinon = require "sinon" chai = require("chai") chai.should() +expect = require("chai").expect {db, ObjectId} = require "../../../app/js/mongojs" +rclient = require("redis").createClient() +MockTrackChangesApi = require "./helpers/MockTrackChangesApi" MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" describe "Setting a document", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + before -> @lines = ["one", "two", "three"] @version = 42 @update = @@ -23,30 +25,31 @@ describe "Setting a document", -> @source = "dropbox" @user_id = "user-id-123" - MockWebApi.insertDoc @project_id, @doc_id, lines: @lines - db.docOps.insert { - doc_id: ObjectId(@doc_id) - version: @version - }, (error) => - throw error if error? - done() + sinon.spy MockTrackChangesApi, "flushDoc" + sinon.spy MockWebApi, "setDocumentLines" + + after -> + MockWebApi.setDocumentLines.restore() + MockTrackChangesApi.flushDoc.restore() - describe "when the updated doc exists in the doc updater", -> before (done) -> - sinon.spy MockWebApi, "setDocumentLines" - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + MockWebApi.insertDoc @project_id, @doc_id, lines: @lines + db.docOps.insert { + doc_id: ObjectId(@doc_id) + version: @version + }, (error) => throw error if error? - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) => + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => throw error if error? - setTimeout () => - DocUpdaterClient.setDocLines @project_id, @doc_id, @newLines, @source, @user_id, (error, res, body) => - @statusCode = res.statusCode - done() - , 200 - - after -> - MockWebApi.setDocumentLines.restore() + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) => + throw error if error? + setTimeout () => + DocUpdaterClient.setDocLines @project_id, @doc_id, @newLines, @source, @user_id, (error, res, body) => + @statusCode = res.statusCode + done() + , 200 it "should return a 204 status code", -> @statusCode.should.equal 204 @@ -65,4 +68,39 @@ describe "Setting a document", -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => doc.version.should.equal @version + 2 done() + + it "should leave the document in redis", (done) -> + rclient.get "doclines:#{@doc_id}", (error, lines) => + throw error if error? + expect(JSON.parse(lines)).to.deep.equal @newLines + done() + + describe "when the updated doc does not exist in the doc updater", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + MockWebApi.insertDoc @project_id, @doc_id, lines: @lines + db.docOps.insert { + doc_id: ObjectId(@doc_id) + version: @version + }, (error) => + throw error if error? + DocUpdaterClient.setDocLines @project_id, @doc_id, @newLines, @source, @user_id, (error, res, body) => + @statusCode = res.statusCode + done() + + it "should return a 204 status code", -> + @statusCode.should.equal 204 + it "should send the updated doc lines to the web api", -> + MockWebApi.setDocumentLines + .calledWith(@project_id, @doc_id, @newLines) + .should.equal true + + it "should flush track changes"#, -> + # MockTrackChangesApi.flushDoc.calledWith(@doc_id).should.equal true + + it "should remove the document from redis", (done) -> + rclient.get "doclines:#{@doc_id}", (error, lines) => + throw error if error? + expect(lines).to.not.exist + done() diff --git a/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee index ea68890199..0db51ddba8 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee @@ -32,7 +32,7 @@ describe "DocumentUpdater - getDoc", -> .should.equal true it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version).should.equal true + @callback.calledWith(null, @lines, @version, true).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true @@ -60,7 +60,7 @@ describe "DocumentUpdater - getDoc", -> .should.equal true it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version).should.equal true + @callback.calledWith(null, @lines, @version, false).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true diff --git a/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee index b827b584f8..76067f943e 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee @@ -29,13 +29,14 @@ describe "DocumentManager - setDoc", -> beforeEach -> @beforeLines = ["before", "lines"] @afterLines = ["after", "lines"] + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, true) + @DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, @ops) + @UpdateManager.applyUpdates = sinon.stub().callsArgWith(3, null) + @DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2) + @DocumentManager.flushAndDeleteDoc = sinon.stub().callsArg(2) - describe "successfully", -> + describe "when already loaded", -> beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version) - @DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, @ops) - @UpdateManager.applyUpdates = sinon.stub().callsArgWith(3, null) - @DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2) @DocumentManager.setDoc @project_id, @doc_id, @afterLines, @source, @user_id, @callback it "should get the current doc lines", -> @@ -76,17 +77,26 @@ describe "DocumentManager - setDoc", -> it "should time the execution", -> @Metrics.Timer::done.called.should.equal true + + describe "when not already loaded", -> + beforeEach -> + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, false) + @DocumentManager.setDoc @project_id, @doc_id, @afterLines, @source, @user_id, @callback - describe "without new lines", -> - beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version) - @DocumentManager.setDoc @project_id, @doc_id, null, @callback + it "should flush and delete the doc from the doc updater", -> + @DocumentManager.flushAndDeleteDoc + .calledWith(@project_id, @doc_id) + .should.equal true - it "should return teh callback with an error", -> - @callback.calledWith(new Error("No lines were passed to setDoc")) - - it "should not try to get the doc lines", -> - @DocumentManager.getDoc.called.should.equal false + describe "without new lines", -> + beforeEach -> + @DocumentManager.setDoc @project_id, @doc_id, null, @source, @user_id, @callback + + it "should return the callback with an error", -> + @callback.calledWith(new Error("No lines were passed to setDoc")) + + it "should not try to get the doc lines", -> + @DocumentManager.getDoc.called.should.equal false From af5d01e440d47106154c88492d357534daebc04c Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 20 Jan 2016 15:05:31 +0000 Subject: [PATCH 090/769] Flush track changes when unloading data from redis --- .../app/coffee/DocumentManager.coffee | 13 ++++++++----- .../coffee/ApplyingUpdatesToADocTests.coffee | 4 ++-- .../coffee/DeletingADocumentTests.coffee | 16 ++++++++++++++-- .../coffee/DeletingAProjectTests.coffee | 10 ++++++++++ .../coffee/SettingADocumentTests.coffee | 6 +++--- .../flushAndDeleteDocTests.coffee | 7 +++++++ 6 files changed, 44 insertions(+), 12 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 1b1afeda3c..69311bd979 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -4,6 +4,7 @@ DocOpsManager = require "./DocOpsManager" DiffCodec = require "./DiffCodec" logger = require "logger-sharelatex" Metrics = require "./Metrics" +TrackChangesManager = require "./TrackChangesManager" module.exports = DocumentManager = getDoc: (project_id, doc_id, _callback = (error, lines, version) ->) -> @@ -109,11 +110,13 @@ module.exports = DocumentManager = DocumentManager.flushDocIfLoaded project_id, doc_id, (error) -> return callback(error) if error? - # We should flush pending ops to track-changes here but this is - # already done in the real-time WebsocketController.leaveProject - # method so we leave it there. Note, if you ever add the flush - # in here be sure to do it in the background because it can take - # a long time. + + # Flush in the background since it requires and http request + # to track changes + TrackChangesManager.flushDocChanges project_id, doc_id, (err) -> + if err? + logger.err {err, project_id, doc_id}, "error flushing to track changes" + RedisManager.removeDocFromMemory project_id, doc_id, (error) -> return callback(error) if error? callback null diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index e9b5da5b1d..a1c56ee41e 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -365,8 +365,8 @@ describe "Applying updates to a large doc (uses compression)", -> after -> MockTrackChangesApi.flushDoc.restore() - it "should flush the doc twice", -> - MockTrackChangesApi.flushDoc.calledTwice.should.equal true + it "should flush the doc", -> + MockTrackChangesApi.flushDoc.called.should.equal true describe "when there is no version in Mongo", -> before (done) -> diff --git a/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee index 139ba9bbed..e08b7fc12f 100644 --- a/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee @@ -3,6 +3,7 @@ chai = require("chai") chai.should() {db, ObjectId} = require "../../../app/js/mongojs" +MockTrackChangesApi = require "./helpers/MockTrackChangesApi" MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" @@ -18,6 +19,11 @@ describe "Deleting a document", -> }] v: @version @result = ["one", "one and a half", "two", "three"] + + sinon.spy MockTrackChangesApi, "flushDoc" + + after -> + MockTrackChangesApi.flushDoc.restore() describe "when the updated doc exists in the doc updater", -> before (done) -> @@ -38,7 +44,7 @@ describe "Deleting a document", -> setTimeout () => DocUpdaterClient.deleteDoc @project_id, @doc_id, (error, res, body) => @statusCode = res.statusCode - done() + setTimeout done, 200 , 200 after -> @@ -70,6 +76,9 @@ describe "Deleting a document", -> .calledWith(@project_id, @doc_id) .should.equal true done() + + it "should flush track changes", -> + MockTrackChangesApi.flushDoc.calledWith(@doc_id).should.equal true describe "when the doc is not in the doc updater", -> before (done) -> @@ -81,7 +90,7 @@ describe "Deleting a document", -> sinon.spy MockWebApi, "getDocument" DocUpdaterClient.deleteDoc @project_id, @doc_id, (error, res, body) => @statusCode = res.statusCode - done() + setTimeout done, 200 after -> MockWebApi.setDocumentLines.restore() @@ -100,6 +109,9 @@ describe "Deleting a document", -> .calledWith(@project_id, @doc_id) .should.equal true done() + + it "should flush track changes", -> + MockTrackChangesApi.flushDoc.calledWith(@doc_id).should.equal true diff --git a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee index 5bfc5a6ee8..2f7a47ff8b 100644 --- a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee +++ b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee @@ -3,6 +3,7 @@ chai = require("chai") chai.should() async = require "async" +MockTrackChangesApi = require "./helpers/MockTrackChangesApi" MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" @@ -37,6 +38,11 @@ describe "Deleting a project", -> lines: doc.lines version: doc.update.v } + + sinon.spy MockTrackChangesApi, "flushDoc" + + after -> + MockTrackChangesApi.flushDoc.restore() describe "with documents which have been updated", -> before (done) -> @@ -78,5 +84,9 @@ describe "Deleting a project", -> ), () -> MockWebApi.getDocument.restore() done() + + it "should flush each doc in track changes", -> + for doc in @docs + MockTrackChangesApi.flushDoc.calledWith(doc.id).should.equal true diff --git a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee index cf0d224995..0d05e30982 100644 --- a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee @@ -86,7 +86,7 @@ describe "Setting a document", -> throw error if error? DocUpdaterClient.setDocLines @project_id, @doc_id, @newLines, @source, @user_id, (error, res, body) => @statusCode = res.statusCode - done() + setTimeout done, 200 it "should return a 204 status code", -> @statusCode.should.equal 204 @@ -96,8 +96,8 @@ describe "Setting a document", -> .calledWith(@project_id, @doc_id, @newLines) .should.equal true - it "should flush track changes"#, -> - # MockTrackChangesApi.flushDoc.calledWith(@doc_id).should.equal true + it "should flush track changes", -> + MockTrackChangesApi.flushDoc.calledWith(@doc_id).should.equal true it "should remove the document from redis", (done) -> rclient.get "doclines:#{@doc_id}", (error, lines) => diff --git a/services/document-updater/test/unit/coffee/DocumentManager/flushAndDeleteDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/flushAndDeleteDocTests.coffee index 85a25ee5a7..3b9a4314a4 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/flushAndDeleteDocTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/flushAndDeleteDocTests.coffee @@ -9,6 +9,7 @@ describe "DocumentUpdater - flushAndDeleteDoc", -> @DocumentManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} "./PersistenceManager": @PersistenceManager = {} + "./TrackChangesManager": @TrackChangesManager = {} "logger-sharelatex": @logger = {log: sinon.stub()} "./DocOpsManager" :{} "./Metrics": @Metrics = @@ -22,6 +23,7 @@ describe "DocumentUpdater - flushAndDeleteDoc", -> beforeEach -> @RedisManager.removeDocFromMemory = sinon.stub().callsArg(2) @DocumentManager.flushDocIfLoaded = sinon.stub().callsArgWith(2) + @TrackChangesManager.flushDocChanges = sinon.stub().callsArg(2) @DocumentManager.flushAndDeleteDoc @project_id, @doc_id, @callback it "should flush the doc", -> @@ -39,3 +41,8 @@ describe "DocumentUpdater - flushAndDeleteDoc", -> it "should time the execution", -> @Metrics.Timer::done.called.should.equal true + + it "should flush to track changes", -> + @TrackChangesManager.flushDocChanges + .calledWith(@project_id, @doc_id) + .should.equal true From 3c7c318ea01cfe88e1aff20e21f87939776bf478 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 20 Jan 2016 17:36:06 +0000 Subject: [PATCH 091/769] Clean up and speed up unit tests by making sure requires are mocked --- .../flushAndDeleteDocTests.coffee | 2 +- ...ts.coffee => flushDocIfLoadedTests.coffee} | 3 +- .../getDocAndRecentOpsTests.coffee | 3 +- .../coffee/DocumentManager/getDocTests.coffee | 3 +- .../coffee/DocumentManager/setDocTests.coffee | 3 +- .../coffee/GettingListOfPendingUpdates.coffee | 42 ------------------- .../HttpController/deleteProjectTests.coffee | 2 +- .../flushAndDeleteDocTests.coffee | 2 +- .../flushDocIfLoadedTests.coffee | 2 +- .../HttpController/flushProjectTests.coffee | 2 +- .../coffee/HttpController/getDocTests.coffee | 2 +- .../coffee/HttpController/setDocTests.coffee | 2 +- .../coffee/LockManager/CheckingTheLock.coffee | 1 + .../LockManager/ReleasingTheLock.coffee | 1 + .../coffee/LockManager/getLockTests.coffee | 1 + .../coffee/LockManager/tryLockTests.coffee | 2 + .../getDocFromWebTests.coffee | 1 + .../getDocVersionInMongoTests.coffee | 1 + .../setDocInWebTests.coffee | 1 + .../setDocVersionInMongo.coffee | 1 + .../clearDocFromPendingUpdatesSetTests.coffee | 1 + .../getCountOfDocsInMemoryTests.coffee} | 7 ++-- .../getDocTests.coffee} | 3 +- .../getDocsWithPendingUpdatesTests.coffee | 1 + .../getPendingUpdatesForDocTests.coffee | 1 + .../getPreviousDocOpsTests.coffee | 1 + .../getUpdatesLengthTests.coffee} | 6 ++- .../coffee/RedisManager/pushDocOpTests.coffee | 1 + .../pushUncompressedHistoryOpTests.coffee | 1 + .../putDocInMemoryTests.coffee} | 7 ++-- .../removeDocFromMemoryTests.coffee} | 7 ++-- .../unit/coffee/ShareJsDB/GetOpsTests.coffee | 1 + .../coffee/ShareJsDB/GetSnapshotTests.coffee | 1 + .../coffee/ShareJsDB/WriteOpsTests.coffee | 1 + .../ShareJsUpdateManagerTests.coffee | 2 +- .../unit/coffee/ZipManager/ZipManager.coffee | 2 +- 36 files changed, 53 insertions(+), 67 deletions(-) rename services/document-updater/test/unit/coffee/DocumentManager/{flushDocTests.coffee => flushDocIfLoadedTests.coffee} (96%) delete mode 100644 services/document-updater/test/unit/coffee/GettingListOfPendingUpdates.coffee rename services/document-updater/test/unit/coffee/{GettingTotalNumberOfDocs.coffee => RedisManager/getCountOfDocsInMemoryTests.coffee} (83%) rename services/document-updater/test/unit/coffee/{GettingDoc.coffee => RedisManager/getDocTests.coffee} (92%) rename services/document-updater/test/unit/coffee/{CheckingUpdatesLength.coffee => RedisManager/getUpdatesLengthTests.coffee} (80%) rename services/document-updater/test/unit/coffee/{AddingDocsToMemory.coffee => RedisManager/putDocInMemoryTests.coffee} (87%) rename services/document-updater/test/unit/coffee/{RemovingSingleDocFromMemory.coffee => RedisManager/removeDocFromMemoryTests.coffee} (89%) rename services/document-updater/test/unit/coffee/{ => ShareJsUpdateManager}/ShareJsUpdateManagerTests.coffee (98%) diff --git a/services/document-updater/test/unit/coffee/DocumentManager/flushAndDeleteDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/flushAndDeleteDocTests.coffee index 3b9a4314a4..911efce1ba 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/flushAndDeleteDocTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/flushAndDeleteDocTests.coffee @@ -4,7 +4,7 @@ should = chai.should() modulePath = "../../../../app/js/DocumentManager.js" SandboxedModule = require('sandboxed-module') -describe "DocumentUpdater - flushAndDeleteDoc", -> +describe "DocumentUpdater.flushAndDeleteDoc", -> beforeEach -> @DocumentManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} diff --git a/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/flushDocIfLoadedTests.coffee similarity index 96% rename from services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee rename to services/document-updater/test/unit/coffee/DocumentManager/flushDocIfLoadedTests.coffee index 6bdba1a2b7..bda914999b 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/flushDocTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/flushDocIfLoadedTests.coffee @@ -4,7 +4,7 @@ should = chai.should() modulePath = "../../../../app/js/DocumentManager.js" SandboxedModule = require('sandboxed-module') -describe "DocumentUpdater - flushDocIfLoaded", -> +describe "DocumentManager.flushDocIfLoaded", -> beforeEach -> @DocumentManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} @@ -14,6 +14,7 @@ describe "DocumentUpdater - flushDocIfLoaded", -> "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() + "./TrackChangesManager": {} @project_id = "project-id-123" @doc_id = "doc-id-123" @lines = ["one", "two", "three"] diff --git a/services/document-updater/test/unit/coffee/DocumentManager/getDocAndRecentOpsTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/getDocAndRecentOpsTests.coffee index 7a296cc47d..8c54b2b854 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/getDocAndRecentOpsTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/getDocAndRecentOpsTests.coffee @@ -4,7 +4,7 @@ should = chai.should() modulePath = "../../../../app/js/DocumentManager.js" SandboxedModule = require('sandboxed-module') -describe "DocumentUpdater - getDocAndRecentOps", -> +describe "DocumentUpdater.getDocAndRecentOps", -> beforeEach -> @DocumentManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} @@ -14,6 +14,7 @@ describe "DocumentUpdater - getDocAndRecentOps", -> "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() + "./TrackChangesManager": {} @project_id = "project-id-123" @doc_id = "doc-id-123" diff --git a/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee index 0db51ddba8..b11686ac3c 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee @@ -4,7 +4,7 @@ should = chai.should() modulePath = "../../../../app/js/DocumentManager.js" SandboxedModule = require('sandboxed-module') -describe "DocumentUpdater - getDoc", -> +describe "DocumentUpdater.getDoc", -> beforeEach -> @DocumentManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} @@ -14,6 +14,7 @@ describe "DocumentUpdater - getDoc", -> "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() + "./TrackChangesManager": {} @project_id = "project-id-123" @doc_id = "doc-id-123" diff --git a/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee index 76067f943e..9307c42feb 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee @@ -4,7 +4,7 @@ should = chai.should() modulePath = "../../../../app/js/DocumentManager.js" SandboxedModule = require('sandboxed-module') -describe "DocumentManager - setDoc", -> +describe "DocumentManager.setDoc", -> beforeEach -> @DocumentManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} @@ -16,6 +16,7 @@ describe "DocumentManager - setDoc", -> "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() + "./TrackChangesManager": {} @project_id = "project-id-123" @doc_id = "doc-id-123" diff --git a/services/document-updater/test/unit/coffee/GettingListOfPendingUpdates.coffee b/services/document-updater/test/unit/coffee/GettingListOfPendingUpdates.coffee deleted file mode 100644 index 10d1c39038..0000000000 --- a/services/document-updater/test/unit/coffee/GettingListOfPendingUpdates.coffee +++ /dev/null @@ -1,42 +0,0 @@ -assert = require('assert') -should = require('chai').should() -path = require('path') -modulePath = path.join __dirname, '../../../app/js/RedisManager.js' -_ = require('underscore') -SandboxedModule = require('sandboxed-module') -keys = require(path.join __dirname, '../../../app/js/RedisKeyBuilder.js') - -describe 'getting entire list of pending updates', ()-> - - doc_id = 123 - redisMemory = {} - correctUpdates = [{"update1"}, {"update2"}, {"update3"}] - jsonCorrectUpdates = _.map correctUpdates, (d)-> JSON.stringify d - redisMemory[keys.pendingUpdates(doc_id:doc_id)] = jsonCorrectUpdates - redisMemory[keys.pendingUpdates(doc_id:"notThis")] = JSON.stringify([{"updatex"}, {"updatez"}]) - - redisReturn = [] - - mocks = - "redis-sharelatex": - createClient: ()-> - auth:-> - multi: ()-> - lrange:(key, start, end)-> - key.should.equal(keys.pendingUpdates(doc_id:doc_id)) - start.should.equal(0) - end.should.equal(-1) - redisReturn.push(redisMemory[key]) - del : (key)-> - key.should.equal(keys.pendingUpdates(doc_id:doc_id)) - redisReturn.push(1) - exec: (callback)-> - callback(null, redisReturn) - - redisManager = SandboxedModule.require(modulePath, requires: mocks) - - it 'should have 3 elements in array', (done)-> - redisManager.getPendingUpdatesForDoc doc_id, (err, listOfUpdates)-> - listOfUpdates.length.should.equal(3) - done() - diff --git a/services/document-updater/test/unit/coffee/HttpController/deleteProjectTests.coffee b/services/document-updater/test/unit/coffee/HttpController/deleteProjectTests.coffee index e3c6eda35c..796df52e80 100644 --- a/services/document-updater/test/unit/coffee/HttpController/deleteProjectTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/deleteProjectTests.coffee @@ -5,7 +5,7 @@ modulePath = "../../../../app/js/HttpController.js" SandboxedModule = require('sandboxed-module') Errors = require "../../../../app/js/Errors.js" -describe "HttpController - deleteProject", -> +describe "HttpController.deleteProject", -> beforeEach -> @HttpController = SandboxedModule.require modulePath, requires: "./DocumentManager": @DocumentManager = {} diff --git a/services/document-updater/test/unit/coffee/HttpController/flushAndDeleteDocTests.coffee b/services/document-updater/test/unit/coffee/HttpController/flushAndDeleteDocTests.coffee index f586b6c4f8..af09c2c1bd 100644 --- a/services/document-updater/test/unit/coffee/HttpController/flushAndDeleteDocTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/flushAndDeleteDocTests.coffee @@ -5,7 +5,7 @@ modulePath = "../../../../app/js/HttpController.js" SandboxedModule = require('sandboxed-module') Errors = require "../../../../app/js/Errors.js" -describe "HttpController - flushAndDeleteDoc", -> +describe "HttpController.flushAndDeleteDoc", -> beforeEach -> @HttpController = SandboxedModule.require modulePath, requires: "./DocumentManager": @DocumentManager = {} diff --git a/services/document-updater/test/unit/coffee/HttpController/flushDocIfLoadedTests.coffee b/services/document-updater/test/unit/coffee/HttpController/flushDocIfLoadedTests.coffee index 69c0137676..3321030624 100644 --- a/services/document-updater/test/unit/coffee/HttpController/flushDocIfLoadedTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/flushDocIfLoadedTests.coffee @@ -5,7 +5,7 @@ modulePath = "../../../../app/js/HttpController.js" SandboxedModule = require('sandboxed-module') Errors = require "../../../../app/js/Errors.js" -describe "HttpController - flushDocIfLoaded", -> +describe "HttpController.flushDocIfLoaded", -> beforeEach -> @HttpController = SandboxedModule.require modulePath, requires: "./DocumentManager": @DocumentManager = {} diff --git a/services/document-updater/test/unit/coffee/HttpController/flushProjectTests.coffee b/services/document-updater/test/unit/coffee/HttpController/flushProjectTests.coffee index 5175cd4280..e45269ce6d 100644 --- a/services/document-updater/test/unit/coffee/HttpController/flushProjectTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/flushProjectTests.coffee @@ -5,7 +5,7 @@ modulePath = "../../../../app/js/HttpController.js" SandboxedModule = require('sandboxed-module') Errors = require "../../../../app/js/Errors.js" -describe "HttpController - flushProject", -> +describe "HttpController.flushProject", -> beforeEach -> @HttpController = SandboxedModule.require modulePath, requires: "./DocumentManager": @DocumentManager = {} diff --git a/services/document-updater/test/unit/coffee/HttpController/getDocTests.coffee b/services/document-updater/test/unit/coffee/HttpController/getDocTests.coffee index 4ec493bc4b..17e5ad8e08 100644 --- a/services/document-updater/test/unit/coffee/HttpController/getDocTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/getDocTests.coffee @@ -5,7 +5,7 @@ modulePath = "../../../../app/js/HttpController.js" SandboxedModule = require('sandboxed-module') Errors = require "../../../../app/js/Errors.js" -describe "HttpController - getDoc", -> +describe "HttpController.getDoc", -> beforeEach -> @HttpController = SandboxedModule.require modulePath, requires: "./DocumentManager": @DocumentManager = {} diff --git a/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee b/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee index dd2a7c1d59..b60549c137 100644 --- a/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee @@ -5,7 +5,7 @@ modulePath = "../../../../app/js/HttpController.js" SandboxedModule = require('sandboxed-module') Errors = require "../../../../app/js/Errors.js" -describe "HttpController - setDoc", -> +describe "HttpController.setDoc", -> beforeEach -> @HttpController = SandboxedModule.require modulePath, requires: "./DocumentManager": @DocumentManager = {} diff --git a/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee b/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee index 598c4903c2..84c34f0725 100644 --- a/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee @@ -27,6 +27,7 @@ describe 'Lock Manager - checking the lock', ()-> expire: exireStub set: setStub exec: execStub + "./Metrics": {inc: () ->} LockManager = SandboxedModule.require(modulePath, requires: mocks) it 'should check if lock exists but not set or expire', (done)-> diff --git a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee index 4ae75e7719..3b8b09877d 100644 --- a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee @@ -18,6 +18,7 @@ describe 'LockManager - releasing the lock', ()-> createClient : ()-> auth:-> del:deleteStub + "./Metrics": {inc: () ->} LockManager = SandboxedModule.require(modulePath, requires: mocks) diff --git a/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee b/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee index 3147f9701a..026bd81538 100644 --- a/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee @@ -11,6 +11,7 @@ describe 'LockManager - getting the lock', -> "redis-sharelatex": createClient : () => auth:-> + "./Metrics": {inc: () ->} @callback = sinon.stub() @doc_id = "doc-id-123" diff --git a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee index c828399c12..0a1ddc00e9 100644 --- a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee @@ -12,6 +12,8 @@ describe 'LockManager - trying the lock', -> createClient : () => auth:-> set: @set = sinon.stub() + "./Metrics": {inc: () ->} + @callback = sinon.stub() @doc_id = "doc-id-123" diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/getDocFromWebTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/getDocFromWebTests.coffee index 82ee937591..2207ea1e7d 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/getDocFromWebTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/getDocFromWebTests.coffee @@ -13,6 +13,7 @@ describe "PersistenceManager.getDocFromWeb", -> "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() + "logger-sharelatex": @logger = {log: sinon.stub(), err: sinon.stub()} @project_id = "project-id-123" @doc_id = "doc-id-123" @lines = ["one", "two", "three"] diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/getDocVersionInMongoTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/getDocVersionInMongoTests.coffee index a5015279fe..2ab89f6795 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/getDocVersionInMongoTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/getDocVersionInMongoTests.coffee @@ -17,6 +17,7 @@ describe "PersistenceManager.getDocVersionInMongo", -> "./mongojs": db: @db = { docOps: {} } ObjectId: ObjectId + "logger-sharelatex": @logger = {log: sinon.stub(), err: sinon.stub()} @doc_id = ObjectId().toString() @callback = sinon.stub() diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/setDocInWebTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/setDocInWebTests.coffee index ad218caa10..d7e6727e9a 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/setDocInWebTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/setDocInWebTests.coffee @@ -13,6 +13,7 @@ describe "PersistenceManager.setDocInWeb", -> "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() + "logger-sharelatex": @logger = {log: sinon.stub(), err: sinon.stub()} @project_id = "project-id-123" @doc_id = "doc-id-123" @lines = ["one", "two", "three"] diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/setDocVersionInMongo.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/setDocVersionInMongo.coffee index 7f228fc341..d642aba0d8 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/setDocVersionInMongo.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/setDocVersionInMongo.coffee @@ -17,6 +17,7 @@ describe "PersistenceManager.getDocVersionInMongo", -> "./mongojs": db: @db = { docOps: {} } ObjectId: ObjectId + "logger-sharelatex": @logger = {log: sinon.stub(), err: sinon.stub()} @doc_id = ObjectId().toString() @callback = sinon.stub() diff --git a/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee index 86ab837a2f..c89842f7bc 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee @@ -13,6 +13,7 @@ describe "RedisManager.clearDocFromPendingUpdatesSet", -> "redis-sharelatex" : createClient: () => @rclient ?= auth:-> # only assign one rclient "logger-sharelatex": {} + "./ZipManager": {} @rclient.srem = sinon.stub().callsArg(2) @RedisManager.clearDocFromPendingUpdatesSet(@project_id, @doc_id, @callback) diff --git a/services/document-updater/test/unit/coffee/GettingTotalNumberOfDocs.coffee b/services/document-updater/test/unit/coffee/RedisManager/getCountOfDocsInMemoryTests.coffee similarity index 83% rename from services/document-updater/test/unit/coffee/GettingTotalNumberOfDocs.coffee rename to services/document-updater/test/unit/coffee/RedisManager/getCountOfDocsInMemoryTests.coffee index bac04361c3..e66fecf86a 100644 --- a/services/document-updater/test/unit/coffee/GettingTotalNumberOfDocs.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/getCountOfDocsInMemoryTests.coffee @@ -2,11 +2,11 @@ require('coffee-script') assert = require('assert') should = require('chai').should() path = require('path') -modulePath = path.join __dirname, '../../../app/js/RedisManager.js' -keys = require(path.join __dirname, '../../../app/js/RedisKeyBuilder.js') +modulePath = path.join __dirname, '../../../../app/js/RedisManager.js' +keys = require(path.join __dirname, '../../../../app/js/RedisKeyBuilder.js') SandboxedModule = require('sandboxed-module') -describe 'getting cound of docs from memory', ()-> +describe 'RedisManager.getCountOfDocsInMemory', ()-> project_id = "12345" doc_id1 = "docid1" @@ -18,6 +18,7 @@ describe 'getting cound of docs from memory', ()-> beforeEach (done)-> mocks = + "./ZipManager": {} "logger-sharelatex": log:-> "redis-sharelatex": createClient : ()-> diff --git a/services/document-updater/test/unit/coffee/GettingDoc.coffee b/services/document-updater/test/unit/coffee/RedisManager/getDocTests.coffee similarity index 92% rename from services/document-updater/test/unit/coffee/GettingDoc.coffee rename to services/document-updater/test/unit/coffee/RedisManager/getDocTests.coffee index 0b4b466c24..e16ff856dd 100644 --- a/services/document-updater/test/unit/coffee/GettingDoc.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/getDocTests.coffee @@ -1,7 +1,7 @@ sinon = require('sinon') chai = require('chai') should = chai.should() -modulePath = "../../../app/js/RedisManager.js" +modulePath = "../../../../app/js/RedisManager.js" SandboxedModule = require('sandboxed-module') describe 'RedisManager.getDoc', -> @@ -11,6 +11,7 @@ describe 'RedisManager.getDoc', -> @rclient.multi = () => @rclient @RedisManager = SandboxedModule.require modulePath, requires: + "logger-sharelatex": {} "redis-sharelatex": @redis = createClient: () => @rclient diff --git a/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee index 2f54ba171e..45efa4c984 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee @@ -8,6 +8,7 @@ describe "RedisManager.getDocsWithPendingUpdates", -> beforeEach -> @callback = sinon.stub() @RedisManager = SandboxedModule.require modulePath, requires: + "./ZipManager": {} "redis-sharelatex" : createClient: () => @rclient ?= auth:-> "logger-sharelatex": {} diff --git a/services/document-updater/test/unit/coffee/RedisManager/getPendingUpdatesForDocTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/getPendingUpdatesForDocTests.coffee index 9c70033eb4..40efa7cec2 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/getPendingUpdatesForDocTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/getPendingUpdatesForDocTests.coffee @@ -7,6 +7,7 @@ SandboxedModule = require('sandboxed-module') describe "RedisManager.getPendingUpdatesForDoc", -> beforeEach -> @RedisManager = SandboxedModule.require modulePath, requires: + "./ZipManager": {} "redis-sharelatex": createClient: () => @rclient = auth: () -> diff --git a/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee index 4a6d42c1ab..eb17d7856f 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee @@ -8,6 +8,7 @@ describe "RedisManager.getPreviousDocOpsTests", -> beforeEach -> @callback = sinon.stub() @RedisManager = SandboxedModule.require modulePath, requires: + "./ZipManager": {} "redis-sharelatex" : createClient: () => @rclient ?= auth: -> diff --git a/services/document-updater/test/unit/coffee/CheckingUpdatesLength.coffee b/services/document-updater/test/unit/coffee/RedisManager/getUpdatesLengthTests.coffee similarity index 80% rename from services/document-updater/test/unit/coffee/CheckingUpdatesLength.coffee rename to services/document-updater/test/unit/coffee/RedisManager/getUpdatesLengthTests.coffee index de04724fef..57e7cb1e02 100644 --- a/services/document-updater/test/unit/coffee/CheckingUpdatesLength.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/getUpdatesLengthTests.coffee @@ -2,17 +2,19 @@ assert = require('chai').assert sinon = require('sinon') chai = require('chai') should = chai.should() -modulePath = "../../../app/js/RedisManager.js" +modulePath = "../../../../app/js/RedisManager.js" SandboxedModule = require('sandboxed-module') doc_id = "1234" -describe 'Document Manager - getUpdatesLength ', -> +describe 'Redis Manager.getUpdatesLength ', -> beforeEach -> @llenStub = sinon.stub() @redisManager = SandboxedModule.require modulePath, requires: + "./ZipManager": {} + "logger-sharelatex": {} "redis-sharelatex": createClient:=> auth:-> diff --git a/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee index a90b20bced..1053ed75c1 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee @@ -7,6 +7,7 @@ SandboxedModule = require('sandboxed-module') describe "RedisManager.pushDocOp", -> beforeEach -> @RedisManager = SandboxedModule.require modulePath, requires: + "./ZipManager": {} "redis-sharelatex": createClient: () => @rclient ?= auth: () -> diff --git a/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee index 82b28a25d2..c7423fcff0 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee @@ -7,6 +7,7 @@ SandboxedModule = require('sandboxed-module') describe "RedisManager.pushUncompressedHistoryOp", -> beforeEach -> @RedisManager = SandboxedModule.require modulePath, requires: + "./ZipManager": {} "redis-sharelatex": createClient: () => @rclient ?= auth: () -> diff --git a/services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee b/services/document-updater/test/unit/coffee/RedisManager/putDocInMemoryTests.coffee similarity index 87% rename from services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee rename to services/document-updater/test/unit/coffee/RedisManager/putDocInMemoryTests.coffee index 328eb13b8c..16043e7e2c 100644 --- a/services/document-updater/test/unit/coffee/AddingDocsToMemory.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/putDocInMemoryTests.coffee @@ -1,13 +1,13 @@ require('coffee-script') assert = require('assert') path = require('path') -modulePath = path.join __dirname, '../../../app/js/RedisManager.js' -keys = require(path.join __dirname, '../../../app/js/RedisKeyBuilder.js') +modulePath = path.join __dirname, '../../../../app/js/RedisManager.js' +keys = require(path.join __dirname, '../../../../app/js/RedisKeyBuilder.js') project_id = 1234 doc_id = 5678 SandboxedModule = require('sandboxed-module') -describe 'putting a doc into memory', ()-> +describe 'RedisManager.putDocInMemory', ()-> lines = ["this is one line", "and another line"] version = 42 @@ -21,6 +21,7 @@ describe 'putting a doc into memory', ()-> potentialSAdds[keys.docsInProject(project_id:project_id)] = doc_id mocks = + "./ZipManager": {} "logger-sharelatex": log:-> "redis-sharelatex": createClient : ()-> diff --git a/services/document-updater/test/unit/coffee/RemovingSingleDocFromMemory.coffee b/services/document-updater/test/unit/coffee/RedisManager/removeDocFromMemoryTests.coffee similarity index 89% rename from services/document-updater/test/unit/coffee/RemovingSingleDocFromMemory.coffee rename to services/document-updater/test/unit/coffee/RedisManager/removeDocFromMemoryTests.coffee index aad1e94804..2c5076bb1c 100644 --- a/services/document-updater/test/unit/coffee/RemovingSingleDocFromMemory.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/removeDocFromMemoryTests.coffee @@ -3,11 +3,11 @@ _ = require("underscore") assert = require('assert') sinon = require('sinon') path = require('path') -modulePath = path.join __dirname, '../../../app/js/RedisManager.js' -keys = require(path.join __dirname, '../../../app/js/RedisKeyBuilder.js') +modulePath = path.join __dirname, '../../../../app/js/RedisManager.js' +keys = require(path.join __dirname, '../../../../app/js/RedisKeyBuilder.js') SandboxedModule = require('sandboxed-module') -describe 'removing single doc from memory', ()-> +describe 'RedisManager.removeDocFromMemory', ()-> project_id = "12345" doc_id1 = "docid1" @@ -21,6 +21,7 @@ describe 'removing single doc from memory', ()-> redisMemory = {} mocks = + "./ZipManager": {} "logger-sharelatex": error:-> log:-> diff --git a/services/document-updater/test/unit/coffee/ShareJsDB/GetOpsTests.coffee b/services/document-updater/test/unit/coffee/ShareJsDB/GetOpsTests.coffee index 4812619574..31830e5afc 100644 --- a/services/document-updater/test/unit/coffee/ShareJsDB/GetOpsTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJsDB/GetOpsTests.coffee @@ -16,6 +16,7 @@ describe "ShareJsDB.getOps", -> "./RedisManager": @RedisManager = {} "./DocOpsManager": @DocOpsManager = {} "./DocumentManager":{} + "logger-sharelatex": {} describe "with start == end", -> beforeEach -> diff --git a/services/document-updater/test/unit/coffee/ShareJsDB/GetSnapshotTests.coffee b/services/document-updater/test/unit/coffee/ShareJsDB/GetSnapshotTests.coffee index ef433c1f90..1cd1e62c4e 100644 --- a/services/document-updater/test/unit/coffee/ShareJsDB/GetSnapshotTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJsDB/GetSnapshotTests.coffee @@ -16,6 +16,7 @@ describe "ShareJsDB.getSnapshot", -> "./DocumentManager": @DocumentManager = {} "./RedisManager": {} "./DocOpsManager": {} + "logger-sharelatex": {} @version = 42 diff --git a/services/document-updater/test/unit/coffee/ShareJsDB/WriteOpsTests.coffee b/services/document-updater/test/unit/coffee/ShareJsDB/WriteOpsTests.coffee index 6088de77f4..30e92bad3c 100644 --- a/services/document-updater/test/unit/coffee/ShareJsDB/WriteOpsTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJsDB/WriteOpsTests.coffee @@ -17,6 +17,7 @@ describe "ShareJsDB.writeOps", -> "./RedisManager": @RedisManager = {} "./DocOpsManager": @DocOpsManager = {} "./DocumentManager": {} + "logger-sharelatex": @logger = {error: sinon.stub()} describe "writing an op", -> beforeEach -> diff --git a/services/document-updater/test/unit/coffee/ShareJsUpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee similarity index 98% rename from services/document-updater/test/unit/coffee/ShareJsUpdateManagerTests.coffee rename to services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee index ecccc91a7e..6d21ca3889 100644 --- a/services/document-updater/test/unit/coffee/ShareJsUpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee @@ -1,7 +1,7 @@ sinon = require('sinon') chai = require('chai') should = chai.should() -modulePath = "../../../app/js/ShareJsUpdateManager.js" +modulePath = "../../../../app/js/ShareJsUpdateManager.js" SandboxedModule = require('sandboxed-module') describe "ShareJsUpdateManager", -> diff --git a/services/document-updater/test/unit/coffee/ZipManager/ZipManager.coffee b/services/document-updater/test/unit/coffee/ZipManager/ZipManager.coffee index da7cdf4195..e477cfb23a 100644 --- a/services/document-updater/test/unit/coffee/ZipManager/ZipManager.coffee +++ b/services/document-updater/test/unit/coffee/ZipManager/ZipManager.coffee @@ -101,7 +101,7 @@ describe "ZipManager with RedisManager", -> @docLines = [] while @docLines.join('').length <= MIN_SIZE @docLines.push "this is a long line in a long document" - console.log "length of doclines", @docLines.join('').length + # console.log "length of doclines", @docLines.join('').length @callback = sinon.stub() @RedisManager.setDocument @doc_id, @docLines, @version, () => @callback() From 6c79ab4321f2a785572cbd92445185819805a53a Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 12 Apr 2016 17:10:39 +0100 Subject: [PATCH 092/769] Don't let HTTP calls take longer than 5 seconds since we're inside a 30 second lock --- .../app/coffee/PersistenceManager.coffee | 7 +++++ .../coffee/FlushingDocsTests.coffee | 28 +++++++++++++++++++ .../coffee/GettingADocumentTests.coffee | 20 +++++++++++-- .../getDocFromWebTests.coffee | 1 + .../setDocInWebTests.coffee | 1 + 5 files changed, 55 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/PersistenceManager.coffee b/services/document-updater/app/coffee/PersistenceManager.coffee index 089700f23d..605425eb5e 100644 --- a/services/document-updater/app/coffee/PersistenceManager.coffee +++ b/services/document-updater/app/coffee/PersistenceManager.coffee @@ -5,6 +5,11 @@ Metrics = require "./Metrics" {db, ObjectId} = require("./mongojs") logger = require "logger-sharelatex" +# We have to be quick with HTTP calls because we're holding a lock that +# expires after 30 seconds. We can't let any errors in the rest of the stack +# hold us up, and need to bail out quickly if there is a problem. +MAX_HTTP_REQUEST_LENGTH = 5000 # 5 seconds + module.exports = PersistenceManager = getDoc: (project_id, doc_id, callback = (error, lines, version) ->) -> PersistenceManager.getDocFromWeb project_id, doc_id, (error, lines) -> @@ -37,6 +42,7 @@ module.exports = PersistenceManager = pass: Settings.apis.web.pass sendImmediately: true jar: false + timeout: MAX_HTTP_REQUEST_LENGTH }, (error, res, body) -> return callback(error) if error? if res.statusCode >= 200 and res.statusCode < 300 @@ -69,6 +75,7 @@ module.exports = PersistenceManager = pass: Settings.apis.web.pass sendImmediately: true jar: false + timeout: MAX_HTTP_REQUEST_LENGTH }, (error, res, body) -> return callback(error) if error? if res.statusCode >= 200 and res.statusCode < 300 diff --git a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee index 4513fd7d5c..8fe89de7be 100644 --- a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee +++ b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee @@ -1,6 +1,7 @@ sinon = require "sinon" chai = require("chai") chai.should() +expect = chai.expect async = require "async" {db, ObjectId} = require "../../../app/js/mongojs" @@ -71,3 +72,30 @@ describe "Flushing a doc to Mongo", -> it "should not flush the doc to the web api", -> MockWebApi.setDocumentLines.called.should.equal false + describe "when the web api http request takes a long time", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + @timeout = 10000 + MockWebApi.insertDoc @project_id, @doc_id, { + lines: @lines + } + sinon.stub MockWebApi, "setDocumentLines", (project_id, doc_id, lines, callback = (error) ->) -> + setTimeout callback, 30000 + + db.docOps.insert { + doc_id: ObjectId(@doc_id) + version: @version + }, (error) => + throw error if error? + DocUpdaterClient.preloadDoc @project_id, @doc_id, done + + after -> + MockWebApi.setDocumentLines.restore() + + it "should return quickly(ish)", (done) -> + start = Date.now() + DocUpdaterClient.flushDoc @project_id, @doc_id, (error, res, doc) => + res.statusCode.should.equal 500 + delta = Date.now() - start + expect(delta).to.be.below 20000 + done() \ No newline at end of file diff --git a/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee index 980d73fa93..35c3f2c55a 100644 --- a/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee @@ -1,6 +1,7 @@ sinon = require "sinon" chai = require("chai") chai.should() +expect = chai.expect {db, ObjectId} = require "../../../app/js/mongojs" MockWebApi = require "./helpers/MockWebApi" @@ -113,7 +114,22 @@ describe "Getting a document", -> it "should return 500", -> @statusCode.should.equal 500 + describe "when the web api http request takes a long time", -> + before (done) -> + @timeout = 10000 + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + sinon.stub MockWebApi, "getDocument", (project_id, doc_id, callback = (error, doc) ->) -> + setTimeout callback, 30000 + done() - - + after -> + MockWebApi.getDocument.restore() + + it "should return quickly(ish)", (done) -> + start = Date.now() + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => + res.statusCode.should.equal 500 + delta = Date.now() - start + expect(delta).to.be.below 20000 + done() diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/getDocFromWebTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/getDocFromWebTests.coffee index 2207ea1e7d..e782c0065b 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/getDocFromWebTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/getDocFromWebTests.coffee @@ -41,6 +41,7 @@ describe "PersistenceManager.getDocFromWeb", -> pass: @pass sendImmediately: true jar: false + timeout: 5000 }) .should.equal true diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/setDocInWebTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/setDocInWebTests.coffee index d7e6727e9a..b70c7dbe1b 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/setDocInWebTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/setDocInWebTests.coffee @@ -43,6 +43,7 @@ describe "PersistenceManager.setDocInWeb", -> pass: @pass sendImmediately: true jar: false + timeout: 5000 }) .should.equal true From 945c728db2a2a540b9eeeed6f43cd158d63804f3 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 13 Apr 2016 11:59:56 +0100 Subject: [PATCH 093/769] Use signed locks so only the locking party can remove their lock --- .../app/coffee/LockManager.coffee | 45 +++++++++++++------ .../app/coffee/UpdateManager.coffee | 18 ++++---- .../coffee/LockManager/CheckingTheLock.coffee | 23 ++-------- .../LockManager/ReleasingTheLock.coffee | 9 ++-- .../coffee/LockManager/getLockTests.coffee | 20 +++++---- .../coffee/LockManager/tryLockTests.coffee | 8 ++-- .../UpdateManager/ApplyingUpdates.coffee | 8 ++-- .../lockUpdatesAndDoTests.coffee | 11 ++--- 8 files changed, 77 insertions(+), 65 deletions(-) diff --git a/services/document-updater/app/coffee/LockManager.coffee b/services/document-updater/app/coffee/LockManager.coffee index 0facb8519b..7901d3860a 100644 --- a/services/document-updater/app/coffee/LockManager.coffee +++ b/services/document-updater/app/coffee/LockManager.coffee @@ -4,42 +4,60 @@ redis = require("redis-sharelatex") rclient = redis.createClient(Settings.redis.web) keys = require('./RedisKeyBuilder') logger = require "logger-sharelatex" +os = require "os" +crypto = require "crypto" + +HOST = os.hostname() +PID = process.pid module.exports = LockManager = LOCK_TEST_INTERVAL: 50 # 50ms between each test of the lock MAX_LOCK_WAIT_TIME: 10000 # 10s maximum time to spend trying to get the lock - REDIS_LOCK_EXPIRY: 30 # seconds. Time until lock auto expires in redis. + LOCK_TTL: 30 # seconds. Time until lock auto expires in redis. + + # Use a signed lock value as described in + # http://redis.io/topics/distlock#correct-implementation-with-a-single-instance + # to prevent accidental unlocking by multiple processes + randomLock : () -> + time = Date.now() + RND = crypto.randomBytes(4).toString('hex') + return "locked:host=#{HOST}:pid=#{PID}:random=#{RND}:time=#{time}" + + unlockScript: 'if redis.call("get", KEYS[1]) == ARGV[1] then return redis.call("del", KEYS[1]) else return 0 end'; tryLock : (doc_id, callback = (err, isFree)->)-> - rclient.set keys.blockingKey(doc_id: doc_id), "locked", "EX", LockManager.REDIS_LOCK_EXPIRY, "NX", (err, gotLock)-> + lockValue = LockManager.randomLock() + key = keys.blockingKey(doc_id:doc_id) + rclient.set key, lockValue, "EX", @LOCK_TTL, "NX", (err, gotLock)-> return callback(err) if err? if gotLock == "OK" metrics.inc "doc-not-blocking" - callback err, true + callback err, true, lockValue else metrics.inc "doc-blocking" - logger.log doc_id: doc_id, redis_response: gotLock, "doc is locked" + logger.log {doc_id, lockValue}, "doc is locked" callback err, false getLock: (doc_id, callback = (error) ->) -> startTime = Date.now() do attempt = () -> if Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME - return callback(new Error("Timeout")) + e = new Error("Timeout") + e.doc_id = doc_id + return callback(e) - LockManager.tryLock doc_id, (error, gotLock) -> + LockManager.tryLock doc_id, (error, gotLock, lockValue) -> return callback(error) if error? if gotLock - callback(null) + callback(null, lockValue) else setTimeout attempt, LockManager.LOCK_TEST_INTERVAL checkLock: (doc_id, callback = (err, isFree)->)-> - multi = rclient.multi() - multi.exists keys.blockingKey(doc_id:doc_id) - multi.exec (err, replys)-> + key = keys.blockingKey(doc_id:doc_id) + rclient.exists key, (err, exists) -> return callback(err) if err? - exists = parseInt replys[0] + exists = parseInt exists if exists == 1 metrics.inc "doc-blocking" callback err, false @@ -47,7 +65,8 @@ module.exports = LockManager = metrics.inc "doc-not-blocking" callback err, true - releaseLock: (doc_id, callback)-> - rclient.del keys.blockingKey(doc_id:doc_id), callback + releaseLock: (doc_id, lockValue, callback)-> + key = keys.blockingKey(doc_id:doc_id) + rclient.eval LockManager.unlockScript, 1, key, lockValue, callback diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index 8765c330ff..2ab74feda1 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -29,12 +29,12 @@ module.exports = UpdateManager = callback() processOutstandingUpdatesWithLock: (project_id, doc_id, callback = (error) ->) -> - LockManager.tryLock doc_id, (error, gotLock) => + LockManager.tryLock doc_id, (error, gotLock, lockValue) => return callback(error) if error? return callback() if !gotLock UpdateManager.processOutstandingUpdates project_id, doc_id, (error) -> - return UpdateManager._handleErrorInsideLock(doc_id, error, callback) if error? - LockManager.releaseLock doc_id, (error) => + return UpdateManager._handleErrorInsideLock(doc_id, lockValue, error, callback) if error? + LockManager.releaseLock doc_id, lockValue, (error) => return callback(error) if error? UpdateManager.continueProcessingUpdatesWithLock project_id, doc_id, callback @@ -62,20 +62,20 @@ module.exports = UpdateManager = RedisManager.setDocument doc_id, updatedDocLines, version, callback lockUpdatesAndDo: (method, project_id, doc_id, args..., callback) -> - LockManager.getLock doc_id, (error) -> + LockManager.getLock doc_id, (error, lockValue) -> return callback(error) if error? UpdateManager.processOutstandingUpdates project_id, doc_id, (error) -> - return UpdateManager._handleErrorInsideLock(doc_id, error, callback) if error? + return UpdateManager._handleErrorInsideLock(doc_id, lockValue, error, callback) if error? method project_id, doc_id, args..., (error, response_args...) -> - return UpdateManager._handleErrorInsideLock(doc_id, error, callback) if error? - LockManager.releaseLock doc_id, (error) -> + return UpdateManager._handleErrorInsideLock(doc_id, lockValue, error, callback) if error? + LockManager.releaseLock doc_id, lockValue, (error) -> return callback(error) if error? callback null, response_args... # We held the lock for a while so updates might have queued up UpdateManager.continueProcessingUpdatesWithLock project_id, doc_id - _handleErrorInsideLock: (doc_id, original_error, callback = (error) ->) -> - LockManager.releaseLock doc_id, (lock_error) -> + _handleErrorInsideLock: (doc_id, lockValue, original_error, callback = (error) ->) -> + LockManager.releaseLock doc_id, lockValue, (lock_error) -> callback(original_error) _sanitizeUpdate: (update) -> diff --git a/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee b/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee index 84c34f0725..e4514750c8 100644 --- a/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee @@ -9,12 +9,9 @@ doc_id = 5678 blockingKey = "Blocking:#{doc_id}" SandboxedModule = require('sandboxed-module') -describe 'Lock Manager - checking the lock', ()-> +describe 'LockManager - checking the lock', ()-> existsStub = sinon.stub() - setStub = sinon.stub() - exireStub = sinon.stub() - execStub = sinon.stub() mocks = "logger-sharelatex": log:-> @@ -22,30 +19,18 @@ describe 'Lock Manager - checking the lock', ()-> "redis-sharelatex": createClient : ()-> auth:-> - multi: -> - exists: existsStub - expire: exireStub - set: setStub - exec: execStub + exists: existsStub "./Metrics": {inc: () ->} LockManager = SandboxedModule.require(modulePath, requires: mocks) - it 'should check if lock exists but not set or expire', (done)-> - execStub.callsArgWith(0, null, ["1"]) - LockManager.checkLock doc_id, (err, docIsLocked)-> - existsStub.calledWith(blockingKey).should.equal true - setStub.called.should.equal false - exireStub.called.should.equal false - done() - it 'should return true if the key does not exists', (done)-> - execStub.callsArgWith(0, null, "0") + existsStub.yields(null, "0") LockManager.checkLock doc_id, (err, free)-> free.should.equal true done() it 'should return false if the key does exists', (done)-> - execStub.callsArgWith(0, null, "1") + existsStub.yields(null, "1") LockManager.checkLock doc_id, (err, free)-> free.should.equal false done() diff --git a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee index 3b8b09877d..bca2f9124a 100644 --- a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee @@ -10,20 +10,21 @@ SandboxedModule = require('sandboxed-module') describe 'LockManager - releasing the lock', ()-> - deleteStub = sinon.stub().callsArgWith(1) + evalStub = sinon.stub().yields(1) mocks = "logger-sharelatex": log:-> "redis-sharelatex": createClient : ()-> auth:-> - del:deleteStub + eval: evalStub "./Metrics": {inc: () ->} LockManager = SandboxedModule.require(modulePath, requires: mocks) it 'should put a all data into memory', (done)-> - LockManager.releaseLock doc_id, -> - deleteStub.calledWith("Blocking:#{doc_id}").should.equal true + lockValue = "lock-value-stub" + LockManager.releaseLock doc_id, lockValue, -> + evalStub.calledWith(LockManager.unlockScript, 1, "Blocking:#{doc_id}", lockValue).should.equal true done() diff --git a/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee b/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee index 026bd81538..84cc3208a3 100644 --- a/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee @@ -17,7 +17,8 @@ describe 'LockManager - getting the lock', -> describe "when the lock is not set", -> beforeEach (done) -> - @LockManager.tryLock = sinon.stub().callsArgWith(1, null, true) + @lockValue = "mock-lock-value" + @LockManager.tryLock = sinon.stub().callsArgWith(1, null, true, @lockValue) @LockManager.getLock @doc_id, (args...) => @callback(args...) done() @@ -30,20 +31,21 @@ describe 'LockManager - getting the lock', -> it "should only need to try once", -> @LockManager.tryLock.callCount.should.equal 1 - it "should return the callback", -> - @callback.calledWith(null).should.equal true + it "should return the callback with the lock value", -> + @callback.calledWith(null, @lockValue).should.equal true describe "when the lock is initially set", -> beforeEach (done) -> + @lockValue = "mock-lock-value" startTime = Date.now() tries = 0 @LockManager.LOCK_TEST_INTERVAL = 5 - @LockManager.tryLock = (doc_id, callback = (error, isFree) ->) -> + @LockManager.tryLock = (doc_id, callback = (error, isFree) ->) => if (Date.now() - startTime < 20) or (tries < 2) tries = tries + 1 callback null, false else - callback null, true + callback null, true, @lockValue sinon.spy @LockManager, "tryLock" @LockManager.getLock @doc_id, (args...) => @@ -53,8 +55,8 @@ describe 'LockManager - getting the lock', -> it "should call tryLock multiple times until free", -> (@LockManager.tryLock.callCount > 1).should.equal true - it "should return the callback", -> - @callback.calledWith(null).should.equal true + it "should return the callback with the lock value", -> + @callback.calledWith(null, @lockValue).should.equal true describe "when the lock times out", -> beforeEach (done) -> @@ -66,7 +68,9 @@ describe 'LockManager - getting the lock', -> done() it "should return the callback with an error", -> - @callback.calledWith(new Error("timeout")).should.equal true + e = new Error("Timeout") + e.doc_id = @doc_id + @callback.calledWith(e).should.equal true diff --git a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee index 0a1ddc00e9..33c3eb3d51 100644 --- a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee @@ -19,15 +19,17 @@ describe 'LockManager - trying the lock', -> describe "when the lock is not set", -> beforeEach -> + @lockValue = "mock-lock-value" + @LockManager.randomLock = sinon.stub().returns @lockValue @set.callsArgWith(5, null, "OK") @LockManager.tryLock @doc_id, @callback it "should set the lock key with an expiry if it is not set", -> - @set.calledWith("Blocking:#{@doc_id}", "locked", "EX", 30, "NX") + @set.calledWith("Blocking:#{@doc_id}", @lockValue, "EX", 30, "NX") .should.equal true - it "should return the callback with true", -> - @callback.calledWith(null, true).should.equal true + it "should return the callback with true and the lock value", -> + @callback.calledWith(null, true, @lockValue).should.equal true describe "when the lock is already set", -> beforeEach -> diff --git a/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee b/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee index 70bab70c71..7b41647f7b 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee @@ -67,8 +67,8 @@ describe "UpdateManager", -> describe "processOutstandingUpdatesWithLock", -> describe "when the lock is free", -> beforeEach -> - @LockManager.tryLock = sinon.stub().callsArgWith(1, null, true) - @LockManager.releaseLock = sinon.stub().callsArg(1) + @LockManager.tryLock = sinon.stub().callsArgWith(1, null, true, @lockValue = "mock-lock-value") + @LockManager.releaseLock = sinon.stub().callsArg(2) @UpdateManager.continueProcessingUpdatesWithLock = sinon.stub().callsArg(2) @UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2) @@ -80,7 +80,7 @@ describe "UpdateManager", -> @LockManager.tryLock.calledWith(@doc_id).should.equal true it "should free the lock", -> - @LockManager.releaseLock.calledWith(@doc_id).should.equal true + @LockManager.releaseLock.calledWith(@doc_id, @lockValue).should.equal true it "should process the outstanding updates", -> @UpdateManager.processOutstandingUpdates.calledWith(@project_id, @doc_id).should.equal true @@ -101,7 +101,7 @@ describe "UpdateManager", -> @UpdateManager.processOutstandingUpdatesWithLock @project_id, @doc_id, @callback it "should free the lock", -> - @LockManager.releaseLock.calledWith(@doc_id).should.equal true + @LockManager.releaseLock.calledWith(@doc_id, @lockValue).should.equal true it "should return the error in the callback", -> @callback.calledWith(@error).should.equal true diff --git a/services/document-updater/test/unit/coffee/UpdateManager/lockUpdatesAndDoTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/lockUpdatesAndDoTests.coffee index 74e5a689fa..adba644b27 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/lockUpdatesAndDoTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/lockUpdatesAndDoTests.coffee @@ -17,8 +17,9 @@ describe 'UpdateManager - lockUpdatesAndDo', -> @callback = sinon.stub() @arg1 = "argument 1" @response_arg1 = "response argument 1" - @LockManager.getLock = sinon.stub().callsArgWith(1, null, true) - @LockManager.releaseLock = sinon.stub().callsArg(1) + @lockValue = "mock-lock-value" + @LockManager.getLock = sinon.stub().callsArgWith(1, null, @lockValue) + @LockManager.releaseLock = sinon.stub().callsArg(2) describe "successfully", -> beforeEach -> @@ -48,7 +49,7 @@ describe 'UpdateManager - lockUpdatesAndDo', -> it "should release the lock", -> @LockManager.releaseLock - .calledWith(@doc_id) + .calledWith(@doc_id, @lockValue) .should.equal true it "should continue processing updates", -> @@ -62,7 +63,7 @@ describe 'UpdateManager - lockUpdatesAndDo', -> @UpdateManager.lockUpdatesAndDo @method, @project_id, @doc_id, @arg1, @callback it "should free the lock", -> - @LockManager.releaseLock.calledWith(@doc_id).should.equal true + @LockManager.releaseLock.calledWith(@doc_id, @lockValue).should.equal true it "should return the error in the callback", -> @callback.calledWith(@error).should.equal true @@ -74,7 +75,7 @@ describe 'UpdateManager - lockUpdatesAndDo', -> @UpdateManager.lockUpdatesAndDo @method, @project_id, @doc_id, @arg1, @callback it "should free the lock", -> - @LockManager.releaseLock.calledWith(@doc_id).should.equal true + @LockManager.releaseLock.calledWith(@doc_id, @lockValue).should.equal true it "should return the error in the callback", -> @callback.calledWith(@error).should.equal true From d15a22be269af72e216a33873f6fbb636e7d0b87 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 13 Apr 2016 12:20:53 +0100 Subject: [PATCH 094/769] Remove misleading log line about lock value --- services/document-updater/app/coffee/LockManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/LockManager.coffee b/services/document-updater/app/coffee/LockManager.coffee index 7901d3860a..1c592c23f4 100644 --- a/services/document-updater/app/coffee/LockManager.coffee +++ b/services/document-updater/app/coffee/LockManager.coffee @@ -35,7 +35,7 @@ module.exports = LockManager = callback err, true, lockValue else metrics.inc "doc-blocking" - logger.log {doc_id, lockValue}, "doc is locked" + logger.log {doc_id}, "doc is locked" callback err, false getLock: (doc_id, callback = (error) ->) -> From 4ab4e27724b27a9330b0e781bab201b7a77fb473 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 13 Apr 2016 16:27:53 +0100 Subject: [PATCH 095/769] Include COUNT in lock and make RND per-process incase randomBytes blocks --- services/document-updater/app/coffee/LockManager.coffee | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/LockManager.coffee b/services/document-updater/app/coffee/LockManager.coffee index 1c592c23f4..0c4eaad320 100644 --- a/services/document-updater/app/coffee/LockManager.coffee +++ b/services/document-updater/app/coffee/LockManager.coffee @@ -9,6 +9,8 @@ crypto = require "crypto" HOST = os.hostname() PID = process.pid +RND = crypto.randomBytes(4).toString('hex') +COUNT = 0 module.exports = LockManager = LOCK_TEST_INTERVAL: 50 # 50ms between each test of the lock @@ -20,8 +22,7 @@ module.exports = LockManager = # to prevent accidental unlocking by multiple processes randomLock : () -> time = Date.now() - RND = crypto.randomBytes(4).toString('hex') - return "locked:host=#{HOST}:pid=#{PID}:random=#{RND}:time=#{time}" + return "locked:host=#{HOST}:pid=#{PID}:random=#{RND}:time=#{time}:count=#{COUNT++}" unlockScript: 'if redis.call("get", KEYS[1]) == ARGV[1] then return redis.call("del", KEYS[1]) else return 0 end'; From 210a61112bca52bd5b6b093cfe3b1218a0c1f64a Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Fri, 29 Apr 2016 15:08:21 +0100 Subject: [PATCH 096/769] reject payloads larger than 2MB for setDoc --- .../app/coffee/HttpController.coffee | 12 ++++++++---- .../unit/coffee/HttpController/setDocTests.coffee | 11 ++++++++++- 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index ef9fb38e19..65c7ca20d3 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -4,6 +4,8 @@ Errors = require "./Errors" logger = require "logger-sharelatex" Metrics = require "./Metrics" +TWO_MEGABYTES = 2 * 1024 * 1024 + module.exports = HttpController = getDoc: (req, res, next = (error) ->) -> doc_id = req.params.doc_id @@ -34,6 +36,9 @@ module.exports = HttpController = lines = req.body.lines source = req.body.source user_id = req.body.user_id + if req.headers['content-length'] > TWO_MEGABYTES + logger.log {project_id, doc_id, source, user_id}, "document too large, returning 406 response" + return res.send 406 logger.log project_id: project_id, doc_id: doc_id, lines: lines, source: source, user_id: user_id, "setting doc via http" timer = new Metrics.Timer("http.setDoc") DocumentManager.setDocWithLock project_id, doc_id, lines, source, user_id, (error) -> @@ -41,7 +46,7 @@ module.exports = HttpController = return next(error) if error? logger.log project_id: project_id, doc_id: doc_id, "set doc via http" res.send 204 # No Content - + flushDocIfLoaded: (req, res, next = (error) ->) -> doc_id = req.params.doc_id @@ -53,7 +58,7 @@ module.exports = HttpController = return next(error) if error? logger.log project_id: project_id, doc_id: doc_id, "flushed doc via http" res.send 204 # No Content - + flushAndDeleteDoc: (req, res, next = (error) ->) -> doc_id = req.params.doc_id project_id = req.params.project_id @@ -74,7 +79,7 @@ module.exports = HttpController = return next(error) if error? logger.log project_id: project_id, "flushed project via http" res.send 204 # No Content - + deleteProject: (req, res, next = (error) ->) -> project_id = req.params.project_id logger.log project_id: project_id, "deleting project via http" @@ -84,4 +89,3 @@ module.exports = HttpController = return next(error) if error? logger.log project_id: project_id, "deleted project via http" res.send 204 # No Content - diff --git a/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee b/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee index b60549c137..e8c05ccd03 100644 --- a/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee @@ -24,6 +24,7 @@ describe "HttpController.setDoc", -> @res = send: sinon.stub() @req = + headers: {} params: project_id: @project_id doc_id: @doc_id @@ -65,7 +66,15 @@ describe "HttpController.setDoc", -> @next .calledWith(new Error("oops")) .should.equal true - + describe "when the payload is too large", -> + beforeEach -> + @req.headers['content-length'] = 40 * 1024 * 1024 + @DocumentManager.setDocWithLock = sinon.stub().callsArgWith(5) + @HttpController.setDoc(@req, @res, @next) + it 'should send back a 406 response', -> + @res.send.calledWith(406).should.equal true + it 'should not call setDocWithLock', -> + @DocumentManager.setDocWithLock.callCount.should.equal 0 From 82d5a7fafdfde698dac57c6e136f31eae266e273 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Wed, 11 May 2016 15:55:21 +0100 Subject: [PATCH 097/769] check total size of lines, rather than content-length --- .../document-updater/app/coffee/HttpController.coffee | 11 +++++++++-- .../unit/coffee/HttpController/setDocTests.coffee | 5 ++++- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 65c7ca20d3..9c5eac94e9 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -30,14 +30,21 @@ module.exports = HttpController = version: version ops: ops + _getTotalSizeOfLines: (lines) -> + size = 0 + for line in lines + size += line.length + return size + setDoc: (req, res, next = (error) ->) -> doc_id = req.params.doc_id project_id = req.params.project_id lines = req.body.lines source = req.body.source user_id = req.body.user_id - if req.headers['content-length'] > TWO_MEGABYTES - logger.log {project_id, doc_id, source, user_id}, "document too large, returning 406 response" + lineSize = HttpController._getTotalSizeOfLines(lines) + if lineSize > TWO_MEGABYTES + logger.log {project_id, doc_id, source, lineSize, user_id}, "document too large, returning 406 response" return res.send 406 logger.log project_id: project_id, doc_id: doc_id, lines: lines, source: source, user_id: user_id, "setting doc via http" timer = new Metrics.Timer("http.setDoc") diff --git a/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee b/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee index e8c05ccd03..9a0176c25d 100644 --- a/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee @@ -69,7 +69,10 @@ describe "HttpController.setDoc", -> describe "when the payload is too large", -> beforeEach -> - @req.headers['content-length'] = 40 * 1024 * 1024 + lines = [] + for _ in [0..300000] + lines.push "test test test" + @req.body.lines = lines @DocumentManager.setDocWithLock = sinon.stub().callsArgWith(5) @HttpController.setDoc(@req, @res, @next) From 048fd19418270c0cff983838f695922dfed14f6b Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Thu, 12 May 2016 09:26:50 +0100 Subject: [PATCH 098/769] Add one to size of line. To account for newline characters in the original document --- services/document-updater/app/coffee/HttpController.coffee | 2 +- .../test/unit/coffee/HttpController/setDocTests.coffee | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 9c5eac94e9..ee6359b104 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -33,7 +33,7 @@ module.exports = HttpController = _getTotalSizeOfLines: (lines) -> size = 0 for line in lines - size += line.length + size += (line.length + 1) return size setDoc: (req, res, next = (error) ->) -> diff --git a/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee b/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee index 9a0176c25d..385b8be044 100644 --- a/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee @@ -70,7 +70,7 @@ describe "HttpController.setDoc", -> describe "when the payload is too large", -> beforeEach -> lines = [] - for _ in [0..300000] + for _ in [0..200000] lines.push "test test test" @req.body.lines = lines @DocumentManager.setDocWithLock = sinon.stub().callsArgWith(5) From 11667e8250f04961ee8dc254babfb6413ac0524f Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 31 May 2016 13:24:19 +0100 Subject: [PATCH 099/769] Return semantic error if the requested ops are not available when getting a document --- services/document-updater/app.coffee | 4 +++- .../document-updater/app/coffee/Errors.coffee | 8 +++++++ .../app/coffee/RedisManager.coffee | 7 +++--- .../coffee/GettingADocumentTests.coffee | 23 ++++++++++++++----- 4 files changed, 32 insertions(+), 10 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index b0813e47f9..41c7a01958 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -74,10 +74,12 @@ app.get "/health_check/redis", (req, res, next)-> app.use (error, req, res, next) -> - logger.error err: error, "request errored" if error instanceof Errors.NotFoundError res.send 404 + else if error instanceof Errors.OpRangeNotAvailableError + res.send 422 # Unprocessable Entity else + logger.error err: error, "request errored" res.send(500, "Oops, something went wrong") shutdownCleanly = (signal) -> diff --git a/services/document-updater/app/coffee/Errors.coffee b/services/document-updater/app/coffee/Errors.coffee index 4a29822efc..941bfcc9f1 100644 --- a/services/document-updater/app/coffee/Errors.coffee +++ b/services/document-updater/app/coffee/Errors.coffee @@ -5,6 +5,14 @@ NotFoundError = (message) -> return error NotFoundError.prototype.__proto__ = Error.prototype +OpRangeNotAvailableError = (message) -> + error = new Error(message) + error.name = "OpRangeNotAvailableError" + error.__proto__ = OpRangeNotAvailableError.prototype + return error +OpRangeNotAvailableError.prototype.__proto__ = Error.prototype + module.exports = Errors = NotFoundError: NotFoundError + OpRangeNotAvailableError: OpRangeNotAvailableError diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index d280de1cea..b7ebb2cd4f 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -7,6 +7,7 @@ keys = require('./RedisKeyBuilder') logger = require('logger-sharelatex') metrics = require('./Metrics') ZipManager = require('./ZipManager') +Errors = require "./Errors" redisOptions = _.clone(Settings.redis.web) redisOptions.return_buffers = true @@ -125,8 +126,8 @@ module.exports = RedisManager = first_version_in_redis = version - length if start < first_version_in_redis or end > version - error = new Error("doc ops range is not loaded in redis") - logger.error err: error, length: length, version: version, start: start, end: end, "inconsistent version or length" + error = new Errors.OpRangeNotAvailableError("doc ops range is not loaded in redis") + logger.warn {err: error, doc_id, length, version, start, end}, "doc ops range is not loaded in redis" return callback(error) start = start - first_version_in_redis @@ -135,7 +136,7 @@ module.exports = RedisManager = if isNaN(start) or isNaN(end) error = new Error("inconsistent version or lengths") - logger.error err: error, length: length, version: version, start: start, end: end, "inconsistent version or length" + logger.error {err: error, doc_id, length, version, start, end}, "inconsistent version or length" return callback(error) rclient.lrange keys.docOps(doc_id: doc_id), start, end, (error, jsonOps) -> diff --git a/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee index 35c3f2c55a..210502ae45 100644 --- a/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee @@ -70,7 +70,7 @@ describe "Getting a document", -> lines: @lines = ["one", "two", "three"] } - @updates = for v in [0..99] + @updates = for v in [0..199] doc_id: @doc_id, op: [i: v.toString(), p: 0] v: v @@ -78,16 +78,27 @@ describe "Getting a document", -> DocUpdaterClient.sendUpdates @project_id, @doc_id, @updates, (error) => throw error if error? sinon.spy MockWebApi, "getDocument" - DocUpdaterClient.getDocAndRecentOps @project_id, @doc_id, 90, (error, res, @returnedDoc) => done() + done() after -> MockWebApi.getDocument.restore() + + describe "when the ops are loaded", -> + before (done) -> + DocUpdaterClient.getDocAndRecentOps @project_id, @doc_id, 190, (error, res, @returnedDoc) => done() - it "should return the recent ops", -> - @returnedDoc.ops.length.should.equal 10 - for update, i in @updates.slice(90, -1) - @returnedDoc.ops[i].op.should.deep.equal update.op + it "should return the recent ops", -> + @returnedDoc.ops.length.should.equal 10 + for update, i in @updates.slice(190, -1) + @returnedDoc.ops[i].op.should.deep.equal update.op + + describe "when the ops are not all loaded", -> + before (done) -> + # We only track 100 ops + DocUpdaterClient.getDocAndRecentOps @project_id, @doc_id, 10, (error, @res, @returnedDoc) => done() + it "should return UnprocessableEntity", -> + @res.statusCode.should.equal 422 describe "when the document does not exist", -> before (done) -> From 1d7cbd8a784d4b5ec1035b8d23e8ba9af1acb0af Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 31 May 2016 13:32:15 +0100 Subject: [PATCH 100/769] Fix unit tests --- .../unit/coffee/RedisManager/getPreviousDocOpsTests.coffee | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee index eb17d7856f..6b93ae3028 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee @@ -3,6 +3,7 @@ chai = require('chai') should = chai.should() modulePath = "../../../../app/js/RedisManager" SandboxedModule = require('sandboxed-module') +Errors = require "../../../../app/js/Errors" describe "RedisManager.getPreviousDocOpsTests", -> beforeEach -> @@ -13,7 +14,7 @@ describe "RedisManager.getPreviousDocOpsTests", -> @rclient ?= auth: -> multi: => @rclient - "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub() } + "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } @doc_id = "doc-id-123" describe "with a start and an end value", -> @@ -94,7 +95,7 @@ describe "RedisManager.getPreviousDocOpsTests", -> @RedisManager.getPreviousDocOps(@doc_id, @start, @end, @callback) it "should return an error", -> - @callback.calledWith(new Error("range is not loaded in redis")).should.equal true + @callback.calledWith(new Errors.OpRangeNotAvailableError("doc ops range is not loaded in redis")).should.equal true it "should log out the problem", -> - @logger.error.called.should.equal true + @logger.warn.called.should.equal true From 32ce44f9c61f680c91b3dea928812923136a7693 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 1 Jun 2016 10:53:50 +0100 Subject: [PATCH 101/769] Remove unused gzipping logic --- .../app/coffee/RedisManager.coffee | 32 ++-- .../app/coffee/ZipManager.coffee | 73 -------- .../unit/coffee/ZipManager/ZipManager.coffee | 169 ------------------ 3 files changed, 13 insertions(+), 261 deletions(-) delete mode 100644 services/document-updater/app/coffee/ZipManager.coffee delete mode 100644 services/document-updater/test/unit/coffee/ZipManager/ZipManager.coffee diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index b7ebb2cd4f..4b66e178e2 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -6,12 +6,10 @@ _ = require('underscore') keys = require('./RedisKeyBuilder') logger = require('logger-sharelatex') metrics = require('./Metrics') -ZipManager = require('./ZipManager') Errors = require "./Errors" redisOptions = _.clone(Settings.redis.web) redisOptions.return_buffers = true -rclientBuffer = redis.createClient(redisOptions) # Make times easy to read minutes = 60 # seconds for Redis expire @@ -48,21 +46,18 @@ module.exports = RedisManager = getDoc : (doc_id, callback = (error, lines, version) ->)-> timer = new metrics.Timer("redis.get-doc") - # use Buffer when retrieving data as it may be gzipped - multi = rclientBuffer.multi() - linesKey = keys.docLines(doc_id:doc_id) - multi.get linesKey + multi = rclient.multi() + multi.get keys.docLines(doc_id:doc_id) multi.get keys.docVersion(doc_id:doc_id) multi.exec (error, result)-> timer.done() return callback(error) if error? - ZipManager.uncompressIfNeeded doc_id, result, (error, result) -> - try - docLines = JSON.parse result[0] - catch e - return callback(e) - version = parseInt(result[1] or 0, 10) - callback null, docLines, version + try + docLines = JSON.parse result[0] + catch e + return callback(e) + version = parseInt(result[1] or 0, 10) + callback null, docLines, version getDocVersion: (doc_id, callback = (error, version) ->) -> rclient.get keys.docVersion(doc_id: doc_id), (error, version) -> @@ -76,12 +71,11 @@ module.exports = RedisManager = callback null, len setDocument : (doc_id, docLines, version, callback = (error) ->)-> - ZipManager.compressIfNeeded doc_id, JSON.stringify(docLines), (err, result) -> - multi = rclient.multi() - multi.set keys.docLines(doc_id:doc_id), result - multi.set keys.docVersion(doc_id:doc_id), version - multi.incr keys.now("docsets") - multi.exec (error, replys) -> callback(error) + multi = rclient.multi() + multi.set keys.docLines(doc_id:doc_id), JSON.stringify(docLines) + multi.set keys.docVersion(doc_id:doc_id), version + multi.incr keys.now("docsets") + multi.exec (error, replys) -> callback(error) getPendingUpdatesForDoc : (doc_id, callback)-> multi = rclient.multi() diff --git a/services/document-updater/app/coffee/ZipManager.coffee b/services/document-updater/app/coffee/ZipManager.coffee deleted file mode 100644 index 18482a5c69..0000000000 --- a/services/document-updater/app/coffee/ZipManager.coffee +++ /dev/null @@ -1,73 +0,0 @@ -Settings = require('settings-sharelatex') -logger = require('logger-sharelatex') -metrics = require('./Metrics') -zlib = require('zlib') - -# Compress and uncompress data sent to Redis using the node 'zlib' -# module, to reduce load on Redis. -# -# Measurements show that most of the load on Redis comes from a very -# large documents. We can shift some of that CPU load from redis to -# the docupdaters (which are scalable) by compressing the data in the -# docupdater first. -# -# To avoid overloading the docupdater clients we impose a minimum size -# on data we will compress, so we only catch the large ones. -# -# The optimimum size for the cutoff is about 10K, below this we do -# more work but don't really gain any extra reduction in Redis CPU -# -# |--------------------+-----------+--------------------------| -# | Compression cutoff | Redis CPU | Extra doc updater CPU(*) | -# |--------------------+-----------+--------------------------| -# | N/A | 100% | 0% | -# | 100k | 80% | 10% | -# | 10k | 50% | 30% | -# |--------------------+-----------+--------------------------| -# -# (*) percentage of a single core, because node zlib runs in multiple -# threads. - -ZIP_WRITES_ENABLED = Settings.redis.zip?.writesEnabled -ZIP_MINSIZE = Settings.redis.zip?.minSize || 64*1024 - -module.exports = ZipManager = - uncompressIfNeeded: (doc_id, result, callback) -> - # result is an array of [text, version]. Each entry is a node - # Buffer object which we need to convert to strings on output - - # first make sure the version (result[1]) is returned as a string - if result?[1]?.toString? - result[1] = result[1].toString() - - # now uncompress the text (result[0]) if needed - buf = result?[0] - - # Check if we have a GZIP file (magic numbers in header) - if buf? and buf[0] == 0x1F and buf[1] == 0x8B - zlib.gunzip buf, (err, newbuf) -> - if err? - logger.err doc_id:doc_id, err:err, "error uncompressing doc" - callback(err, null) - else - logger.log doc_id:doc_id, fromBytes: buf.length, toChars: newbuf.length, factor: buf.length/newbuf.length, "uncompressed successfully" - result[0] = newbuf.toString() - callback(null, result) - else - # if we don't have a GZIP file it's just a buffer of text, convert it back to a string - if buf?.toString? - result[0] = buf.toString() - callback(null, result) - - compressIfNeeded: (doc_id, text, callback) -> - if ZIP_WRITES_ENABLED and ZIP_MINSIZE > 1024 and text.length > ZIP_MINSIZE - # N.B. skip files of 1k or less, because gzip increases the size - zlib.gzip text, (err, buf) -> - if err? - logger.err doc_id:doc_id, err:err, "error compressing doc" - callback(err, null) - else - logger.log doc_id:doc_id, fromChars: text.length, toBytes: buf.length, factor: buf.length/text.length , "compressed successfully" - callback(null, buf) - else - callback(null, text) diff --git a/services/document-updater/test/unit/coffee/ZipManager/ZipManager.coffee b/services/document-updater/test/unit/coffee/ZipManager/ZipManager.coffee deleted file mode 100644 index e477cfb23a..0000000000 --- a/services/document-updater/test/unit/coffee/ZipManager/ZipManager.coffee +++ /dev/null @@ -1,169 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -zipModulePath = "../../../../app/js/ZipManager" -redisModulePath = "../../../../app/js/RedisManager" -SandboxedModule = require('sandboxed-module') -zlib = require('zlib') - -MIN_SIZE = 9999 - -describe "ZipManager with RedisManager", -> - describe "for a small document (uncompressed)", -> - rclient = null - beforeEach (done) -> - @ZipManager = SandboxedModule.require zipModulePath, requires: - "logger-sharelatex": log:-> - 'settings-sharelatex': redis: - web: - host: 'none' - port: 'none' - zip: - writesEnabled: true - minSize: MIN_SIZE - @RedisManager = SandboxedModule.require redisModulePath, requires: - "./ZipManager" : @ZipManager - "redis-sharelatex" : createClient: () => - rclient ?= - auth:-> # only assign one rclient - multi: () => rclient - set: (key, value) => rclient.store[key] = value - get: (key) => rclient.results.push rclient.store[key] - incr: (key) => rclient.store[key]++ - exec: (callback) => - callback.apply(null, [null, rclient.results]) - rclient.results = [] - store: {} - results: [] - "logger-sharelatex": {} - @doc_id = "document-id" - @version = 123 - - @docLines = ["hello", "world"] - @callback = sinon.stub() - - @RedisManager.setDocument @doc_id, @docLines, @version, () => - @callback() - done() - - it "should set the document", -> - rclient.store['doclines:document-id'] - .should.equal JSON.stringify(@docLines) - - it "should return the callback", -> - @callback.called.should.equal true - - it "should get the document back again", (done) -> - @RedisManager.getDoc @doc_id, (err, lines, version) => - @docLines.should.eql lines - done() - - describe "calling node zlib.gzip directly", -> - it "should compress the string 'hello world' within the timeout", (done) -> - zlib.gzip "hello world", done - - it "should compress a 10k string within the timeout", (done) -> - text = "" - while text.length < 10*1024 - text = text + "helloworld" - zlib.gzip text, done - - describe "for a large document (with compression enabled)", -> - rclient = null - beforeEach (done) -> - @ZipManager = SandboxedModule.require zipModulePath, requires: - "logger-sharelatex": log:-> - 'settings-sharelatex': redis: - web: - host: 'none' - port: 'none' - zip: - writesEnabled: true - minSize: MIN_SIZE - @RedisManager = SandboxedModule.require redisModulePath, requires: - "./ZipManager" : @ZipManager - "redis-sharelatex" : createClient: () => - rclient ?= - auth:-> # only assign one rclient - multi: () => rclient - set: (key, value) => rclient.store[key] = value - get: (key) => rclient.results.push rclient.store[key] - incr: (key) => rclient.store[key]++ - exec: (callback) => - callback.apply(null, [null, rclient.results]) - rclient.results = [] - store: {} - results: [] - "logger-sharelatex": {} - @doc_id = "document-id" - @version = 123 - - @docLines = [] - while @docLines.join('').length <= MIN_SIZE - @docLines.push "this is a long line in a long document" - # console.log "length of doclines", @docLines.join('').length - @callback = sinon.stub() - @RedisManager.setDocument @doc_id, @docLines, @version, () => - @callback() - done() - - it "should set the document as a gzipped blob", -> - rclient.store['doclines:document-id'] - .should.not.equal JSON.stringify(@docLines) - - it "should return the callback", -> - @callback.called.should.equal true - - it "should get the uncompressed document back again", (done) -> - @RedisManager.getDoc @doc_id, (err, lines, version) => - @docLines.should.eql lines - done() - - describe "for a large document (with compression disabled)", -> - rclient = null - beforeEach (done) -> - @ZipManager = SandboxedModule.require zipModulePath, requires: - "logger-sharelatex": log:-> - 'settings-sharelatex': redis: - web: - host: 'none' - port: 'none' - zip: - writesEnabled: false - minSize: MIN_SIZE - @RedisManager = SandboxedModule.require redisModulePath, requires: - "./ZipManager" : @ZipManager - "redis-sharelatex" : createClient: () => - rclient ?= - auth:-> # only assign one rclient - multi: () => rclient - set: (key, value) => rclient.store[key] = value - get: (key) => rclient.results.push rclient.store[key] - incr: (key) => rclient.store[key]++ - exec: (callback) => - callback.apply(null, [null, rclient.results]) - rclient.results = [] - store: {} - results: [] - "logger-sharelatex": {} - @doc_id = "document-id" - @version = 123 - @docLines = [] - while @docLines.join('').length <= MIN_SIZE - @docLines.push "this is a long line in a long document" - @callback = sinon.stub() - @RedisManager.setDocument @doc_id, @docLines, @version, () => - @callback() - done() - - it "should set the document", -> - rclient.store['doclines:document-id'] - .should.equal JSON.stringify(@docLines) - - it "should return the callback", -> - @callback.called.should.equal true - - it "should get the document back again", (done) -> - @RedisManager.getDoc @doc_id, (err, lines, version) => - @docLines.should.eql lines - done() From b2e58266ae5db512fc0cd9a0b7b9cba6d808c6a0 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 1 Jun 2016 11:03:39 +0100 Subject: [PATCH 102/769] Remove unused total doc counting redis commands --- .../app/coffee/RedisKeyBuilder.coffee | 6 --- .../app/coffee/RedisManager.coffee | 8 ---- .../getCountOfDocsInMemoryTests.coffee | 48 ------------------- .../RedisManager/putDocInMemoryTests.coffee | 1 - 4 files changed, 63 deletions(-) delete mode 100644 services/document-updater/test/unit/coffee/RedisManager/getCountOfDocsInMemoryTests.coffee diff --git a/services/document-updater/app/coffee/RedisKeyBuilder.coffee b/services/document-updater/app/coffee/RedisKeyBuilder.coffee index 0cfd330721..391fd0ace3 100644 --- a/services/document-updater/app/coffee/RedisKeyBuilder.coffee +++ b/services/document-updater/app/coffee/RedisKeyBuilder.coffee @@ -1,4 +1,3 @@ -ALLDOCSKEY = "AllDocIds" PROJECTKEY = "ProjectId" BLOCKINGKEY = "Blocking" CHANGEQUE = "ChangeQue" @@ -12,8 +11,6 @@ DOCSWITHHISTORYOPS = "DocsWithHistoryOps" UNCOMPRESSED_HISTORY_OPS = "UncompressedHistoryOps" module.exports = - - allDocs : ALLDOCSKEY docLines : (op)-> DOCLINES+":"+op.doc_id docOps : (op)-> DOCOPS+":"+op.doc_id uncompressedHistoryOp: (op) -> UNCOMPRESSED_HISTORY_OPS + ":" + op.doc_id @@ -27,6 +24,3 @@ module.exports = combineProjectIdAndDocId: (project_id, doc_id) -> "#{project_id}:#{doc_id}" splitProjectIdAndDocId: (project_and_doc_id) -> project_and_doc_id.split(":") docsWithHistoryOps: (op) -> DOCSWITHHISTORYOPS + ":" + op.project_id - now : (key)-> - d = new Date() - d.getDate()+":"+(d.getMonth()+1)+":"+d.getFullYear()+":"+key diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 4b66e178e2..bcb5d28ae0 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -22,7 +22,6 @@ module.exports = RedisManager = multi.set keys.docLines(doc_id:doc_id), JSON.stringify(docLines) multi.set keys.projectKey({doc_id:doc_id}), project_id multi.set keys.docVersion(doc_id:doc_id), version - multi.sadd keys.allDocs, doc_id multi.sadd keys.docsInProject(project_id:project_id), doc_id multi.exec (err, replys)-> timer.done() @@ -35,7 +34,6 @@ module.exports = RedisManager = multi.del keys.projectKey(doc_id:doc_id) multi.del keys.docVersion(doc_id:doc_id) multi.srem keys.docsInProject(project_id:project_id), doc_id - multi.srem keys.allDocs, doc_id multi.exec (err, replys)-> if err? logger.err project_id:project_id, doc_id:doc_id, err:err, "error removing doc from redis" @@ -65,16 +63,10 @@ module.exports = RedisManager = version = parseInt(version, 10) callback null, version - getCountOfDocsInMemory : (callback)-> - rclient.smembers keys.allDocs, (err, members)-> - len = members.length - callback null, len - setDocument : (doc_id, docLines, version, callback = (error) ->)-> multi = rclient.multi() multi.set keys.docLines(doc_id:doc_id), JSON.stringify(docLines) multi.set keys.docVersion(doc_id:doc_id), version - multi.incr keys.now("docsets") multi.exec (error, replys) -> callback(error) getPendingUpdatesForDoc : (doc_id, callback)-> diff --git a/services/document-updater/test/unit/coffee/RedisManager/getCountOfDocsInMemoryTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/getCountOfDocsInMemoryTests.coffee deleted file mode 100644 index e66fecf86a..0000000000 --- a/services/document-updater/test/unit/coffee/RedisManager/getCountOfDocsInMemoryTests.coffee +++ /dev/null @@ -1,48 +0,0 @@ -require('coffee-script') -assert = require('assert') -should = require('chai').should() -path = require('path') -modulePath = path.join __dirname, '../../../../app/js/RedisManager.js' -keys = require(path.join __dirname, '../../../../app/js/RedisKeyBuilder.js') -SandboxedModule = require('sandboxed-module') - -describe 'RedisManager.getCountOfDocsInMemory', ()-> - - project_id = "12345" - doc_id1 = "docid1" - doc_id2 = "docid2" - doc_id3 = "docid3" - - redisMemory = {} - redisManager = undefined - - beforeEach (done)-> - mocks = - "./ZipManager": {} - "logger-sharelatex": log:-> - "redis-sharelatex": - createClient : ()-> - auth:-> - smembers:(key, callback)-> - callback(null, redisMemory[key]) - multi: ()-> - set:(key, value)-> - redisMemory[key] = value - sadd:(key, value)-> - if !redisMemory[key]? - redisMemory[key] = [] - redisMemory[key].push value - del:()-> - exec:(callback)-> - callback() - - redisManager = SandboxedModule.require(modulePath, requires: mocks) - redisManager.putDocInMemory project_id, doc_id1, 0, ["line"], -> - redisManager.putDocInMemory project_id, doc_id2, 0, ["ledf"], -> - redisManager.putDocInMemory project_id, doc_id3, 0, ["ledf"], -> - done() - - it 'should return total', (done)-> - redisManager.getCountOfDocsInMemory (err, count)-> - assert.equal count, 3 - done() diff --git a/services/document-updater/test/unit/coffee/RedisManager/putDocInMemoryTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/putDocInMemoryTests.coffee index 16043e7e2c..5b7c8ef4ce 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/putDocInMemoryTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/putDocInMemoryTests.coffee @@ -17,7 +17,6 @@ describe 'RedisManager.putDocInMemory', ()-> potentialSets[keys.docVersion(doc_id:doc_id)] = version potentialSAdds = {} - potentialSAdds[keys.allDocs] = doc_id potentialSAdds[keys.docsInProject(project_id:project_id)] = doc_id mocks = From f707783aba69a8f4d09134883ea7d8858eb4dfb8 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 1 Jun 2016 11:28:23 +0100 Subject: [PATCH 103/769] Remove old methods of triggering doc updates Remove the old pub/sub listener which is no longer used. Also remove the DocsWithPendingUpdates set, which used to track docs waiting to be updated. This was necessary incase messages were missed on the pub/sub channel, so we knew which docs still had pending updates. However, now we use the BLPOP queue, these updates just sit in the queue until a consumer comes back to continue consuming them. --- services/document-updater/app.coffee | 12 ------- .../app/coffee/RedisKeyBuilder.coffee | 2 -- .../app/coffee/RedisManager.coffee | 15 -------- .../app/coffee/UpdateManager.coffee | 20 ++--------- .../clearDocFromPendingUpdatesSetTests.coffee | 29 --------------- .../getDocsWithPendingUpdatesTests.coffee | 35 ------------------- .../UpdateManager/ApplyingUpdates.coffee | 32 ----------------- 7 files changed, 3 insertions(+), 142 deletions(-) delete mode 100644 services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 41c7a01958..eafe03e402 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -4,7 +4,6 @@ Settings = require('settings-sharelatex') logger = require('logger-sharelatex') logger.initialize("documentupdater") RedisManager = require('./app/js/RedisManager') -UpdateManager = require('./app/js/UpdateManager') DispatchManager = require('./app/js/DispatchManager') Keys = require('./app/js/RedisKeyBuilder') Errors = require "./app/js/Errors" @@ -26,19 +25,8 @@ app.configure -> app.use express.bodyParser() app.use app.router -rclient.subscribe("pending-updates") -rclient.on "message", (channel, doc_key) -> - [project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key) - if !Settings.shuttingDown - UpdateManager.processOutstandingUpdatesWithLock project_id, doc_id, (error) -> - logger.error err: error, project_id: project_id, doc_id: doc_id, "error processing update" if error? - else - logger.log project_id: project_id, doc_id: doc_id, "ignoring incoming update" - DispatchManager.createAndStartDispatchers(Settings.dispatcherCount || 10) -UpdateManager.resumeProcessing() - app.get '/project/:project_id/doc/:doc_id', HttpController.getDoc app.post '/project/:project_id/doc/:doc_id', HttpController.setDoc app.post '/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLoaded diff --git a/services/document-updater/app/coffee/RedisKeyBuilder.coffee b/services/document-updater/app/coffee/RedisKeyBuilder.coffee index 391fd0ace3..e55ae99bde 100644 --- a/services/document-updater/app/coffee/RedisKeyBuilder.coffee +++ b/services/document-updater/app/coffee/RedisKeyBuilder.coffee @@ -6,7 +6,6 @@ PENDINGUPDATESKEY = "PendingUpdates" DOCLINES = "doclines" DOCOPS = "DocOps" DOCVERSION = "DocVersion" -DOCIDSWITHPENDINGUPDATES = "DocsWithPendingUpdates" DOCSWITHHISTORYOPS = "DocsWithHistoryOps" UNCOMPRESSED_HISTORY_OPS = "UncompressedHistoryOps" @@ -20,7 +19,6 @@ module.exports = changeQue : (op)-> CHANGEQUE+":"+op.project_id docsInProject : (op)-> DOCSINPROJECT+":"+op.project_id pendingUpdates : (op)-> PENDINGUPDATESKEY+":"+op.doc_id - docsWithPendingUpdates : DOCIDSWITHPENDINGUPDATES combineProjectIdAndDocId: (project_id, doc_id) -> "#{project_id}:#{doc_id}" splitProjectIdAndDocId: (project_and_doc_id) -> project_and_doc_id.split(":") docsWithHistoryOps: (op) -> DOCSWITHHISTORYOPS + ":" + op.project_id diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index bcb5d28ae0..1215471ce1 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -88,21 +88,6 @@ module.exports = RedisManager = getUpdatesLength: (doc_id, callback)-> rclient.llen keys.pendingUpdates(doc_id:doc_id), callback - getDocsWithPendingUpdates: (callback = (error, docs) ->) -> - rclient.smembers keys.docsWithPendingUpdates, (error, doc_keys) -> - return callback(error) if error? - docs = doc_keys.map (doc_key) -> - [project_id, doc_id] = keys.splitProjectIdAndDocId(doc_key) - return { - doc_id: doc_id - project_id: project_id - } - callback null, docs - - clearDocFromPendingUpdatesSet: (project_id, doc_id, callback = (error) ->) -> - doc_key = keys.combineProjectIdAndDocId(project_id, doc_id) - rclient.srem keys.docsWithPendingUpdates, doc_key, callback - getPreviousDocOps: (doc_id, start, end, callback = (error, jsonOps) ->) -> rclient.llen keys.docOps(doc_id: doc_id), (error, length) -> return callback(error) if error? diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index 2ab74feda1..97c33e8b6f 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -7,26 +7,12 @@ logger = require('logger-sharelatex') Metrics = require "./Metrics" module.exports = UpdateManager = - resumeProcessing: (callback = (error) ->) -> - RedisManager.getDocsWithPendingUpdates (error, docs) => - return callback(error) if error? - jobs = for doc in (docs or []) - do (doc) => - (callback) => @processOutstandingUpdatesWithLock doc.project_id, doc.doc_id, callback - - async.parallelLimit jobs, 5, callback - - processOutstandingUpdates: (project_id, doc_id, _callback = (error) ->) -> + processOutstandingUpdates: (project_id, doc_id, callback = (error) ->) -> timer = new Metrics.Timer("updateManager.processOutstandingUpdates") - callback = (args...) -> + UpdateManager.fetchAndApplyUpdates project_id, doc_id, (error) -> timer.done() - _callback(args...) - - UpdateManager.fetchAndApplyUpdates project_id, doc_id, (error) => return callback(error) if error? - RedisManager.clearDocFromPendingUpdatesSet project_id, doc_id, (error) => - return callback(error) if error? - callback() + callback() processOutstandingUpdatesWithLock: (project_id, doc_id, callback = (error) ->) -> LockManager.tryLock doc_id, (error, gotLock, lockValue) => diff --git a/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee deleted file mode 100644 index c89842f7bc..0000000000 --- a/services/document-updater/test/unit/coffee/RedisManager/clearDocFromPendingUpdatesSetTests.coffee +++ /dev/null @@ -1,29 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/RedisManager" -SandboxedModule = require('sandboxed-module') - -describe "RedisManager.clearDocFromPendingUpdatesSet", -> - beforeEach -> - @project_id = "project-id" - @doc_id = "document-id" - @callback = sinon.stub() - @RedisManager = SandboxedModule.require modulePath, requires: - "redis-sharelatex" : createClient: () => - @rclient ?= auth:-> # only assign one rclient - "logger-sharelatex": {} - "./ZipManager": {} - - @rclient.srem = sinon.stub().callsArg(2) - @RedisManager.clearDocFromPendingUpdatesSet(@project_id, @doc_id, @callback) - - it "should get the docs with pending updates", -> - @rclient.srem - .calledWith("DocsWithPendingUpdates", "#{@project_id}:#{@doc_id}") - .should.equal true - - it "should return the callback", -> - @callback.called.should.equal true - - diff --git a/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee deleted file mode 100644 index 45efa4c984..0000000000 --- a/services/document-updater/test/unit/coffee/RedisManager/getDocsWithPendingUpdatesTests.coffee +++ /dev/null @@ -1,35 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/RedisManager" -SandboxedModule = require('sandboxed-module') - -describe "RedisManager.getDocsWithPendingUpdates", -> - beforeEach -> - @callback = sinon.stub() - @RedisManager = SandboxedModule.require modulePath, requires: - "./ZipManager": {} - "redis-sharelatex" : createClient: () => - @rclient ?= auth:-> - "logger-sharelatex": {} - - @docs = [{ - doc_id: "doc-id-1" - project_id: "project-id-1" - }, { - doc_id: "doc-id-2" - project_id: "project-id-2" - }] - @doc_keys = @docs.map (doc) -> "#{doc.project_id}:#{doc.doc_id}" - - @rclient.smembers = sinon.stub().callsArgWith(1, null, @doc_keys) - @RedisManager.getDocsWithPendingUpdates(@callback) - - it "should get the docs with pending updates", -> - @rclient.smembers - .calledWith("DocsWithPendingUpdates") - .should.equal true - - it "should return the docs with pending updates", -> - @callback.calledWith(null, @docs).should.equal true - diff --git a/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee b/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee index 7b41647f7b..249740973f 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee @@ -18,46 +18,14 @@ describe "UpdateManager", -> Timer: class Timer done: sinon.stub() - describe "resumeProcessing", -> - beforeEach (done) -> - @docs = [{ - doc_id: "doc-1" - project_id: "project-1" - }, { - doc_id: "doc-2" - project_id: "project-2" - }, { - doc_id: "doc-3" - project_id: "project-3" - }] - @RedisManager.getDocsWithPendingUpdates = sinon.stub().callsArgWith(0, null, @docs) - @UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArg(2) - @UpdateManager.resumeProcessing(done) - - it "should the docs that haven't been processed yet", -> - @RedisManager.getDocsWithPendingUpdates - .called.should.equal true - - it "should call processOutstandingUpdatesWithLock for each doc", -> - for doc in @docs - @UpdateManager.processOutstandingUpdatesWithLock - .calledWith(doc.project_id, doc.doc_id) - .should.equal true - describe "processOutstandingUpdates", -> beforeEach -> @UpdateManager.fetchAndApplyUpdates = sinon.stub().callsArg(2) - @RedisManager.clearDocFromPendingUpdatesSet = sinon.stub().callsArg(2) @UpdateManager.processOutstandingUpdates @project_id, @doc_id, @callback it "should apply the updates", -> @UpdateManager.fetchAndApplyUpdates.calledWith(@project_id, @doc_id).should.equal true - it "should clear the doc from the process pending set", -> - @RedisManager.clearDocFromPendingUpdatesSet - .calledWith(@project_id, @doc_id) - .should.equal true - it "should call the callback", -> @callback.called.should.equal true From 05b09a447e1db8a7dcc29e4fccc69dc76749bbce Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 1 Jun 2016 11:40:12 +0100 Subject: [PATCH 104/769] Ensure that all multi call keys will hash to the same node in cluster --- .../app/coffee/RedisManager.coffee | 40 +++++++++++------- .../pushUncompressedHistoryOpTests.coffee | 14 +++---- .../RedisManager/putDocInMemoryTests.coffee | 42 ++++++++++--------- .../removeDocFromMemoryTests.coffee | 38 +++++++++-------- 4 files changed, 73 insertions(+), 61 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 1215471ce1..9f0e1cd7f5 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -18,23 +18,31 @@ module.exports = RedisManager = putDocInMemory : (project_id, doc_id, docLines, version, callback)-> timer = new metrics.Timer("redis.put-doc") logger.log project_id:project_id, doc_id:doc_id, version: version, "putting doc in redis" - multi = rclient.multi() - multi.set keys.docLines(doc_id:doc_id), JSON.stringify(docLines) - multi.set keys.projectKey({doc_id:doc_id}), project_id - multi.set keys.docVersion(doc_id:doc_id), version - multi.sadd keys.docsInProject(project_id:project_id), doc_id - multi.exec (err, replys)-> + async.parallel [ + (cb) -> + multi = rclient.multi() + multi.set keys.docLines(doc_id:doc_id), JSON.stringify(docLines) + multi.set keys.projectKey({doc_id:doc_id}), project_id + multi.set keys.docVersion(doc_id:doc_id), version + multi.exec cb + (cb) -> + rclient.sadd keys.docsInProject(project_id:project_id), doc_id, cb + ], (err) -> timer.done() callback(err) removeDocFromMemory : (project_id, doc_id, callback)-> logger.log project_id:project_id, doc_id:doc_id, "removing doc from redis" - multi = rclient.multi() - multi.del keys.docLines(doc_id:doc_id) - multi.del keys.projectKey(doc_id:doc_id) - multi.del keys.docVersion(doc_id:doc_id) - multi.srem keys.docsInProject(project_id:project_id), doc_id - multi.exec (err, replys)-> + async.parallel [ + (cb) -> + multi = rclient.multi() + multi.del keys.docLines(doc_id:doc_id) + multi.del keys.projectKey(doc_id:doc_id) + multi.del keys.docVersion(doc_id:doc_id) + multi.exec cb + (cb) -> + rclient.srem keys.docsInProject(project_id:project_id), doc_id, cb + ], (err) -> if err? logger.err project_id:project_id, doc_id:doc_id, err:err, "error removing doc from redis" callback(err, null) @@ -135,10 +143,10 @@ module.exports = RedisManager = pushUncompressedHistoryOp: (project_id, doc_id, op, callback = (error, length) ->) -> jsonOp = JSON.stringify op - multi = rclient.multi() - multi.rpush keys.uncompressedHistoryOp(doc_id: doc_id), jsonOp - multi.sadd keys.docsWithHistoryOps(project_id: project_id), doc_id - multi.exec (error, results) -> + async.parallel [ + (cb) -> rclient.rpush keys.uncompressedHistoryOp(doc_id: doc_id), jsonOp, cb + (cb) -> rclient.sadd keys.docsWithHistoryOps(project_id: project_id), doc_id, cb + ], (error, results) -> return callback(error) if error? [length, _] = results callback(error, length) diff --git a/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee index c7423fcff0..97530a193c 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee @@ -11,19 +11,19 @@ describe "RedisManager.pushUncompressedHistoryOp", -> "redis-sharelatex": createClient: () => @rclient ?= auth: () -> - multi: () => @rclient "logger-sharelatex": @logger = {log: sinon.stub()} @doc_id = "doc-id-123" @project_id = "project-id-123" @callback = sinon.stub() describe "successfully", -> - beforeEach -> + beforeEach (done) -> @op = { op: [{ i: "foo", p: 4 }] } - @rclient.rpush = sinon.stub() - @rclient.sadd = sinon.stub() - @rclient.exec = sinon.stub().callsArgWith(0, null, [@length = 42, "1"]) - @RedisManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback + @rclient.rpush = sinon.stub().yields(null, @length = 42) + @rclient.sadd = sinon.stub().yields() + @RedisManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, (args...) => + @callback(args...) + done() it "should push the doc op into the doc ops list", -> @rclient.rpush @@ -36,7 +36,7 @@ describe "RedisManager.pushUncompressedHistoryOp", -> .should.equal true it "should call the callback with the length", -> - @callback.calledWith(null, @length).should.equal true + @callback.calledWith(undefined, @length).should.equal true diff --git a/services/document-updater/test/unit/coffee/RedisManager/putDocInMemoryTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/putDocInMemoryTests.coffee index 5b7c8ef4ce..eea348b49b 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/putDocInMemoryTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/putDocInMemoryTests.coffee @@ -19,30 +19,32 @@ describe 'RedisManager.putDocInMemory', ()-> potentialSAdds = {} potentialSAdds[keys.docsInProject(project_id:project_id)] = doc_id + rclient = + auth:-> + set:(key, value)-> + result = potentialSets[key] + delete potentialSets[key] + if key == keys.docLines(doc_id:doc_id) + value = JSON.parse(value) + assert.deepEqual result, value + incr:()-> + sadd:(key, value, cb)-> + result = potentialSAdds[key] + delete potentialSAdds[key] + assert.equal result, value + cb() + del: (key) -> + result = potentialDels[key] + delete potentialDels[key] + assert.equal result, true + exec:(callback)-> + callback() + rclient.multi = () -> rclient mocks = "./ZipManager": {} "logger-sharelatex": log:-> "redis-sharelatex": - createClient : ()-> - auth:-> - multi: ()-> - set:(key, value)-> - result = potentialSets[key] - delete potentialSets[key] - if key == keys.docLines(doc_id:doc_id) - value = JSON.parse(value) - assert.deepEqual result, value - incr:()-> - sadd:(key, value)-> - result = potentialSAdds[key] - delete potentialSAdds[key] - assert.equal result, value - del: (key) -> - result = potentialDels[key] - delete potentialDels[key] - assert.equal result, true - exec:(callback)-> - callback() + createClient : () -> rclient redisManager = SandboxedModule.require(modulePath, requires: mocks) diff --git a/services/document-updater/test/unit/coffee/RedisManager/removeDocFromMemoryTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/removeDocFromMemoryTests.coffee index 2c5076bb1c..d1750f8fdd 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/removeDocFromMemoryTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/removeDocFromMemoryTests.coffee @@ -19,30 +19,32 @@ describe 'RedisManager.removeDocFromMemory', ()-> self = @ beforeEach (done)-> redisMemory = {} - + rclient = + auth:-> + get:-> + set:(key, value)-> + redisMemory[key] = value + sadd:(key, value, cb)-> + if !redisMemory[key]? + redisMemory[key] = [] + redisMemory[key].push value + cb() + del : (key)-> + delete redisMemory[key] + srem : (key, member, cb)-> + index = redisMemory[key].indexOf(member) + redisMemory[key].splice(index, 1) + cb() + exec:(callback)-> + callback(null, []) + rclient.multi = () -> rclient mocks = "./ZipManager": {} "logger-sharelatex": error:-> log:-> "redis-sharelatex": - createClient : -> - auth:-> - multi: -> - get:-> - set:(key, value)-> - redisMemory[key] = value - sadd:(key, value)-> - if !redisMemory[key]? - redisMemory[key] = [] - redisMemory[key].push value - del : (key)-> - delete redisMemory[key] - srem : (key, member)-> - index = redisMemory[key].indexOf(member) - redisMemory[key].splice(index, 1) - exec:(callback)-> - callback(null, []) + createClient : -> rclient redisManager = SandboxedModule.require(modulePath, requires: mocks) redisManager.putDocInMemory project_id, doc_id1, 0, ["line"], -> From 8004e022fe3e0258a76b93c1d99255c4102e9058 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 1 Jun 2016 11:49:24 +0100 Subject: [PATCH 105/769] Fix async version and fix affected unit tests --- services/document-updater/package.json | 32 +++++++++---------- .../pushUncompressedHistoryOpTests.coffee | 2 +- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index ac7e45147d..d27b16271c 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -7,31 +7,31 @@ "url": "https://github.com/sharelatex/document-updater-sharelatex.git" }, "dependencies": { + "async": "^2.0.0-rc.5", + "coffee-script": "1.4.0", "express": "3.3.4", - "underscore": "1.2.2", - "chai": "", + "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.0.0", + "lynx": "0.0.11", + "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.0.0", + "mongojs": "0.9.11", + "redis-sharelatex": "0.0.9", "request": "2.25.0", "sandboxed-module": "~0.2.0", - "chai-spies": "", - "async": "", - "lynx": "0.0.11", - "coffee-script": "1.4.0", "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0", - "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.0.0", - "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.0.0", "sinon": "~1.5.2", - "mongojs": "0.9.11", - "redis-sharelatex": "0.0.9" + "underscore": "1.2.2" }, "devDependencies": { - "grunt-execute": "~0.1.5", - "grunt-contrib-clean": "~0.5.0", - "grunt-mocha-test": "~0.9.0", + "bunyan": "~0.22.1", + "chai": "^3.5.0", + "chai-spies": "^0.7.1", "grunt": "~0.4.2", "grunt-available-tasks": "~0.4.1", - "grunt-contrib-coffee": "~0.10.0", - "bunyan": "~0.22.1", "grunt-bunyan": "~0.5.0", - "grunt-forever": "0.4.1" + "grunt-contrib-clean": "~0.5.0", + "grunt-contrib-coffee": "~0.10.0", + "grunt-execute": "~0.1.5", + "grunt-forever": "0.4.1", + "grunt-mocha-test": "~0.9.0" } } diff --git a/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee index 97530a193c..b237c5f635 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee @@ -36,7 +36,7 @@ describe "RedisManager.pushUncompressedHistoryOp", -> .should.equal true it "should call the callback with the length", -> - @callback.calledWith(undefined, @length).should.equal true + @callback.calledWith(null, @length).should.equal true From ea8f4a6d7a831d5d237c4ec5f7a634992e13668e Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 1 Jun 2016 11:58:15 +0100 Subject: [PATCH 106/769] Remove unused getDocOpsLength method --- services/document-updater/app/coffee/RedisManager.coffee | 3 --- 1 file changed, 3 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 9f0e1cd7f5..a300e92fcc 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -151,8 +151,5 @@ module.exports = RedisManager = [length, _] = results callback(error, length) - getDocOpsLength: (doc_id, callback = (error, length) ->) -> - rclient.llen keys.docOps(doc_id: doc_id), callback - getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) -> rclient.smembers keys.docsInProject(project_id: project_id), callback From e38e88308184a3851e4ca193231bc21ed712d315 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 1 Jun 2016 12:22:47 +0100 Subject: [PATCH 107/769] Clean up and consolidate RedisManager tests for easy refactoring later --- .../RedisManager/RedisManagerTests.coffee | 296 ++++++++++++++++++ .../coffee/RedisManager/getDocTests.coffee | 42 --- .../getPendingUpdatesForDocTests.coffee | 57 ---- .../getPreviousDocOpsTests.coffee | 101 ------ .../RedisManager/getUpdatesLengthTests.coffee | 29 -- .../coffee/RedisManager/pushDocOpTests.coffee | 55 ---- .../pushUncompressedHistoryOpTests.coffee | 42 --- .../RedisManager/putDocInMemoryTests.coffee | 56 ---- .../removeDocFromMemoryTests.coffee | 76 ----- 9 files changed, 296 insertions(+), 458 deletions(-) create mode 100644 services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/RedisManager/getDocTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/RedisManager/getPendingUpdatesForDocTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/RedisManager/getUpdatesLengthTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/RedisManager/putDocInMemoryTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/RedisManager/removeDocFromMemoryTests.coffee diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee new file mode 100644 index 0000000000..99daf8f706 --- /dev/null +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -0,0 +1,296 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/RedisManager.js" +SandboxedModule = require('sandboxed-module') +Errors = require "../../../../app/js/Errors" + +describe "RedisManager", -> + beforeEach -> + @rclient = + auth: () -> + exec: sinon.stub() + @rclient.multi = () => @rclient + @RedisManager = SandboxedModule.require modulePath, requires: + "redis-sharelatex": createClient: () => @rclient + "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } + "./Metrics": @metrics = + inc: sinon.stub() + Timer: class Timer + done: () -> + "./Errors": Errors + @doc_id = "doc-id-123" + @project_id = "project-id-123" + @callback = sinon.stub() + + describe "getDoc", -> + beforeEach -> + @lines = ["one", "two", "three"] + @jsonlines = JSON.stringify @lines + @version = 42 + @rclient.get = sinon.stub() + @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version]) + @RedisManager.getDoc @doc_id, @callback + + it "should get the lines from redis", -> + @rclient.get + .calledWith("doclines:#{@doc_id}") + .should.equal true + + it "should get the version from", -> + @rclient.get + .calledWith("DocVersion:#{@doc_id}") + .should.equal true + + it 'should return the document', -> + @callback + .calledWith(null, @lines, @version) + .should.equal true + + describe "getPendingUpdatesForDoc", -> + beforeEach -> + @rclient.lrange = sinon.stub() + @rclient.del = sinon.stub() + + describe "successfully", -> + beforeEach -> + @updates = [ + { op: [{ i: "foo", p: 4 }] } + { op: [{ i: "foo", p: 4 }] } + ] + @jsonUpdates = @updates.map (update) -> JSON.stringify update + @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonUpdates]) + @RedisManager.getPendingUpdatesForDoc @doc_id, @callback + + it "should get the pending updates", -> + @rclient.lrange + .calledWith("PendingUpdates:#{@doc_id}", 0, -1) + .should.equal true + + it "should delete the pending updates", -> + @rclient.del + .calledWith("PendingUpdates:#{@doc_id}") + .should.equal true + + it "should call the callback with the updates", -> + @callback.calledWith(null, @updates).should.equal true + + describe "when the JSON doesn't parse", -> + beforeEach -> + @jsonUpdates = [ + JSON.stringify { op: [{ i: "foo", p: 4 }] } + "broken json" + ] + @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonUpdates]) + @RedisManager.getPendingUpdatesForDoc @doc_id, @callback + + it "should return an error to the callback", -> + @callback.calledWith(new Error("JSON parse error")).should.equal true + + describe "getPreviousDocOpsTests", -> + describe "with a start and an end value", -> + beforeEach -> + @first_version_in_redis = 30 + @version = 70 + @length = @version - @first_version_in_redis + @start = 50 + @end = 60 + @ops = [ + { "mock": "op-1" }, + { "mock": "op-2" } + ] + @jsonOps = @ops.map (op) -> JSON.stringify op + @rclient.llen = sinon.stub().callsArgWith(1, null, @length) + @rclient.get = sinon.stub().callsArgWith(1, null, @version.toString()) + @rclient.lrange = sinon.stub().callsArgWith(3, null, @jsonOps) + @RedisManager.getPreviousDocOps(@doc_id, @start, @end, @callback) + + it "should get the length of the existing doc ops", -> + @rclient.llen + .calledWith("DocOps:#{@doc_id}") + .should.equal true + + it "should get the current version of the doc", -> + @rclient.get + .calledWith("DocVersion:#{@doc_id}") + .should.equal true + + it "should get the appropriate docs ops", -> + @rclient.lrange + .calledWith("DocOps:#{@doc_id}", @start - @first_version_in_redis, @end - @first_version_in_redis) + .should.equal true + + it "should return the docs with the doc ops deserialized", -> + @callback.calledWith(null, @ops).should.equal true + + describe "with an end value of -1", -> + beforeEach -> + @first_version_in_redis = 30 + @version = 70 + @length = @version - @first_version_in_redis + @start = 50 + @end = -1 + @ops = [ + { "mock": "op-1" }, + { "mock": "op-2" } + ] + @jsonOps = @ops.map (op) -> JSON.stringify op + @rclient.llen = sinon.stub().callsArgWith(1, null, @length) + @rclient.get = sinon.stub().callsArgWith(1, null, @version.toString()) + @rclient.lrange = sinon.stub().callsArgWith(3, null, @jsonOps) + @RedisManager.getPreviousDocOps(@doc_id, @start, @end, @callback) + + it "should get the appropriate docs ops to the end of list", -> + @rclient.lrange + .calledWith("DocOps:#{@doc_id}", @start - @first_version_in_redis, -1) + .should.equal true + + it "should return the docs with the doc ops deserialized", -> + @callback.calledWith(null, @ops).should.equal true + + describe "when the requested range is not in Redis", -> + beforeEach -> + @first_version_in_redis = 30 + @version = 70 + @length = @version - @first_version_in_redis + @start = 20 + @end = -1 + @ops = [ + { "mock": "op-1" }, + { "mock": "op-2" } + ] + @jsonOps = @ops.map (op) -> JSON.stringify op + @rclient.llen = sinon.stub().callsArgWith(1, null, @length) + @rclient.get = sinon.stub().callsArgWith(1, null, @version.toString()) + @rclient.lrange = sinon.stub().callsArgWith(3, null, @jsonOps) + @RedisManager.getPreviousDocOps(@doc_id, @start, @end, @callback) + + it "should return an error", -> + @callback.calledWith(new Errors.OpRangeNotAvailableError("doc ops range is not loaded in redis")).should.equal true + + it "should log out the problem", -> + @logger.warn.called.should.equal true + + describe "pushUncompressedHistoryOp", -> + beforeEach (done) -> + @op = { op: [{ i: "foo", p: 4 }] } + @rclient.rpush = sinon.stub().yields(null, @length = 42) + @rclient.sadd = sinon.stub().yields() + @RedisManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, (args...) => + @callback(args...) + done() + + it "should push the doc op into the doc ops list", -> + @rclient.rpush + .calledWith("UncompressedHistoryOps:#{@doc_id}", JSON.stringify(@op)) + .should.equal true + + it "should add the doc_id to the set of which records the project docs", -> + @rclient.sadd + .calledWith("DocsWithHistoryOps:#{@project_id}", @doc_id) + .should.equal true + + it "should call the callback with the length", -> + @callback.calledWith(null, @length).should.equal true + + describe "getUpdatesLength", -> + beforeEach -> + @rclient.llen = sinon.stub().yields(null, @length = 3) + @RedisManager.getUpdatesLength @doc_id, @callback + + it "should look up the length", -> + @rclient.llen.calledWith("PendingUpdates:#{@doc_id}").should.equal true + + it "should return the length", -> + @callback.calledWith(null, @length).should.equal true + + describe "pushDocOp", -> + beforeEach -> + @rclient.rpush = sinon.stub() + @rclient.expire = sinon.stub() + @rclient.incr = sinon.stub() + @rclient.ltrim = sinon.stub() + @op = { op: [{ i: "foo", p: 4 }] } + @version = 42 + _ = null + @rclient.exec = sinon.stub().callsArgWith(0, null, [_, _, _, @version]) + @RedisManager.pushDocOp @doc_id, @op, @callback + + it "should push the doc op into the doc ops list", -> + @rclient.rpush + .calledWith("DocOps:#{@doc_id}", JSON.stringify(@op)) + .should.equal true + + it "should renew the expiry ttl on the doc ops array", -> + @rclient.expire + .calledWith("DocOps:#{@doc_id}", @RedisManager.DOC_OPS_TTL) + .should.equal true + + it "should truncate the list to 100 members", -> + @rclient.ltrim + .calledWith("DocOps:#{@doc_id}", -@RedisManager.DOC_OPS_MAX_LENGTH, -1) + .should.equal true + + it "should increment the version number", -> + @rclient.incr + .calledWith("DocVersion:#{@doc_id}") + .should.equal true + + it "should call the callback with the version number", -> + @callback.calledWith(null, parseInt(@version, 10)).should.equal true + + describe "putDocInMemory", -> + beforeEach (done) -> + @rclient.set = sinon.stub() + @rclient.sadd = sinon.stub().yields() + @rclient.exec.yields() + @lines = ["one", "two", "three"] + @version = 42 + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, done + + it "should set the lines", -> + @rclient.set + .calledWith("doclines:#{@doc_id}", JSON.stringify @lines) + .should.equal true + + it "should set the version", -> + @rclient.set + .calledWith("DocVersion:#{@doc_id}", @version) + .should.equal true + + it "should set the project_id for the doc", -> + @rclient.set + .calledWith("ProjectId:#{@doc_id}", @project_id) + .should.equal true + + it "should add the doc_id to the project set", -> + @rclient.sadd + .calledWith("DocsIn:#{@project_id}", @doc_id) + .should.equal true + + describe "removeDocFromMemory", -> + beforeEach (done) -> + @rclient.del = sinon.stub() + @rclient.srem = sinon.stub().yields() + @rclient.exec.yields() + @RedisManager.removeDocFromMemory @project_id, @doc_id, done + + it "should delete the lines", -> + @rclient.del + .calledWith("doclines:#{@doc_id}") + .should.equal true + + it "should delete the version", -> + @rclient.del + .calledWith("DocVersion:#{@doc_id}") + .should.equal true + + it "should delete the project_id for the doc", -> + @rclient.del + .calledWith("ProjectId:#{@doc_id}") + .should.equal true + + it "should remove the doc_id from the project set", -> + @rclient.srem + .calledWith("DocsIn:#{@project_id}", @doc_id) + .should.equal true \ No newline at end of file diff --git a/services/document-updater/test/unit/coffee/RedisManager/getDocTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/getDocTests.coffee deleted file mode 100644 index e16ff856dd..0000000000 --- a/services/document-updater/test/unit/coffee/RedisManager/getDocTests.coffee +++ /dev/null @@ -1,42 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/RedisManager.js" -SandboxedModule = require('sandboxed-module') - -describe 'RedisManager.getDoc', -> - beforeEach -> - @rclient = {} - @rclient.auth = () -> - @rclient.multi = () => @rclient - - @RedisManager = SandboxedModule.require modulePath, requires: - "logger-sharelatex": {} - "redis-sharelatex": @redis = - createClient: () => @rclient - - @doc_id = "doc-id-123" - @lines = ["one", "two", "three"] - @jsonlines = JSON.stringify @lines - @version = 42 - @callback = sinon.stub() - - @rclient.get = sinon.stub() - @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version]) - - @RedisManager.getDoc @doc_id, @callback - - it "should get the lines from redis", -> - @rclient.get - .calledWith("doclines:#{@doc_id}") - .should.equal true - - it "should get the version from", -> - @rclient.get - .calledWith("DocVersion:#{@doc_id}") - .should.equal true - - it 'should return the document', -> - @callback - .calledWith(null, @lines, @version) - .should.equal true diff --git a/services/document-updater/test/unit/coffee/RedisManager/getPendingUpdatesForDocTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/getPendingUpdatesForDocTests.coffee deleted file mode 100644 index 40efa7cec2..0000000000 --- a/services/document-updater/test/unit/coffee/RedisManager/getPendingUpdatesForDocTests.coffee +++ /dev/null @@ -1,57 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/RedisManager.js" -SandboxedModule = require('sandboxed-module') - -describe "RedisManager.getPendingUpdatesForDoc", -> - beforeEach -> - @RedisManager = SandboxedModule.require modulePath, requires: - "./ZipManager": {} - "redis-sharelatex": createClient: () => - @rclient = - auth: () -> - multi: () => @rclient - "logger-sharelatex": @logger = {log: sinon.stub()} - @project_id = "project-id-123" - @doc_id = "doc-id-123" - @callback = sinon.stub() - @rclient.lrange = sinon.stub() - @rclient.del = sinon.stub() - - describe "successfully", -> - beforeEach -> - @updates = [ - { op: [{ i: "foo", p: 4 }] } - { op: [{ i: "foo", p: 4 }] } - ] - @jsonUpdates = @updates.map (update) -> JSON.stringify update - @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonUpdates]) - @RedisManager.getPendingUpdatesForDoc @doc_id, @callback - - it "should get the pending updates", -> - @rclient.lrange - .calledWith("PendingUpdates:#{@doc_id}", 0, -1) - .should.equal true - - it "should delete the pending updates", -> - @rclient.del - .calledWith("PendingUpdates:#{@doc_id}") - .should.equal true - - it "should call the callback with the updates", -> - @callback.calledWith(null, @updates).should.equal true - - describe "when the JSON doesn't parse", -> - beforeEach -> - @jsonUpdates = [ - JSON.stringify { op: [{ i: "foo", p: 4 }] } - "broken json" - ] - @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonUpdates]) - @RedisManager.getPendingUpdatesForDoc @doc_id, @callback - - it "should return an error to the callback", -> - @callback.calledWith(new Error("JSON parse error")).should.equal true - - diff --git a/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee deleted file mode 100644 index 6b93ae3028..0000000000 --- a/services/document-updater/test/unit/coffee/RedisManager/getPreviousDocOpsTests.coffee +++ /dev/null @@ -1,101 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/RedisManager" -SandboxedModule = require('sandboxed-module') -Errors = require "../../../../app/js/Errors" - -describe "RedisManager.getPreviousDocOpsTests", -> - beforeEach -> - @callback = sinon.stub() - @RedisManager = SandboxedModule.require modulePath, requires: - "./ZipManager": {} - "redis-sharelatex" : createClient: () => - @rclient ?= - auth: -> - multi: => @rclient - "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } - @doc_id = "doc-id-123" - - describe "with a start and an end value", -> - beforeEach -> - @first_version_in_redis = 30 - @version = 70 - @length = @version - @first_version_in_redis - @start = 50 - @end = 60 - @ops = [ - { "mock": "op-1" }, - { "mock": "op-2" } - ] - @jsonOps = @ops.map (op) -> JSON.stringify op - @rclient.llen = sinon.stub().callsArgWith(1, null, @length) - @rclient.get = sinon.stub().callsArgWith(1, null, @version.toString()) - @rclient.lrange = sinon.stub().callsArgWith(3, null, @jsonOps) - @RedisManager.getPreviousDocOps(@doc_id, @start, @end, @callback) - - it "should get the length of the existing doc ops", -> - @rclient.llen - .calledWith("DocOps:#{@doc_id}") - .should.equal true - - it "should get the current version of the doc", -> - @rclient.get - .calledWith("DocVersion:#{@doc_id}") - .should.equal true - - it "should get the appropriate docs ops", -> - @rclient.lrange - .calledWith("DocOps:#{@doc_id}", @start - @first_version_in_redis, @end - @first_version_in_redis) - .should.equal true - - it "should return the docs with the doc ops deserialized", -> - @callback.calledWith(null, @ops).should.equal true - - describe "with an end value of -1", -> - beforeEach -> - @first_version_in_redis = 30 - @version = 70 - @length = @version - @first_version_in_redis - @start = 50 - @end = -1 - @ops = [ - { "mock": "op-1" }, - { "mock": "op-2" } - ] - @jsonOps = @ops.map (op) -> JSON.stringify op - @rclient.llen = sinon.stub().callsArgWith(1, null, @length) - @rclient.get = sinon.stub().callsArgWith(1, null, @version.toString()) - @rclient.lrange = sinon.stub().callsArgWith(3, null, @jsonOps) - @RedisManager.getPreviousDocOps(@doc_id, @start, @end, @callback) - - it "should get the appropriate docs ops to the end of list", -> - @rclient.lrange - .calledWith("DocOps:#{@doc_id}", @start - @first_version_in_redis, -1) - .should.equal true - - it "should return the docs with the doc ops deserialized", -> - @callback.calledWith(null, @ops).should.equal true - - describe "when the requested range is not in Redis", -> - beforeEach -> - @first_version_in_redis = 30 - @version = 70 - @length = @version - @first_version_in_redis - @start = 20 - @end = -1 - @ops = [ - { "mock": "op-1" }, - { "mock": "op-2" } - ] - @jsonOps = @ops.map (op) -> JSON.stringify op - @rclient.llen = sinon.stub().callsArgWith(1, null, @length) - @rclient.get = sinon.stub().callsArgWith(1, null, @version.toString()) - @rclient.lrange = sinon.stub().callsArgWith(3, null, @jsonOps) - @RedisManager.getPreviousDocOps(@doc_id, @start, @end, @callback) - - it "should return an error", -> - @callback.calledWith(new Errors.OpRangeNotAvailableError("doc ops range is not loaded in redis")).should.equal true - - it "should log out the problem", -> - @logger.warn.called.should.equal true diff --git a/services/document-updater/test/unit/coffee/RedisManager/getUpdatesLengthTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/getUpdatesLengthTests.coffee deleted file mode 100644 index 57e7cb1e02..0000000000 --- a/services/document-updater/test/unit/coffee/RedisManager/getUpdatesLengthTests.coffee +++ /dev/null @@ -1,29 +0,0 @@ -assert = require('chai').assert -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/RedisManager.js" -SandboxedModule = require('sandboxed-module') - -doc_id = "1234" - -describe 'Redis Manager.getUpdatesLength ', -> - - beforeEach -> - - @llenStub = sinon.stub() - @redisManager = SandboxedModule.require modulePath, requires: - "./ZipManager": {} - "logger-sharelatex": {} - "redis-sharelatex": - createClient:=> - auth:-> - llen:@llenStub - - it "should the number of things to process in the que", (done)-> - - @llenStub.callsArgWith(1, null, 3) - @redisManager.getUpdatesLength doc_id, (err, len)=> - @llenStub.calledWith("PendingUpdates:#{doc_id}").should.equal true - len.should.equal 3 - done() diff --git a/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee deleted file mode 100644 index 1053ed75c1..0000000000 --- a/services/document-updater/test/unit/coffee/RedisManager/pushDocOpTests.coffee +++ /dev/null @@ -1,55 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/RedisManager.js" -SandboxedModule = require('sandboxed-module') - -describe "RedisManager.pushDocOp", -> - beforeEach -> - @RedisManager = SandboxedModule.require modulePath, requires: - "./ZipManager": {} - "redis-sharelatex": createClient: () => - @rclient ?= - auth: () -> - multi: () => @rclient - "logger-sharelatex": @logger = {log: sinon.stub()} - @doc_id = "doc-id-123" - @callback = sinon.stub() - @rclient.rpush = sinon.stub() - @rclient.expire = sinon.stub() - @rclient.incr = sinon.stub() - @rclient.ltrim = sinon.stub() - - describe "successfully", -> - beforeEach -> - @op = { op: [{ i: "foo", p: 4 }] } - @version = 42 - _ = null - @rclient.exec = sinon.stub().callsArgWith(0, null, [_, _, _, @version]) - @RedisManager.pushDocOp @doc_id, @op, @callback - - it "should push the doc op into the doc ops list", -> - @rclient.rpush - .calledWith("DocOps:#{@doc_id}", JSON.stringify(@op)) - .should.equal true - - it "should renew the expiry ttl on the doc ops array", -> - @rclient.expire - .calledWith("DocOps:#{@doc_id}", @RedisManager.DOC_OPS_TTL) - .should.equal true - - it "should truncate the list to 100 members", -> - @rclient.ltrim - .calledWith("DocOps:#{@doc_id}", -@RedisManager.DOC_OPS_MAX_LENGTH, -1) - .should.equal true - - it "should increment the version number", -> - @rclient.incr - .calledWith("DocVersion:#{@doc_id}") - .should.equal true - - it "should call the callback with the version number", -> - @callback.calledWith(null, parseInt(@version, 10)).should.equal true - - - diff --git a/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee deleted file mode 100644 index b237c5f635..0000000000 --- a/services/document-updater/test/unit/coffee/RedisManager/pushUncompressedHistoryOpTests.coffee +++ /dev/null @@ -1,42 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/RedisManager.js" -SandboxedModule = require('sandboxed-module') - -describe "RedisManager.pushUncompressedHistoryOp", -> - beforeEach -> - @RedisManager = SandboxedModule.require modulePath, requires: - "./ZipManager": {} - "redis-sharelatex": createClient: () => - @rclient ?= - auth: () -> - "logger-sharelatex": @logger = {log: sinon.stub()} - @doc_id = "doc-id-123" - @project_id = "project-id-123" - @callback = sinon.stub() - - describe "successfully", -> - beforeEach (done) -> - @op = { op: [{ i: "foo", p: 4 }] } - @rclient.rpush = sinon.stub().yields(null, @length = 42) - @rclient.sadd = sinon.stub().yields() - @RedisManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, (args...) => - @callback(args...) - done() - - it "should push the doc op into the doc ops list", -> - @rclient.rpush - .calledWith("UncompressedHistoryOps:#{@doc_id}", JSON.stringify(@op)) - .should.equal true - - it "should add the doc_id to the set of which records the project docs", -> - @rclient.sadd - .calledWith("DocsWithHistoryOps:#{@project_id}", @doc_id) - .should.equal true - - it "should call the callback with the length", -> - @callback.calledWith(null, @length).should.equal true - - - diff --git a/services/document-updater/test/unit/coffee/RedisManager/putDocInMemoryTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/putDocInMemoryTests.coffee deleted file mode 100644 index eea348b49b..0000000000 --- a/services/document-updater/test/unit/coffee/RedisManager/putDocInMemoryTests.coffee +++ /dev/null @@ -1,56 +0,0 @@ -require('coffee-script') -assert = require('assert') -path = require('path') -modulePath = path.join __dirname, '../../../../app/js/RedisManager.js' -keys = require(path.join __dirname, '../../../../app/js/RedisKeyBuilder.js') -project_id = 1234 -doc_id = 5678 -SandboxedModule = require('sandboxed-module') - -describe 'RedisManager.putDocInMemory', ()-> - lines = ["this is one line", "and another line"] - version = 42 - - potentialSets = {} - potentialSets[keys.docLines(doc_id:doc_id)] = lines - potentialSets[keys.projectKey(doc_id:doc_id)] = project_id - potentialSets[keys.docVersion(doc_id:doc_id)] = version - - potentialSAdds = {} - potentialSAdds[keys.docsInProject(project_id:project_id)] = doc_id - - rclient = - auth:-> - set:(key, value)-> - result = potentialSets[key] - delete potentialSets[key] - if key == keys.docLines(doc_id:doc_id) - value = JSON.parse(value) - assert.deepEqual result, value - incr:()-> - sadd:(key, value, cb)-> - result = potentialSAdds[key] - delete potentialSAdds[key] - assert.equal result, value - cb() - del: (key) -> - result = potentialDels[key] - delete potentialDels[key] - assert.equal result, true - exec:(callback)-> - callback() - rclient.multi = () -> rclient - mocks = - "./ZipManager": {} - "logger-sharelatex": log:-> - "redis-sharelatex": - createClient : () -> rclient - - redisManager = SandboxedModule.require(modulePath, requires: mocks) - - it 'should put a all data into memory', (done)-> - redisManager.putDocInMemory project_id, doc_id, lines, version, ()-> - assert.deepEqual potentialSets, {} - assert.deepEqual potentialSAdds, {} - done() - diff --git a/services/document-updater/test/unit/coffee/RedisManager/removeDocFromMemoryTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/removeDocFromMemoryTests.coffee deleted file mode 100644 index d1750f8fdd..0000000000 --- a/services/document-updater/test/unit/coffee/RedisManager/removeDocFromMemoryTests.coffee +++ /dev/null @@ -1,76 +0,0 @@ -require('coffee-script') -_ = require("underscore") -assert = require('assert') -sinon = require('sinon') -path = require('path') -modulePath = path.join __dirname, '../../../../app/js/RedisManager.js' -keys = require(path.join __dirname, '../../../../app/js/RedisKeyBuilder.js') -SandboxedModule = require('sandboxed-module') - -describe 'RedisManager.removeDocFromMemory', ()-> - - project_id = "12345" - doc_id1 = "docid1" - doc_id2 = "docid2" - doc_id3 = "docid3" - - redisMemory = undefined - redisManager = undefined - self = @ - beforeEach (done)-> - redisMemory = {} - rclient = - auth:-> - get:-> - set:(key, value)-> - redisMemory[key] = value - sadd:(key, value, cb)-> - if !redisMemory[key]? - redisMemory[key] = [] - redisMemory[key].push value - cb() - del : (key)-> - delete redisMemory[key] - srem : (key, member, cb)-> - index = redisMemory[key].indexOf(member) - redisMemory[key].splice(index, 1) - cb() - exec:(callback)-> - callback(null, []) - rclient.multi = () -> rclient - mocks = - "./ZipManager": {} - "logger-sharelatex": - error:-> - log:-> - "redis-sharelatex": - createClient : -> rclient - - redisManager = SandboxedModule.require(modulePath, requires: mocks) - redisManager.putDocInMemory project_id, doc_id1, 0, ["line"], -> - redisManager.putDocInMemory project_id, doc_id2, 0, ["ledf"], -> - redisManager.putDocInMemory project_id, doc_id3, 0, ["ledf"], -> - done() - - it 'should remove doc lines from memory', (done)-> - keyExists = false - redisManager.removeDocFromMemory project_id, doc_id1, ()-> - assert.equal redisMemory[keys.docLines(doc_id:doc_id1)], undefined - keys = _.keys(redisMemory) - containsKey(keys, doc_id1) - keys.forEach (sets)-> - containsKey sets, doc_id1 - _.each redisMemory, (value)-> - if value.indexOf(doc_id1) != -1 - assert.equal false, "#{doc_id1} found in value #{value}" - done() - - -containsKey = (haystack, key)-> - if haystack.forEach? - haystack.forEach (area)-> - if area.indexOf(key) != -1 - assert.equal false, "#{key} found in haystack in #{area}" - - - From 1db6f8f159d458a2994baece63b548b31cba1d93 Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 7 Jun 2016 17:58:18 +0100 Subject: [PATCH 108/769] Pull out rclient into RedisBackend that supports sending requests to multiple rclients --- .../app/coffee/DispatchManager.coffee | 2 +- .../app/coffee/LockManager.coffee | 3 +- .../app/coffee/RedisBackend.coffee | 84 ++++++++++ .../app/coffee/RedisKeyBuilder.coffee | 64 +++++--- .../app/coffee/RedisManager.coffee | 16 +- .../app/coffee/ShareJsDB.coffee | 2 +- .../app/coffee/ShareJsUpdateManager.coffee | 2 +- .../app/coffee/TrackChangesManager.coffee | 12 +- .../app/coffee/UpdateKeys.coffee | 3 + .../config/settings.defaults.coffee | 29 +++- .../coffee/ApplyingUpdatesToADocTests.coffee | 153 +----------------- .../coffee/LockManager/CheckingTheLock.coffee | 6 +- .../LockManager/ReleasingTheLock.coffee | 6 +- .../coffee/LockManager/getLockTests.coffee | 4 +- .../coffee/LockManager/tryLockTests.coffee | 4 +- .../RedisManager/RedisManagerTests.coffee | 32 ++-- .../TrackChangesManagerTests.coffee | 22 +-- 17 files changed, 201 insertions(+), 243 deletions(-) create mode 100644 services/document-updater/app/coffee/RedisBackend.coffee create mode 100644 services/document-updater/app/coffee/UpdateKeys.coffee diff --git a/services/document-updater/app/coffee/DispatchManager.coffee b/services/document-updater/app/coffee/DispatchManager.coffee index 4216a6dbcf..28397185dc 100644 --- a/services/document-updater/app/coffee/DispatchManager.coffee +++ b/services/document-updater/app/coffee/DispatchManager.coffee @@ -1,6 +1,6 @@ Settings = require('settings-sharelatex') logger = require('logger-sharelatex') -Keys = require('./RedisKeyBuilder') +Keys = require('./UpdateKeys') redis = require("redis-sharelatex") UpdateManager = require('./UpdateManager') diff --git a/services/document-updater/app/coffee/LockManager.coffee b/services/document-updater/app/coffee/LockManager.coffee index 0c4eaad320..a1ed9292e9 100644 --- a/services/document-updater/app/coffee/LockManager.coffee +++ b/services/document-updater/app/coffee/LockManager.coffee @@ -1,7 +1,6 @@ metrics = require('./Metrics') Settings = require('settings-sharelatex') -redis = require("redis-sharelatex") -rclient = redis.createClient(Settings.redis.web) +rclient = require("./RedisBackend").createClient() keys = require('./RedisKeyBuilder') logger = require "logger-sharelatex" os = require "os" diff --git a/services/document-updater/app/coffee/RedisBackend.coffee b/services/document-updater/app/coffee/RedisBackend.coffee new file mode 100644 index 0000000000..3c795e827c --- /dev/null +++ b/services/document-updater/app/coffee/RedisBackend.coffee @@ -0,0 +1,84 @@ +Settings = require "settings-sharelatex" +redis = require("redis-sharelatex") +async = require "async" + +class Client + constructor: (@clients) -> + + multi: () -> + return new MultiClient( + @clients.map (client) -> { + rclient: client.rclient.multi() + key_schema: client.key_schema + primary: client.primary + } + ) + +class MultiClient + constructor: (@clients) -> + + exec: (callback) -> + jobs = @clients.map (client) -> + (cb) -> + console.error "EXEC", client.rclient.queue + client.rclient.exec (result...) -> + console.error "EXEC RESULT", result + if client.primary + # Return this result as the actual result + callback(result...) + # Send the rest through for comparison + cb(result...) + async.parallel jobs, (error, results) -> + console.error "EXEC RESULTS", results + +COMMANDS = [ + "get", "smembers", "set", "srem", "sadd", "del", "lrange", + "llen", "rpush", "expire", "ltrim", "incr" +] +for command in COMMANDS + do (command) -> + Client.prototype[command] = (key_builder, args..., callback) -> + async.parallel @clients.map (client) -> + (cb) -> + key = key_builder(client.key_schema) + console.error "COMMAND", command, key, args + client.rclient[command] key, args..., (result...) -> + console.log "RESULT", command, result + if client.primary + # Return this result as the actual result + callback?(result...) + # Send the rest through for comparison + cb(result...) + , (error, results) -> + console.log "#{command} RESULTS", results + + MultiClient.prototype[command] = (key_builder, args...) -> + for client in @clients + key = key_builder(client.key_schema) + console.error "MULTI COMMAND", command, key, args + client.rclient[command] key, args... + +Client::eval = (script, pos, key_builder, args..., callback) -> + async.parallel @clients.map (client) -> + (cb) -> + key = key_builder(client.key_schema) + client.rclient.eval script, pos, key, args..., (result...) -> + if client.primary + # Return this result as the actual result + callback(result...) + # Send the rest through for comparison + cb(result...) + , (error, results) -> + console.log "#{command} RESULTS", results + +module.exports = + createClient: () -> + client_configs = Settings.redis.documentupdater + unless client_configs instanceof Array + client_configs.primary = true + client_configs = [client_configs] + clients = client_configs.map (config) -> + rclient: redis.createClient(config) + key_schema: config.key_schema + primary: config.primary + return new Client(clients) \ No newline at end of file diff --git a/services/document-updater/app/coffee/RedisKeyBuilder.coffee b/services/document-updater/app/coffee/RedisKeyBuilder.coffee index e55ae99bde..0e9e59e8f1 100644 --- a/services/document-updater/app/coffee/RedisKeyBuilder.coffee +++ b/services/document-updater/app/coffee/RedisKeyBuilder.coffee @@ -1,24 +1,40 @@ -PROJECTKEY = "ProjectId" -BLOCKINGKEY = "Blocking" -CHANGEQUE = "ChangeQue" -DOCSINPROJECT = "DocsIn" -PENDINGUPDATESKEY = "PendingUpdates" -DOCLINES = "doclines" -DOCOPS = "DocOps" -DOCVERSION = "DocVersion" -DOCSWITHHISTORYOPS = "DocsWithHistoryOps" -UNCOMPRESSED_HISTORY_OPS = "UncompressedHistoryOps" - -module.exports = - docLines : (op)-> DOCLINES+":"+op.doc_id - docOps : (op)-> DOCOPS+":"+op.doc_id - uncompressedHistoryOp: (op) -> UNCOMPRESSED_HISTORY_OPS + ":" + op.doc_id - docVersion : (op)-> DOCVERSION+":"+op.doc_id - projectKey : (op)-> PROJECTKEY+":"+op.doc_id - blockingKey : (op)-> BLOCKINGKEY+":"+op.doc_id - changeQue : (op)-> CHANGEQUE+":"+op.project_id - docsInProject : (op)-> DOCSINPROJECT+":"+op.project_id - pendingUpdates : (op)-> PENDINGUPDATESKEY+":"+op.doc_id - combineProjectIdAndDocId: (project_id, doc_id) -> "#{project_id}:#{doc_id}" - splitProjectIdAndDocId: (project_and_doc_id) -> project_and_doc_id.split(":") - docsWithHistoryOps: (op) -> DOCSWITHHISTORYOPS + ":" + op.project_id +# The default key schema looks like: +# doclines:foo +# DocVersion:foo +# but if we use redis cluster, we want all 'foo' keys to map to the same +# node, so we must use: +# doclines:{foo} +# DocVersion:{foo} +# since redis hashes on the contents of {...}. +# +# To transparently support different key schemas for different clients +# (potential writing/reading to both a cluster and single instance +# while we migrate), instead of keys, we now pass around functions which +# will build the key when passed a schema. +# +# E.g. +# key_schema = Settings.redis.keys +# key_schema == { docLines: ({doc_id}) -> "doclines:#{doc_id}", ... } +# key_builder = RedisKeyBuilder.docLines({doc_id: "foo"}) +# key_builder == (key_schema) -> key_schema.docLines({doc_id: "foo"}) +# key = key_builder(key_schema) +# key == "doclines:foo" +module.exports = RedisKeyBuilder = + blockingKey: ({doc_id}) -> + return (key_schema) -> key_schema.blockingKey({doc_id}) + docLines: ({doc_id}) -> + return (key_schema) -> key_schema.docLines({doc_id}) + docOps: ({doc_id}) -> + return (key_schema) -> key_schema.docOps({doc_id}) + docVersion: ({doc_id}) -> + return (key_schema) -> key_schema.docVersion({doc_id}) + projectKey: ({doc_id}) -> + return (key_schema) -> key_schema.projectKey({doc_id}) + uncompressedHistoryOp: ({doc_id}) -> + return (key_schema) -> key_schema.uncompressedHistoryOp({doc_id}) + pendingUpdates: ({doc_id}) -> + return (key_schema) -> key_schema.pendingUpdates({doc_id}) + docsInProject: ({project_id}) -> + return (key_schema) -> key_schema.docsInProject({project_id}) + docsWithHistoryOps: ({project_id}) -> + return (key_schema) -> key_schema.docsWithHistoryOps({project_id}) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index a300e92fcc..d5908be12e 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -1,16 +1,12 @@ Settings = require('settings-sharelatex') -redis = require("redis-sharelatex") -rclient = redis.createClient(Settings.redis.web) async = require('async') +rclient = require("./RedisBackend").createClient() _ = require('underscore') keys = require('./RedisKeyBuilder') logger = require('logger-sharelatex') metrics = require('./Metrics') Errors = require "./Errors" -redisOptions = _.clone(Settings.redis.web) -redisOptions.return_buffers = true - # Make times easy to read minutes = 60 # seconds for Redis expire @@ -141,15 +137,5 @@ module.exports = RedisManager = version = parseInt(version, 10) callback null, version - pushUncompressedHistoryOp: (project_id, doc_id, op, callback = (error, length) ->) -> - jsonOp = JSON.stringify op - async.parallel [ - (cb) -> rclient.rpush keys.uncompressedHistoryOp(doc_id: doc_id), jsonOp, cb - (cb) -> rclient.sadd keys.docsWithHistoryOps(project_id: project_id), doc_id, cb - ], (error, results) -> - return callback(error) if error? - [length, _] = results - callback(error, length) - getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) -> rclient.smembers keys.docsInProject(project_id: project_id), callback diff --git a/services/document-updater/app/coffee/ShareJsDB.coffee b/services/document-updater/app/coffee/ShareJsDB.coffee index da6640685b..f9527ccc0b 100644 --- a/services/document-updater/app/coffee/ShareJsDB.coffee +++ b/services/document-updater/app/coffee/ShareJsDB.coffee @@ -1,4 +1,4 @@ -Keys = require('./RedisKeyBuilder') +Keys = require('./UpdateKeys') Settings = require('settings-sharelatex') DocumentManager = require "./DocumentManager" RedisManager = require "./RedisManager" diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee index 20de2d1dfd..eb7ad92720 100644 --- a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee +++ b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee @@ -3,7 +3,7 @@ ShareJsDB = require "./ShareJsDB" async = require "async" logger = require "logger-sharelatex" Settings = require('settings-sharelatex') -Keys = require "./RedisKeyBuilder" +Keys = require "./UpdateKeys" {EventEmitter} = require "events" util = require "util" diff --git a/services/document-updater/app/coffee/TrackChangesManager.coffee b/services/document-updater/app/coffee/TrackChangesManager.coffee index 90cba86b36..43d2314149 100644 --- a/services/document-updater/app/coffee/TrackChangesManager.coffee +++ b/services/document-updater/app/coffee/TrackChangesManager.coffee @@ -1,8 +1,9 @@ settings = require "settings-sharelatex" request = require "request" logger = require "logger-sharelatex" -RedisManager = require "./RedisManager" -crypto = require("crypto") +redis = require("redis-sharelatex") +rclient = redis.createClient(settings.redis.web) +async = require "async" module.exports = TrackChangesManager = flushDocChanges: (project_id, doc_id, callback = (error) ->) -> @@ -23,8 +24,13 @@ module.exports = TrackChangesManager = FLUSH_EVERY_N_OPS: 50 pushUncompressedHistoryOp: (project_id, doc_id, op, callback = (error) ->) -> - RedisManager.pushUncompressedHistoryOp project_id, doc_id, op, (error, length) -> + jsonOp = JSON.stringify op + async.parallel [ + (cb) -> rclient.rpush "UncompressedHistoryOps:#{doc_id}", jsonOp, cb + (cb) -> rclient.sadd "DocsWithHistoryOps:#{project_id}", doc_id, cb + ], (error, results) -> return callback(error) if error? + [length, _] = results if length > 0 and length % TrackChangesManager.FLUSH_EVERY_N_OPS == 0 # Do this in the background since it uses HTTP and so may be too # slow to wait for when processing a doc update. diff --git a/services/document-updater/app/coffee/UpdateKeys.coffee b/services/document-updater/app/coffee/UpdateKeys.coffee new file mode 100644 index 0000000000..7d1f279495 --- /dev/null +++ b/services/document-updater/app/coffee/UpdateKeys.coffee @@ -0,0 +1,3 @@ +module.exports = + combineProjectIdAndDocId: (project_id, doc_id) -> "#{project_id}:#{doc_id}" + splitProjectIdAndDocId: (project_and_doc_id) -> project_and_doc_id.split(":") diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 41d25b4ea2..38306d5d79 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -20,9 +20,32 @@ module.exports = port:"6379" host:"localhost" password:"" - zip: - minSize: 10*1024 - writesEnabled: false + documentupdater: [{ + primary: true + port: "6379" + host: "localhost" + password:"" + key_schema: + blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" + docLines: ({doc_id}) -> "doclines:#{doc_id}" + docOps: ({doc_id}) -> "DocOps:#{doc_id}" + docVersion: ({doc_id}) -> "DocVersion:#{doc_id}" + projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" + pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" + docsInProject: ({project_id}) -> "DocsIn:#{project_id}" + }, { + port: "6380" + host: "localhost" + password:"" + key_schema: + blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" + docLines: ({doc_id}) -> "doclines:{#{doc_id}}" + docOps: ({doc_id}) -> "DocOps:{#{doc_id}}" + docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}" + projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" + pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" + docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" + }] max_doc_length: 2 * 1024 * 1024 # 2mb diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index a1c56ee41e..e3966cabfa 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -210,7 +210,7 @@ describe "Applying updates to a doc", -> throw error if error? DocUpdaterClient.sendUpdates @project_id, @doc_id, updates, (error) => throw error if error? - setTimeout done, 200 + setTimeout done, 1000 after -> MockTrackChangesApi.flushDoc.restore() @@ -237,154 +237,3 @@ describe "Applying updates to a doc", -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => doc.lines.should.deep.equal @result done() - - - - -describe "Applying updates to a large doc (uses compression)", -> - MIN_SIZE = 500000 - before -> - @lines = ["one", "two", "three"] - while @lines.join('').length < MIN_SIZE - @lines.push "this is a repeated long line which will create a large document which must be compressed #{@lines.length}" - @version = 42 - @update = - doc: @doc_id - op: [{ - i: "one and a half\n" - p: 4 - }] - v: @version - @result = @lines.slice() - @result.splice 1, 0, "one and a half" - - describe "when the document is not loaded", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - sinon.spy MockWebApi, "getDocument" - - MockWebApi.insertDoc @project_id, @doc_id, lines: @lines - db.docOps.insert { - doc_id: ObjectId(@doc_id) - version: @version - }, (error) => - throw error if error? - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> - throw error if error? - setTimeout done, 200 - - after -> - MockWebApi.getDocument.restore() - - it "should load the document from the web API", -> - MockWebApi.getDocument - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should update the doc", (done) -> - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => - doc.lines.should.deep.equal @result - done() - - it "should push the applied updates to the track changes api", (done) -> - rclient.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => - throw error if error? - JSON.parse(updates[0]).op.should.deep.equal @update.op - rclient.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => - throw error if error? - result.should.equal 1 - done() - - - describe "when the document is loaded", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - - MockWebApi.insertDoc @project_id, @doc_id, lines: @lines - db.docOps.insert doc_id: ObjectId(@doc_id), version: @version, (error) => - throw error if error? - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => - throw error if error? - sinon.spy MockWebApi, "getDocument" - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> - throw error if error? - setTimeout done, 200 - - after -> - MockWebApi.getDocument.restore() - - it "should not need to call the web api", -> - MockWebApi.getDocument.called.should.equal false - - it "should update the doc", (done) -> - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => - doc.lines.should.deep.equal @result - done() - - it "should push the applied updates to the track changes api", (done) -> - rclient.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => - JSON.parse(updates[0]).op.should.deep.equal @update.op - rclient.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => - result.should.equal 1 - done() - - describe "with a broken update", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, lines: @lines - db.docOps.insert doc_id: ObjectId(@doc_id), version: @version, (error) => - throw error if error? - DocUpdaterClient.sendUpdate @project_id, @doc_id, @undefined, (error) -> - throw error if error? - setTimeout done, 200 - - it "should not update the doc", (done) -> - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => - doc.lines.should.deep.equal @lines - done() - - describe "with enough updates to flush to the track changes api", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - updates = [] - for v in [0..99] # Should flush after 50 ops - updates.push - doc_id: @doc_id, - op: [i: v.toString(), p: 0] - v: v - - sinon.spy MockTrackChangesApi, "flushDoc" - - MockWebApi.insertDoc @project_id, @doc_id, lines: @lines - db.docOps.insert doc_id: ObjectId(@doc_id), version: 0, (error) => - throw error if error? - DocUpdaterClient.sendUpdates @project_id, @doc_id, updates, (error) => - throw error if error? - setTimeout done, 500 - - after -> - MockTrackChangesApi.flushDoc.restore() - - it "should flush the doc", -> - MockTrackChangesApi.flushDoc.called.should.equal true - - describe "when there is no version in Mongo", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines - } - - update = - doc: @doc_id - op: @update.op - v: 0 - DocUpdaterClient.sendUpdate @project_id, @doc_id, update, (error) -> - throw error if error? - setTimeout done, 200 - - it "should update the doc (using version = 0)", (done) -> - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => - doc.lines.should.deep.equal @result - done() - diff --git a/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee b/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee index e4514750c8..ea69dcff52 100644 --- a/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee @@ -3,7 +3,6 @@ sinon = require('sinon') assert = require('assert') path = require('path') modulePath = path.join __dirname, '../../../../app/js/LockManager.js' -keys = require(path.join __dirname, '../../../../app/js/RedisKeyBuilder.js') project_id = 1234 doc_id = 5678 blockingKey = "Blocking:#{doc_id}" @@ -15,8 +14,9 @@ describe 'LockManager - checking the lock', ()-> mocks = "logger-sharelatex": log:-> - - "redis-sharelatex": + "./RedisKeyBuilder": + blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" + "./RedisBackend": createClient : ()-> auth:-> exists: existsStub diff --git a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee index bca2f9124a..3260c2fea9 100644 --- a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee @@ -3,7 +3,6 @@ sinon = require('sinon') assert = require('assert') path = require('path') modulePath = path.join __dirname, '../../../../app/js/LockManager.js' -keys = require(path.join __dirname, '../../../../app/js/RedisKeyBuilder.js') project_id = 1234 doc_id = 5678 SandboxedModule = require('sandboxed-module') @@ -13,8 +12,9 @@ describe 'LockManager - releasing the lock', ()-> evalStub = sinon.stub().yields(1) mocks = "logger-sharelatex": log:-> - - "redis-sharelatex": + "./RedisKeyBuilder": + blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" + "./RedisBackend": createClient : ()-> auth:-> eval: evalStub diff --git a/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee b/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee index 84cc3208a3..89c08afc2f 100644 --- a/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee @@ -8,7 +8,9 @@ describe 'LockManager - getting the lock', -> beforeEach -> @LockManager = SandboxedModule.require modulePath, requires: "logger-sharelatex": log:-> - "redis-sharelatex": + "./RedisKeyBuilder": + blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" + "./RedisBackend": createClient : () => auth:-> "./Metrics": {inc: () ->} diff --git a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee index 33c3eb3d51..55af920469 100644 --- a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee @@ -8,7 +8,9 @@ describe 'LockManager - trying the lock', -> beforeEach -> @LockManager = SandboxedModule.require modulePath, requires: "logger-sharelatex": log:-> - "redis-sharelatex": + "./RedisKeyBuilder": + blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" + "./RedisBackend": createClient : () => auth:-> set: @set = sinon.stub() diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 99daf8f706..338e7cd668 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -12,7 +12,15 @@ describe "RedisManager", -> exec: sinon.stub() @rclient.multi = () => @rclient @RedisManager = SandboxedModule.require modulePath, requires: - "redis-sharelatex": createClient: () => @rclient + "./RedisBackend": createClient: () => @rclient + "./RedisKeyBuilder": + blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" + docLines: ({doc_id}) -> "doclines:#{doc_id}" + docOps: ({doc_id}) -> "DocOps:#{doc_id}" + docVersion: ({doc_id}) -> "DocVersion:#{doc_id}" + projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" + pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" + docsInProject: ({project_id}) -> "DocsIn:#{project_id}" "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } "./Metrics": @metrics = inc: sinon.stub() @@ -171,28 +179,6 @@ describe "RedisManager", -> it "should log out the problem", -> @logger.warn.called.should.equal true - describe "pushUncompressedHistoryOp", -> - beforeEach (done) -> - @op = { op: [{ i: "foo", p: 4 }] } - @rclient.rpush = sinon.stub().yields(null, @length = 42) - @rclient.sadd = sinon.stub().yields() - @RedisManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, (args...) => - @callback(args...) - done() - - it "should push the doc op into the doc ops list", -> - @rclient.rpush - .calledWith("UncompressedHistoryOps:#{@doc_id}", JSON.stringify(@op)) - .should.equal true - - it "should add the doc_id to the set of which records the project docs", -> - @rclient.sadd - .calledWith("DocsWithHistoryOps:#{@project_id}", @doc_id) - .should.equal true - - it "should call the callback with the length", -> - @callback.calledWith(null, @length).should.equal true - describe "getUpdatesLength", -> beforeEach -> @rclient.llen = sinon.stub().yields(null, @length = 3) diff --git a/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee b/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee index 8fad5322e2..f43a3a0c43 100644 --- a/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee @@ -7,9 +7,9 @@ describe "TrackChangesManager", -> beforeEach -> @TrackChangesManager = SandboxedModule.require modulePath, requires: "request": @request = {} - "settings-sharelatex": @Settings = {} + "settings-sharelatex": @Settings = { redis: web: {} } "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } - "./RedisManager": @RedisManager = {} + "redis-sharelatex": createClient: () => @rclient = {} @project_id = "mock-project-id" @doc_id = "mock-doc-id" @callback = sinon.stub() @@ -42,17 +42,21 @@ describe "TrackChangesManager", -> describe "pushUncompressedHistoryOp", -> beforeEach -> - @op = "mock-op" + @op = { op: [{ i: "foo", p: 4 }] } + @rclient.rpush = sinon.stub().yields(null, @length = 42) + @rclient.sadd = sinon.stub().yields() @TrackChangesManager.flushDocChanges = sinon.stub().callsArg(2) describe "pushing the op", -> beforeEach -> - @RedisManager.pushUncompressedHistoryOp = sinon.stub().callsArgWith(3, null, 1) @TrackChangesManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback it "should push the op into redis", -> - @RedisManager.pushUncompressedHistoryOp - .calledWith(@project_id, @doc_id, @op) + @rclient.rpush + .calledWith("UncompressedHistoryOps:#{@doc_id}", JSON.stringify @op) + .should.equal true + @rclient.sadd + .calledWith("DocsWithHistoryOps:#{@project_id}", @doc_id) .should.equal true it "should call the callback", -> @@ -63,8 +67,7 @@ describe "TrackChangesManager", -> describe "when there are a multiple of FLUSH_EVERY_N_OPS ops", -> beforeEach -> - @RedisManager.pushUncompressedHistoryOp = - sinon.stub().callsArgWith(3, null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS) + @rclient.rpush = sinon.stub().yields(null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS) @TrackChangesManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback it "should tell the track changes api to flush", -> @@ -74,8 +77,7 @@ describe "TrackChangesManager", -> describe "when TrackChangesManager errors", -> beforeEach -> - @RedisManager.pushUncompressedHistoryOp = - sinon.stub().callsArgWith(3, null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS) + @rclient.rpush = sinon.stub().yields(null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS) @TrackChangesManager.flushDocChanges = sinon.stub().callsArgWith(2, @error = new Error("oops")) @TrackChangesManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback From ef43e2b325ed7885d9332a33db3d285a492eb67f Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 7 Jun 2016 18:38:32 +0100 Subject: [PATCH 109/769] Configure backend database as redis cluster --- services/document-updater/.gitignore | 5 + .../app/coffee/RedisBackend.coffee | 8 +- .../config/settings.defaults.coffee | 7 +- services/document-updater/package.json | 1 + .../redis_cluster/7000/redis.conf | 5 + .../redis_cluster/7001/redis.conf | 5 + .../redis_cluster/7002/redis.conf | 5 + .../redis_cluster/7003/redis.conf | 5 + .../redis_cluster/7004/redis.conf | 5 + .../redis_cluster/7005/redis.conf | 5 + .../redis_cluster/redis-cluster.sh | 23 + .../redis_cluster/redis-trib.rb | 1696 +++++++++++++++++ 12 files changed, 1765 insertions(+), 5 deletions(-) create mode 100644 services/document-updater/redis_cluster/7000/redis.conf create mode 100644 services/document-updater/redis_cluster/7001/redis.conf create mode 100644 services/document-updater/redis_cluster/7002/redis.conf create mode 100644 services/document-updater/redis_cluster/7003/redis.conf create mode 100644 services/document-updater/redis_cluster/7004/redis.conf create mode 100644 services/document-updater/redis_cluster/7005/redis.conf create mode 100755 services/document-updater/redis_cluster/redis-cluster.sh create mode 100755 services/document-updater/redis_cluster/redis-trib.rb diff --git a/services/document-updater/.gitignore b/services/document-updater/.gitignore index a477cfd66c..ad21f261b4 100644 --- a/services/document-updater/.gitignore +++ b/services/document-updater/.gitignore @@ -46,3 +46,8 @@ test/acceptance/js/* forever/ **.swp + +# Redis cluster +**/appendonly.aof +**/dump.rdb +**/nodes.conf diff --git a/services/document-updater/app/coffee/RedisBackend.coffee b/services/document-updater/app/coffee/RedisBackend.coffee index 3c795e827c..e242beaec2 100644 --- a/services/document-updater/app/coffee/RedisBackend.coffee +++ b/services/document-updater/app/coffee/RedisBackend.coffee @@ -1,5 +1,4 @@ Settings = require "settings-sharelatex" -redis = require("redis-sharelatex") async = require "async" class Client @@ -78,7 +77,12 @@ module.exports = client_configs.primary = true client_configs = [client_configs] clients = client_configs.map (config) -> - rclient: redis.createClient(config) + if config.cluster? + Redis = require("ioredis") + rclient = new Redis.Cluster(config.cluster) + else + rclient = require("redis-sharelatex").createClient(config) + rclient: rclient key_schema: config.key_schema primary: config.primary return new Client(clients) \ No newline at end of file diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 38306d5d79..9a5c6d91f2 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -34,9 +34,10 @@ module.exports = pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" docsInProject: ({project_id}) -> "DocsIn:#{project_id}" }, { - port: "6380" - host: "localhost" - password:"" + cluster: [{ + port: "7000" + host: "localhost" + }] key_schema: blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" docLines: ({doc_id}) -> "doclines:{#{doc_id}}" diff --git a/services/document-updater/package.json b/services/document-updater/package.json index d27b16271c..e91f3ff7c8 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -10,6 +10,7 @@ "async": "^2.0.0-rc.5", "coffee-script": "1.4.0", "express": "3.3.4", + "ioredis": "^2.0.1", "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.0.0", "lynx": "0.0.11", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.0.0", diff --git a/services/document-updater/redis_cluster/7000/redis.conf b/services/document-updater/redis_cluster/7000/redis.conf new file mode 100644 index 0000000000..10dc9cb85c --- /dev/null +++ b/services/document-updater/redis_cluster/7000/redis.conf @@ -0,0 +1,5 @@ +port 7000 +cluster-enabled yes +cluster-config-file nodes.conf +cluster-node-timeout 5000 +appendonly yes \ No newline at end of file diff --git a/services/document-updater/redis_cluster/7001/redis.conf b/services/document-updater/redis_cluster/7001/redis.conf new file mode 100644 index 0000000000..10df3f5a5c --- /dev/null +++ b/services/document-updater/redis_cluster/7001/redis.conf @@ -0,0 +1,5 @@ +port 7001 +cluster-enabled yes +cluster-config-file nodes.conf +cluster-node-timeout 5000 +appendonly yes \ No newline at end of file diff --git a/services/document-updater/redis_cluster/7002/redis.conf b/services/document-updater/redis_cluster/7002/redis.conf new file mode 100644 index 0000000000..d16f4e2a7f --- /dev/null +++ b/services/document-updater/redis_cluster/7002/redis.conf @@ -0,0 +1,5 @@ +port 7002 +cluster-enabled yes +cluster-config-file nodes.conf +cluster-node-timeout 5000 +appendonly yes \ No newline at end of file diff --git a/services/document-updater/redis_cluster/7003/redis.conf b/services/document-updater/redis_cluster/7003/redis.conf new file mode 100644 index 0000000000..f54103d83f --- /dev/null +++ b/services/document-updater/redis_cluster/7003/redis.conf @@ -0,0 +1,5 @@ +port 7003 +cluster-enabled yes +cluster-config-file nodes.conf +cluster-node-timeout 5000 +appendonly yes \ No newline at end of file diff --git a/services/document-updater/redis_cluster/7004/redis.conf b/services/document-updater/redis_cluster/7004/redis.conf new file mode 100644 index 0000000000..8b3af0834b --- /dev/null +++ b/services/document-updater/redis_cluster/7004/redis.conf @@ -0,0 +1,5 @@ +port 7004 +cluster-enabled yes +cluster-config-file nodes.conf +cluster-node-timeout 5000 +appendonly yes \ No newline at end of file diff --git a/services/document-updater/redis_cluster/7005/redis.conf b/services/document-updater/redis_cluster/7005/redis.conf new file mode 100644 index 0000000000..ce0b227510 --- /dev/null +++ b/services/document-updater/redis_cluster/7005/redis.conf @@ -0,0 +1,5 @@ +port 7005 +cluster-enabled yes +cluster-config-file nodes.conf +cluster-node-timeout 5000 +appendonly yes \ No newline at end of file diff --git a/services/document-updater/redis_cluster/redis-cluster.sh b/services/document-updater/redis_cluster/redis-cluster.sh new file mode 100755 index 0000000000..e25359bcf7 --- /dev/null +++ b/services/document-updater/redis_cluster/redis-cluster.sh @@ -0,0 +1,23 @@ +#!/bin/sh + +(cd 7000 && redis-server redis.conf) & +PID1="$!" + +(cd 7001 && redis-server redis.conf) & +PID2="$!" + +(cd 7002 && redis-server redis.conf) & +PID3="$!" + +(cd 7003 && redis-server redis.conf) & +PID4="$!" + +(cd 7004 && redis-server redis.conf) & +PID5="$!" + +(cd 7005 && redis-server redis.conf) & +PID6="$!" + +trap "kill $PID1 $PID2 $PID3 $PID4 $PID5 $PID6" exit INT TERM + +wait \ No newline at end of file diff --git a/services/document-updater/redis_cluster/redis-trib.rb b/services/document-updater/redis_cluster/redis-trib.rb new file mode 100755 index 0000000000..ccb0551586 --- /dev/null +++ b/services/document-updater/redis_cluster/redis-trib.rb @@ -0,0 +1,1696 @@ +#!/usr/bin/env ruby + +# TODO (temporary here, we'll move this into the Github issues once +# redis-trib initial implementation is completed). +# +# - Make sure that if the rehashing fails in the middle redis-trib will try +# to recover. +# - When redis-trib performs a cluster check, if it detects a slot move in +# progress it should prompt the user to continue the move from where it +# stopped. +# - Gracefully handle Ctrl+C in move_slot to prompt the user if really stop +# while rehashing, and performing the best cleanup possible if the user +# forces the quit. +# - When doing "fix" set a global Fix to true, and prompt the user to +# fix the problem if automatically fixable every time there is something +# to fix. For instance: +# 1) If there is a node that pretend to receive a slot, or to migrate a +# slot, but has no entries in that slot, fix it. +# 2) If there is a node having keys in slots that are not owned by it +# fix this condition moving the entries in the same node. +# 3) Perform more possibly slow tests about the state of the cluster. +# 4) When aborted slot migration is detected, fix it. + +require 'rubygems' +require 'redis' + +ClusterHashSlots = 16384 +MigrateDefaultTimeout = 60000 +MigrateDefaultPipeline = 10 +RebalanceDefaultThreshold = 2 + +$verbose = false + +def xputs(s) + case s[0..2] + when ">>>" + color="29;1" + when "[ER" + color="31;1" + when "[WA" + color="31;1" + when "[OK" + color="32" + when "[FA","***" + color="33" + else + color=nil + end + + color = nil if ENV['TERM'] != "xterm" + print "\033[#{color}m" if color + print s + print "\033[0m" if color + print "\n" +end + +class ClusterNode + def initialize(addr) + s = addr.split("@")[0].split(":") + if s.length < 2 + puts "Invalid IP or Port (given as #{addr}) - use IP:Port format" + exit 1 + end + port = s.pop # removes port from split array + ip = s.join(":") # if s.length > 1 here, it's IPv6, so restore address + @r = nil + @info = {} + @info[:host] = ip + @info[:port] = port + @info[:slots] = {} + @info[:migrating] = {} + @info[:importing] = {} + @info[:replicate] = false + @dirty = false # True if we need to flush slots info into node. + @friends = [] + end + + def friends + @friends + end + + def slots + @info[:slots] + end + + def has_flag?(flag) + @info[:flags].index(flag) + end + + def to_s + "#{@info[:host]}:#{@info[:port]}" + end + + def connect(o={}) + return if @r + print "Connecting to node #{self}: " if $verbose + STDOUT.flush + begin + @r = Redis.new(:host => @info[:host], :port => @info[:port], :timeout => 60) + @r.ping + rescue + xputs "[ERR] Sorry, can't connect to node #{self}" + exit 1 if o[:abort] + @r = nil + end + xputs "OK" if $verbose + end + + def assert_cluster + info = @r.info + if !info["cluster_enabled"] || info["cluster_enabled"].to_i == 0 + xputs "[ERR] Node #{self} is not configured as a cluster node." + exit 1 + end + end + + def assert_empty + if !(@r.cluster("info").split("\r\n").index("cluster_known_nodes:1")) || + (@r.info['db0']) + xputs "[ERR] Node #{self} is not empty. Either the node already knows other nodes (check with CLUSTER NODES) or contains some key in database 0." + exit 1 + end + end + + def load_info(o={}) + self.connect + nodes = @r.cluster("nodes").split("\n") + nodes.each{|n| + # name addr flags role ping_sent ping_recv link_status slots + split = n.split + name,addr,flags,master_id,ping_sent,ping_recv,config_epoch,link_status = split[0..6] + slots = split[8..-1] + info = { + :name => name, + :addr => addr, + :flags => flags.split(","), + :replicate => master_id, + :ping_sent => ping_sent.to_i, + :ping_recv => ping_recv.to_i, + :link_status => link_status + } + info[:replicate] = false if master_id == "-" + + if info[:flags].index("myself") + @info = @info.merge(info) + @info[:slots] = {} + slots.each{|s| + if s[0..0] == '[' + if s.index("->-") # Migrating + slot,dst = s[1..-1].split("->-") + @info[:migrating][slot.to_i] = dst + elsif s.index("-<-") # Importing + slot,src = s[1..-1].split("-<-") + @info[:importing][slot.to_i] = src + end + elsif s.index("-") + start,stop = s.split("-") + self.add_slots((start.to_i)..(stop.to_i)) + else + self.add_slots((s.to_i)..(s.to_i)) + end + } if slots + @dirty = false + @r.cluster("info").split("\n").each{|e| + k,v=e.split(":") + k = k.to_sym + v.chop! + if k != :cluster_state + @info[k] = v.to_i + else + @info[k] = v + end + } + elsif o[:getfriends] + @friends << info + end + } + end + + def add_slots(slots) + slots.each{|s| + @info[:slots][s] = :new + } + @dirty = true + end + + def set_as_replica(node_id) + @info[:replicate] = node_id + @dirty = true + end + + def flush_node_config + return if !@dirty + if @info[:replicate] + begin + @r.cluster("replicate",@info[:replicate]) + rescue + # If the cluster did not already joined it is possible that + # the slave does not know the master node yet. So on errors + # we return ASAP leaving the dirty flag set, to flush the + # config later. + return + end + else + new = [] + @info[:slots].each{|s,val| + if val == :new + new << s + @info[:slots][s] = true + end + } + @r.cluster("addslots",*new) + end + @dirty = false + end + + def info_string + # We want to display the hash slots assigned to this node + # as ranges, like in: "1-5,8-9,20-25,30" + # + # Note: this could be easily written without side effects, + # we use 'slots' just to split the computation into steps. + + # First step: we want an increasing array of integers + # for instance: [1,2,3,4,5,8,9,20,21,22,23,24,25,30] + slots = @info[:slots].keys.sort + + # As we want to aggregate adjacent slots we convert all the + # slot integers into ranges (with just one element) + # So we have something like [1..1,2..2, ... and so forth. + slots.map!{|x| x..x} + + # Finally we group ranges with adjacent elements. + slots = slots.reduce([]) {|a,b| + if !a.empty? && b.first == (a[-1].last)+1 + a[0..-2] + [(a[-1].first)..(b.last)] + else + a + [b] + end + } + + # Now our task is easy, we just convert ranges with just one + # element into a number, and a real range into a start-end format. + # Finally we join the array using the comma as separator. + slots = slots.map{|x| + x.count == 1 ? x.first.to_s : "#{x.first}-#{x.last}" + }.join(",") + + role = self.has_flag?("master") ? "M" : "S" + + if self.info[:replicate] and @dirty + is = "S: #{self.info[:name]} #{self.to_s}" + else + is = "#{role}: #{self.info[:name]} #{self.to_s}\n"+ + " slots:#{slots} (#{self.slots.length} slots) "+ + "#{(self.info[:flags]-["myself"]).join(",")}" + end + if self.info[:replicate] + is += "\n replicates #{info[:replicate]}" + elsif self.has_flag?("master") && self.info[:replicas] + is += "\n #{info[:replicas].length} additional replica(s)" + end + is + end + + # Return a single string representing nodes and associated slots. + # TODO: remove slaves from config when slaves will be handled + # by Redis Cluster. + def get_config_signature + config = [] + @r.cluster("nodes").each_line{|l| + s = l.split + slots = s[8..-1].select {|x| x[0..0] != "["} + next if slots.length == 0 + config << s[0]+":"+(slots.sort.join(",")) + } + config.sort.join("|") + end + + def info + @info + end + + def is_dirty? + @dirty + end + + def r + @r + end +end + +class RedisTrib + def initialize + @nodes = [] + @fix = false + @errors = [] + @timeout = MigrateDefaultTimeout + end + + def check_arity(req_args, num_args) + if ((req_args > 0 and num_args != req_args) || + (req_args < 0 and num_args < req_args.abs)) + xputs "[ERR] Wrong number of arguments for specified sub command" + exit 1 + end + end + + def add_node(node) + @nodes << node + end + + def reset_nodes + @nodes = [] + end + + def cluster_error(msg) + @errors << msg + xputs msg + end + + # Return the node with the specified ID or Nil. + def get_node_by_name(name) + @nodes.each{|n| + return n if n.info[:name] == name.downcase + } + return nil + end + + # Like get_node_by_name but the specified name can be just the first + # part of the node ID as long as the prefix in unique across the + # cluster. + def get_node_by_abbreviated_name(name) + l = name.length + candidates = [] + @nodes.each{|n| + if n.info[:name][0...l] == name.downcase + candidates << n + end + } + return nil if candidates.length != 1 + candidates[0] + end + + # This function returns the master that has the least number of replicas + # in the cluster. If there are multiple masters with the same smaller + # number of replicas, one at random is returned. + def get_master_with_least_replicas + masters = @nodes.select{|n| n.has_flag? "master"} + sorted = masters.sort{|a,b| + a.info[:replicas].length <=> b.info[:replicas].length + } + sorted[0] + end + + def check_cluster(opt={}) + xputs ">>> Performing Cluster Check (using node #{@nodes[0]})" + show_nodes if !opt[:quiet] + check_config_consistency + check_open_slots + check_slots_coverage + end + + def show_cluster_info + masters = 0 + keys = 0 + @nodes.each{|n| + if n.has_flag?("master") + puts "#{n} (#{n.info[:name][0...8]}...) -> #{n.r.dbsize} keys | #{n.slots.length} slots | "+ + "#{n.info[:replicas].length} slaves." + masters += 1 + keys += n.r.dbsize + end + } + xputs "[OK] #{keys} keys in #{masters} masters." + keys_per_slot = sprintf("%.2f",keys/16384.0) + puts "#{keys_per_slot} keys per slot on average." + end + + # Merge slots of every known node. If the resulting slots are equal + # to ClusterHashSlots, then all slots are served. + def covered_slots + slots = {} + @nodes.each{|n| + slots = slots.merge(n.slots) + } + slots + end + + def check_slots_coverage + xputs ">>> Check slots coverage..." + slots = covered_slots + if slots.length == ClusterHashSlots + xputs "[OK] All #{ClusterHashSlots} slots covered." + else + cluster_error \ + "[ERR] Not all #{ClusterHashSlots} slots are covered by nodes." + fix_slots_coverage if @fix + end + end + + def check_open_slots + xputs ">>> Check for open slots..." + open_slots = [] + @nodes.each{|n| + if n.info[:migrating].size > 0 + cluster_error \ + "[WARNING] Node #{n} has slots in migrating state (#{n.info[:migrating].keys.join(",")})." + open_slots += n.info[:migrating].keys + end + if n.info[:importing].size > 0 + cluster_error \ + "[WARNING] Node #{n} has slots in importing state (#{n.info[:importing].keys.join(",")})." + open_slots += n.info[:importing].keys + end + } + open_slots.uniq! + if open_slots.length > 0 + xputs "[WARNING] The following slots are open: #{open_slots.join(",")}" + end + if @fix + open_slots.each{|slot| fix_open_slot slot} + end + end + + def nodes_with_keys_in_slot(slot) + nodes = [] + @nodes.each{|n| + next if n.has_flag?("slave") + nodes << n if n.r.cluster("getkeysinslot",slot,1).length > 0 + } + nodes + end + + def fix_slots_coverage + not_covered = (0...ClusterHashSlots).to_a - covered_slots.keys + xputs ">>> Fixing slots coverage..." + xputs "List of not covered slots: " + not_covered.join(",") + + # For every slot, take action depending on the actual condition: + # 1) No node has keys for this slot. + # 2) A single node has keys for this slot. + # 3) Multiple nodes have keys for this slot. + slots = {} + not_covered.each{|slot| + nodes = nodes_with_keys_in_slot(slot) + slots[slot] = nodes + xputs "Slot #{slot} has keys in #{nodes.length} nodes: #{nodes.join(", ")}" + } + + none = slots.select {|k,v| v.length == 0} + single = slots.select {|k,v| v.length == 1} + multi = slots.select {|k,v| v.length > 1} + + # Handle case "1": keys in no node. + if none.length > 0 + xputs "The folowing uncovered slots have no keys across the cluster:" + xputs none.keys.join(",") + yes_or_die "Fix these slots by covering with a random node?" + none.each{|slot,nodes| + node = @nodes.sample + xputs ">>> Covering slot #{slot} with #{node}" + node.r.cluster("addslots",slot) + } + end + + # Handle case "2": keys only in one node. + if single.length > 0 + xputs "The folowing uncovered slots have keys in just one node:" + puts single.keys.join(",") + yes_or_die "Fix these slots by covering with those nodes?" + single.each{|slot,nodes| + xputs ">>> Covering slot #{slot} with #{nodes[0]}" + nodes[0].r.cluster("addslots",slot) + } + end + + # Handle case "3": keys in multiple nodes. + if multi.length > 0 + xputs "The folowing uncovered slots have keys in multiple nodes:" + xputs multi.keys.join(",") + yes_or_die "Fix these slots by moving keys into a single node?" + multi.each{|slot,nodes| + target = get_node_with_most_keys_in_slot(nodes,slot) + xputs ">>> Covering slot #{slot} moving keys to #{target}" + + target.r.cluster('addslots',slot) + target.r.cluster('setslot',slot,'stable') + nodes.each{|src| + next if src == target + # Set the source node in 'importing' state (even if we will + # actually migrate keys away) in order to avoid receiving + # redirections for MIGRATE. + src.r.cluster('setslot',slot,'importing',target.info[:name]) + move_slot(src,target,slot,:dots=>true,:fix=>true,:cold=>true) + src.r.cluster('setslot',slot,'stable') + } + } + end + end + + # Return the owner of the specified slot + def get_slot_owners(slot) + owners = [] + @nodes.each{|n| + next if n.has_flag?("slave") + n.slots.each{|s,_| + owners << n if s == slot + } + } + owners + end + + # Return the node, among 'nodes' with the greatest number of keys + # in the specified slot. + def get_node_with_most_keys_in_slot(nodes,slot) + best = nil + best_numkeys = 0 + @nodes.each{|n| + next if n.has_flag?("slave") + numkeys = n.r.cluster("countkeysinslot",slot) + if numkeys > best_numkeys || best == nil + best = n + best_numkeys = numkeys + end + } + return best + end + + # Slot 'slot' was found to be in importing or migrating state in one or + # more nodes. This function fixes this condition by migrating keys where + # it seems more sensible. + def fix_open_slot(slot) + puts ">>> Fixing open slot #{slot}" + + # Try to obtain the current slot owner, according to the current + # nodes configuration. + owners = get_slot_owners(slot) + owner = owners[0] if owners.length == 1 + + migrating = [] + importing = [] + @nodes.each{|n| + next if n.has_flag? "slave" + if n.info[:migrating][slot] + migrating << n + elsif n.info[:importing][slot] + importing << n + elsif n.r.cluster("countkeysinslot",slot) > 0 && n != owner + xputs "*** Found keys about slot #{slot} in node #{n}!" + importing << n + end + } + puts "Set as migrating in: #{migrating.join(",")}" + puts "Set as importing in: #{importing.join(",")}" + + # If there is no slot owner, set as owner the slot with the biggest + # number of keys, among the set of migrating / importing nodes. + if !owner + xputs ">>> Nobody claims ownership, selecting an owner..." + owner = get_node_with_most_keys_in_slot(@nodes,slot) + + # If we still don't have an owner, we can't fix it. + if !owner + xputs "[ERR] Can't select a slot owner. Impossible to fix." + exit 1 + end + + # Use ADDSLOTS to assign the slot. + puts "*** Configuring #{owner} as the slot owner" + owner.r.cluster("setslot",slot,"stable") + owner.r.cluster("addslots",slot) + # Make sure this information will propagate. Not strictly needed + # since there is no past owner, so all the other nodes will accept + # whatever epoch this node will claim the slot with. + owner.r.cluster("bumpepoch") + + # Remove the owner from the list of migrating/importing + # nodes. + migrating.delete(owner) + importing.delete(owner) + end + + # If there are multiple owners of the slot, we need to fix it + # so that a single node is the owner and all the other nodes + # are in importing state. Later the fix can be handled by one + # of the base cases above. + # + # Note that this case also covers multiple nodes having the slot + # in migrating state, since migrating is a valid state only for + # slot owners. + if owners.length > 1 + owner = get_node_with_most_keys_in_slot(owners,slot) + owners.each{|n| + next if n == owner + n.r.cluster('delslots',slot) + n.r.cluster('setslot',slot,'importing',owner.info[:name]) + importing.delete(n) # Avoid duplciates + importing << n + } + owner.r.cluster('bumpepoch') + end + + # Case 1: The slot is in migrating state in one slot, and in + # importing state in 1 slot. That's trivial to address. + if migrating.length == 1 && importing.length == 1 + move_slot(migrating[0],importing[0],slot,:dots=>true,:fix=>true) + # Case 2: There are multiple nodes that claim the slot as importing, + # they probably got keys about the slot after a restart so opened + # the slot. In this case we just move all the keys to the owner + # according to the configuration. + elsif migrating.length == 0 && importing.length > 0 + xputs ">>> Moving all the #{slot} slot keys to its owner #{owner}" + importing.each {|node| + next if node == owner + move_slot(node,owner,slot,:dots=>true,:fix=>true,:cold=>true) + xputs ">>> Setting #{slot} as STABLE in #{node}" + node.r.cluster("setslot",slot,"stable") + } + # Case 3: There are no slots claiming to be in importing state, but + # there is a migrating node that actually don't have any key. We + # can just close the slot, probably a reshard interrupted in the middle. + elsif importing.length == 0 && migrating.length == 1 && + migrating[0].r.cluster("getkeysinslot",slot,10).length == 0 + migrating[0].r.cluster("setslot",slot,"stable") + else + xputs "[ERR] Sorry, Redis-trib can't fix this slot yet (work in progress). Slot is set as migrating in #{migrating.join(",")}, as importing in #{importing.join(",")}, owner is #{owner}" + end + end + + # Check if all the nodes agree about the cluster configuration + def check_config_consistency + if !is_config_consistent? + cluster_error "[ERR] Nodes don't agree about configuration!" + else + xputs "[OK] All nodes agree about slots configuration." + end + end + + def is_config_consistent? + signatures=[] + @nodes.each{|n| + signatures << n.get_config_signature + } + return signatures.uniq.length == 1 + end + + def wait_cluster_join + print "Waiting for the cluster to join" + while !is_config_consistent? + print "." + STDOUT.flush + sleep 1 + end + print "\n" + end + + def alloc_slots + nodes_count = @nodes.length + masters_count = @nodes.length / (@replicas+1) + masters = [] + + # The first step is to split instances by IP. This is useful as + # we'll try to allocate master nodes in different physical machines + # (as much as possible) and to allocate slaves of a given master in + # different physical machines as well. + # + # This code assumes just that if the IP is different, than it is more + # likely that the instance is running in a different physical host + # or at least a different virtual machine. + ips = {} + @nodes.each{|n| + ips[n.info[:host]] = [] if !ips[n.info[:host]] + ips[n.info[:host]] << n + } + + # Select master instances + puts "Using #{masters_count} masters:" + interleaved = [] + stop = false + while not stop do + # Take one node from each IP until we run out of nodes + # across every IP. + ips.each do |ip,nodes| + if nodes.empty? + # if this IP has no remaining nodes, check for termination + if interleaved.length == nodes_count + # stop when 'interleaved' has accumulated all nodes + stop = true + next + end + else + # else, move one node from this IP to 'interleaved' + interleaved.push nodes.shift + end + end + end + + masters = interleaved.slice!(0, masters_count) + nodes_count -= masters.length + + masters.each{|m| puts m} + + # Alloc slots on masters + slots_per_node = ClusterHashSlots.to_f / masters_count + first = 0 + cursor = 0.0 + masters.each_with_index{|n,masternum| + last = (cursor+slots_per_node-1).round + if last > ClusterHashSlots || masternum == masters.length-1 + last = ClusterHashSlots-1 + end + last = first if last < first # Min step is 1. + n.add_slots first..last + first = last+1 + cursor += slots_per_node + } + + # Select N replicas for every master. + # We try to split the replicas among all the IPs with spare nodes + # trying to avoid the host where the master is running, if possible. + # + # Note we loop two times. The first loop assigns the requested + # number of replicas to each master. The second loop assigns any + # remaining instances as extra replicas to masters. Some masters + # may end up with more than their requested number of replicas, but + # all nodes will be used. + assignment_verbose = false + + [:requested,:unused].each do |assign| + masters.each do |m| + assigned_replicas = 0 + while assigned_replicas < @replicas + break if nodes_count == 0 + if assignment_verbose + if assign == :requested + puts "Requesting total of #{@replicas} replicas " \ + "(#{assigned_replicas} replicas assigned " \ + "so far with #{nodes_count} total remaining)." + elsif assign == :unused + puts "Assigning extra instance to replication " \ + "role too (#{nodes_count} remaining)." + end + end + + # Return the first node not matching our current master + node = interleaved.find{|n| n.info[:host] != m.info[:host]} + + # If we found a node, use it as a best-first match. + # Otherwise, we didn't find a node on a different IP, so we + # go ahead and use a same-IP replica. + if node + slave = node + interleaved.delete node + else + slave = interleaved.shift + end + slave.set_as_replica(m.info[:name]) + nodes_count -= 1 + assigned_replicas += 1 + puts "Adding replica #{slave} to #{m}" + + # If we are in the "assign extra nodes" loop, + # we want to assign one extra replica to each + # master before repeating masters. + # This break lets us assign extra replicas to masters + # in a round-robin way. + break if assign == :unused + end + end + end + end + + def flush_nodes_config + @nodes.each{|n| + n.flush_node_config + } + end + + def show_nodes + @nodes.each{|n| + xputs n.info_string + } + end + + # Redis Cluster config epoch collision resolution code is able to eventually + # set a different epoch to each node after a new cluster is created, but + # it is slow compared to assign a progressive config epoch to each node + # before joining the cluster. However we do just a best-effort try here + # since if we fail is not a problem. + def assign_config_epoch + config_epoch = 1 + @nodes.each{|n| + begin + n.r.cluster("set-config-epoch",config_epoch) + rescue + end + config_epoch += 1 + } + end + + def join_cluster + # We use a brute force approach to make sure the node will meet + # each other, that is, sending CLUSTER MEET messages to all the nodes + # about the very same node. + # Thanks to gossip this information should propagate across all the + # cluster in a matter of seconds. + first = false + @nodes.each{|n| + if !first then first = n.info; next; end # Skip the first node + n.r.cluster("meet",first[:host],first[:port]) + } + end + + def yes_or_die(msg) + print "#{msg} (type 'yes' to accept): " + STDOUT.flush + if !(STDIN.gets.chomp.downcase == "yes") + xputs "*** Aborting..." + exit 1 + end + end + + def load_cluster_info_from_node(nodeaddr) + node = ClusterNode.new(nodeaddr) + node.connect(:abort => true) + node.assert_cluster + node.load_info(:getfriends => true) + add_node(node) + node.friends.each{|f| + next if f[:flags].index("noaddr") || + f[:flags].index("disconnected") || + f[:flags].index("fail") + fnode = ClusterNode.new(f[:addr]) + fnode.connect() + next if !fnode.r + begin + fnode.load_info() + add_node(fnode) + rescue => e + xputs "[ERR] Unable to load info for node #{fnode}" + end + } + populate_nodes_replicas_info + end + + # This function is called by load_cluster_info_from_node in order to + # add additional information to every node as a list of replicas. + def populate_nodes_replicas_info + # Start adding the new field to every node. + @nodes.each{|n| + n.info[:replicas] = [] + } + + # Populate the replicas field using the replicate field of slave + # nodes. + @nodes.each{|n| + if n.info[:replicate] + master = get_node_by_name(n.info[:replicate]) + if !master + xputs "*** WARNING: #{n} claims to be slave of unknown node ID #{n.info[:replicate]}." + else + master.info[:replicas] << n + end + end + } + end + + # Given a list of source nodes return a "resharding plan" + # with what slots to move in order to move "numslots" slots to another + # instance. + def compute_reshard_table(sources,numslots) + moved = [] + # Sort from bigger to smaller instance, for two reasons: + # 1) If we take less slots than instances it is better to start + # getting from the biggest instances. + # 2) We take one slot more from the first instance in the case of not + # perfect divisibility. Like we have 3 nodes and need to get 10 + # slots, we take 4 from the first, and 3 from the rest. So the + # biggest is always the first. + sources = sources.sort{|a,b| b.slots.length <=> a.slots.length} + source_tot_slots = sources.inject(0) {|sum,source| + sum+source.slots.length + } + sources.each_with_index{|s,i| + # Every node will provide a number of slots proportional to the + # slots it has assigned. + n = (numslots.to_f/source_tot_slots*s.slots.length) + if i == 0 + n = n.ceil + else + n = n.floor + end + s.slots.keys.sort[(0...n)].each{|slot| + if moved.length < numslots + moved << {:source => s, :slot => slot} + end + } + } + return moved + end + + def show_reshard_table(table) + table.each{|e| + puts " Moving slot #{e[:slot]} from #{e[:source].info[:name]}" + } + end + + # Move slots between source and target nodes using MIGRATE. + # + # Options: + # :verbose -- Print a dot for every moved key. + # :fix -- We are moving in the context of a fix. Use REPLACE. + # :cold -- Move keys without opening slots / reconfiguring the nodes. + # :update -- Update nodes.info[:slots] for source/target nodes. + # :quiet -- Don't print info messages. + def move_slot(source,target,slot,o={}) + o = {:pipeline => MigrateDefaultPipeline}.merge(o) + + # We start marking the slot as importing in the destination node, + # and the slot as migrating in the target host. Note that the order of + # the operations is important, as otherwise a client may be redirected + # to the target node that does not yet know it is importing this slot. + if !o[:quiet] + print "Moving slot #{slot} from #{source} to #{target}: " + STDOUT.flush + end + + if !o[:cold] + target.r.cluster("setslot",slot,"importing",source.info[:name]) + source.r.cluster("setslot",slot,"migrating",target.info[:name]) + end + # Migrate all the keys from source to target using the MIGRATE command + while true + keys = source.r.cluster("getkeysinslot",slot,o[:pipeline]) + break if keys.length == 0 + begin + source.r.client.call(["migrate",target.info[:host],target.info[:port],"",0,@timeout,:keys,*keys]) + rescue => e + if o[:fix] && e.to_s =~ /BUSYKEY/ + xputs "*** Target key exists. Replacing it for FIX." + source.r.client.call(["migrate",target.info[:host],target.info[:port],"",0,@timeout,:replace,:keys,*keys]) + else + puts "" + xputs "[ERR] Calling MIGRATE: #{e}" + exit 1 + end + end + print "."*keys.length if o[:dots] + STDOUT.flush + end + + puts if !o[:quiet] + # Set the new node as the owner of the slot in all the known nodes. + if !o[:cold] + @nodes.each{|n| + next if n.has_flag?("slave") + n.r.cluster("setslot",slot,"node",target.info[:name]) + } + end + + # Update the node logical config + if o[:update] then + source.info[:slots].delete(slot) + target.info[:slots][slot] = true + end + end + + # redis-trib subcommands implementations. + + def check_cluster_cmd(argv,opt) + load_cluster_info_from_node(argv[0]) + check_cluster + end + + def info_cluster_cmd(argv,opt) + load_cluster_info_from_node(argv[0]) + show_cluster_info + end + + def rebalance_cluster_cmd(argv,opt) + opt = { + 'pipeline' => MigrateDefaultPipeline, + 'threshold' => RebalanceDefaultThreshold + }.merge(opt) + + # Load nodes info before parsing options, otherwise we can't + # handle --weight. + load_cluster_info_from_node(argv[0]) + + # Options parsing + threshold = opt['threshold'].to_i + autoweights = opt['auto-weights'] + weights = {} + opt['weight'].each{|w| + fields = w.split("=") + node = get_node_by_abbreviated_name(fields[0]) + if !node || !node.has_flag?("master") + puts "*** No such master node #{fields[0]}" + exit 1 + end + weights[node.info[:name]] = fields[1].to_f + } if opt['weight'] + useempty = opt['use-empty-masters'] + + # Assign a weight to each node, and compute the total cluster weight. + total_weight = 0 + nodes_involved = 0 + @nodes.each{|n| + if n.has_flag?("master") + next if !useempty && n.slots.length == 0 + n.info[:w] = weights[n.info[:name]] ? weights[n.info[:name]] : 1 + total_weight += n.info[:w] + nodes_involved += 1 + end + } + + # Check cluster, only proceed if it looks sane. + check_cluster(:quiet => true) + if @errors.length != 0 + puts "*** Please fix your cluster problems before rebalancing" + exit 1 + end + + # Calculate the slots balance for each node. It's the number of + # slots the node should lose (if positive) or gain (if negative) + # in order to be balanced. + threshold = opt['threshold'].to_f + threshold_reached = false + @nodes.each{|n| + if n.has_flag?("master") + next if !n.info[:w] + expected = ((ClusterHashSlots.to_f / total_weight) * + n.info[:w]).to_i + n.info[:balance] = n.slots.length - expected + # Compute the percentage of difference between the + # expected number of slots and the real one, to see + # if it's over the threshold specified by the user. + over_threshold = false + if threshold > 0 + if n.slots.length > 0 + err_perc = (100-(100.0*expected/n.slots.length)).abs + over_threshold = true if err_perc > threshold + elsif expected > 0 + over_threshold = true + end + end + threshold_reached = true if over_threshold + end + } + if !threshold_reached + xputs "*** No rebalancing needed! All nodes are within the #{threshold}% threshold." + return + end + + # Only consider nodes we want to change + sn = @nodes.select{|n| + n.has_flag?("master") && n.info[:w] + } + + # Because of rounding, it is possible that the balance of all nodes + # summed does not give 0. Make sure that nodes that have to provide + # slots are always matched by nodes receiving slots. + total_balance = sn.map{|x| x.info[:balance]}.reduce{|a,b| a+b} + while total_balance > 0 + sn.each{|n| + if n.info[:balance] < 0 && total_balance > 0 + n.info[:balance] -= 1 + total_balance -= 1 + end + } + end + + # Sort nodes by their slots balance. + sn = sn.sort{|a,b| + a.info[:balance] <=> b.info[:balance] + } + + xputs ">>> Rebalancing across #{nodes_involved} nodes. Total weight = #{total_weight}" + + if $verbose + sn.each{|n| + puts "#{n} balance is #{n.info[:balance]} slots" + } + end + + # Now we have at the start of the 'sn' array nodes that should get + # slots, at the end nodes that must give slots. + # We take two indexes, one at the start, and one at the end, + # incrementing or decrementing the indexes accordingly til we + # find nodes that need to get/provide slots. + dst_idx = 0 + src_idx = sn.length - 1 + + while dst_idx < src_idx + dst = sn[dst_idx] + src = sn[src_idx] + numslots = [dst.info[:balance],src.info[:balance]].map{|n| + n.abs + }.min + + if numslots > 0 + puts "Moving #{numslots} slots from #{src} to #{dst}" + + # Actaully move the slots. + reshard_table = compute_reshard_table([src],numslots) + if reshard_table.length != numslots + xputs "*** Assertio failed: Reshard table != number of slots" + exit 1 + end + if opt['simulate'] + print "#"*reshard_table.length + else + reshard_table.each{|e| + move_slot(e[:source],dst,e[:slot], + :quiet=>true, + :dots=>false, + :update=>true, + :pipeline=>opt['pipeline']) + print "#" + STDOUT.flush + } + end + puts + end + + # Update nodes balance. + dst.info[:balance] += numslots + src.info[:balance] -= numslots + dst_idx += 1 if dst.info[:balance] == 0 + src_idx -= 1 if src.info[:balance] == 0 + end + end + + def fix_cluster_cmd(argv,opt) + @fix = true + @timeout = opt['timeout'].to_i if opt['timeout'] + + load_cluster_info_from_node(argv[0]) + check_cluster + end + + def reshard_cluster_cmd(argv,opt) + opt = {'pipeline' => MigrateDefaultPipeline}.merge(opt) + + load_cluster_info_from_node(argv[0]) + check_cluster + if @errors.length != 0 + puts "*** Please fix your cluster problems before resharding" + exit 1 + end + + @timeout = opt['timeout'].to_i if opt['timeout'].to_i + + # Get number of slots + if opt['slots'] + numslots = opt['slots'].to_i + else + numslots = 0 + while numslots <= 0 or numslots > ClusterHashSlots + print "How many slots do you want to move (from 1 to #{ClusterHashSlots})? " + numslots = STDIN.gets.to_i + end + end + + # Get the target instance + if opt['to'] + target = get_node_by_name(opt['to']) + if !target || target.has_flag?("slave") + xputs "*** The specified node is not known or not a master, please retry." + exit 1 + end + else + target = nil + while not target + print "What is the receiving node ID? " + target = get_node_by_name(STDIN.gets.chop) + if !target || target.has_flag?("slave") + xputs "*** The specified node is not known or not a master, please retry." + target = nil + end + end + end + + # Get the source instances + sources = [] + if opt['from'] + opt['from'].split(',').each{|node_id| + if node_id == "all" + sources = "all" + break + end + src = get_node_by_name(node_id) + if !src || src.has_flag?("slave") + xputs "*** The specified node is not known or is not a master, please retry." + exit 1 + end + sources << src + } + else + xputs "Please enter all the source node IDs." + xputs " Type 'all' to use all the nodes as source nodes for the hash slots." + xputs " Type 'done' once you entered all the source nodes IDs." + while true + print "Source node ##{sources.length+1}:" + line = STDIN.gets.chop + src = get_node_by_name(line) + if line == "done" + break + elsif line == "all" + sources = "all" + break + elsif !src || src.has_flag?("slave") + xputs "*** The specified node is not known or is not a master, please retry." + elsif src.info[:name] == target.info[:name] + xputs "*** It is not possible to use the target node as source node." + else + sources << src + end + end + end + + if sources.length == 0 + puts "*** No source nodes given, operation aborted" + exit 1 + end + + # Handle soures == all. + if sources == "all" + sources = [] + @nodes.each{|n| + next if n.info[:name] == target.info[:name] + next if n.has_flag?("slave") + sources << n + } + end + + # Check if the destination node is the same of any source nodes. + if sources.index(target) + xputs "*** Target node is also listed among the source nodes!" + exit 1 + end + + puts "\nReady to move #{numslots} slots." + puts " Source nodes:" + sources.each{|s| puts " "+s.info_string} + puts " Destination node:" + puts " #{target.info_string}" + reshard_table = compute_reshard_table(sources,numslots) + puts " Resharding plan:" + show_reshard_table(reshard_table) + if !opt['yes'] + print "Do you want to proceed with the proposed reshard plan (yes/no)? " + yesno = STDIN.gets.chop + exit(1) if (yesno != "yes") + end + reshard_table.each{|e| + move_slot(e[:source],target,e[:slot], + :dots=>true, + :pipeline=>opt['pipeline']) + } + end + + # This is an helper function for create_cluster_cmd that verifies if + # the number of nodes and the specified replicas have a valid configuration + # where there are at least three master nodes and enough replicas per node. + def check_create_parameters + masters = @nodes.length/(@replicas+1) + if masters < 3 + puts "*** ERROR: Invalid configuration for cluster creation." + puts "*** Redis Cluster requires at least 3 master nodes." + puts "*** This is not possible with #{@nodes.length} nodes and #{@replicas} replicas per node." + puts "*** At least #{3*(@replicas+1)} nodes are required." + exit 1 + end + end + + def create_cluster_cmd(argv,opt) + opt = {'replicas' => 0}.merge(opt) + @replicas = opt['replicas'].to_i + + xputs ">>> Creating cluster" + argv[0..-1].each{|n| + node = ClusterNode.new(n) + node.connect(:abort => true) + node.assert_cluster + node.load_info + node.assert_empty + add_node(node) + } + check_create_parameters + xputs ">>> Performing hash slots allocation on #{@nodes.length} nodes..." + alloc_slots + show_nodes + yes_or_die "Can I set the above configuration?" + flush_nodes_config + xputs ">>> Nodes configuration updated" + xputs ">>> Assign a different config epoch to each node" + assign_config_epoch + xputs ">>> Sending CLUSTER MEET messages to join the cluster" + join_cluster + # Give one second for the join to start, in order to avoid that + # wait_cluster_join will find all the nodes agree about the config as + # they are still empty with unassigned slots. + sleep 1 + wait_cluster_join + flush_nodes_config # Useful for the replicas + check_cluster + end + + def addnode_cluster_cmd(argv,opt) + xputs ">>> Adding node #{argv[0]} to cluster #{argv[1]}" + + # Check the existing cluster + load_cluster_info_from_node(argv[1]) + check_cluster + + # If --master-id was specified, try to resolve it now so that we + # abort before starting with the node configuration. + if opt['slave'] + if opt['master-id'] + master = get_node_by_name(opt['master-id']) + if !master + xputs "[ERR] No such master ID #{opt['master-id']}" + end + else + master = get_master_with_least_replicas + xputs "Automatically selected master #{master}" + end + end + + # Add the new node + new = ClusterNode.new(argv[0]) + new.connect(:abort => true) + new.assert_cluster + new.load_info + new.assert_empty + first = @nodes.first.info + add_node(new) + + # Send CLUSTER MEET command to the new node + xputs ">>> Send CLUSTER MEET to node #{new} to make it join the cluster." + new.r.cluster("meet",first[:host],first[:port]) + + # Additional configuration is needed if the node is added as + # a slave. + if opt['slave'] + wait_cluster_join + xputs ">>> Configure node as replica of #{master}." + new.r.cluster("replicate",master.info[:name]) + end + xputs "[OK] New node added correctly." + end + + def delnode_cluster_cmd(argv,opt) + id = argv[1].downcase + xputs ">>> Removing node #{id} from cluster #{argv[0]}" + + # Load cluster information + load_cluster_info_from_node(argv[0]) + + # Check if the node exists and is not empty + node = get_node_by_name(id) + + if !node + xputs "[ERR] No such node ID #{id}" + exit 1 + end + + if node.slots.length != 0 + xputs "[ERR] Node #{node} is not empty! Reshard data away and try again." + exit 1 + end + + # Send CLUSTER FORGET to all the nodes but the node to remove + xputs ">>> Sending CLUSTER FORGET messages to the cluster..." + @nodes.each{|n| + next if n == node + if n.info[:replicate] && n.info[:replicate].downcase == id + # Reconfigure the slave to replicate with some other node + master = get_master_with_least_replicas + xputs ">>> #{n} as replica of #{master}" + n.r.cluster("replicate",master.info[:name]) + end + n.r.cluster("forget",argv[1]) + } + + # Finally shutdown the node + xputs ">>> SHUTDOWN the node." + node.r.shutdown + end + + def set_timeout_cluster_cmd(argv,opt) + timeout = argv[1].to_i + if timeout < 100 + puts "Setting a node timeout of less than 100 milliseconds is a bad idea." + exit 1 + end + + # Load cluster information + load_cluster_info_from_node(argv[0]) + ok_count = 0 + err_count = 0 + + # Send CLUSTER FORGET to all the nodes but the node to remove + xputs ">>> Reconfiguring node timeout in every cluster node..." + @nodes.each{|n| + begin + n.r.config("set","cluster-node-timeout",timeout) + n.r.config("rewrite") + ok_count += 1 + xputs "*** New timeout set for #{n}" + rescue => e + puts "ERR setting node-timeot for #{n}: #{e}" + err_count += 1 + end + } + xputs ">>> New node timeout set. #{ok_count} OK, #{err_count} ERR." + end + + def call_cluster_cmd(argv,opt) + cmd = argv[1..-1] + cmd[0] = cmd[0].upcase + + # Load cluster information + load_cluster_info_from_node(argv[0]) + xputs ">>> Calling #{cmd.join(" ")}" + @nodes.each{|n| + begin + res = n.r.send(*cmd) + puts "#{n}: #{res}" + rescue => e + puts "#{n}: #{e}" + end + } + end + + def import_cluster_cmd(argv,opt) + source_addr = opt['from'] + xputs ">>> Importing data from #{source_addr} to cluster #{argv[1]}" + use_copy = opt['copy'] + use_replace = opt['replace'] + + # Check the existing cluster. + load_cluster_info_from_node(argv[0]) + check_cluster + + # Connect to the source node. + xputs ">>> Connecting to the source Redis instance" + src_host,src_port = source_addr.split(":") + source = Redis.new(:host =>src_host, :port =>src_port) + if source.info['cluster_enabled'].to_i == 1 + xputs "[ERR] The source node should not be a cluster node." + end + xputs "*** Importing #{source.dbsize} keys from DB 0" + + # Build a slot -> node map + slots = {} + @nodes.each{|n| + n.slots.each{|s,_| + slots[s] = n + } + } + + # Use SCAN to iterate over the keys, migrating to the + # right node as needed. + cursor = nil + while cursor != 0 + cursor,keys = source.scan(cursor, :count => 1000) + cursor = cursor.to_i + keys.each{|k| + # Migrate keys using the MIGRATE command. + slot = key_to_slot(k) + target = slots[slot] + print "Migrating #{k} to #{target}: " + STDOUT.flush + begin + cmd = ["migrate",target.info[:host],target.info[:port],k,0,@timeout] + cmd << :copy if use_copy + cmd << :replace if use_replace + source.client.call(cmd) + rescue => e + puts e + else + puts "OK" + end + } + end + end + + def help_cluster_cmd(argv,opt) + show_help + exit 0 + end + + # Parse the options for the specific command "cmd". + # Returns an hash populate with option => value pairs, and the index of + # the first non-option argument in ARGV. + def parse_options(cmd) + idx = 1 ; # Current index into ARGV + options={} + while idx < ARGV.length && ARGV[idx][0..1] == '--' + if ARGV[idx][0..1] == "--" + option = ARGV[idx][2..-1] + idx += 1 + + # --verbose is a global option + if option == "verbose" + $verbose = true + next + end + + if ALLOWED_OPTIONS[cmd] == nil || ALLOWED_OPTIONS[cmd][option] == nil + puts "Unknown option '#{option}' for command '#{cmd}'" + exit 1 + end + if ALLOWED_OPTIONS[cmd][option] != false + value = ARGV[idx] + idx += 1 + else + value = true + end + + # If the option is set to [], it's a multiple arguments + # option. We just queue every new value into an array. + if ALLOWED_OPTIONS[cmd][option] == [] + options[option] = [] if !options[option] + options[option] << value + else + options[option] = value + end + else + # Remaining arguments are not options. + break + end + end + + # Enforce mandatory options + if ALLOWED_OPTIONS[cmd] + ALLOWED_OPTIONS[cmd].each {|option,val| + if !options[option] && val == :required + puts "Option '--#{option}' is required "+ \ + "for subcommand '#{cmd}'" + exit 1 + end + } + end + return options,idx + end +end + +################################################################################# +# Libraries +# +# We try to don't depend on external libs since this is a critical part +# of Redis Cluster. +################################################################################# + +# This is the CRC16 algorithm used by Redis Cluster to hash keys. +# Implementation according to CCITT standards. +# +# This is actually the XMODEM CRC 16 algorithm, using the +# following parameters: +# +# Name : "XMODEM", also known as "ZMODEM", "CRC-16/ACORN" +# Width : 16 bit +# Poly : 1021 (That is actually x^16 + x^12 + x^5 + 1) +# Initialization : 0000 +# Reflect Input byte : False +# Reflect Output CRC : False +# Xor constant to output CRC : 0000 +# Output for "123456789" : 31C3 + +module RedisClusterCRC16 + def RedisClusterCRC16.crc16(bytes) + crc = 0 + bytes.each_byte{|b| + crc = ((crc<<8) & 0xffff) ^ XMODEMCRC16Lookup[((crc>>8)^b) & 0xff] + } + crc + end + +private + XMODEMCRC16Lookup = [ + 0x0000,0x1021,0x2042,0x3063,0x4084,0x50a5,0x60c6,0x70e7, + 0x8108,0x9129,0xa14a,0xb16b,0xc18c,0xd1ad,0xe1ce,0xf1ef, + 0x1231,0x0210,0x3273,0x2252,0x52b5,0x4294,0x72f7,0x62d6, + 0x9339,0x8318,0xb37b,0xa35a,0xd3bd,0xc39c,0xf3ff,0xe3de, + 0x2462,0x3443,0x0420,0x1401,0x64e6,0x74c7,0x44a4,0x5485, + 0xa56a,0xb54b,0x8528,0x9509,0xe5ee,0xf5cf,0xc5ac,0xd58d, + 0x3653,0x2672,0x1611,0x0630,0x76d7,0x66f6,0x5695,0x46b4, + 0xb75b,0xa77a,0x9719,0x8738,0xf7df,0xe7fe,0xd79d,0xc7bc, + 0x48c4,0x58e5,0x6886,0x78a7,0x0840,0x1861,0x2802,0x3823, + 0xc9cc,0xd9ed,0xe98e,0xf9af,0x8948,0x9969,0xa90a,0xb92b, + 0x5af5,0x4ad4,0x7ab7,0x6a96,0x1a71,0x0a50,0x3a33,0x2a12, + 0xdbfd,0xcbdc,0xfbbf,0xeb9e,0x9b79,0x8b58,0xbb3b,0xab1a, + 0x6ca6,0x7c87,0x4ce4,0x5cc5,0x2c22,0x3c03,0x0c60,0x1c41, + 0xedae,0xfd8f,0xcdec,0xddcd,0xad2a,0xbd0b,0x8d68,0x9d49, + 0x7e97,0x6eb6,0x5ed5,0x4ef4,0x3e13,0x2e32,0x1e51,0x0e70, + 0xff9f,0xefbe,0xdfdd,0xcffc,0xbf1b,0xaf3a,0x9f59,0x8f78, + 0x9188,0x81a9,0xb1ca,0xa1eb,0xd10c,0xc12d,0xf14e,0xe16f, + 0x1080,0x00a1,0x30c2,0x20e3,0x5004,0x4025,0x7046,0x6067, + 0x83b9,0x9398,0xa3fb,0xb3da,0xc33d,0xd31c,0xe37f,0xf35e, + 0x02b1,0x1290,0x22f3,0x32d2,0x4235,0x5214,0x6277,0x7256, + 0xb5ea,0xa5cb,0x95a8,0x8589,0xf56e,0xe54f,0xd52c,0xc50d, + 0x34e2,0x24c3,0x14a0,0x0481,0x7466,0x6447,0x5424,0x4405, + 0xa7db,0xb7fa,0x8799,0x97b8,0xe75f,0xf77e,0xc71d,0xd73c, + 0x26d3,0x36f2,0x0691,0x16b0,0x6657,0x7676,0x4615,0x5634, + 0xd94c,0xc96d,0xf90e,0xe92f,0x99c8,0x89e9,0xb98a,0xa9ab, + 0x5844,0x4865,0x7806,0x6827,0x18c0,0x08e1,0x3882,0x28a3, + 0xcb7d,0xdb5c,0xeb3f,0xfb1e,0x8bf9,0x9bd8,0xabbb,0xbb9a, + 0x4a75,0x5a54,0x6a37,0x7a16,0x0af1,0x1ad0,0x2ab3,0x3a92, + 0xfd2e,0xed0f,0xdd6c,0xcd4d,0xbdaa,0xad8b,0x9de8,0x8dc9, + 0x7c26,0x6c07,0x5c64,0x4c45,0x3ca2,0x2c83,0x1ce0,0x0cc1, + 0xef1f,0xff3e,0xcf5d,0xdf7c,0xaf9b,0xbfba,0x8fd9,0x9ff8, + 0x6e17,0x7e36,0x4e55,0x5e74,0x2e93,0x3eb2,0x0ed1,0x1ef0 + ] +end + +# Turn a key name into the corrisponding Redis Cluster slot. +def key_to_slot(key) + # Only hash what is inside {...} if there is such a pattern in the key. + # Note that the specification requires the content that is between + # the first { and the first } after the first {. If we found {} without + # nothing in the middle, the whole key is hashed as usually. + s = key.index "{" + if s + e = key.index "}",s+1 + if e && e != s+1 + key = key[s+1..e-1] + end + end + RedisClusterCRC16.crc16(key) % 16384 +end + +################################################################################# +# Definition of commands +################################################################################# + +COMMANDS={ + "create" => ["create_cluster_cmd", -2, "host1:port1 ... hostN:portN"], + "check" => ["check_cluster_cmd", 2, "host:port"], + "info" => ["info_cluster_cmd", 2, "host:port"], + "fix" => ["fix_cluster_cmd", 2, "host:port"], + "reshard" => ["reshard_cluster_cmd", 2, "host:port"], + "rebalance" => ["rebalance_cluster_cmd", -2, "host:port"], + "add-node" => ["addnode_cluster_cmd", 3, "new_host:new_port existing_host:existing_port"], + "del-node" => ["delnode_cluster_cmd", 3, "host:port node_id"], + "set-timeout" => ["set_timeout_cluster_cmd", 3, "host:port milliseconds"], + "call" => ["call_cluster_cmd", -3, "host:port command arg arg .. arg"], + "import" => ["import_cluster_cmd", 2, "host:port"], + "help" => ["help_cluster_cmd", 1, "(show this help)"] +} + +ALLOWED_OPTIONS={ + "create" => {"replicas" => true}, + "add-node" => {"slave" => false, "master-id" => true}, + "import" => {"from" => :required, "copy" => false, "replace" => false}, + "reshard" => {"from" => true, "to" => true, "slots" => true, "yes" => false, "timeout" => true, "pipeline" => true}, + "rebalance" => {"weight" => [], "auto-weights" => false, "use-empty-masters" => false, "timeout" => true, "simulate" => false, "pipeline" => true, "threshold" => true}, + "fix" => {"timeout" => MigrateDefaultTimeout}, +} + +def show_help + puts "Usage: redis-trib \n\n" + COMMANDS.each{|k,v| + o = "" + puts " #{k.ljust(15)} #{v[2]}" + if ALLOWED_OPTIONS[k] + ALLOWED_OPTIONS[k].each{|optname,has_arg| + puts " --#{optname}" + (has_arg ? " " : "") + } + end + } + puts "\nFor check, fix, reshard, del-node, set-timeout you can specify the host and port of any working node in the cluster.\n" +end + +# Sanity check +if ARGV.length == 0 + show_help + exit 1 +end + +rt = RedisTrib.new +cmd_spec = COMMANDS[ARGV[0].downcase] +if !cmd_spec + puts "Unknown redis-trib subcommand '#{ARGV[0]}'" + exit 1 +end + +# Parse options +cmd_options,first_non_option = rt.parse_options(ARGV[0].downcase) +rt.check_arity(cmd_spec[1],ARGV.length-(first_non_option-1)) + +# Dispatch +rt.send(cmd_spec[0],ARGV[first_non_option..-1],cmd_options) \ No newline at end of file From b4936f62afd404a89b277cd8982feb007f3b6c44 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 8 Jun 2016 12:18:37 +0100 Subject: [PATCH 110/769] Check that return values from different redis backends match --- .../app/coffee/RedisBackend.coffee | 115 ++++--- .../RedisBackend/RedisBackendTests.coffee | 307 ++++++++++++++++++ 2 files changed, 383 insertions(+), 39 deletions(-) create mode 100644 services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee diff --git a/services/document-updater/app/coffee/RedisBackend.coffee b/services/document-updater/app/coffee/RedisBackend.coffee index e242beaec2..125aa29d03 100644 --- a/services/document-updater/app/coffee/RedisBackend.coffee +++ b/services/document-updater/app/coffee/RedisBackend.coffee @@ -1,5 +1,7 @@ Settings = require "settings-sharelatex" async = require "async" +_ = require "underscore" +logger = require "logger-sharelatex" class Client constructor: (@clients) -> @@ -10,6 +12,7 @@ class Client rclient: client.rclient.multi() key_schema: client.key_schema primary: client.primary + driver: client.driver } ) @@ -19,56 +22,84 @@ class MultiClient exec: (callback) -> jobs = @clients.map (client) -> (cb) -> - console.error "EXEC", client.rclient.queue - client.rclient.exec (result...) -> - console.error "EXEC RESULT", result + client.rclient.exec (error, result) -> + if client.driver == "ioredis" + # ioredis returns an results like: + # [ [null, 42], [null, "foo"] ] + # where the first entries in each 2-tuple are + # presumably errors for each individual command, + # and the second entry is the result. We need to transform + # this into the same result as the old redis driver: + # [ 42, "foo" ] + filtered_result = [] + for entry in result or [] + if entry[0]? + return cb(entry[0]) + else + filtered_result.push entry[1] + result = filtered_result + if client.primary # Return this result as the actual result - callback(result...) + callback(error, result) # Send the rest through for comparison - cb(result...) + cb(error, result) async.parallel jobs, (error, results) -> - console.error "EXEC RESULTS", results + if error? + logger.error {err: error}, "error in redis backend" + else + compareResults(results) -COMMANDS = [ - "get", "smembers", "set", "srem", "sadd", "del", "lrange", - "llen", "rpush", "expire", "ltrim", "incr" -] -for command in COMMANDS - do (command) -> - Client.prototype[command] = (key_builder, args..., callback) -> - async.parallel @clients.map (client) -> +COMMANDS = { + "get": 0, + "smembers": 0, + "set": 0, + "srem": 0, + "sadd": 0, + "del": 0, + "lrange": 0, + "llen": 0, + "rpush": 0, + "expire": 0, + "ltrim": 0, + "incr": 0, + "eval": 2 +} +for command, key_pos of COMMANDS + do (command, key_pos) -> + Client.prototype[command] = (args..., callback) -> + jobs = @clients.map (client) -> (cb) -> + key_builder = args[key_pos] key = key_builder(client.key_schema) - console.error "COMMAND", command, key, args - client.rclient[command] key, args..., (result...) -> - console.log "RESULT", command, result + args_with_key = args.slice(0) + args_with_key[key_pos] = key + client.rclient[command] args_with_key..., (error, result...) -> if client.primary # Return this result as the actual result - callback?(result...) + callback(error, result...) # Send the rest through for comparison - cb(result...) - , (error, results) -> - console.log "#{command} RESULTS", results + cb(error, result...) + async.parallel jobs, (error, results) -> + if error? + logger.error {err: error}, "error in redis backend" + else + compareResults(results) - MultiClient.prototype[command] = (key_builder, args...) -> + MultiClient.prototype[command] = (args...) -> for client in @clients + key_builder = args[key_pos] key = key_builder(client.key_schema) - console.error "MULTI COMMAND", command, key, args + args_with_key = args.slice(0) + args_with_key[key_pos] = key client.rclient[command] key, args... -Client::eval = (script, pos, key_builder, args..., callback) -> - async.parallel @clients.map (client) -> - (cb) -> - key = key_builder(client.key_schema) - client.rclient.eval script, pos, key, args..., (result...) -> - if client.primary - # Return this result as the actual result - callback(result...) - # Send the rest through for comparison - cb(result...) - , (error, results) -> - console.log "#{command} RESULTS", results +compareResults = (results) -> + return if results.length < 2 + first = results[0] + for result in results.slice(1) + if not _.isEqual(first, result) + logger.warn { results }, "redis return values do not match" module.exports = createClient: () -> @@ -80,9 +111,15 @@ module.exports = if config.cluster? Redis = require("ioredis") rclient = new Redis.Cluster(config.cluster) + driver = "ioredis" else - rclient = require("redis-sharelatex").createClient(config) - rclient: rclient - key_schema: config.key_schema - primary: config.primary + {host, port, password} = config + rclient = require("redis-sharelatex").createClient({host, port, password}) + driver = "redis" + return { + rclient: rclient + key_schema: config.key_schema + primary: config.primary + driver: driver + } return new Client(clients) \ No newline at end of file diff --git a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee new file mode 100644 index 0000000000..ca48aff7ff --- /dev/null +++ b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee @@ -0,0 +1,307 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/RedisBackend.js" +SandboxedModule = require('sandboxed-module') +RedisKeyBuilder = require "../../../../app/js/RedisKeyBuilder" + +describe "RedisBackend", -> + beforeEach -> + @Settings = + redis: + documentupdater: [{ + primary: true + port: "6379" + host: "localhost" + password: "single-password" + key_schema: + blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" + docLines: ({doc_id}) -> "doclines:#{doc_id}" + docOps: ({doc_id}) -> "DocOps:#{doc_id}" + docVersion: ({doc_id}) -> "DocVersion:#{doc_id}" + projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" + pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" + docsInProject: ({project_id}) -> "DocsIn:#{project_id}" + }, { + cluster: [{ + port: "7000" + host: "localhost" + }] + password: "cluster-password" + key_schema: + blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" + docLines: ({doc_id}) -> "doclines:{#{doc_id}}" + docOps: ({doc_id}) -> "DocOps:{#{doc_id}}" + docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}" + projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" + pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" + docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" + }] + + test_context = @ + class Cluster + constructor: (@config) -> + test_context.rclient_ioredis = @ + + @RedisBackend = SandboxedModule.require modulePath, requires: + "settings-sharelatex": @Settings + "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } + "redis-sharelatex": @redis = + createClient: sinon.stub().returns @rclient_redis = {} + "ioredis": @ioredis = + Cluster: Cluster + @client = @RedisBackend.createClient() + + @doc_id = "mock-doc-id" + + it "should create a redis client", -> + @redis.createClient + .calledWith({ + port: "6379" + host: "localhost" + password: "single-password" + }) + .should.equal true + + it "should create an ioredis cluster client", -> + @rclient_ioredis.config.should.deep.equal [{ + port: "7000" + host: "localhost" + }] + + describe "individual commands", -> + describe "with the same results", -> + beforeEach (done) -> + @content = "bar" + @rclient_redis.get = sinon.stub() + @rclient_redis.get.withArgs("doclines:#{@doc_id}").yields(null, @content) + @rclient_ioredis.get = sinon.stub() + @rclient_ioredis.get.withArgs("doclines:{#{@doc_id}}").yields(null, @content) + @client.get RedisKeyBuilder.docLines({doc_id: @doc_id}), (error, @result) => + setTimeout () -> # Let all background requests complete + done(error) + + it "should return the result", -> + @result.should.equal @content + + it "should have called the redis client with the appropriate key", -> + @rclient_redis.get + .calledWith("doclines:#{@doc_id}") + .should.equal true + + it "should have called the ioredis cluster client with the appropriate key", -> + @rclient_ioredis.get + .calledWith("doclines:{#{@doc_id}}") + .should.equal true + + describe "with different results", -> + beforeEach (done) -> + @rclient_redis.get = sinon.stub() + @rclient_redis.get.withArgs("doclines:#{@doc_id}").yields(null, "primary-result") + @rclient_ioredis.get = sinon.stub() + @rclient_ioredis.get.withArgs("doclines:{#{@doc_id}}").yields(null, "secondary-result") + @client.get RedisKeyBuilder.docLines({doc_id: @doc_id}), (error, @result) => + setTimeout () -> # Let all background requests complete + done(error) + + it "should return the primary result", -> + @result.should.equal "primary-result" + + it "should log out the difference", -> + @logger.warn + .calledWith({ + results: [ + "primary-result", + "secondary-result" + ] + }, "redis return values do not match") + .should.equal true + + describe "when the secondary errors", -> + beforeEach (done) -> + @rclient_redis.get = sinon.stub() + @rclient_redis.get.withArgs("doclines:#{@doc_id}").yields(null, "primary-result") + @rclient_ioredis.get = sinon.stub() + @rclient_ioredis.get.withArgs("doclines:{#{@doc_id}}").yields(@error = new Error("oops")) + @client.get RedisKeyBuilder.docLines({doc_id: @doc_id}), (error, @result) => + setTimeout () -> # Let all background requests complete + done(error) + + it "should return the primary result", -> + @result.should.equal "primary-result" + + it "should log out the secondary error", -> + @logger.error + .calledWith({ + err: @error + }, "error in redis backend") + .should.equal true + + describe "when the primary errors", -> + beforeEach (done) -> + @rclient_redis.get = sinon.stub() + @rclient_redis.get.withArgs("doclines:#{@doc_id}").yields(@error = new Error("oops")) + @rclient_ioredis.get = sinon.stub() + @rclient_ioredis.get.withArgs("doclines:{#{@doc_id}}").yields(null, "secondary-result") + @client.get RedisKeyBuilder.docLines({doc_id: @doc_id}), (@returned_error, @result) => + setTimeout () -> # Let all background requests complete + done() + + it "should return the error", -> + @returned_error.should.equal @error + + it "should log out the error", -> + @logger.error + .calledWith({ + err: @error + }, "error in redis backend") + .should.equal true + + describe "when the command has the key in a non-zero argument index", -> + beforeEach (done) -> + @script = "mock-script" + @key_count = 1 + @value = "mock-value" + @rclient_redis.eval = sinon.stub() + @rclient_redis.eval.withArgs(@script, @key_count, "Blocking:#{@doc_id}", @value).yields(null) + @rclient_ioredis.eval = sinon.stub() + @rclient_ioredis.eval.withArgs(@script, @key_count, "Blocking:{#{@doc_id}}", @value).yields(null, @content) + @client.eval @script, @key_count, RedisKeyBuilder.blockingKey({doc_id: @doc_id}), @value, (error) => + setTimeout () -> # Let all background requests complete + done(error) + + it "should have called the redis client with the appropriate key", -> + @rclient_redis.eval + .calledWith(@script, @key_count, "Blocking:#{@doc_id}", @value) + .should.equal true + + it "should have called the ioredis cluster client with the appropriate key", -> + @rclient_ioredis.eval + .calledWith(@script, @key_count, "Blocking:{#{@doc_id}}", @value) + .should.equal true + + describe "multi commands", -> + beforeEach -> + # We will test with: + # rclient.multi() + # .get("doclines:foo") + # .get("DocVersion:foo") + # .exec (...) -> + @doclines = "mock-doclines" + @version = "42" + @rclient_redis.multi = sinon.stub().returns @rclient_redis + @rclient_ioredis.multi = sinon.stub().returns @rclient_ioredis + + describe "with the same results", -> + beforeEach (done) -> + @rclient_redis.get = sinon.stub() + @rclient_redis.exec = sinon.stub().yields(null, [@doclines, @version]) + @rclient_ioredis.get = sinon.stub() + @rclient_ioredis.exec = sinon.stub().yields(null, [ [null, @doclines], [null, @version] ]) + + multi = @client.multi() + multi.get RedisKeyBuilder.docLines({doc_id: @doc_id}) + multi.get RedisKeyBuilder.docVersion({doc_id: @doc_id}) + multi.exec (error, @result) => + setTimeout () -> + done(error) + + it "should return the result", -> + @result.should.deep.equal [@doclines, @version] + + it "should have called the redis client with the appropriate keys", -> + @rclient_redis.get + .calledWith("doclines:#{@doc_id}") + .should.equal true + @rclient_redis.get + .calledWith("DocVersion:#{@doc_id}") + .should.equal true + @rclient_ioredis.exec + .called + .should.equal true + + it "should have called the ioredis cluster client with the appropriate keys", -> + @rclient_ioredis.get + .calledWith("doclines:{#{@doc_id}}") + .should.equal true + @rclient_ioredis.get + .calledWith("DocVersion:{#{@doc_id}}") + .should.equal true + @rclient_ioredis.exec + .called + .should.equal true + + describe "with different results", -> + beforeEach (done) -> + @rclient_redis.get = sinon.stub() + @rclient_redis.exec = sinon.stub().yields(null, [@doclines, @version]) + @rclient_ioredis.get = sinon.stub() + @rclient_ioredis.exec = sinon.stub().yields(null, [ [null, "different-doc-lines"], [null, @version] ]) + + multi = @client.multi() + multi.get RedisKeyBuilder.docLines({doc_id: @doc_id}) + multi.get RedisKeyBuilder.docVersion({doc_id: @doc_id}) + multi.exec (error, @result) => + setTimeout () -> + done(error) + + it "should return the primary result", -> + @result.should.deep.equal [@doclines, @version] + + it "should log out the difference", -> + @logger.warn + .calledWith({ + results: [ + [@doclines, @version], + ["different-doc-lines", @version] + ] + }, "redis return values do not match") + .should.equal true + + describe "when the secondary errors", -> + beforeEach (done) -> + @rclient_redis.get = sinon.stub() + @rclient_redis.exec = sinon.stub().yields(null, [@doclines, @version]) + @rclient_ioredis.get = sinon.stub() + @rclient_ioredis.exec = sinon.stub().yields(@error = new Error("oops")) + + multi = @client.multi() + multi.get RedisKeyBuilder.docLines({doc_id: @doc_id}) + multi.get RedisKeyBuilder.docVersion({doc_id: @doc_id}) + multi.exec (error, @result) => + setTimeout () -> + done(error) + + it "should return the primary result", -> + @result.should.deep.equal [@doclines, @version] + + it "should log out the secondary error", -> + @logger.error + .calledWith({ + err: @error + }, "error in redis backend") + .should.equal true + + describe "when the secondary errors", -> + beforeEach (done) -> + @rclient_redis.get = sinon.stub() + @rclient_redis.exec = sinon.stub().yields(@error = new Error("oops")) + @rclient_ioredis.get = sinon.stub() + @rclient_ioredis.exec = sinon.stub().yields([ [null, @doclines], [null, @version] ]) + + multi = @client.multi() + multi.get RedisKeyBuilder.docLines({doc_id: @doc_id}) + multi.get RedisKeyBuilder.docVersion({doc_id: @doc_id}) + multi.exec (@returned_error) => + setTimeout () -> done() + + it "should return the error", -> + @returned_error.should.equal @error + + it "should log out the error", -> + @logger.error + .calledWith({ + err: @error + }, "error in redis backend") + .should.equal true + From 437e88581218bc0c850d0fd9ac3f2f9a099d7469 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 8 Jun 2016 16:21:56 +0100 Subject: [PATCH 111/769] Lock down to specific async version --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index d27b16271c..01c280c843 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -7,7 +7,7 @@ "url": "https://github.com/sharelatex/document-updater-sharelatex.git" }, "dependencies": { - "async": "^2.0.0-rc.5", + "async": "2.0.0-rc.5", "coffee-script": "1.4.0", "express": "3.3.4", "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.0.0", From c6605ed5f0b80ebf2a9ac6b8c66cec35a6cfcec8 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 8 Jun 2016 16:41:58 +0100 Subject: [PATCH 112/769] Fix misapplied arguments to multi commands --- services/document-updater/app/coffee/RedisBackend.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RedisBackend.coffee b/services/document-updater/app/coffee/RedisBackend.coffee index 125aa29d03..630a780eb7 100644 --- a/services/document-updater/app/coffee/RedisBackend.coffee +++ b/services/document-updater/app/coffee/RedisBackend.coffee @@ -92,7 +92,7 @@ for command, key_pos of COMMANDS key = key_builder(client.key_schema) args_with_key = args.slice(0) args_with_key[key_pos] = key - client.rclient[command] key, args... + client.rclient[command] args_with_key... compareResults = (results) -> return if results.length < 2 From c823e06912a1f8ffda7d5762da2925ec1c1cd2b8 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 8 Jun 2016 16:42:09 +0100 Subject: [PATCH 113/769] Don't run redis commands in parallel for easier consistency reasoning --- .../app/coffee/RedisManager.coffee | 49 +++++++++---------- .../app/coffee/TrackChangesManager.coffee | 8 +-- .../TrackChangesManagerTests.coffee | 14 +++--- 3 files changed, 35 insertions(+), 36 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index d5908be12e..75312298bb 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -11,40 +11,37 @@ Errors = require "./Errors" minutes = 60 # seconds for Redis expire module.exports = RedisManager = - putDocInMemory : (project_id, doc_id, docLines, version, callback)-> + putDocInMemory : (project_id, doc_id, docLines, version, _callback)-> timer = new metrics.Timer("redis.put-doc") - logger.log project_id:project_id, doc_id:doc_id, version: version, "putting doc in redis" - async.parallel [ - (cb) -> - multi = rclient.multi() - multi.set keys.docLines(doc_id:doc_id), JSON.stringify(docLines) - multi.set keys.projectKey({doc_id:doc_id}), project_id - multi.set keys.docVersion(doc_id:doc_id), version - multi.exec cb - (cb) -> - rclient.sadd keys.docsInProject(project_id:project_id), doc_id, cb - ], (err) -> + callback = (error) -> timer.done() - callback(err) + _callback(error) + logger.log project_id:project_id, doc_id:doc_id, version: version, "putting doc in redis" + multi = rclient.multi() + multi.set keys.docLines(doc_id:doc_id), JSON.stringify(docLines) + multi.set keys.projectKey({doc_id:doc_id}), project_id + multi.set keys.docVersion(doc_id:doc_id), version + multi.exec (error) -> + return callback(error) if error? + rclient.sadd keys.docsInProject(project_id:project_id), doc_id, callback - removeDocFromMemory : (project_id, doc_id, callback)-> + removeDocFromMemory : (project_id, doc_id, _callback)-> logger.log project_id:project_id, doc_id:doc_id, "removing doc from redis" - async.parallel [ - (cb) -> - multi = rclient.multi() - multi.del keys.docLines(doc_id:doc_id) - multi.del keys.projectKey(doc_id:doc_id) - multi.del keys.docVersion(doc_id:doc_id) - multi.exec cb - (cb) -> - rclient.srem keys.docsInProject(project_id:project_id), doc_id, cb - ], (err) -> + callback = (err) -> if err? logger.err project_id:project_id, doc_id:doc_id, err:err, "error removing doc from redis" - callback(err, null) + _callback(err) else logger.log project_id:project_id, doc_id:doc_id, "removed doc from redis" - callback() + _callback() + + multi = rclient.multi() + multi.del keys.docLines(doc_id:doc_id) + multi.del keys.projectKey(doc_id:doc_id) + multi.del keys.docVersion(doc_id:doc_id) + multi.exec (error) -> + return callback(error) if error? + rclient.srem keys.docsInProject(project_id:project_id), doc_id, callback getDoc : (doc_id, callback = (error, lines, version) ->)-> timer = new metrics.Timer("redis.get-doc") diff --git a/services/document-updater/app/coffee/TrackChangesManager.coffee b/services/document-updater/app/coffee/TrackChangesManager.coffee index 43d2314149..7661a52320 100644 --- a/services/document-updater/app/coffee/TrackChangesManager.coffee +++ b/services/document-updater/app/coffee/TrackChangesManager.coffee @@ -25,10 +25,10 @@ module.exports = TrackChangesManager = FLUSH_EVERY_N_OPS: 50 pushUncompressedHistoryOp: (project_id, doc_id, op, callback = (error) ->) -> jsonOp = JSON.stringify op - async.parallel [ - (cb) -> rclient.rpush "UncompressedHistoryOps:#{doc_id}", jsonOp, cb - (cb) -> rclient.sadd "DocsWithHistoryOps:#{project_id}", doc_id, cb - ], (error, results) -> + multi = rclient.multi() + multi.rpush "UncompressedHistoryOps:#{doc_id}", jsonOp + multi.sadd "DocsWithHistoryOps:#{project_id}", doc_id + multi.exec (error, results) -> return callback(error) if error? [length, _] = results if length > 0 and length % TrackChangesManager.FLUSH_EVERY_N_OPS == 0 diff --git a/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee b/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee index f43a3a0c43..bd72db3669 100644 --- a/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee @@ -43,8 +43,10 @@ describe "TrackChangesManager", -> describe "pushUncompressedHistoryOp", -> beforeEach -> @op = { op: [{ i: "foo", p: 4 }] } - @rclient.rpush = sinon.stub().yields(null, @length = 42) - @rclient.sadd = sinon.stub().yields() + @rclient.multi = sinon.stub().returns(@multi = {}) + @multi.rpush = sinon.stub() + @multi.sadd = sinon.stub() + @multi.exec = sinon.stub().yields(null, [@length = 42, "foo"]) @TrackChangesManager.flushDocChanges = sinon.stub().callsArg(2) describe "pushing the op", -> @@ -52,10 +54,10 @@ describe "TrackChangesManager", -> @TrackChangesManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback it "should push the op into redis", -> - @rclient.rpush + @multi.rpush .calledWith("UncompressedHistoryOps:#{@doc_id}", JSON.stringify @op) .should.equal true - @rclient.sadd + @multi.sadd .calledWith("DocsWithHistoryOps:#{@project_id}", @doc_id) .should.equal true @@ -67,7 +69,7 @@ describe "TrackChangesManager", -> describe "when there are a multiple of FLUSH_EVERY_N_OPS ops", -> beforeEach -> - @rclient.rpush = sinon.stub().yields(null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS) + @multi.exec = sinon.stub().yields(null, [2 * @TrackChangesManager.FLUSH_EVERY_N_OPS, "foo"]) @TrackChangesManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback it "should tell the track changes api to flush", -> @@ -77,7 +79,7 @@ describe "TrackChangesManager", -> describe "when TrackChangesManager errors", -> beforeEach -> - @rclient.rpush = sinon.stub().yields(null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS) + @multi.exec = sinon.stub().yields(null, [2 * @TrackChangesManager.FLUSH_EVERY_N_OPS, "foo"]) @TrackChangesManager.flushDocChanges = sinon.stub().callsArgWith(2, @error = new Error("oops")) @TrackChangesManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback From 48a92b28e5d04c11e2d7d3f6be1ee721a96e007b Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 8 Jun 2016 16:42:09 +0100 Subject: [PATCH 114/769] Don't run redis commands in parallel for easier consistency reasoning --- .../app/coffee/RedisManager.coffee | 59 ++++++++----------- .../app/coffee/TrackChangesManager.coffee | 13 ++-- .../RedisManager/RedisManagerTests.coffee | 22 ------- .../TrackChangesManagerTests.coffee | 25 ++++---- 4 files changed, 46 insertions(+), 73 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index a300e92fcc..396979e52f 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -15,40 +15,37 @@ redisOptions.return_buffers = true minutes = 60 # seconds for Redis expire module.exports = RedisManager = - putDocInMemory : (project_id, doc_id, docLines, version, callback)-> + putDocInMemory : (project_id, doc_id, docLines, version, _callback)-> timer = new metrics.Timer("redis.put-doc") - logger.log project_id:project_id, doc_id:doc_id, version: version, "putting doc in redis" - async.parallel [ - (cb) -> - multi = rclient.multi() - multi.set keys.docLines(doc_id:doc_id), JSON.stringify(docLines) - multi.set keys.projectKey({doc_id:doc_id}), project_id - multi.set keys.docVersion(doc_id:doc_id), version - multi.exec cb - (cb) -> - rclient.sadd keys.docsInProject(project_id:project_id), doc_id, cb - ], (err) -> + callback = (error) -> timer.done() - callback(err) + _callback(error) + logger.log project_id:project_id, doc_id:doc_id, version: version, "putting doc in redis" + multi = rclient.multi() + multi.set keys.docLines(doc_id:doc_id), JSON.stringify(docLines) + multi.set keys.projectKey({doc_id:doc_id}), project_id + multi.set keys.docVersion(doc_id:doc_id), version + multi.exec (error) -> + return callback(error) if error? + rclient.sadd keys.docsInProject(project_id:project_id), doc_id, callback - removeDocFromMemory : (project_id, doc_id, callback)-> + removeDocFromMemory : (project_id, doc_id, _callback)-> logger.log project_id:project_id, doc_id:doc_id, "removing doc from redis" - async.parallel [ - (cb) -> - multi = rclient.multi() - multi.del keys.docLines(doc_id:doc_id) - multi.del keys.projectKey(doc_id:doc_id) - multi.del keys.docVersion(doc_id:doc_id) - multi.exec cb - (cb) -> - rclient.srem keys.docsInProject(project_id:project_id), doc_id, cb - ], (err) -> + callback = (err) -> if err? logger.err project_id:project_id, doc_id:doc_id, err:err, "error removing doc from redis" - callback(err, null) + _callback(err) else logger.log project_id:project_id, doc_id:doc_id, "removed doc from redis" - callback() + _callback() + + multi = rclient.multi() + multi.del keys.docLines(doc_id:doc_id) + multi.del keys.projectKey(doc_id:doc_id) + multi.del keys.docVersion(doc_id:doc_id) + multi.exec (error) -> + return callback(error) if error? + rclient.srem keys.docsInProject(project_id:project_id), doc_id, callback getDoc : (doc_id, callback = (error, lines, version) ->)-> timer = new metrics.Timer("redis.get-doc") @@ -141,15 +138,5 @@ module.exports = RedisManager = version = parseInt(version, 10) callback null, version - pushUncompressedHistoryOp: (project_id, doc_id, op, callback = (error, length) ->) -> - jsonOp = JSON.stringify op - async.parallel [ - (cb) -> rclient.rpush keys.uncompressedHistoryOp(doc_id: doc_id), jsonOp, cb - (cb) -> rclient.sadd keys.docsWithHistoryOps(project_id: project_id), doc_id, cb - ], (error, results) -> - return callback(error) if error? - [length, _] = results - callback(error, length) - getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) -> rclient.smembers keys.docsInProject(project_id: project_id), callback diff --git a/services/document-updater/app/coffee/TrackChangesManager.coffee b/services/document-updater/app/coffee/TrackChangesManager.coffee index 90cba86b36..fcfbcdcc58 100644 --- a/services/document-updater/app/coffee/TrackChangesManager.coffee +++ b/services/document-updater/app/coffee/TrackChangesManager.coffee @@ -1,8 +1,9 @@ settings = require "settings-sharelatex" request = require "request" logger = require "logger-sharelatex" -RedisManager = require "./RedisManager" -crypto = require("crypto") +redis = require("redis-sharelatex") +rclient = redis.createClient(settings.redis.web) +async = require "async" module.exports = TrackChangesManager = flushDocChanges: (project_id, doc_id, callback = (error) ->) -> @@ -23,8 +24,13 @@ module.exports = TrackChangesManager = FLUSH_EVERY_N_OPS: 50 pushUncompressedHistoryOp: (project_id, doc_id, op, callback = (error) ->) -> - RedisManager.pushUncompressedHistoryOp project_id, doc_id, op, (error, length) -> + jsonOp = JSON.stringify op + multi = rclient.multi() + multi.rpush "UncompressedHistoryOps:#{doc_id}", jsonOp + multi.sadd "DocsWithHistoryOps:#{project_id}", doc_id + multi.exec (error, results) -> return callback(error) if error? + [length, _] = results if length > 0 and length % TrackChangesManager.FLUSH_EVERY_N_OPS == 0 # Do this in the background since it uses HTTP and so may be too # slow to wait for when processing a doc update. @@ -33,4 +39,3 @@ module.exports = TrackChangesManager = if error? logger.error err: error, doc_id: doc_id, project_id: project_id, "error flushing doc to track changes api" callback() - diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 99daf8f706..09af6781d3 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -171,28 +171,6 @@ describe "RedisManager", -> it "should log out the problem", -> @logger.warn.called.should.equal true - describe "pushUncompressedHistoryOp", -> - beforeEach (done) -> - @op = { op: [{ i: "foo", p: 4 }] } - @rclient.rpush = sinon.stub().yields(null, @length = 42) - @rclient.sadd = sinon.stub().yields() - @RedisManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, (args...) => - @callback(args...) - done() - - it "should push the doc op into the doc ops list", -> - @rclient.rpush - .calledWith("UncompressedHistoryOps:#{@doc_id}", JSON.stringify(@op)) - .should.equal true - - it "should add the doc_id to the set of which records the project docs", -> - @rclient.sadd - .calledWith("DocsWithHistoryOps:#{@project_id}", @doc_id) - .should.equal true - - it "should call the callback with the length", -> - @callback.calledWith(null, @length).should.equal true - describe "getUpdatesLength", -> beforeEach -> @rclient.llen = sinon.stub().yields(null, @length = 3) diff --git a/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee b/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee index 8fad5322e2..c2f58abbf5 100644 --- a/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee @@ -7,9 +7,9 @@ describe "TrackChangesManager", -> beforeEach -> @TrackChangesManager = SandboxedModule.require modulePath, requires: "request": @request = {} - "settings-sharelatex": @Settings = {} + "settings-sharelatex": @Settings = { redis: web: {} } "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } - "./RedisManager": @RedisManager = {} + "redis-sharelatex": createClient: () => @rclient = {} @project_id = "mock-project-id" @doc_id = "mock-doc-id" @callback = sinon.stub() @@ -42,17 +42,23 @@ describe "TrackChangesManager", -> describe "pushUncompressedHistoryOp", -> beforeEach -> - @op = "mock-op" + @op = { op: [{ i: "foo", p: 4 }] } + @rclient.multi = sinon.stub().returns(@multi = {}) + @multi.rpush = sinon.stub() + @multi.sadd = sinon.stub() + @multi.exec = sinon.stub().yields(null, [@length = 42, "foo"]) @TrackChangesManager.flushDocChanges = sinon.stub().callsArg(2) describe "pushing the op", -> beforeEach -> - @RedisManager.pushUncompressedHistoryOp = sinon.stub().callsArgWith(3, null, 1) @TrackChangesManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback it "should push the op into redis", -> - @RedisManager.pushUncompressedHistoryOp - .calledWith(@project_id, @doc_id, @op) + @multi.rpush + .calledWith("UncompressedHistoryOps:#{@doc_id}", JSON.stringify @op) + .should.equal true + @multi.sadd + .calledWith("DocsWithHistoryOps:#{@project_id}", @doc_id) .should.equal true it "should call the callback", -> @@ -63,8 +69,7 @@ describe "TrackChangesManager", -> describe "when there are a multiple of FLUSH_EVERY_N_OPS ops", -> beforeEach -> - @RedisManager.pushUncompressedHistoryOp = - sinon.stub().callsArgWith(3, null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS) + @multi.exec = sinon.stub().yields(null, [2 * @TrackChangesManager.FLUSH_EVERY_N_OPS, "foo"]) @TrackChangesManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback it "should tell the track changes api to flush", -> @@ -74,8 +79,7 @@ describe "TrackChangesManager", -> describe "when TrackChangesManager errors", -> beforeEach -> - @RedisManager.pushUncompressedHistoryOp = - sinon.stub().callsArgWith(3, null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS) + @multi.exec = sinon.stub().yields(null, [2 * @TrackChangesManager.FLUSH_EVERY_N_OPS, "foo"]) @TrackChangesManager.flushDocChanges = sinon.stub().callsArgWith(2, @error = new Error("oops")) @TrackChangesManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback @@ -89,4 +93,3 @@ describe "TrackChangesManager", -> ) .should.equal true - From bc00aab7b17a6a2ddeaefa822f87e1787bab7e74 Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 17 Jun 2016 12:17:22 +0100 Subject: [PATCH 115/769] Explicitly separate calls to web and docupdater redis instances --- .../app/coffee/RedisManager.coffee | 19 ---- .../app/coffee/TrackChangesManager.coffee | 11 +-- .../app/coffee/UpdateManager.coffee | 5 +- .../app/coffee/WebRedisManager.coffee | 33 +++++++ .../config/settings.defaults.coffee | 37 ++++---- services/document-updater/package.json | 1 + .../coffee/helpers/DocUpdaterClient.coffee | 8 +- .../RedisManager/RedisManagerTests.coffee | 51 ---------- .../TrackChangesManagerTests.coffee | 24 ++--- .../UpdateManager/ApplyingUpdates.coffee | 11 ++- .../lockUpdatesAndDoTests.coffee | 1 + .../WebRedisManagerTests.coffee | 93 +++++++++++++++++++ 12 files changed, 175 insertions(+), 119 deletions(-) create mode 100644 services/document-updater/app/coffee/WebRedisManager.coffee create mode 100644 services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 75312298bb..226214599e 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -70,25 +70,6 @@ module.exports = RedisManager = multi.set keys.docVersion(doc_id:doc_id), version multi.exec (error, replys) -> callback(error) - getPendingUpdatesForDoc : (doc_id, callback)-> - multi = rclient.multi() - multi.lrange keys.pendingUpdates(doc_id:doc_id), 0 , -1 - multi.del keys.pendingUpdates(doc_id:doc_id) - multi.exec (error, replys) -> - return callback(error) if error? - jsonUpdates = replys[0] - updates = [] - for jsonUpdate in jsonUpdates - try - update = JSON.parse jsonUpdate - catch e - return callback e - updates.push update - callback error, updates - - getUpdatesLength: (doc_id, callback)-> - rclient.llen keys.pendingUpdates(doc_id:doc_id), callback - getPreviousDocOps: (doc_id, start, end, callback = (error, jsonOps) ->) -> rclient.llen keys.docOps(doc_id: doc_id), (error, length) -> return callback(error) if error? diff --git a/services/document-updater/app/coffee/TrackChangesManager.coffee b/services/document-updater/app/coffee/TrackChangesManager.coffee index fcfbcdcc58..cc61bdb0ae 100644 --- a/services/document-updater/app/coffee/TrackChangesManager.coffee +++ b/services/document-updater/app/coffee/TrackChangesManager.coffee @@ -1,9 +1,7 @@ settings = require "settings-sharelatex" request = require "request" logger = require "logger-sharelatex" -redis = require("redis-sharelatex") -rclient = redis.createClient(settings.redis.web) -async = require "async" +WebRedisManager = require "./WebRedisManager" module.exports = TrackChangesManager = flushDocChanges: (project_id, doc_id, callback = (error) ->) -> @@ -24,13 +22,8 @@ module.exports = TrackChangesManager = FLUSH_EVERY_N_OPS: 50 pushUncompressedHistoryOp: (project_id, doc_id, op, callback = (error) ->) -> - jsonOp = JSON.stringify op - multi = rclient.multi() - multi.rpush "UncompressedHistoryOps:#{doc_id}", jsonOp - multi.sadd "DocsWithHistoryOps:#{project_id}", doc_id - multi.exec (error, results) -> + WebRedisManager.pushUncompressedHistoryOp project_id, doc_id, op, (error, length) -> return callback(error) if error? - [length, _] = results if length > 0 and length % TrackChangesManager.FLUSH_EVERY_N_OPS == 0 # Do this in the background since it uses HTTP and so may be too # slow to wait for when processing a doc update. diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index 97c33e8b6f..219c52848b 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -1,5 +1,6 @@ LockManager = require "./LockManager" RedisManager = require "./RedisManager" +WebRedisManager = require "./WebRedisManager" ShareJsUpdateManager = require "./ShareJsUpdateManager" Settings = require('settings-sharelatex') async = require("async") @@ -25,7 +26,7 @@ module.exports = UpdateManager = UpdateManager.continueProcessingUpdatesWithLock project_id, doc_id, callback continueProcessingUpdatesWithLock: (project_id, doc_id, callback = (error) ->) -> - RedisManager.getUpdatesLength doc_id, (error, length) => + WebRedisManager.getUpdatesLength doc_id, (error, length) => return callback(error) if error? if length > 0 UpdateManager.processOutstandingUpdatesWithLock project_id, doc_id, callback @@ -33,7 +34,7 @@ module.exports = UpdateManager = callback() fetchAndApplyUpdates: (project_id, doc_id, callback = (error) ->) -> - RedisManager.getPendingUpdatesForDoc doc_id, (error, updates) => + WebRedisManager.getPendingUpdatesForDoc doc_id, (error, updates) => return callback(error) if error? if updates.length == 0 return callback() diff --git a/services/document-updater/app/coffee/WebRedisManager.coffee b/services/document-updater/app/coffee/WebRedisManager.coffee new file mode 100644 index 0000000000..a14c2d6c86 --- /dev/null +++ b/services/document-updater/app/coffee/WebRedisManager.coffee @@ -0,0 +1,33 @@ +Settings = require('settings-sharelatex') +rclient = require("redis-sharelatex").createClient(Settings.redis.web) +async = require "async" + +module.exports = WebRedisManager = + getPendingUpdatesForDoc : (doc_id, callback)-> + multi = rclient.multi() + multi.lrange "PendingUpdates:#{doc_id}", 0 , -1 + multi.del "PendingUpdates:#{doc_id}" + multi.exec (error, replys) -> + return callback(error) if error? + jsonUpdates = replys[0] + updates = [] + for jsonUpdate in jsonUpdates + try + update = JSON.parse jsonUpdate + catch e + return callback e + updates.push update + callback error, updates + + getUpdatesLength: (doc_id, callback)-> + rclient.llen "PendingUpdates:#{doc_id}", callback + + pushUncompressedHistoryOp: (project_id, doc_id, op, callback = (error, length) ->) -> + jsonOp = JSON.stringify op + async.parallel [ + (cb) -> rclient.rpush "UncompressedHistoryOps:#{doc_id}", jsonOp, cb + (cb) -> rclient.sadd "DocsWithHistoryOps:#{project_id}", doc_id, cb + ], (error, results) -> + return callback(error) if error? + [length, _] = results + callback(error, length) \ No newline at end of file diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 9a5c6d91f2..df2c9758c6 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -20,10 +20,9 @@ module.exports = port:"6379" host:"localhost" password:"" - documentupdater: [{ - primary: true - port: "6379" - host: "localhost" + documentupdater: + port:"6379" + host:"localhost" password:"" key_schema: blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" @@ -33,20 +32,22 @@ module.exports = projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" docsInProject: ({project_id}) -> "DocsIn:#{project_id}" - }, { - cluster: [{ - port: "7000" - host: "localhost" - }] - key_schema: - blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" - docLines: ({doc_id}) -> "doclines:{#{doc_id}}" - docOps: ({doc_id}) -> "DocOps:{#{doc_id}}" - docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}" - projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" - pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" - docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" - }] + # To use Redis cluster, configure the backend as follows: + # [{ + # primary: true + # cluster: [{ + # port: "7000" + # host: "localhost" + # }] + # key_schema: + # blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" + # docLines: ({doc_id}) -> "doclines:{#{doc_id}}" + # docOps: ({doc_id}) -> "DocOps:{#{doc_id}}" + # docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}" + # projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" + # pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" + # docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" + # }] max_doc_length: 2 * 1024 * 1024 # 2mb diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 59def540a8..6872d5f332 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -26,6 +26,7 @@ "bunyan": "~0.22.1", "chai": "^3.5.0", "chai-spies": "^0.7.1", + "cluster-key-slot": "^1.0.5", "grunt": "~0.4.2", "grunt-available-tasks": "~0.4.1", "grunt-bunyan": "~0.5.0", diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee index e9fd0b0c34..4f76f4cd6e 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee @@ -1,4 +1,5 @@ -rclient = require("redis").createClient() +Settings = require('settings-sharelatex') +rclient = require("redis").createClient(Settings.redis.web) request = require("request").defaults(jar: false) async = require "async" @@ -7,6 +8,11 @@ module.exports = DocUpdaterClient = chars = for i in [1..24] Math.random().toString(16)[2] return chars.join("") + + subscribeToAppliedOps: (callback = (message) ->) -> + rclient_sub = require("redis").createClient() + rclient_sub.subscribe "applied-ops" + rclient_sub.on "message", callback sendUpdate: (project_id, doc_id, update, callback = (error) ->) -> rclient.rpush "PendingUpdates:#{doc_id}", JSON.stringify(update), (error)-> diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 338e7cd668..7ee63de648 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -54,46 +54,6 @@ describe "RedisManager", -> @callback .calledWith(null, @lines, @version) .should.equal true - - describe "getPendingUpdatesForDoc", -> - beforeEach -> - @rclient.lrange = sinon.stub() - @rclient.del = sinon.stub() - - describe "successfully", -> - beforeEach -> - @updates = [ - { op: [{ i: "foo", p: 4 }] } - { op: [{ i: "foo", p: 4 }] } - ] - @jsonUpdates = @updates.map (update) -> JSON.stringify update - @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonUpdates]) - @RedisManager.getPendingUpdatesForDoc @doc_id, @callback - - it "should get the pending updates", -> - @rclient.lrange - .calledWith("PendingUpdates:#{@doc_id}", 0, -1) - .should.equal true - - it "should delete the pending updates", -> - @rclient.del - .calledWith("PendingUpdates:#{@doc_id}") - .should.equal true - - it "should call the callback with the updates", -> - @callback.calledWith(null, @updates).should.equal true - - describe "when the JSON doesn't parse", -> - beforeEach -> - @jsonUpdates = [ - JSON.stringify { op: [{ i: "foo", p: 4 }] } - "broken json" - ] - @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonUpdates]) - @RedisManager.getPendingUpdatesForDoc @doc_id, @callback - - it "should return an error to the callback", -> - @callback.calledWith(new Error("JSON parse error")).should.equal true describe "getPreviousDocOpsTests", -> describe "with a start and an end value", -> @@ -179,17 +139,6 @@ describe "RedisManager", -> it "should log out the problem", -> @logger.warn.called.should.equal true - describe "getUpdatesLength", -> - beforeEach -> - @rclient.llen = sinon.stub().yields(null, @length = 3) - @RedisManager.getUpdatesLength @doc_id, @callback - - it "should look up the length", -> - @rclient.llen.calledWith("PendingUpdates:#{@doc_id}").should.equal true - - it "should return the length", -> - @callback.calledWith(null, @length).should.equal true - describe "pushDocOp", -> beforeEach -> @rclient.rpush = sinon.stub() diff --git a/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee b/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee index c2f58abbf5..574795f3bb 100644 --- a/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee @@ -7,9 +7,9 @@ describe "TrackChangesManager", -> beforeEach -> @TrackChangesManager = SandboxedModule.require modulePath, requires: "request": @request = {} - "settings-sharelatex": @Settings = { redis: web: {} } + "settings-sharelatex": @Settings = {} "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } - "redis-sharelatex": createClient: () => @rclient = {} + "./WebRedisManager": @WebRedisManager = {} @project_id = "mock-project-id" @doc_id = "mock-doc-id" @callback = sinon.stub() @@ -42,23 +42,17 @@ describe "TrackChangesManager", -> describe "pushUncompressedHistoryOp", -> beforeEach -> - @op = { op: [{ i: "foo", p: 4 }] } - @rclient.multi = sinon.stub().returns(@multi = {}) - @multi.rpush = sinon.stub() - @multi.sadd = sinon.stub() - @multi.exec = sinon.stub().yields(null, [@length = 42, "foo"]) + @op = "mock-op" @TrackChangesManager.flushDocChanges = sinon.stub().callsArg(2) describe "pushing the op", -> beforeEach -> + @WebRedisManager.pushUncompressedHistoryOp = sinon.stub().callsArgWith(3, null, 1) @TrackChangesManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback it "should push the op into redis", -> - @multi.rpush - .calledWith("UncompressedHistoryOps:#{@doc_id}", JSON.stringify @op) - .should.equal true - @multi.sadd - .calledWith("DocsWithHistoryOps:#{@project_id}", @doc_id) + @WebRedisManager.pushUncompressedHistoryOp + .calledWith(@project_id, @doc_id, @op) .should.equal true it "should call the callback", -> @@ -69,7 +63,8 @@ describe "TrackChangesManager", -> describe "when there are a multiple of FLUSH_EVERY_N_OPS ops", -> beforeEach -> - @multi.exec = sinon.stub().yields(null, [2 * @TrackChangesManager.FLUSH_EVERY_N_OPS, "foo"]) + @WebRedisManager.pushUncompressedHistoryOp = + sinon.stub().callsArgWith(3, null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS) @TrackChangesManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback it "should tell the track changes api to flush", -> @@ -79,7 +74,8 @@ describe "TrackChangesManager", -> describe "when TrackChangesManager errors", -> beforeEach -> - @multi.exec = sinon.stub().yields(null, [2 * @TrackChangesManager.FLUSH_EVERY_N_OPS, "foo"]) + @WebRedisManager.pushUncompressedHistoryOp = + sinon.stub().callsArgWith(3, null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS) @TrackChangesManager.flushDocChanges = sinon.stub().callsArgWith(2, @error = new Error("oops")) @TrackChangesManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback diff --git a/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee b/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee index 249740973f..19094794bb 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee @@ -12,6 +12,7 @@ describe "UpdateManager", -> @UpdateManager = SandboxedModule.require modulePath, requires: "./LockManager" : @LockManager = {} "./RedisManager" : @RedisManager = {} + "./WebRedisManager" : @WebRedisManager = {} "./ShareJsUpdateManager" : @ShareJsUpdateManager = {} "logger-sharelatex": @logger = { log: sinon.stub() } "./Metrics": @Metrics = @@ -89,7 +90,7 @@ describe "UpdateManager", -> describe "continueProcessingUpdatesWithLock", -> describe "when there are outstanding updates", -> beforeEach -> - @RedisManager.getUpdatesLength = sinon.stub().callsArgWith(1, null, 3) + @WebRedisManager.getUpdatesLength = sinon.stub().callsArgWith(1, null, 3) @UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArg(2) @UpdateManager.continueProcessingUpdatesWithLock @project_id, @doc_id, @callback @@ -101,7 +102,7 @@ describe "UpdateManager", -> describe "when there are no outstanding updates", -> beforeEach -> - @RedisManager.getUpdatesLength = sinon.stub().callsArgWith(1, null, 0) + @WebRedisManager.getUpdatesLength = sinon.stub().callsArgWith(1, null, 0) @UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArg(2) @UpdateManager.continueProcessingUpdatesWithLock @project_id, @doc_id, @callback @@ -117,12 +118,12 @@ describe "UpdateManager", -> @updates = [{p: 1, t: "foo"}] @updatedDocLines = ["updated", "lines"] @version = 34 - @RedisManager.getPendingUpdatesForDoc = sinon.stub().callsArgWith(1, null, @updates) + @WebRedisManager.getPendingUpdatesForDoc = sinon.stub().callsArgWith(1, null, @updates) @UpdateManager.applyUpdates = sinon.stub().callsArgWith(3, null, @updatedDocLines, @version) @UpdateManager.fetchAndApplyUpdates @project_id, @doc_id, @callback it "should get the pending updates", -> - @RedisManager.getPendingUpdatesForDoc.calledWith(@doc_id).should.equal true + @WebRedisManager.getPendingUpdatesForDoc.calledWith(@doc_id).should.equal true it "should apply the updates", -> @UpdateManager.applyUpdates @@ -135,7 +136,7 @@ describe "UpdateManager", -> describe "when there are no updates", -> beforeEach -> @updates = [] - @RedisManager.getPendingUpdatesForDoc = sinon.stub().callsArgWith(1, null, @updates) + @WebRedisManager.getPendingUpdatesForDoc = sinon.stub().callsArgWith(1, null, @updates) @UpdateManager.applyUpdates = sinon.stub() @RedisManager.setDocument = sinon.stub() @UpdateManager.fetchAndApplyUpdates @project_id, @doc_id, @callback diff --git a/services/document-updater/test/unit/coffee/UpdateManager/lockUpdatesAndDoTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/lockUpdatesAndDoTests.coffee index adba644b27..fa9ca76356 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/lockUpdatesAndDoTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/lockUpdatesAndDoTests.coffee @@ -9,6 +9,7 @@ describe 'UpdateManager - lockUpdatesAndDo', -> @UpdateManager = SandboxedModule.require modulePath, requires: "./LockManager" : @LockManager = {} "./RedisManager" : @RedisManager = {} + "./WebRedisManager" : @WebRedisManager = {} "./ShareJsUpdateManager" : @ShareJsUpdateManager = {} "logger-sharelatex": @logger = { log: sinon.stub() } @project_id = "project-id-123" diff --git a/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee new file mode 100644 index 0000000000..932cb92e26 --- /dev/null +++ b/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee @@ -0,0 +1,93 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/WebRedisManager.js" +SandboxedModule = require('sandboxed-module') +Errors = require "../../../../app/js/Errors" + +describe "WebRedisManager", -> + beforeEach -> + @rclient = + auth: () -> + exec: sinon.stub() + @rclient.multi = () => @rclient + @WebRedisManager = SandboxedModule.require modulePath, requires: + "redis-sharelatex": createClient: () => @rclient + "settings-sharelatex": redis: web: @settings = {"mock": "settings"} + @doc_id = "doc-id-123" + @project_id = "project-id-123" + @callback = sinon.stub() + + describe "getPendingUpdatesForDoc", -> + beforeEach -> + @rclient.lrange = sinon.stub() + @rclient.del = sinon.stub() + + describe "successfully", -> + beforeEach -> + @updates = [ + { op: [{ i: "foo", p: 4 }] } + { op: [{ i: "foo", p: 4 }] } + ] + @jsonUpdates = @updates.map (update) -> JSON.stringify update + @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonUpdates]) + @WebRedisManager.getPendingUpdatesForDoc @doc_id, @callback + + it "should get the pending updates", -> + @rclient.lrange + .calledWith("PendingUpdates:#{@doc_id}", 0, -1) + .should.equal true + + it "should delete the pending updates", -> + @rclient.del + .calledWith("PendingUpdates:#{@doc_id}") + .should.equal true + + it "should call the callback with the updates", -> + @callback.calledWith(null, @updates).should.equal true + + describe "when the JSON doesn't parse", -> + beforeEach -> + @jsonUpdates = [ + JSON.stringify { op: [{ i: "foo", p: 4 }] } + "broken json" + ] + @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonUpdates]) + @WebRedisManager.getPendingUpdatesForDoc @doc_id, @callback + + it "should return an error to the callback", -> + @callback.calledWith(new Error("JSON parse error")).should.equal true + + + describe "getUpdatesLength", -> + beforeEach -> + @rclient.llen = sinon.stub().yields(null, @length = 3) + @WebRedisManager.getUpdatesLength @doc_id, @callback + + it "should look up the length", -> + @rclient.llen.calledWith("PendingUpdates:#{@doc_id}").should.equal true + + it "should return the length", -> + @callback.calledWith(null, @length).should.equal true + + describe "pushUncompressedHistoryOp", -> + beforeEach (done) -> + @op = { op: [{ i: "foo", p: 4 }] } + @rclient.rpush = sinon.stub().yields(null, @length = 42) + @rclient.sadd = sinon.stub().yields() + @WebRedisManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, (args...) => + @callback(args...) + done() + + it "should push the doc op into the doc ops list", -> + @rclient.rpush + .calledWith("UncompressedHistoryOps:#{@doc_id}", JSON.stringify(@op)) + .should.equal true + + it "should add the doc_id to the set of which records the project docs", -> + @rclient.sadd + .calledWith("DocsWithHistoryOps:#{@project_id}", @doc_id) + .should.equal true + + it "should call the callback with the length", -> + @callback.calledWith(null, @length).should.equal true From e5cf856ddf604fdba9483f4716d1390a96a7d4ae Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 17 Jun 2016 12:17:43 +0100 Subject: [PATCH 116/769] Add in stress test script --- .../test/stress/coffee/run.coffee | 152 ++++++++++++++++++ 1 file changed, 152 insertions(+) create mode 100644 services/document-updater/test/stress/coffee/run.coffee diff --git a/services/document-updater/test/stress/coffee/run.coffee b/services/document-updater/test/stress/coffee/run.coffee new file mode 100644 index 0000000000..92f4ef64dc --- /dev/null +++ b/services/document-updater/test/stress/coffee/run.coffee @@ -0,0 +1,152 @@ +DocUpdaterClient = require "../../acceptance/js/helpers/DocUpdaterClient" +# MockTrackChangesApi = require "../../acceptance/js/helpers/MockTrackChangesApi" +# MockWebApi = require "../../acceptance/js/helpers/MockWebApi" +assert = require "assert" +async = require "async" + +insert = (string, pos, content) -> + string.slice(0, pos) + content + string.slice(pos) + +transform = (op1, op2) -> + if op2.p < op1.p + op1.p += op2.i.length + return op1 + +class StressTestClient + constructor: (@options = {}) -> + @options.updateDelay ?= 200 + @project_id = @options.project_id or DocUpdaterClient.randomId() + @doc_id = @options.doc_id or DocUpdaterClient.randomId() + @pos = @options.pos or 0 + @content = @options.content or "" + + @client_id = DocUpdaterClient.randomId() + @version = @options.version or 0 + @inflight_op = null + @charCode = 0 + + @counts = { + conflicts: 0 + local_updates: 0 + remote_updates: 0 + max_delay: 0 + } + + DocUpdaterClient.subscribeToAppliedOps (channel, update) => + update = JSON.parse(update) + if update.doc_id == @doc_id + @processReply(update) + + sendUpdate: () -> + data = String.fromCharCode(65 + @charCode++ % 26) + @content = insert(@content, @pos, data) + DocUpdaterClient.sendUpdate( + @project_id, @doc_id + { + doc: @doc_id + op: [@inflight_op = { + i: data + p: @pos++ + }] + v: @version + meta: + source: @client_id + } + ) + @inflight_op_sent = Date.now() + + processReply: (update) -> + if update.error? + throw new Error("Error from server: '#{update.error}'") + assert(update.op.v == @version, "Op version from server is not increasing by 1 each time") + @version++ + if update.op.meta.source == @client_id + @counts.local_updates++ + @inflight_op = null + delay = Date.now() - @inflight_op_sent + @counts.max_delay = Math.max(@counts.max_delay, delay) + @continue() + else + assert(update.op.op.length == 1) + @counts.remote_updates++ + external_op = update.op.op[0] + if @inflight_op? + @counts.conflicts++ + external_op = transform(external_op, @inflight_op) + if external_op.p < @pos + @pos += external_op.i.length + @content = insert(@content, external_op.p, external_op.i) + + continue: () -> + if @updateCount > 0 + @updateCount-- + setTimeout () => + @sendUpdate() + , @options.updateDelay * ( 0.5 + Math.random() ) + else + @updateCallback() + + runForNUpdates: (n, callback = (error) ->) -> + @updateCallback = callback + @updateCount = n + @continue() + + check: (callback = (error) ->) -> + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, body) => + throw error if error? + if !body.lines? + return console.error "[#{new Date()}] ERROR: Invalid response from get doc (#{doc_id})", body + content = body.lines.join("\n") + if content != @content + console.error "[#{new Date()}] Error: Client content does not match server (Server: '#{content}', Client: '#{@content}')" + # TODO: Check content is of the correct form + callback() + + +checkDocument = (project_id, doc_id, clients, callback = (error) ->) -> + jobs = clients.map (client) -> + (cb) -> client.check cb + async.parallel jobs, callback + +printSummary = (doc_id, clients) -> + slot = require('cluster-key-slot') + now = new Date() + console.log "[#{now}] [#{doc_id.slice(0,4)} (slot: #{slot(doc_id)})] #{clients.length} clients..." + for client in clients + console.log "[#{now}] \t[#{client.client_id.slice(0,4)}] { local: #{client.counts.local_updates }, remote: #{client.counts.remote_updates}, conflicts: #{client.counts.conflicts}, max_delay: #{client.counts.max_delay} }" + client.counts = { + local_updates: 0 + remote_updates: 0 + conflicts: 0 + max_delay: 0 + } + +UPDATE_DELAY = parseInt(process.argv[2], 10) +SAMPLE_INTERVAL = parseInt(process.argv[3], 10) + +for doc_and_project_id in process.argv.slice(4) + do (doc_and_project_id) -> + [project_id, doc_id] = doc_and_project_id.split(":") + console.log {project_id, doc_id} + DocUpdaterClient.getDoc project_id, doc_id, (error, res, body) => + throw error if error? + if !body.lines? + return console.error "[#{new Date()}] ERROR: Invalid response from get doc (#{doc_id})", body + content = body.lines.join("\n") + version = body.version + + clients = [] + for pos in [1, 2, 3, 4, 5] + do (pos) -> + client = new StressTestClient({doc_id, project_id, content, pos: pos, version: version, updateDelay: UPDATE_DELAY}) + clients.push client + + do runBatch = () -> + jobs = clients.map (client) -> + (cb) -> client.runForNUpdates(SAMPLE_INTERVAL / UPDATE_DELAY, cb) + async.parallel jobs, (error) -> + throw error if error? + printSummary(doc_id, clients) + checkDocument project_id, doc_id, clients, (error) -> + throw error if error? + runBatch() From 40b7da8edbc6e5e27b1e9916dac17fa955461669 Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 17 Jun 2016 14:05:54 +0100 Subject: [PATCH 117/769] Support endpoints and masterName in redis config --- services/document-updater/app/coffee/RedisBackend.coffee | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/RedisBackend.coffee b/services/document-updater/app/coffee/RedisBackend.coffee index 630a780eb7..6a8e817d8c 100644 --- a/services/document-updater/app/coffee/RedisBackend.coffee +++ b/services/document-updater/app/coffee/RedisBackend.coffee @@ -113,8 +113,8 @@ module.exports = rclient = new Redis.Cluster(config.cluster) driver = "ioredis" else - {host, port, password} = config - rclient = require("redis-sharelatex").createClient({host, port, password}) + {host, port, password, endpoints, masterName} = config + rclient = require("redis-sharelatex").createClient({host, port, password, endpoints, masterName}) driver = "redis" return { rclient: rclient From 39d920682aa292e24a69a2a9928c7dafd9eaa029 Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 17 Jun 2016 14:11:22 +0100 Subject: [PATCH 118/769] Only pass through keys to redis that are explicitly set in the config --- services/document-updater/app/coffee/RedisBackend.coffee | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/RedisBackend.coffee b/services/document-updater/app/coffee/RedisBackend.coffee index 6a8e817d8c..8df988faba 100644 --- a/services/document-updater/app/coffee/RedisBackend.coffee +++ b/services/document-updater/app/coffee/RedisBackend.coffee @@ -113,8 +113,11 @@ module.exports = rclient = new Redis.Cluster(config.cluster) driver = "ioredis" else - {host, port, password, endpoints, masterName} = config - rclient = require("redis-sharelatex").createClient({host, port, password, endpoints, masterName}) + redis_config = {} + for key in ["host", "port", "password", "endpoints", "masterName"] + if config[key]? + redis_config[key] = config[key] + rclient = require("redis-sharelatex").createClient(redis_config) driver = "redis" return { rclient: rclient From 7915c2ab6ca914217fde2915b26e7ea7718a4230 Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 20 Jun 2016 13:56:46 +0100 Subject: [PATCH 119/769] Tweak stress test to work on server --- .../test/acceptance/coffee/helpers/DocUpdaterClient.coffee | 4 ++-- services/document-updater/test/stress/coffee/run.coffee | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee index 4f76f4cd6e..49c7f96fb9 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee @@ -1,5 +1,5 @@ Settings = require('settings-sharelatex') -rclient = require("redis").createClient(Settings.redis.web) +rclient = require("redis-sharelatex").createClient(Settings.redis.web) request = require("request").defaults(jar: false) async = require "async" @@ -10,7 +10,7 @@ module.exports = DocUpdaterClient = return chars.join("") subscribeToAppliedOps: (callback = (message) ->) -> - rclient_sub = require("redis").createClient() + rclient_sub = require("redis-sharelatex").createClient(Settings.redis.web) rclient_sub.subscribe "applied-ops" rclient_sub.on "message", callback diff --git a/services/document-updater/test/stress/coffee/run.coffee b/services/document-updater/test/stress/coffee/run.coffee index 92f4ef64dc..f2ccfe670e 100644 --- a/services/document-updater/test/stress/coffee/run.coffee +++ b/services/document-updater/test/stress/coffee/run.coffee @@ -1,4 +1,4 @@ -DocUpdaterClient = require "../../acceptance/js/helpers/DocUpdaterClient" +DocUpdaterClient = require "../../acceptance/coffee/helpers/DocUpdaterClient" # MockTrackChangesApi = require "../../acceptance/js/helpers/MockTrackChangesApi" # MockWebApi = require "../../acceptance/js/helpers/MockWebApi" assert = require "assert" From 392beac57bf6fe80880e21cf0f64b8aefce2eeaa Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 20 Jun 2016 15:01:39 +0100 Subject: [PATCH 120/769] Improve error reporting and retry ops in stress test script --- .../test/stress/coffee/run.coffee | 47 +++++++++++++------ 1 file changed, 33 insertions(+), 14 deletions(-) diff --git a/services/document-updater/test/stress/coffee/run.coffee b/services/document-updater/test/stress/coffee/run.coffee index f2ccfe670e..80f2923831 100644 --- a/services/document-updater/test/stress/coffee/run.coffee +++ b/services/document-updater/test/stress/coffee/run.coffee @@ -34,38 +34,57 @@ class StressTestClient DocUpdaterClient.subscribeToAppliedOps (channel, update) => update = JSON.parse(update) + if update.error? + console.error new Error("Error from server: '#{update.error}'") if update.doc_id == @doc_id @processReply(update) sendUpdate: () -> data = String.fromCharCode(65 + @charCode++ % 26) @content = insert(@content, @pos, data) + @inflight_op = { + i: data + p: @pos++ + } + @resendUpdate() + @inflight_op_sent = Date.now() + + resendUpdate: () -> + assert(@inflight_op?) DocUpdaterClient.sendUpdate( @project_id, @doc_id { doc: @doc_id - op: [@inflight_op = { - i: data - p: @pos++ - }] + op: [@inflight_op] v: @version meta: source: @client_id + dupIfSource: [@client_id] } ) - @inflight_op_sent = Date.now() + @update_timer = setTimeout () => + console.log "[#{new Date()}] \t[#{@client_id.slice(0,4)}] WARN: Resending update after 5 seconds" + @resendUpdate() + , 5000 processReply: (update) -> - if update.error? - throw new Error("Error from server: '#{update.error}'") - assert(update.op.v == @version, "Op version from server is not increasing by 1 each time") + if update.op.v != @version + if update.op.v < @version + console.log "[#{new Date()}] \t[#{@client_id.slice(0,4)}] WARN: Duplicate ack (already seen version)" + return + else + throw new Error("version jumped ahead") @version++ if update.op.meta.source == @client_id - @counts.local_updates++ - @inflight_op = null - delay = Date.now() - @inflight_op_sent - @counts.max_delay = Math.max(@counts.max_delay, delay) - @continue() + if @inflight_op? + @counts.local_updates++ + @inflight_op = null + clearTimeout @update_timer + delay = Date.now() - @inflight_op_sent + @counts.max_delay = Math.max(@counts.max_delay, delay) + @continue() + else + console.log "[#{new Date()}] \t[#{@client_id.slice(0,4)}] WARN: Duplicate ack" else assert(update.op.op.length == 1) @counts.remote_updates++ @@ -136,7 +155,7 @@ for doc_and_project_id in process.argv.slice(4) version = body.version clients = [] - for pos in [1, 2, 3, 4, 5] + for pos in [1, 2] do (pos) -> client = new StressTestClient({doc_id, project_id, content, pos: pos, version: version, updateDelay: UPDATE_DELAY}) clients.push client From 27a74d6b715401b22ac32ab04566f623422106ad Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 21 Jun 2016 14:31:20 +0100 Subject: [PATCH 121/769] Improve consistency of stress test script --- .../coffee/helpers/DocUpdaterClient.coffee | 6 +- .../test/stress/coffee/run.coffee | 96 +++++++++++++------ 2 files changed, 71 insertions(+), 31 deletions(-) diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee index 49c7f96fb9..a14f6f9364 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee @@ -3,6 +3,10 @@ rclient = require("redis-sharelatex").createClient(Settings.redis.web) request = require("request").defaults(jar: false) async = require "async" +rclient_sub = require("redis-sharelatex").createClient(Settings.redis.web) +rclient_sub.subscribe "applied-ops" +rclient_sub.setMaxListeners(0) + module.exports = DocUpdaterClient = randomId: () -> chars = for i in [1..24] @@ -10,8 +14,6 @@ module.exports = DocUpdaterClient = return chars.join("") subscribeToAppliedOps: (callback = (message) ->) -> - rclient_sub = require("redis-sharelatex").createClient(Settings.redis.web) - rclient_sub.subscribe "applied-ops" rclient_sub.on "message", callback sendUpdate: (project_id, doc_id, update, callback = (error) ->) -> diff --git a/services/document-updater/test/stress/coffee/run.coffee b/services/document-updater/test/stress/coffee/run.coffee index 80f2923831..2c48583014 100644 --- a/services/document-updater/test/stress/coffee/run.coffee +++ b/services/document-updater/test/stress/coffee/run.coffee @@ -5,12 +5,17 @@ assert = require "assert" async = require "async" insert = (string, pos, content) -> - string.slice(0, pos) + content + string.slice(pos) + result = string.slice(0, pos) + content + string.slice(pos) + return result transform = (op1, op2) -> if op2.p < op1.p - op1.p += op2.i.length - return op1 + return { + p: op1.p + op2.i.length + i: op1.i + } + else + return op1 class StressTestClient constructor: (@options = {}) -> @@ -36,6 +41,7 @@ class StressTestClient update = JSON.parse(update) if update.error? console.error new Error("Error from server: '#{update.error}'") + return if update.doc_id == @doc_id @processReply(update) @@ -73,7 +79,7 @@ class StressTestClient console.log "[#{new Date()}] \t[#{@client_id.slice(0,4)}] WARN: Duplicate ack (already seen version)" return else - throw new Error("version jumped ahead") + console.error "[#{new Date()}] \t[#{@client_id.slice(0,4)}] ERROR: Version jumped ahead (client: #{@version}, op: #{update.op.v})" @version++ if update.op.meta.source == @client_id if @inflight_op? @@ -91,6 +97,7 @@ class StressTestClient external_op = update.op.op[0] if @inflight_op? @counts.conflicts++ + @inflight_op = transform(@inflight_op, external_op) external_op = transform(external_op, @inflight_op) if external_op.p < @pos @pos += external_op.i.length @@ -114,13 +121,41 @@ class StressTestClient DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, body) => throw error if error? if !body.lines? - return console.error "[#{new Date()}] ERROR: Invalid response from get doc (#{doc_id})", body + return console.error "[#{new Date()}] \t[#{@client_id.slice(0,4)}] ERROR: Invalid response from get doc (#{doc_id})", body content = body.lines.join("\n") + version = body.version if content != @content - console.error "[#{new Date()}] Error: Client content does not match server (Server: '#{content}', Client: '#{@content}')" - # TODO: Check content is of the correct form + if version == @version + console.error "[#{new Date()}] \t[#{@client_id.slice(0,4)}] Error: Client content does not match server." + console.error "Server: #{content.split('a')}" + console.error "Client: #{@content.split('a')}" + else + console.error "[#{new Date()}] \t[#{@client_id.slice(0,4)}] Error: Version mismatch (Server: '#{version}', Client: '#{@version}')" + + if !@isContentValid(@content) + for chunk, i in @content.split("") + if chunk? and chunk != "a" + console.log chunk, i + throw new Error("bad content") callback() + isChunkValid: (chunk) -> + char = 0 + for letter, i in chunk + if letter.charCodeAt(0) != 65 + i % 26 + console.error "[#{new Date()}] \t[#{@client_id.slice(0,4)}] Invalid Chunk:", chunk + return false + return true + + isContentValid: (content) -> + for chunk in content.split('a') + if chunk? and chunk != "" + if !@isChunkValid(chunk) + + console.error "[#{new Date()}] \t[#{@client_id.slice(0,4)}] Invalid content", content + return false + return true + checkDocument = (project_id, doc_id, clients, callback = (error) ->) -> jobs = clients.map (client) -> @@ -140,32 +175,35 @@ printSummary = (doc_id, clients) -> max_delay: 0 } -UPDATE_DELAY = parseInt(process.argv[2], 10) -SAMPLE_INTERVAL = parseInt(process.argv[3], 10) +CLIENT_COUNT = parseInt(process.argv[2], 10) +UPDATE_DELAY = parseInt(process.argv[3], 10) +SAMPLE_INTERVAL = parseInt(process.argv[4], 10) -for doc_and_project_id in process.argv.slice(4) +for doc_and_project_id in process.argv.slice(5) do (doc_and_project_id) -> [project_id, doc_id] = doc_and_project_id.split(":") console.log {project_id, doc_id} - DocUpdaterClient.getDoc project_id, doc_id, (error, res, body) => + DocUpdaterClient.setDocLines project_id, doc_id, [(new Array(CLIENT_COUNT + 2)).join('a')], null, null, (error) -> throw error if error? - if !body.lines? - return console.error "[#{new Date()}] ERROR: Invalid response from get doc (#{doc_id})", body - content = body.lines.join("\n") - version = body.version - - clients = [] - for pos in [1, 2] - do (pos) -> - client = new StressTestClient({doc_id, project_id, content, pos: pos, version: version, updateDelay: UPDATE_DELAY}) - clients.push client + DocUpdaterClient.getDoc project_id, doc_id, (error, res, body) => + throw error if error? + if !body.lines? + return console.error "[#{new Date()}] ERROR: Invalid response from get doc (#{doc_id})", body + content = body.lines.join("\n") + version = body.version - do runBatch = () -> - jobs = clients.map (client) -> - (cb) -> client.runForNUpdates(SAMPLE_INTERVAL / UPDATE_DELAY, cb) - async.parallel jobs, (error) -> - throw error if error? - printSummary(doc_id, clients) - checkDocument project_id, doc_id, clients, (error) -> + clients = [] + for pos in [1..CLIENT_COUNT] + do (pos) -> + client = new StressTestClient({doc_id, project_id, content, pos: pos, version: version, updateDelay: UPDATE_DELAY}) + clients.push client + + do runBatch = () -> + jobs = clients.map (client) -> + (cb) -> client.runForNUpdates(SAMPLE_INTERVAL / UPDATE_DELAY, cb) + async.parallel jobs, (error) -> throw error if error? - runBatch() + printSummary(doc_id, clients) + checkDocument project_id, doc_id, clients, (error) -> + throw error if error? + runBatch() From 8ef03c3d2f8a63289ffc8806e0efcb42f99ec02b Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 23 Jun 2016 15:38:51 +0100 Subject: [PATCH 122/769] Add in application layer monitoring of the health of each cluster node --- .../app/coffee/RedisBackend.coffee | 25 ++++++++ .../app/coffee/RedisManager.coffee | 2 + .../RedisBackend/RedisBackendTests.coffee | 59 +++++++++++++++++++ .../RedisManager/RedisManagerTests.coffee | 4 +- 4 files changed, 89 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RedisBackend.coffee b/services/document-updater/app/coffee/RedisBackend.coffee index 8df988faba..7d02ba72af 100644 --- a/services/document-updater/app/coffee/RedisBackend.coffee +++ b/services/document-updater/app/coffee/RedisBackend.coffee @@ -5,6 +5,8 @@ logger = require "logger-sharelatex" class Client constructor: (@clients) -> + @HEARTBEAT_INTERVAL = 5000 + @HEARTBEAT_TIMEOUT = 2000 multi: () -> return new MultiClient( @@ -16,6 +18,29 @@ class Client } ) + monitorAndReconnect: () -> + for client in @clients + if client.driver == "ioredis" + @_monitorCluster(client.rclient) + + _monitorCluster: (rclient) -> + setInterval () => + # Nodes can come and go as the cluster moves/heals, so each heartbeat + # we ask again for the currently known nodes. + for node in rclient.nodes("all") + do (node) => + timer = setTimeout () => + logger.error {err: new Error("Node timed out, reconnecting"), key: node.options.key} + node.stream.destroy() + timer = null + , @HEARTBEAT_TIMEOUT + node.ping (err) -> + if !err? + clearTimeout timer + timer = null + , @HEARTBEAT_INTERVAL + + class MultiClient constructor: (@clients) -> diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 226214599e..7fe03f88d8 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -10,6 +10,8 @@ Errors = require "./Errors" # Make times easy to read minutes = 60 # seconds for Redis expire +rclient.monitorAndReconnect() + module.exports = RedisManager = putDocInMemory : (project_id, doc_id, docLines, version, _callback)-> timer = new metrics.Timer("redis.put-doc") diff --git a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee index ca48aff7ff..263bc7deab 100644 --- a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee @@ -42,6 +42,8 @@ describe "RedisBackend", -> class Cluster constructor: (@config) -> test_context.rclient_ioredis = @ + + nodes: sinon.stub() @RedisBackend = SandboxedModule.require modulePath, requires: "settings-sharelatex": @Settings @@ -305,3 +307,60 @@ describe "RedisBackend", -> }, "error in redis backend") .should.equal true + describe "monitorAndReconnect", -> + beforeEach -> + @client._monitorCluster = sinon.stub() + @client.monitorAndReconnect() + + it "should monitor the cluster client", -> + @client._monitorCluster + .calledWith(@rclient_ioredis) + .should.equal true + + describe "_monitorCluster", -> + beforeEach -> + @client.HEARTBEAT_TIMEOUT = 10 + @client.HEARTBEAT_INTERVAL = 100 + @nodes = [{ + options: key: "node-0" + stream: destroy: sinon.stub() + }, { + options: key: "node-1" + stream: destroy: sinon.stub() + }] + @rclient_ioredis.nodes = sinon.stub().returns(@nodes) + + describe "successfully", -> + beforeEach -> + @nodes[0].ping = (cb) -> cb() + @nodes[1].ping = (cb) -> cb() + @client._monitorCluster(@rclient_ioredis) + + it "should get all nodes", -> + setTimeout () => + @rclient_ioredis.nodes + .calledWith("all") + .should.equal true + , 200 + + it "should not reset the node connections", (done) -> + setTimeout () => + @nodes[0].stream.destroy.called.should.equal false + @nodes[1].stream.destroy.called.should.equal false + done() + , 200 + + describe "when ping fails to a node", -> + beforeEach -> + @nodes[0].ping = (cb) -> cb() + @nodes[1].ping = (cb) -> # Just hang + @client._monitorCluster(@rclient_ioredis) + + it "should reset the failing node connection", (done) -> + setTimeout () => + @nodes[0].stream.destroy.called.should.equal false + @nodes[1].stream.destroy.called.should.equal true + done() + , 200 + + \ No newline at end of file diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 7ee63de648..d88dafb9bb 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -10,9 +10,11 @@ describe "RedisManager", -> @rclient = auth: () -> exec: sinon.stub() + monitorAndReconnect: () -> @rclient.multi = () => @rclient @RedisManager = SandboxedModule.require modulePath, requires: - "./RedisBackend": createClient: () => @rclient + "./RedisBackend": + createClient: () => @rclient "./RedisKeyBuilder": blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" docLines: ({doc_id}) -> "doclines:#{doc_id}" From 414ab5d6a9a9e428963232d5512fca891b4b96f4 Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 23 Jun 2016 15:59:25 +0100 Subject: [PATCH 123/769] Refactor TCP redis monitoring to be more explicit --- .../app/coffee/RedisBackend.coffee | 24 ++++----- .../app/coffee/RedisManager.coffee | 2 +- .../config/settings.defaults.coffee | 50 +++++++++---------- .../RedisBackend/RedisBackendTests.coffee | 4 +- .../RedisManager/RedisManagerTests.coffee | 2 +- 5 files changed, 40 insertions(+), 42 deletions(-) diff --git a/services/document-updater/app/coffee/RedisBackend.coffee b/services/document-updater/app/coffee/RedisBackend.coffee index 7d02ba72af..f72e530471 100644 --- a/services/document-updater/app/coffee/RedisBackend.coffee +++ b/services/document-updater/app/coffee/RedisBackend.coffee @@ -18,7 +18,7 @@ class Client } ) - monitorAndReconnect: () -> + monitorTcpAndReconnect: () -> for client in @clients if client.driver == "ioredis" @_monitorCluster(client.rclient) @@ -28,18 +28,18 @@ class Client # Nodes can come and go as the cluster moves/heals, so each heartbeat # we ask again for the currently known nodes. for node in rclient.nodes("all") - do (node) => - timer = setTimeout () => - logger.error {err: new Error("Node timed out, reconnecting"), key: node.options.key} - node.stream.destroy() - timer = null - , @HEARTBEAT_TIMEOUT - node.ping (err) -> - if !err? - clearTimeout timer - timer = null + @_checkNode(node) , @HEARTBEAT_INTERVAL - + + _checkNode: (node) -> + timer = setTimeout () -> + logger.error {err: new Error("Node timed out, reconnecting"), key: node.options.key} + # Discussion of application layer monitoring recommends this way of reconnecting at https://github.com/luin/ioredis/issues/275 + node.stream.destroy() + , @HEARTBEAT_TIMEOUT + node.ping (err) -> + if !err? + clearTimeout timer class MultiClient constructor: (@clients) -> diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 7fe03f88d8..a23725653b 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -10,7 +10,7 @@ Errors = require "./Errors" # Make times easy to read minutes = 60 # seconds for Redis expire -rclient.monitorAndReconnect() +rclient.monitorTcpAndReconnect() module.exports = RedisManager = putDocInMemory : (project_id, doc_id, docLines, version, _callback)-> diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index df2c9758c6..6a2902036e 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -21,33 +21,31 @@ module.exports = host:"localhost" password:"" documentupdater: - port:"6379" - host:"localhost" - password:"" - key_schema: - blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" - docLines: ({doc_id}) -> "doclines:#{doc_id}" - docOps: ({doc_id}) -> "DocOps:#{doc_id}" - docVersion: ({doc_id}) -> "DocVersion:#{doc_id}" - projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" - pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" - docsInProject: ({project_id}) -> "DocsIn:#{project_id}" + # port:"6379" + # host:"localhost" + # password:"" + # key_schema: + # blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" + # docLines: ({doc_id}) -> "doclines:#{doc_id}" + # docOps: ({doc_id}) -> "DocOps:#{doc_id}" + # docVersion: ({doc_id}) -> "DocVersion:#{doc_id}" + # projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" + # docsInProject: ({project_id}) -> "DocsIn:#{project_id}" # To use Redis cluster, configure the backend as follows: - # [{ - # primary: true - # cluster: [{ - # port: "7000" - # host: "localhost" - # }] - # key_schema: - # blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" - # docLines: ({doc_id}) -> "doclines:{#{doc_id}}" - # docOps: ({doc_id}) -> "DocOps:{#{doc_id}}" - # docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}" - # projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" - # pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" - # docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" - # }] + [{ + primary: true + cluster: [{ + port: "7000" + host: "localhost" + }] + key_schema: + blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" + docLines: ({doc_id}) -> "doclines:{#{doc_id}}" + docOps: ({doc_id}) -> "DocOps:{#{doc_id}}" + docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}" + projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" + docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" + }] max_doc_length: 2 * 1024 * 1024 # 2mb diff --git a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee index 263bc7deab..10ad599301 100644 --- a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee @@ -307,10 +307,10 @@ describe "RedisBackend", -> }, "error in redis backend") .should.equal true - describe "monitorAndReconnect", -> + describe "monitorTcpAndReconnect", -> beforeEach -> @client._monitorCluster = sinon.stub() - @client.monitorAndReconnect() + @client.monitorTcpAndReconnect() it "should monitor the cluster client", -> @client._monitorCluster diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index d88dafb9bb..9022163da6 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -10,7 +10,7 @@ describe "RedisManager", -> @rclient = auth: () -> exec: sinon.stub() - monitorAndReconnect: () -> + monitorTcpAndReconnect: () -> @rclient.multi = () => @rclient @RedisManager = SandboxedModule.require modulePath, requires: "./RedisBackend": From da89ff717295a97bfa15f4a6057047459ca0826e Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 23 Jun 2016 18:00:03 +0100 Subject: [PATCH 124/769] Add in external health check rather than internal --- services/document-updater/app.coffee | 7 ++ .../app/coffee/RedisBackend.coffee | 48 +++++++----- .../app/coffee/RedisManager.coffee | 4 +- .../config/settings.defaults.coffee | 51 ++++++------ .../RedisBackend/RedisBackendTests.coffee | 77 ++++++++++++------- .../RedisManager/RedisManagerTests.coffee | 1 - 6 files changed, 113 insertions(+), 75 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index eafe03e402..cb50471965 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -60,6 +60,13 @@ app.get "/health_check/redis", (req, res, next)-> else res.send 500 +app.get "/health_check/redis_cluster", (req, res, next) -> + RedisManager.rclient.healthCheck (error, alive) -> + if error? + logger.err {err: error}, "failed redis cluster health check" + res.send 500 + else + res.send 200 app.use (error, req, res, next) -> if error instanceof Errors.NotFoundError diff --git a/services/document-updater/app/coffee/RedisBackend.coffee b/services/document-updater/app/coffee/RedisBackend.coffee index f72e530471..ca9e3de9e7 100644 --- a/services/document-updater/app/coffee/RedisBackend.coffee +++ b/services/document-updater/app/coffee/RedisBackend.coffee @@ -5,7 +5,6 @@ logger = require "logger-sharelatex" class Client constructor: (@clients) -> - @HEARTBEAT_INTERVAL = 5000 @HEARTBEAT_TIMEOUT = 2000 multi: () -> @@ -18,28 +17,41 @@ class Client } ) - monitorTcpAndReconnect: () -> - for client in @clients - if client.driver == "ioredis" - @_monitorCluster(client.rclient) + healthCheck: (callback) -> + jobs = @clients.map (client) => + (cb) => @_healthCheckClient(client, cb) + async.parallel jobs, callback - _monitorCluster: (rclient) -> - setInterval () => - # Nodes can come and go as the cluster moves/heals, so each heartbeat - # we ask again for the currently known nodes. - for node in rclient.nodes("all") - @_checkNode(node) - , @HEARTBEAT_INTERVAL + _healthCheckClient: (client, callback) -> + if client.driver == "ioredis" + @_healthCheckClusterClient(client, callback) + else + @_healthCheckNodeRedisClient(client, callback) - _checkNode: (node) -> + _healthCheckNodeRedisClient: (client, callback) -> + client.healthCheck ?= require("redis-sharelatex").activeHealthCheckRedis(Settings.redis.web) + if client.healthCheck.isAlive() + return callback() + else + return callback(new Error("node-redis client failed health check")) + + _healthCheckClusterClient: (client, callback) -> + jobs = client.rclient.nodes("all").map (n) => + (cb) => @_checkNode(n, cb) + async.parallel jobs, callback + + _checkNode: (node, _callback) -> + callback = (args...) -> + _callback(args...) + _callback = () -> timer = setTimeout () -> - logger.error {err: new Error("Node timed out, reconnecting"), key: node.options.key} - # Discussion of application layer monitoring recommends this way of reconnecting at https://github.com/luin/ioredis/issues/275 - node.stream.destroy() + error = new Error("ioredis node ping check timed out") + logger.error {err: error, key: node.options.key}, "node timed out" + callback(error) , @HEARTBEAT_TIMEOUT node.ping (err) -> - if !err? - clearTimeout timer + clearTimeout timer + callback(err) class MultiClient constructor: (@clients) -> diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index a23725653b..f8b109ca17 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -10,9 +10,9 @@ Errors = require "./Errors" # Make times easy to read minutes = 60 # seconds for Redis expire -rclient.monitorTcpAndReconnect() - module.exports = RedisManager = + rclient: rclient + putDocInMemory : (project_id, doc_id, docLines, version, _callback)-> timer = new metrics.Timer("redis.put-doc") callback = (error) -> diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 6a2902036e..15456db932 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -20,32 +20,31 @@ module.exports = port:"6379" host:"localhost" password:"" - documentupdater: - # port:"6379" - # host:"localhost" - # password:"" - # key_schema: - # blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" - # docLines: ({doc_id}) -> "doclines:#{doc_id}" - # docOps: ({doc_id}) -> "DocOps:#{doc_id}" - # docVersion: ({doc_id}) -> "DocVersion:#{doc_id}" - # projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" - # docsInProject: ({project_id}) -> "DocsIn:#{project_id}" - # To use Redis cluster, configure the backend as follows: - [{ - primary: true - cluster: [{ - port: "7000" - host: "localhost" - }] - key_schema: - blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" - docLines: ({doc_id}) -> "doclines:{#{doc_id}}" - docOps: ({doc_id}) -> "DocOps:{#{doc_id}}" - docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}" - projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" - docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" - }] + documentupdater: [{ + primary: true + port:"6379" + host:"localhost" + password:"" + key_schema: + blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" + docLines: ({doc_id}) -> "doclines:#{doc_id}" + docOps: ({doc_id}) -> "DocOps:#{doc_id}" + docVersion: ({doc_id}) -> "DocVersion:#{doc_id}" + projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" + docsInProject: ({project_id}) -> "DocsIn:#{project_id}" + # }, { + # cluster: [{ + # port: "7000" + # host: "localhost" + # }] + # key_schema: + # blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" + # docLines: ({doc_id}) -> "doclines:{#{doc_id}}" + # docOps: ({doc_id}) -> "DocOps:{#{doc_id}}" + # docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}" + # projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" + # docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" + }] max_doc_length: 2 * 1024 * 1024 # 2mb diff --git a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee index 10ad599301..648395cd1a 100644 --- a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee @@ -50,6 +50,7 @@ describe "RedisBackend", -> "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } "redis-sharelatex": @redis = createClient: sinon.stub().returns @rclient_redis = {} + activeHealthCheck: sinon.stub() "ioredis": @ioredis = Cluster: Cluster @client = @RedisBackend.createClient() @@ -317,10 +318,40 @@ describe "RedisBackend", -> .calledWith(@rclient_ioredis) .should.equal true - describe "_monitorCluster", -> + describe "_healthCheckNodeRedisClient", -> + beforeEach -> + @redis.activeHealthCheckRedis = sinon.stub().returns @healthCheck = { + isAlive: sinon.stub() + } + + describe "successfully", -> + beforeEach (done) -> + @healthCheck.isAlive.returns true + @redis_client = {} + @client._healthCheckNodeRedisClient(@redis_client, done) + + it "should check the status of the node redis client", -> + @healthCheck.isAlive.called.should.equal true + + it "should only create one health check when called multiple times", (done) -> + @client._healthCheckNodeRedisClient @redis_client, () => + @redis.activeHealthCheckRedis.calledOnce.should.equal true + @healthCheck.isAlive.calledTwice.should.equal true + done() + + describe "when failing", -> + beforeEach -> + @healthCheck.isAlive.returns false + @redis_client = {} + + it "should return an error", (done) -> + @client._healthCheckNodeRedisClient @redis_client, (error) -> + error.message.should.equal "node-redis client failed health check" + done() + + describe "_healthCheckClusterClient", -> beforeEach -> @client.HEARTBEAT_TIMEOUT = 10 - @client.HEARTBEAT_INTERVAL = 100 @nodes = [{ options: key: "node-0" stream: destroy: sinon.stub() @@ -329,38 +360,28 @@ describe "RedisBackend", -> stream: destroy: sinon.stub() }] @rclient_ioredis.nodes = sinon.stub().returns(@nodes) - - describe "successfully", -> - beforeEach -> - @nodes[0].ping = (cb) -> cb() - @nodes[1].ping = (cb) -> cb() - @client._monitorCluster(@rclient_ioredis) + + describe "when both clients are successful", -> + beforeEach (done) -> + @nodes[0].ping = sinon.stub().yields() + @nodes[1].ping = sinon.stub().yields() + @client._healthCheckClusterClient({ rclient: @rclient_ioredis }, done) - it "should get all nodes", -> - setTimeout () => - @rclient_ioredis.nodes - .calledWith("all") - .should.equal true - , 200 + it "should get all cluster nodes", -> + @rclient_ioredis.nodes + .calledWith("all") + .should.equal true - it "should not reset the node connections", (done) -> - setTimeout () => - @nodes[0].stream.destroy.called.should.equal false - @nodes[1].stream.destroy.called.should.equal false - done() - , 200 + it "should ping each cluster node", -> + for node in @nodes + node.ping.called.should.equal true describe "when ping fails to a node", -> beforeEach -> @nodes[0].ping = (cb) -> cb() @nodes[1].ping = (cb) -> # Just hang - @client._monitorCluster(@rclient_ioredis) - it "should reset the failing node connection", (done) -> - setTimeout () => - @nodes[0].stream.destroy.called.should.equal false - @nodes[1].stream.destroy.called.should.equal true + it "should return an error", -> + @client._healthCheckClusterClient { rclient: @rclient_ioredis }, (error) -> + error.message.should.equal "ioredis node ping check timed out" done() - , 200 - - \ No newline at end of file diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 9022163da6..d5b8fbe5ec 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -10,7 +10,6 @@ describe "RedisManager", -> @rclient = auth: () -> exec: sinon.stub() - monitorTcpAndReconnect: () -> @rclient.multi = () => @rclient @RedisManager = SandboxedModule.require modulePath, requires: "./RedisBackend": From e04c946ecdc7902c26c30d9058073e0459c4cf5f Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 23 Jun 2016 18:04:26 +0100 Subject: [PATCH 125/769] Fix unit tests --- .../coffee/RedisBackend/RedisBackendTests.coffee | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee index 648395cd1a..9ad9ea5ad4 100644 --- a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee @@ -307,16 +307,6 @@ describe "RedisBackend", -> err: @error }, "error in redis backend") .should.equal true - - describe "monitorTcpAndReconnect", -> - beforeEach -> - @client._monitorCluster = sinon.stub() - @client.monitorTcpAndReconnect() - - it "should monitor the cluster client", -> - @client._monitorCluster - .calledWith(@rclient_ioredis) - .should.equal true describe "_healthCheckNodeRedisClient", -> beforeEach -> @@ -381,7 +371,7 @@ describe "RedisBackend", -> @nodes[0].ping = (cb) -> cb() @nodes[1].ping = (cb) -> # Just hang - it "should return an error", -> + it "should return an error", (done) -> @client._healthCheckClusterClient { rclient: @rclient_ioredis }, (error) -> error.message.should.equal "ioredis node ping check timed out" done() From 0b9e85ea506d56ec86b709f842f4998c7b4c73e0 Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 28 Jun 2016 15:47:06 +0100 Subject: [PATCH 126/769] Update ioredis version --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 6872d5f332..a9d610a4c1 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -10,7 +10,7 @@ "async": "2.0.0-rc.5", "coffee-script": "1.4.0", "express": "3.3.4", - "ioredis": "^2.0.1", + "ioredis": "^2.2.0", "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.0.0", "lynx": "0.0.11", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.0.0", From dbe03e2757e8330ceda15f3d8f4f4e18c6ad9ee0 Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 28 Jun 2016 16:49:15 +0100 Subject: [PATCH 127/769] Track metrics of when backends match or disagree --- .../app/coffee/RedisBackend.coffee | 5 ++- .../RedisBackend/RedisBackendTests.coffee | 34 ++++++++++--------- 2 files changed, 22 insertions(+), 17 deletions(-) diff --git a/services/document-updater/app/coffee/RedisBackend.coffee b/services/document-updater/app/coffee/RedisBackend.coffee index ca9e3de9e7..807737510e 100644 --- a/services/document-updater/app/coffee/RedisBackend.coffee +++ b/services/document-updater/app/coffee/RedisBackend.coffee @@ -2,6 +2,7 @@ Settings = require "settings-sharelatex" async = require "async" _ = require "underscore" logger = require "logger-sharelatex" +Metrics = require "metrics-sharelatex" class Client constructor: (@clients) -> @@ -136,7 +137,9 @@ compareResults = (results) -> first = results[0] for result in results.slice(1) if not _.isEqual(first, result) - logger.warn { results }, "redis return values do not match" + Metrics.inc "backend-conflict" + else + Metrics.inc "backend-match" module.exports = createClient: () -> diff --git a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee index 9ad9ea5ad4..a5cf885296 100644 --- a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee @@ -53,6 +53,8 @@ describe "RedisBackend", -> activeHealthCheck: sinon.stub() "ioredis": @ioredis = Cluster: Cluster + "metrics-sharelatex": + @Metrics = inc: sinon.stub() @client = @RedisBackend.createClient() @doc_id = "mock-doc-id" @@ -96,6 +98,11 @@ describe "RedisBackend", -> @rclient_ioredis.get .calledWith("doclines:{#{@doc_id}}") .should.equal true + + it "should send a metric", -> + @Metrics.inc + .calledWith("backend-match") + .should.equal true describe "with different results", -> beforeEach (done) -> @@ -110,14 +117,9 @@ describe "RedisBackend", -> it "should return the primary result", -> @result.should.equal "primary-result" - it "should log out the difference", -> - @logger.warn - .calledWith({ - results: [ - "primary-result", - "secondary-result" - ] - }, "redis return values do not match") + it "should send a metric", -> + @Metrics.inc + .calledWith("backend-conflict") .should.equal true describe "when the secondary errors", -> @@ -233,6 +235,11 @@ describe "RedisBackend", -> @rclient_ioredis.exec .called .should.equal true + + it "should send a metric", -> + @Metrics.inc + .calledWith("backend-match") + .should.equal true describe "with different results", -> beforeEach (done) -> @@ -251,14 +258,9 @@ describe "RedisBackend", -> it "should return the primary result", -> @result.should.deep.equal [@doclines, @version] - it "should log out the difference", -> - @logger.warn - .calledWith({ - results: [ - [@doclines, @version], - ["different-doc-lines", @version] - ] - }, "redis return values do not match") + it "should send a metric", -> + @Metrics.inc + .calledWith("backend-conflict") .should.equal true describe "when the secondary errors", -> From fde334acc46372d41819e0f04b5572c7e9fbdf34 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 29 Jun 2016 12:57:56 +0100 Subject: [PATCH 128/769] Create script that will ensure data is migrate between redis backends --- .../app/coffee/RedisBackend.coffee | 2 + .../app/coffee/RedisManager.coffee | 25 +++++++ services/document-updater/migrate.coffee | 29 ++++++++ .../RedisManager/RedisManagerTests.coffee | 67 ++++++++++++++++++- 4 files changed, 122 insertions(+), 1 deletion(-) create mode 100644 services/document-updater/migrate.coffee diff --git a/services/document-updater/app/coffee/RedisBackend.coffee b/services/document-updater/app/coffee/RedisBackend.coffee index 807737510e..7779f2f7cf 100644 --- a/services/document-updater/app/coffee/RedisBackend.coffee +++ b/services/document-updater/app/coffee/RedisBackend.coffee @@ -61,6 +61,7 @@ class MultiClient jobs = @clients.map (client) -> (cb) -> client.rclient.exec (error, result) -> + console.log "EXEC [#{client.driver}]" if client.driver == "ioredis" # ioredis returns an results like: # [ [null, 42], [null, "foo"] ] @@ -126,6 +127,7 @@ for command, key_pos of COMMANDS MultiClient.prototype[command] = (args...) -> for client in @clients + console.log "COMMAND [#{client.driver}]", command, args key_builder = args[key_pos] key = key_builder(client.key_schema) args_with_key = args.slice(0) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index f8b109ca17..257b889680 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -119,3 +119,28 @@ module.exports = RedisManager = getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) -> rclient.smembers keys.docsInProject(project_id: project_id), callback + + getAndSetDoc: (doc_id, callback = (error, project_id) ->) -> + multi = rclient.multi() + multi.get keys.docLines(doc_id:doc_id) + multi.get keys.docVersion(doc_id:doc_id) + multi.lrange keys.docOps(doc_id:doc_id), 0, -1 + multi.get keys.projectKey(doc_id:doc_id) + multi.exec (error, results = []) -> + return callback(error) if error? + [lines, version, ops, project_id] = results + multi = rclient.multi() + multi.set keys.docLines(doc_id:doc_id), lines + multi.set keys.docVersion(doc_id:doc_id), version + multi.del keys.docOps(doc_id:doc_id) + if ops.length > 0 + multi.rpush keys.docOps(doc_id:doc_id), ops... + multi.set keys.projectKey(doc_id:doc_id), project_id + multi.exec (error) -> + return callback(error) if error? + return callback null, project_id + + getAndSetProject: (project_id, callback = (error) ->) -> + rclient.smembers keys.docsInProject(project_id: project_id), (error, doc_ids) -> + return callback(error) if error? + rclient.sadd keys.docsInProject(project_id: project_id), doc_ids..., callback diff --git a/services/document-updater/migrate.coffee b/services/document-updater/migrate.coffee new file mode 100644 index 0000000000..13f20e50c2 --- /dev/null +++ b/services/document-updater/migrate.coffee @@ -0,0 +1,29 @@ +RedisManager = require "./app/coffee/RedisManager" +UpdateManager = require "./app/coffee/UpdateManager" +LockManager = require "./app/coffee/LockManager" + +handleErrorInsideLock = (doc_id, lockValue, original_error, callback = (error) ->) -> + LockManager.releaseLock doc_id, lockValue, (lock_error) -> + callback(original_error) + +migrateDoc = (doc_id, callback = (error) ->) -> + LockManager.getLock doc_id, (error, lockValue) -> + return callback(error) if error? + RedisManager.getAndSetDoc doc_id, (error, project_id) -> + return handleErrorInsideLock(doc_id, lockValue, error, callback) if error? + RedisManager.getAndSetProject project_id, (error) -> + return handleErrorInsideLock(doc_id, lockValue, error, callback) if error? + LockManager.releaseLock doc_id, lockValue, (error) -> + return callback(error) if error? + UpdateManager.continueProcessingUpdatesWithLock project_id, doc_id, callback + +doc_id = process.argv[2] +if !doc_id? + console.log "Usage: coffee migrate.coffee DOC_ID" + process.exit(1) + +migrateDoc doc_id, (error) -> + throw error if error? + setTimeout () -> + process.exit(0) + , 200 \ No newline at end of file diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index d5b8fbe5ec..6ca5250050 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -229,4 +229,69 @@ describe "RedisManager", -> it "should remove the doc_id from the project set", -> @rclient.srem .calledWith("DocsIn:#{@project_id}", @doc_id) - .should.equal true \ No newline at end of file + .should.equal true + + describe "getAndSetDoc", -> + beforeEach -> + @rclient.get = sinon.stub() + @rclient.lrange = sinon.stub() + @rclient.del = sinon.stub() + @rclient.set = sinon.stub() + @rclient.rpush = sinon.stub() + @rclient.exec = sinon.stub() + @rclient.exec.yields(null, [ + @lines = '["mock","lines"]', + @version = 42, + @doc_ops = ["mock", "doc", "ops"], + @project_id = "mock-project-id" + ]) + @RedisManager.getAndSetDoc @doc_id, @callback + + it "should get the original values", -> + @rclient.get + .calledWith("doclines:#{@doc_id}") + .should.equal true + @rclient.get + .calledWith("DocVersion:#{@doc_id}") + .should.equal true + @rclient.get + .calledWith("ProjectId:#{@doc_id}") + .should.equal true + @rclient.lrange + .calledWith("DocOps:#{@doc_id}", 0, -1) + .should.equal true + + it "should set the doclines again", -> + @rclient.set + .calledWith("doclines:#{@doc_id}", @lines) + .should.equal true + + it "should set the DocVersion again", -> + @rclient.set + .calledWith("DocVersion:#{@doc_id}", @version) + .should.equal true + + it "should set the project id again", -> + @rclient.set + .calledWith("ProjectId:#{@doc_id}", @project_id) + .should.equal true + + it "should set the doc ops again", -> + @rclient.del + .calledWith("DocOps:#{@doc_id}") + .should.equal true + @rclient.rpush + .calledWith("DocOps:#{@doc_id}", @doc_ops...) + .should.equal true + + describe "getAndSetProject", -> + beforeEach -> + @rclient.smembers = sinon.stub() + @rclient.sadd = sinon.stub() + @rclient.smembers.withArgs("DocsIn:#{@project_id}").yields(null, @doc_ids = ["mock-doc-1", "mock-doc-2"]) + @RedisManager.getAndSetProject @project_id, @callback + + it "should set the doc ids again", -> + @rclient.sadd + .calledWith("DocsIn:#{@project_id}", @doc_ids...) + .should.equal true From 149351aa49f8a975ad7950209d37844c51f546b7 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 29 Jun 2016 21:18:13 +0100 Subject: [PATCH 129/769] Remove debugging log lines --- services/document-updater/app/coffee/RedisBackend.coffee | 2 -- 1 file changed, 2 deletions(-) diff --git a/services/document-updater/app/coffee/RedisBackend.coffee b/services/document-updater/app/coffee/RedisBackend.coffee index 7779f2f7cf..807737510e 100644 --- a/services/document-updater/app/coffee/RedisBackend.coffee +++ b/services/document-updater/app/coffee/RedisBackend.coffee @@ -61,7 +61,6 @@ class MultiClient jobs = @clients.map (client) -> (cb) -> client.rclient.exec (error, result) -> - console.log "EXEC [#{client.driver}]" if client.driver == "ioredis" # ioredis returns an results like: # [ [null, 42], [null, "foo"] ] @@ -127,7 +126,6 @@ for command, key_pos of COMMANDS MultiClient.prototype[command] = (args...) -> for client in @clients - console.log "COMMAND [#{client.driver}]", command, args key_builder = args[key_pos] key = key_builder(client.key_schema) args_with_key = args.slice(0) From dfd45bd23caa8bf233ba1b7ba44864de02179ef6 Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 4 Jul 2016 11:14:23 +0100 Subject: [PATCH 130/769] Add timers to time how long each redis request takes --- .../app/coffee/RedisBackend.coffee | 6 ++++- .../RedisBackend/RedisBackendTests.coffee | 26 ++++++++++++++++++- 2 files changed, 30 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/RedisBackend.coffee b/services/document-updater/app/coffee/RedisBackend.coffee index 807737510e..92a4b89627 100644 --- a/services/document-updater/app/coffee/RedisBackend.coffee +++ b/services/document-updater/app/coffee/RedisBackend.coffee @@ -60,7 +60,9 @@ class MultiClient exec: (callback) -> jobs = @clients.map (client) -> (cb) -> + timer = new Metrics.Timer("redis.#{client.driver}.exec") client.rclient.exec (error, result) -> + timer.done() if client.driver == "ioredis" # ioredis returns an results like: # [ [null, 42], [null, "foo"] ] @@ -112,7 +114,9 @@ for command, key_pos of COMMANDS key = key_builder(client.key_schema) args_with_key = args.slice(0) args_with_key[key_pos] = key + timer = new Metrics.Timer("redis.#{client.driver}.#{command}") client.rclient[command] args_with_key..., (error, result...) -> + timer.done() if client.primary # Return this result as the actual result callback(error, result...) @@ -158,7 +162,7 @@ module.exports = if config[key]? redis_config[key] = config[key] rclient = require("redis-sharelatex").createClient(redis_config) - driver = "redis" + driver = "noderedis" return { rclient: rclient key_schema: config.key_schema diff --git a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee index a5cf885296..5493f3ef6d 100644 --- a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee @@ -44,6 +44,11 @@ describe "RedisBackend", -> test_context.rclient_ioredis = @ nodes: sinon.stub() + + @timer = timer = sinon.stub() + class Timer + constructor: (args...) -> timer(args...) + done: () -> @RedisBackend = SandboxedModule.require modulePath, requires: "settings-sharelatex": @Settings @@ -54,7 +59,10 @@ describe "RedisBackend", -> "ioredis": @ioredis = Cluster: Cluster "metrics-sharelatex": - @Metrics = inc: sinon.stub() + @Metrics = + inc: sinon.stub() + Timer: Timer + @client = @RedisBackend.createClient() @doc_id = "mock-doc-id" @@ -103,6 +111,14 @@ describe "RedisBackend", -> @Metrics.inc .calledWith("backend-match") .should.equal true + + it "should time the commands", -> + @timer + .calledWith("redis.ioredis.get") + .should.equal true + @timer + .calledWith("redis.noderedis.get") + .should.equal true describe "with different results", -> beforeEach (done) -> @@ -240,6 +256,14 @@ describe "RedisBackend", -> @Metrics.inc .calledWith("backend-match") .should.equal true + + it "should time the exec", -> + @timer + .calledWith("redis.ioredis.exec") + .should.equal true + @timer + .calledWith("redis.noderedis.exec") + .should.equal true describe "with different results", -> beforeEach (done) -> From e912ccc562c1ad7d14ff48e5e06612b716962b37 Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 5 Jul 2016 16:07:47 +0100 Subject: [PATCH 131/769] Wait for both backends to return so that they are always in sync --- .../app/coffee/RedisBackend.coffee | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/services/document-updater/app/coffee/RedisBackend.coffee b/services/document-updater/app/coffee/RedisBackend.coffee index 92a4b89627..b414b0c4cf 100644 --- a/services/document-updater/app/coffee/RedisBackend.coffee +++ b/services/document-updater/app/coffee/RedisBackend.coffee @@ -58,6 +58,8 @@ class MultiClient constructor: (@clients) -> exec: (callback) -> + primaryError = null + primaryResult = null jobs = @clients.map (client) -> (cb) -> timer = new Metrics.Timer("redis.#{client.driver}.exec") @@ -80,15 +82,15 @@ class MultiClient result = filtered_result if client.primary - # Return this result as the actual result - callback(error, result) - # Send the rest through for comparison + primaryError = error + primaryResult = result cb(error, result) async.parallel jobs, (error, results) -> if error? logger.error {err: error}, "error in redis backend" else compareResults(results) + callback(primaryError, primaryResult) COMMANDS = { "get": 0, @@ -108,6 +110,8 @@ COMMANDS = { for command, key_pos of COMMANDS do (command, key_pos) -> Client.prototype[command] = (args..., callback) -> + primaryError = null + primaryResult = null jobs = @clients.map (client) -> (cb) -> key_builder = args[key_pos] @@ -118,15 +122,15 @@ for command, key_pos of COMMANDS client.rclient[command] args_with_key..., (error, result...) -> timer.done() if client.primary - # Return this result as the actual result - callback(error, result...) - # Send the rest through for comparison + primaryError = error + primaryResult = result cb(error, result...) async.parallel jobs, (error, results) -> if error? logger.error {err: error}, "error in redis backend" else compareResults(results) + callback(primaryError, primaryResult...) MultiClient.prototype[command] = (args...) -> for client in @clients @@ -141,6 +145,7 @@ compareResults = (results) -> first = results[0] for result in results.slice(1) if not _.isEqual(first, result) + logger.error results: results, "redis backend conflict" Metrics.inc "backend-conflict" else Metrics.inc "backend-match" From 508a95c19b1e0646d9c750c24ea8940c7d90d4c7 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 6 Jul 2016 11:50:02 +0100 Subject: [PATCH 132/769] Use the main redis instance for locks --- services/document-updater/app/coffee/LockManager.coffee | 7 +++++-- .../test/unit/coffee/LockManager/CheckingTheLock.coffee | 4 +--- .../test/unit/coffee/LockManager/ReleasingTheLock.coffee | 4 +--- .../test/unit/coffee/LockManager/getLockTests.coffee | 4 +--- .../test/unit/coffee/LockManager/tryLockTests.coffee | 4 +--- 5 files changed, 9 insertions(+), 14 deletions(-) diff --git a/services/document-updater/app/coffee/LockManager.coffee b/services/document-updater/app/coffee/LockManager.coffee index a1ed9292e9..aae60f123d 100644 --- a/services/document-updater/app/coffee/LockManager.coffee +++ b/services/document-updater/app/coffee/LockManager.coffee @@ -1,7 +1,7 @@ metrics = require('./Metrics') Settings = require('settings-sharelatex') -rclient = require("./RedisBackend").createClient() -keys = require('./RedisKeyBuilder') +redis = require("redis-sharelatex") +rclient = redis.createClient(Settings.redis.web) logger = require "logger-sharelatex" os = require "os" crypto = require "crypto" @@ -11,6 +11,9 @@ PID = process.pid RND = crypto.randomBytes(4).toString('hex') COUNT = 0 +keys = + blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" + module.exports = LockManager = LOCK_TEST_INTERVAL: 50 # 50ms between each test of the lock MAX_LOCK_WAIT_TIME: 10000 # 10s maximum time to spend trying to get the lock diff --git a/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee b/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee index ea69dcff52..f6670c8b35 100644 --- a/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee @@ -14,9 +14,7 @@ describe 'LockManager - checking the lock', ()-> mocks = "logger-sharelatex": log:-> - "./RedisKeyBuilder": - blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" - "./RedisBackend": + "redis-sharelatex": createClient : ()-> auth:-> exists: existsStub diff --git a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee index 3260c2fea9..ed502fb587 100644 --- a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee @@ -12,9 +12,7 @@ describe 'LockManager - releasing the lock', ()-> evalStub = sinon.stub().yields(1) mocks = "logger-sharelatex": log:-> - "./RedisKeyBuilder": - blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" - "./RedisBackend": + "redis-sharelatex": createClient : ()-> auth:-> eval: evalStub diff --git a/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee b/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee index 89c08afc2f..84cc3208a3 100644 --- a/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee @@ -8,9 +8,7 @@ describe 'LockManager - getting the lock', -> beforeEach -> @LockManager = SandboxedModule.require modulePath, requires: "logger-sharelatex": log:-> - "./RedisKeyBuilder": - blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" - "./RedisBackend": + "redis-sharelatex": createClient : () => auth:-> "./Metrics": {inc: () ->} diff --git a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee index 55af920469..33c3eb3d51 100644 --- a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee @@ -8,9 +8,7 @@ describe 'LockManager - trying the lock', -> beforeEach -> @LockManager = SandboxedModule.require modulePath, requires: "logger-sharelatex": log:-> - "./RedisKeyBuilder": - blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" - "./RedisBackend": + "redis-sharelatex": createClient : () => auth:-> set: @set = sinon.stub() From 89f90c1b0458f0cfef4bbabfc2eb318fe9072dbf Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 6 Jul 2016 12:25:36 +0100 Subject: [PATCH 133/769] Timeout secondary requests if they take longer than 200ms --- .../app/coffee/RedisBackend.coffee | 36 ++++++-- .../RedisBackend/RedisBackendTests.coffee | 82 +++++++++++++++++++ 2 files changed, 112 insertions(+), 6 deletions(-) diff --git a/services/document-updater/app/coffee/RedisBackend.coffee b/services/document-updater/app/coffee/RedisBackend.coffee index b414b0c4cf..da51cbbacf 100644 --- a/services/document-updater/app/coffee/RedisBackend.coffee +++ b/services/document-updater/app/coffee/RedisBackend.coffee @@ -6,6 +6,7 @@ Metrics = require "metrics-sharelatex" class Client constructor: (@clients) -> + @SECONDARY_TIMEOUT = 200 @HEARTBEAT_TIMEOUT = 2000 multi: () -> @@ -56,14 +57,24 @@ class Client class MultiClient constructor: (@clients) -> + @SECONDARY_TIMEOUT = 200 exec: (callback) -> primaryError = null primaryResult = null - jobs = @clients.map (client) -> - (cb) -> + jobs = @clients.map (client) => + (cb) => + cb = _.once(cb) timer = new Metrics.Timer("redis.#{client.driver}.exec") - client.rclient.exec (error, result) -> + + timeout = null + if !client.primary + logger.warn {timeout: @SECONDARY_TIMEOUT}, "starting timeout exec" + timeout = setTimeout () -> + cb(new Error("backend timed out")) + , @SECONDARY_TIMEOUT + + client.rclient.exec (error, result) => timer.done() if client.driver == "ioredis" # ioredis returns an results like: @@ -84,6 +95,8 @@ class MultiClient if client.primary primaryError = error primaryResult = result + if timeout? + clearTimeout(timeout) cb(error, result) async.parallel jobs, (error, results) -> if error? @@ -112,18 +125,29 @@ for command, key_pos of COMMANDS Client.prototype[command] = (args..., callback) -> primaryError = null primaryResult = null - jobs = @clients.map (client) -> - (cb) -> + jobs = @clients.map (client) => + (cb) => + cb = _.once(cb) key_builder = args[key_pos] key = key_builder(client.key_schema) args_with_key = args.slice(0) args_with_key[key_pos] = key timer = new Metrics.Timer("redis.#{client.driver}.#{command}") - client.rclient[command] args_with_key..., (error, result...) -> + + timeout = null + if !client.primary + logger.warn {timeout: @SECONDARY_TIMEOUT}, "starting timeout #{command}" + timeout = setTimeout () -> + cb(new Error("backend timed out")) + , @SECONDARY_TIMEOUT + + client.rclient[command] args_with_key..., (error, result...) => timer.done() if client.primary primaryError = error primaryResult = result + if timeout? + clearTimeout(timeout) cb(error, result...) async.parallel jobs, (error, results) -> if error? diff --git a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee index 5493f3ef6d..e5d2c1756b 100644 --- a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee @@ -200,6 +200,44 @@ describe "RedisBackend", -> @rclient_ioredis.eval .calledWith(@script, @key_count, "Blocking:{#{@doc_id}}", @value) .should.equal true + + describe "when the secondary takes longer than SECONDARY_TIMEOUT", -> + beforeEach (done) -> + @client.SECONDARY_TIMEOUT = 10 + @content = "bar" + @rclient_redis.get = sinon.stub() + @rclient_redis.get.withArgs("doclines:#{@doc_id}").yields(null, @content) + @rclient_ioredis.get = (key, cb) => + key.should.equal "doclines:{#{@doc_id}}" + setTimeout () => + cb(null, @content) + , @client.SECONDARY_TIMEOUT * 2 + @client.get RedisKeyBuilder.docLines({doc_id: @doc_id}), (error, @result) => + done(error) + + it "should log out an error for the backend", -> + @logger.error + .calledWith({err: new Error("backend timed out")}, "error in redis backend") + .should.equal true + + describe "when the primary takes longer than SECONDARY_TIMEOUT", -> + beforeEach (done) -> + @client.SECONDARY_TIMEOUT = 10 + @content = "bar" + @rclient_ioredis.get = sinon.stub() + @rclient_ioredis.get.withArgs("doclines:{#{@doc_id}}").yields(null, @content) + @rclient_redis.get = (key, cb) => + key.should.equal "doclines:#{@doc_id}" + setTimeout () => + cb(null, @content) + , @client.SECONDARY_TIMEOUT * 2 + @client.get RedisKeyBuilder.docLines({doc_id: @doc_id}), (error, @result) => + done(error) + + it "should not consider this an error", -> + @logger.error + .called + .should.equal false describe "multi commands", -> beforeEach -> @@ -333,6 +371,50 @@ describe "RedisBackend", -> err: @error }, "error in redis backend") .should.equal true + + describe "when the secondary takes longer than SECONDARY_TIMEOUT", -> + beforeEach (done) -> + @rclient_redis.get = sinon.stub() + @rclient_redis.exec = sinon.stub().yields(null, [@doclines, @version]) + @rclient_ioredis.get = sinon.stub() + @rclient_ioredis.exec = (cb) => + setTimeout () => + cb(null, [ [null, @doclines], [null, @version] ]) + , 20 + + multi = @client.multi() + multi.SECONDARY_TIMEOUT = 10 + multi.get RedisKeyBuilder.docLines({doc_id: @doc_id}) + multi.get RedisKeyBuilder.docVersion({doc_id: @doc_id}) + multi.exec (error, @result) => + done(error) + + it "should log out an error for the backend", -> + @logger.error + .calledWith({err: new Error("backend timed out")}, "error in redis backend") + .should.equal true + + describe "when the primary takes longer than SECONDARY_TIMEOUT", -> + beforeEach (done) -> + @rclient_redis.get = sinon.stub() + @rclient_redis.exec = (cb) => + setTimeout () => + cb(null, [@doclines, @version]) + , 20 + @rclient_ioredis.get = sinon.stub() + @rclient_ioredis.exec = sinon.stub().yields(null, [ [null, @doclines], [null, @version] ]) + + multi = @client.multi() + multi.SECONDARY_TIMEOUT = 10 + multi.get RedisKeyBuilder.docLines({doc_id: @doc_id}) + multi.get RedisKeyBuilder.docVersion({doc_id: @doc_id}) + multi.exec (error, @result) => + done(error) + + it "should not consider this an error", -> + @logger.error + .called + .should.equal false describe "_healthCheckNodeRedisClient", -> beforeEach -> From 59883023ca7d84d5d735ead96e1ce2911a138515 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 6 Jul 2016 14:24:27 +0100 Subject: [PATCH 134/769] Ignore different order of results from smembers --- .../app/coffee/RedisBackend.coffee | 10 +++++++--- .../RedisBackend/RedisBackendTests.coffee | 19 +++++++++++++++++++ 2 files changed, 26 insertions(+), 3 deletions(-) diff --git a/services/document-updater/app/coffee/RedisBackend.coffee b/services/document-updater/app/coffee/RedisBackend.coffee index da51cbbacf..b33021689c 100644 --- a/services/document-updater/app/coffee/RedisBackend.coffee +++ b/services/document-updater/app/coffee/RedisBackend.coffee @@ -102,7 +102,7 @@ class MultiClient if error? logger.error {err: error}, "error in redis backend" else - compareResults(results) + compareResults(results, "exec") callback(primaryError, primaryResult) COMMANDS = { @@ -153,7 +153,7 @@ for command, key_pos of COMMANDS if error? logger.error {err: error}, "error in redis backend" else - compareResults(results) + compareResults(results, command) callback(primaryError, primaryResult...) MultiClient.prototype[command] = (args...) -> @@ -164,10 +164,14 @@ for command, key_pos of COMMANDS args_with_key[key_pos] = key client.rclient[command] args_with_key... -compareResults = (results) -> +compareResults = (results, command) -> return if results.length < 2 first = results[0] + if command == "smembers" + first = first.slice().sort() for result in results.slice(1) + if command == "smembers" + result = result.slice().sort() if not _.isEqual(first, result) logger.error results: results, "redis backend conflict" Metrics.inc "backend-conflict" diff --git a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee index e5d2c1756b..0ffc0d72d7 100644 --- a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee @@ -66,6 +66,7 @@ describe "RedisBackend", -> @client = @RedisBackend.createClient() @doc_id = "mock-doc-id" + @project_id = "mock-project-id" it "should create a redis client", -> @redis.createClient @@ -138,6 +139,24 @@ describe "RedisBackend", -> .calledWith("backend-conflict") .should.equal true + describe "with differently ordered results from smembers", -> + beforeEach (done) -> + @rclient_redis.smembers = sinon.stub() + @rclient_redis.smembers.withArgs("DocsIn:#{@project_id}").yields(null, ["one", "two"]) + @rclient_ioredis.smembers = sinon.stub() + @rclient_ioredis.smembers.withArgs("DocsIn:{#{@project_id}}").yields(null, ["two", "one"]) + @client.smembers RedisKeyBuilder.docsInProject({project_id: @project_id}), (error, @result) => + setTimeout () -> # Let all background requests complete + done(error) + + it "should return the primary result", -> + @result.should.deep.equal ["one", "two"] + + it "should send a metric indicating a match", -> + @Metrics.inc + .calledWith("backend-match") + .should.equal true + describe "when the secondary errors", -> beforeEach (done) -> @rclient_redis.get = sinon.stub() From edf1e1ab49b7144a9dd5d1812177f927ec4dc9e3 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 6 Jul 2016 15:02:05 +0100 Subject: [PATCH 135/769] Initialize primaryResult to an array, not null --- services/document-updater/app/coffee/RedisBackend.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RedisBackend.coffee b/services/document-updater/app/coffee/RedisBackend.coffee index b33021689c..4861694570 100644 --- a/services/document-updater/app/coffee/RedisBackend.coffee +++ b/services/document-updater/app/coffee/RedisBackend.coffee @@ -124,7 +124,7 @@ for command, key_pos of COMMANDS do (command, key_pos) -> Client.prototype[command] = (args..., callback) -> primaryError = null - primaryResult = null + primaryResult = [] jobs = @clients.map (client) => (cb) => cb = _.once(cb) From 1c62a1c5a149a909dc17fed1601f050cc690cbce Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 7 Jul 2016 09:45:55 +0100 Subject: [PATCH 136/769] Don't propagate error on secondary timeout --- .../app/coffee/RedisBackend.coffee | 8 +++---- .../RedisBackend/RedisBackendTests.coffee | 22 ++++++++++++++----- 2 files changed, 21 insertions(+), 9 deletions(-) diff --git a/services/document-updater/app/coffee/RedisBackend.coffee b/services/document-updater/app/coffee/RedisBackend.coffee index 4861694570..649086ba09 100644 --- a/services/document-updater/app/coffee/RedisBackend.coffee +++ b/services/document-updater/app/coffee/RedisBackend.coffee @@ -69,9 +69,9 @@ class MultiClient timeout = null if !client.primary - logger.warn {timeout: @SECONDARY_TIMEOUT}, "starting timeout exec" timeout = setTimeout () -> - cb(new Error("backend timed out")) + logger.error {err: new Error("#{client.driver} backend timed out")}, "backend timed out" + cb() , @SECONDARY_TIMEOUT client.rclient.exec (error, result) => @@ -136,9 +136,9 @@ for command, key_pos of COMMANDS timeout = null if !client.primary - logger.warn {timeout: @SECONDARY_TIMEOUT}, "starting timeout #{command}" timeout = setTimeout () -> - cb(new Error("backend timed out")) + logger.error {err: new Error("#{client.driver} backend timed out")}, "backend timed out" + cb() , @SECONDARY_TIMEOUT client.rclient[command] args_with_key..., (error, result...) => diff --git a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee index 0ffc0d72d7..814a0b932d 100644 --- a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee @@ -224,8 +224,11 @@ describe "RedisBackend", -> beforeEach (done) -> @client.SECONDARY_TIMEOUT = 10 @content = "bar" - @rclient_redis.get = sinon.stub() - @rclient_redis.get.withArgs("doclines:#{@doc_id}").yields(null, @content) + @rclient_redis.get = (key, cb) => + key.should.equal "doclines:#{@doc_id}" + setTimeout () => + cb(null, @content) + , @client.SECONDARY_TIMEOUT * 3 # If the secondary errors first, don't affect the primary result @rclient_ioredis.get = (key, cb) => key.should.equal "doclines:{#{@doc_id}}" setTimeout () => @@ -236,8 +239,11 @@ describe "RedisBackend", -> it "should log out an error for the backend", -> @logger.error - .calledWith({err: new Error("backend timed out")}, "error in redis backend") + .calledWith({err: new Error("backend timed out")}, "backend timed out") .should.equal true + + it "should return the primary result", -> + @result.should.equal @content describe "when the primary takes longer than SECONDARY_TIMEOUT", -> beforeEach (done) -> @@ -394,7 +400,10 @@ describe "RedisBackend", -> describe "when the secondary takes longer than SECONDARY_TIMEOUT", -> beforeEach (done) -> @rclient_redis.get = sinon.stub() - @rclient_redis.exec = sinon.stub().yields(null, [@doclines, @version]) + @rclient_redis.exec = (cb) => + setTimeout () => + cb(null, [@doclines, @version]) + , 30 # If secondary errors first, don't affect the primary result @rclient_ioredis.get = sinon.stub() @rclient_ioredis.exec = (cb) => setTimeout () => @@ -410,8 +419,11 @@ describe "RedisBackend", -> it "should log out an error for the backend", -> @logger.error - .calledWith({err: new Error("backend timed out")}, "error in redis backend") + .calledWith({err: new Error("backend timed out")}, "backend timed out") .should.equal true + + it "should return the primary result", -> + @result.should.deep.equal [@doclines, @version] describe "when the primary takes longer than SECONDARY_TIMEOUT", -> beforeEach (done) -> From 6bc78ccf77d2ba8bf9cea6e4cd1488c91c5cc955 Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 7 Jul 2016 11:24:33 +0100 Subject: [PATCH 137/769] Add in some null checks on comparing backend results --- services/document-updater/app/coffee/RedisBackend.coffee | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/RedisBackend.coffee b/services/document-updater/app/coffee/RedisBackend.coffee index 649086ba09..f4c910a2df 100644 --- a/services/document-updater/app/coffee/RedisBackend.coffee +++ b/services/document-updater/app/coffee/RedisBackend.coffee @@ -167,10 +167,10 @@ for command, key_pos of COMMANDS compareResults = (results, command) -> return if results.length < 2 first = results[0] - if command == "smembers" + if command == "smembers" and first? first = first.slice().sort() for result in results.slice(1) - if command == "smembers" + if command == "smembers" and result? result = result.slice().sort() if not _.isEqual(first, result) logger.error results: results, "redis backend conflict" From c38d903f92341219552613a6e2e3608fe023f476 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Thu, 7 Jul 2016 16:07:14 +0100 Subject: [PATCH 138/769] Basic docker file. --- services/document-updater/.dockerignore | 53 +++++++++++++++++++ services/document-updater/Dockerfile | 12 +++++ .../test/acceptance/docker-entrypoint.sh | 8 +++ 3 files changed, 73 insertions(+) create mode 100644 services/document-updater/.dockerignore create mode 100644 services/document-updater/Dockerfile create mode 100644 services/document-updater/test/acceptance/docker-entrypoint.sh diff --git a/services/document-updater/.dockerignore b/services/document-updater/.dockerignore new file mode 100644 index 0000000000..ad21f261b4 --- /dev/null +++ b/services/document-updater/.dockerignore @@ -0,0 +1,53 @@ +compileFolder + +Compiled source # +################### +*.com +*.class +*.dll +*.exe +*.o +*.so + +# Packages # +############ +# it's better to unpack these files and commit the raw source +# git has its own built in compression methods +*.7z +*.dmg +*.gz +*.iso +*.jar +*.rar +*.tar +*.zip + +# Logs and databases # +###################### +*.log +*.sql +*.sqlite + +# OS generated files # +###################### +.DS_Store? +ehthumbs.db +Icon? +Thumbs.db + +/node_modules/* + +app.js +app/js/* + +test/unit/js/* +test/acceptance/js/* + +forever/ + +**.swp + +# Redis cluster +**/appendonly.aof +**/dump.rdb +**/nodes.conf diff --git a/services/document-updater/Dockerfile b/services/document-updater/Dockerfile new file mode 100644 index 0000000000..a4133cb030 --- /dev/null +++ b/services/document-updater/Dockerfile @@ -0,0 +1,12 @@ +FROM ubuntu + +COPY ./test/acceptance/docker-entrypoint.sh /entrypoint.sh + +RUN apt-get update && apt-get upgrade +RUN apt-get install build-essential redis-server mongodb-server nodejs npm +RUN ln -s /usr/bin/nodejs /usr/bin/node + +RUN mkdir /document-updater +VOLUME /document-updater + +ENTRYPOINT /entrypoint.sh \ No newline at end of file diff --git a/services/document-updater/test/acceptance/docker-entrypoint.sh b/services/document-updater/test/acceptance/docker-entrypoint.sh new file mode 100644 index 0000000000..04453f402c --- /dev/null +++ b/services/document-updater/test/acceptance/docker-entrypoint.sh @@ -0,0 +1,8 @@ +#! /usr/bin/env bash + +service redis-server start +service mongodb start + +cd /document-updater +npm install +grunt test:acceptance:docker From 9bd7c0017dcef9e00534b716745d8c51abc331b2 Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 7 Jul 2016 16:29:01 +0100 Subject: [PATCH 139/769] Allow migrate script to process multi docs at once --- .../config/settings.defaults.coffee | 24 +++++++++---------- services/document-updater/migrate.coffee | 21 ++++++++++------ 2 files changed, 26 insertions(+), 19 deletions(-) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 15456db932..3f7abf90c7 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -32,18 +32,18 @@ module.exports = docVersion: ({doc_id}) -> "DocVersion:#{doc_id}" projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" docsInProject: ({project_id}) -> "DocsIn:#{project_id}" - # }, { - # cluster: [{ - # port: "7000" - # host: "localhost" - # }] - # key_schema: - # blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" - # docLines: ({doc_id}) -> "doclines:{#{doc_id}}" - # docOps: ({doc_id}) -> "DocOps:{#{doc_id}}" - # docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}" - # projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" - # docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" + }, { + cluster: [{ + port: "7000" + host: "localhost" + }] + key_schema: + blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" + docLines: ({doc_id}) -> "doclines:{#{doc_id}}" + docOps: ({doc_id}) -> "DocOps:{#{doc_id}}" + docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}" + projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" + docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" }] max_doc_length: 2 * 1024 * 1024 # 2mb diff --git a/services/document-updater/migrate.coffee b/services/document-updater/migrate.coffee index 13f20e50c2..d0ede23a22 100644 --- a/services/document-updater/migrate.coffee +++ b/services/document-updater/migrate.coffee @@ -2,6 +2,8 @@ RedisManager = require "./app/coffee/RedisManager" UpdateManager = require "./app/coffee/UpdateManager" LockManager = require "./app/coffee/LockManager" +async = require "async" + handleErrorInsideLock = (doc_id, lockValue, original_error, callback = (error) ->) -> LockManager.releaseLock doc_id, lockValue, (lock_error) -> callback(original_error) @@ -17,13 +19,18 @@ migrateDoc = (doc_id, callback = (error) ->) -> return callback(error) if error? UpdateManager.continueProcessingUpdatesWithLock project_id, doc_id, callback -doc_id = process.argv[2] -if !doc_id? - console.log "Usage: coffee migrate.coffee DOC_ID" +doc_ids = process.argv.slice(2) +if doc_ids.length == 0 + console.log "Usage: coffee migrate.coffee DOC_ID [DOC_ID ...]" process.exit(1) -migrateDoc doc_id, (error) -> +jobs = [] +for doc_id in doc_ids + do (doc_id) -> + jobs.push (cb) -> + console.log "MIGRATING #{doc_id}" + migrateDoc doc_id, cb + +async.series jobs, (error) -> throw error if error? - setTimeout () -> - process.exit(0) - , 200 \ No newline at end of file + process.exit(0) From 8b090c0a601af9cde5d2f543423106a4749b2e78 Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 7 Jul 2016 16:41:38 +0100 Subject: [PATCH 140/769] Don't call sadd with blank args --- services/document-updater/app/coffee/RedisManager.coffee | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 257b889680..6878d8a17d 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -143,4 +143,7 @@ module.exports = RedisManager = getAndSetProject: (project_id, callback = (error) ->) -> rclient.smembers keys.docsInProject(project_id: project_id), (error, doc_ids) -> return callback(error) if error? - rclient.sadd keys.docsInProject(project_id: project_id), doc_ids..., callback + if doc_ids.length > 0 + rclient.sadd keys.docsInProject(project_id: project_id), doc_ids..., callback + else + callback() From bca8f6c37665ebed9c064340ce841f06730702ff Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Fri, 8 Jul 2016 10:51:22 +0100 Subject: [PATCH 141/769] working docker image which can run the acceptance tests --- services/document-updater/Dockerfile | 6 ++++-- .../test/acceptance/docker-entrypoint.sh | 9 ++++++++- .../test/acceptance/scripts/full-test.sh | 18 ++++++++++++++++++ 3 files changed, 30 insertions(+), 3 deletions(-) create mode 100644 services/document-updater/test/acceptance/scripts/full-test.sh diff --git a/services/document-updater/Dockerfile b/services/document-updater/Dockerfile index a4133cb030..71bafbd05b 100644 --- a/services/document-updater/Dockerfile +++ b/services/document-updater/Dockerfile @@ -1,10 +1,12 @@ FROM ubuntu COPY ./test/acceptance/docker-entrypoint.sh /entrypoint.sh +RUN chmod +x /entrypoint.sh -RUN apt-get update && apt-get upgrade -RUN apt-get install build-essential redis-server mongodb-server nodejs npm +RUN apt-get update && apt-get upgrade -y +RUN apt-get install -y build-essential redis-server mongodb-server nodejs npm RUN ln -s /usr/bin/nodejs /usr/bin/node +RUN npm install -g grunt-cli RUN mkdir /document-updater VOLUME /document-updater diff --git a/services/document-updater/test/acceptance/docker-entrypoint.sh b/services/document-updater/test/acceptance/docker-entrypoint.sh index 04453f402c..e42c3cc391 100644 --- a/services/document-updater/test/acceptance/docker-entrypoint.sh +++ b/services/document-updater/test/acceptance/docker-entrypoint.sh @@ -5,4 +5,11 @@ service mongodb start cd /document-updater npm install -grunt test:acceptance:docker + + +source ./test/acceptance/scripts/full-test.sh + +service redis-server stop +service mongodb stop + +exit 0 diff --git a/services/document-updater/test/acceptance/scripts/full-test.sh b/services/document-updater/test/acceptance/scripts/full-test.sh new file mode 100644 index 0000000000..7a90c95b84 --- /dev/null +++ b/services/document-updater/test/acceptance/scripts/full-test.sh @@ -0,0 +1,18 @@ +#! /usr/bin/env bash + +echo ">> Starting server..." + +grunt >> /dev/null & +_pid="$!" + +echo ">> Server started with pid: $_pid" + +sleep 20 + +echo ">> Running acceptance tests..." +grunt test:acceptance + +echo ">> Killing server (pid: $_pid)" +kill -1 "$_pid" + +echo ">> Done" From bd392f568207b3df7ca1e85001d1fdef43aa4f74 Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 8 Jul 2016 10:54:56 +0100 Subject: [PATCH 142/769] Comment out cluster config (again...) --- .../config/settings.defaults.coffee | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 3f7abf90c7..15456db932 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -32,18 +32,18 @@ module.exports = docVersion: ({doc_id}) -> "DocVersion:#{doc_id}" projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" docsInProject: ({project_id}) -> "DocsIn:#{project_id}" - }, { - cluster: [{ - port: "7000" - host: "localhost" - }] - key_schema: - blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" - docLines: ({doc_id}) -> "doclines:{#{doc_id}}" - docOps: ({doc_id}) -> "DocOps:{#{doc_id}}" - docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}" - projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" - docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" + # }, { + # cluster: [{ + # port: "7000" + # host: "localhost" + # }] + # key_schema: + # blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" + # docLines: ({doc_id}) -> "doclines:{#{doc_id}}" + # docOps: ({doc_id}) -> "DocOps:{#{doc_id}}" + # docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}" + # projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" + # docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" }] max_doc_length: 2 * 1024 * 1024 # 2mb From 4f34cb7363dde4986fc6de6a1fe55ae3efe1ed11 Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 8 Jul 2016 11:45:36 +0100 Subject: [PATCH 143/769] Only set keys in migration if they exist --- services/document-updater/app/coffee/RedisManager.coffee | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 6878d8a17d..2e59fefd57 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -130,12 +130,15 @@ module.exports = RedisManager = return callback(error) if error? [lines, version, ops, project_id] = results multi = rclient.multi() - multi.set keys.docLines(doc_id:doc_id), lines - multi.set keys.docVersion(doc_id:doc_id), version + if lines? + multi.set keys.docLines(doc_id:doc_id), lines + if version? + multi.set keys.docVersion(doc_id:doc_id), version multi.del keys.docOps(doc_id:doc_id) if ops.length > 0 multi.rpush keys.docOps(doc_id:doc_id), ops... - multi.set keys.projectKey(doc_id:doc_id), project_id + if project_id? + multi.set keys.projectKey(doc_id:doc_id), project_id multi.exec (error) -> return callback(error) if error? return callback null, project_id From 879482a955d5ca55a8ddfbdf1c518dcd0170948b Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Fri, 8 Jul 2016 12:09:01 +0100 Subject: [PATCH 144/769] grunt scripts to run the docker acceptance tests --- services/document-updater/Gruntfile.coffee | 36 +++++++++++++++++-- services/document-updater/package.json | 3 +- .../test/acceptance/docker-entrypoint.sh | 3 -- 3 files changed, 36 insertions(+), 6 deletions(-) diff --git a/services/document-updater/Gruntfile.coffee b/services/document-updater/Gruntfile.coffee index 3497455a57..e31eb742cf 100644 --- a/services/document-updater/Gruntfile.coffee +++ b/services/document-updater/Gruntfile.coffee @@ -6,7 +6,8 @@ module.exports = (grunt) -> grunt.loadNpmTasks 'grunt-execute' grunt.loadNpmTasks 'grunt-bunyan' grunt.loadNpmTasks 'grunt-forever' - + grunt.loadNpmTasks 'grunt-shell' + grunt.initConfig forever: app: @@ -67,6 +68,21 @@ module.exports = (grunt) -> grep: grunt.option("grep") timeout: 10000 + shell: + fullAcceptanceTests: + command: "bash ./test/acceptance/scripts/full-test.sh" + buildDockerImage: + command: """ + if [ -z $(docker images | awk \'{ print $1 }\' | grep sharelatex-docupdater-tests) ]; + then + docker build . -t sharelatex-docupdater-tests; + else + echo ">> docker image \'sharelatex-docupdater-tests\' already exists"; + fi + """ + dockerTests: + command: 'docker run -v "$(pwd):/document-updater" --rm --name doc-updater-test sharelatex-docupdater-tests' + availabletasks: tasks: options: @@ -111,8 +127,24 @@ module.exports = (grunt) -> grunt.registerTask 'install', "Compile everything when installing as an npm module", ['compile'] grunt.registerTask 'test:unit', 'Run the unit tests (use --grep= for individual tests)', ['compile:server', 'compile:unit_tests', 'mochaTest:unit'] + + grunt.registerTask( + 'test:acceptance:full', + "Start server and run acceptance tests", + ['shell:fullAcceptanceTests'] + ) + grunt.registerTask( + 'test:acceptance:buildDockerImage', + "Build docker image for acceptance tests", + ['shell:buildDockerImage'] + ) + grunt.registerTask( + 'test:acceptance:docker', + "Run acceptance tests inside docker container", + ['shell:buildDockerImage', 'shell:dockerTests'] + ) + grunt.registerTask 'test:acceptance', 'Run the acceptance tests (use --grep= for individual tests)', ['compile:acceptance_tests', 'mochaTest:acceptance'] grunt.registerTask 'run', "Compile and run the document-updater-sharelatex server", ['compile', 'bunyan', 'execute'] grunt.registerTask 'default', 'run' - diff --git a/services/document-updater/package.json b/services/document-updater/package.json index a9d610a4c1..3133881691 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -34,6 +34,7 @@ "grunt-contrib-coffee": "~0.10.0", "grunt-execute": "~0.1.5", "grunt-forever": "0.4.1", - "grunt-mocha-test": "~0.9.0" + "grunt-mocha-test": "~0.9.0", + "grunt-shell": "^1.3.0" } } diff --git a/services/document-updater/test/acceptance/docker-entrypoint.sh b/services/document-updater/test/acceptance/docker-entrypoint.sh index e42c3cc391..54a12c0468 100644 --- a/services/document-updater/test/acceptance/docker-entrypoint.sh +++ b/services/document-updater/test/acceptance/docker-entrypoint.sh @@ -9,7 +9,4 @@ npm install source ./test/acceptance/scripts/full-test.sh -service redis-server stop -service mongodb stop - exit 0 From ea41453442d826da9deffa0003e9f9f4b880dfe9 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Fri, 8 Jul 2016 13:44:43 +0100 Subject: [PATCH 145/769] fix docker build command. --- services/document-updater/Gruntfile.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/Gruntfile.coffee b/services/document-updater/Gruntfile.coffee index e31eb742cf..1ed3a1f892 100644 --- a/services/document-updater/Gruntfile.coffee +++ b/services/document-updater/Gruntfile.coffee @@ -75,7 +75,7 @@ module.exports = (grunt) -> command: """ if [ -z $(docker images | awk \'{ print $1 }\' | grep sharelatex-docupdater-tests) ]; then - docker build . -t sharelatex-docupdater-tests; + docker build -t sharelatex-docupdater-tests .; else echo ">> docker image \'sharelatex-docupdater-tests\' already exists"; fi From 6d9aecae4cff9a395028216618f422b84a767ad7 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Fri, 8 Jul 2016 14:31:43 +0100 Subject: [PATCH 146/769] capture exit code of the test run. --- services/document-updater/test/acceptance/scripts/full-test.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/services/document-updater/test/acceptance/scripts/full-test.sh b/services/document-updater/test/acceptance/scripts/full-test.sh index 7a90c95b84..d4dd5e0419 100644 --- a/services/document-updater/test/acceptance/scripts/full-test.sh +++ b/services/document-updater/test/acceptance/scripts/full-test.sh @@ -11,8 +11,11 @@ sleep 20 echo ">> Running acceptance tests..." grunt test:acceptance +_test_exit_code=$? echo ">> Killing server (pid: $_pid)" kill -1 "$_pid" echo ">> Done" + +exit $_test_exit_code From b16b34d7c61ba51b7df91524f444fb09b91b2289 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Fri, 8 Jul 2016 14:50:30 +0100 Subject: [PATCH 147/769] only execute app inside container, instead of compile and execute. --- services/document-updater/test/acceptance/scripts/full-test.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/test/acceptance/scripts/full-test.sh b/services/document-updater/test/acceptance/scripts/full-test.sh index d4dd5e0419..ad8eaea1d6 100644 --- a/services/document-updater/test/acceptance/scripts/full-test.sh +++ b/services/document-updater/test/acceptance/scripts/full-test.sh @@ -2,7 +2,7 @@ echo ">> Starting server..." -grunt >> /dev/null & +grunt execute:app >> /dev/null & _pid="$!" echo ">> Server started with pid: $_pid" From 32d06b805cc4d8128f5c9089fb8e424f97920667 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Fri, 8 Jul 2016 15:15:41 +0100 Subject: [PATCH 148/769] run only the acceptance tests inside container, rather than compile and test. --- services/document-updater/test/acceptance/scripts/full-test.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/test/acceptance/scripts/full-test.sh b/services/document-updater/test/acceptance/scripts/full-test.sh index ad8eaea1d6..32fa62133c 100644 --- a/services/document-updater/test/acceptance/scripts/full-test.sh +++ b/services/document-updater/test/acceptance/scripts/full-test.sh @@ -10,7 +10,7 @@ echo ">> Server started with pid: $_pid" sleep 20 echo ">> Running acceptance tests..." -grunt test:acceptance +grunt mochaTest:acceptance _test_exit_code=$? echo ">> Killing server (pid: $_pid)" From 0a5f95f22da7247df34e1feabcd37bddd6ea199f Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Fri, 8 Jul 2016 15:52:02 +0100 Subject: [PATCH 149/769] Move docker-entrypoint.sh into the test/acceptance/scripts directory --- services/document-updater/Dockerfile | 2 +- .../test/acceptance/{ => scripts}/docker-entrypoint.sh | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename services/document-updater/test/acceptance/{ => scripts}/docker-entrypoint.sh (100%) diff --git a/services/document-updater/Dockerfile b/services/document-updater/Dockerfile index 71bafbd05b..b92c7d35f1 100644 --- a/services/document-updater/Dockerfile +++ b/services/document-updater/Dockerfile @@ -1,6 +1,6 @@ FROM ubuntu -COPY ./test/acceptance/docker-entrypoint.sh /entrypoint.sh +COPY ./test/acceptance/scripts/docker-entrypoint.sh /entrypoint.sh RUN chmod +x /entrypoint.sh RUN apt-get update && apt-get upgrade -y diff --git a/services/document-updater/test/acceptance/docker-entrypoint.sh b/services/document-updater/test/acceptance/scripts/docker-entrypoint.sh similarity index 100% rename from services/document-updater/test/acceptance/docker-entrypoint.sh rename to services/document-updater/test/acceptance/scripts/docker-entrypoint.sh From 4c0dc5a0ef72ea004ed249d5a8395d416a146c70 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Fri, 8 Jul 2016 16:05:36 +0100 Subject: [PATCH 150/769] Increase timeout for test. --- .../test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index e3966cabfa..3dbd2f0a3d 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -210,7 +210,7 @@ describe "Applying updates to a doc", -> throw error if error? DocUpdaterClient.sendUpdates @project_id, @doc_id, updates, (error) => throw error if error? - setTimeout done, 1000 + setTimeout done, 2000 after -> MockTrackChangesApi.flushDoc.restore() From d0c54f1be64601b10b12363d5eb153626eafb4c7 Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 12 Jul 2016 11:04:37 +0100 Subject: [PATCH 151/769] Increase secondary timeout to 600ms --- services/document-updater/app/coffee/RedisBackend.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RedisBackend.coffee b/services/document-updater/app/coffee/RedisBackend.coffee index f4c910a2df..8dc766ed33 100644 --- a/services/document-updater/app/coffee/RedisBackend.coffee +++ b/services/document-updater/app/coffee/RedisBackend.coffee @@ -6,7 +6,7 @@ Metrics = require "metrics-sharelatex" class Client constructor: (@clients) -> - @SECONDARY_TIMEOUT = 200 + @SECONDARY_TIMEOUT = 600 @HEARTBEAT_TIMEOUT = 2000 multi: () -> From a1bac9719adb2272ef399009ee7c27365318ee49 Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 12 Jul 2016 11:45:10 +0100 Subject: [PATCH 152/769] Update secondary timeout for multi commands too --- services/document-updater/app/coffee/RedisBackend.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RedisBackend.coffee b/services/document-updater/app/coffee/RedisBackend.coffee index 8dc766ed33..9ec479c01f 100644 --- a/services/document-updater/app/coffee/RedisBackend.coffee +++ b/services/document-updater/app/coffee/RedisBackend.coffee @@ -57,7 +57,7 @@ class Client class MultiClient constructor: (@clients) -> - @SECONDARY_TIMEOUT = 200 + @SECONDARY_TIMEOUT = 600 exec: (callback) -> primaryError = null From c446b203754d0512d4cbeffaa322ff1d1cd186b9 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Mon, 18 Jul 2016 10:36:03 +0100 Subject: [PATCH 153/769] Add alternative test:acceptance:docker command, using separate docker repo --- services/document-updater/Gruntfile.coffee | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/services/document-updater/Gruntfile.coffee b/services/document-updater/Gruntfile.coffee index 1ed3a1f892..96b1be22fd 100644 --- a/services/document-updater/Gruntfile.coffee +++ b/services/document-updater/Gruntfile.coffee @@ -82,6 +82,8 @@ module.exports = (grunt) -> """ dockerTests: command: 'docker run -v "$(pwd):/document-updater" --rm --name doc-updater-test sharelatex-docupdater-tests' + dockerTests2: + command: 'docker run -v "$(pwd):/app" --rm sl-acceptance-test-runner' availabletasks: tasks: @@ -144,6 +146,12 @@ module.exports = (grunt) -> ['shell:buildDockerImage', 'shell:dockerTests'] ) + grunt.registerTask( + 'test:acceptance:docker2', + "Run acceptance tests inside docker container", + ['shell:dockerTests2'] + ) + grunt.registerTask 'test:acceptance', 'Run the acceptance tests (use --grep= for individual tests)', ['compile:acceptance_tests', 'mochaTest:acceptance'] grunt.registerTask 'run', "Compile and run the document-updater-sharelatex server", ['compile', 'bunyan', 'execute'] From 39cc9cda6d09bdbc4687a5a1488024f98ec5034a Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Mon, 18 Jul 2016 11:09:04 +0100 Subject: [PATCH 154/769] Remove docker-related config. `test:acceptance:docker` now depends on `acceptance-test-runner-sharelatex`. --- services/document-updater/Dockerfile | 14 --------- services/document-updater/Gruntfile.coffee | 31 +++---------------- .../acceptance/scripts/docker-entrypoint.sh | 12 ------- 3 files changed, 5 insertions(+), 52 deletions(-) delete mode 100644 services/document-updater/Dockerfile delete mode 100644 services/document-updater/test/acceptance/scripts/docker-entrypoint.sh diff --git a/services/document-updater/Dockerfile b/services/document-updater/Dockerfile deleted file mode 100644 index b92c7d35f1..0000000000 --- a/services/document-updater/Dockerfile +++ /dev/null @@ -1,14 +0,0 @@ -FROM ubuntu - -COPY ./test/acceptance/scripts/docker-entrypoint.sh /entrypoint.sh -RUN chmod +x /entrypoint.sh - -RUN apt-get update && apt-get upgrade -y -RUN apt-get install -y build-essential redis-server mongodb-server nodejs npm -RUN ln -s /usr/bin/nodejs /usr/bin/node -RUN npm install -g grunt-cli - -RUN mkdir /document-updater -VOLUME /document-updater - -ENTRYPOINT /entrypoint.sh \ No newline at end of file diff --git a/services/document-updater/Gruntfile.coffee b/services/document-updater/Gruntfile.coffee index 96b1be22fd..042fe32ce2 100644 --- a/services/document-updater/Gruntfile.coffee +++ b/services/document-updater/Gruntfile.coffee @@ -22,7 +22,7 @@ module.exports = (grunt) -> strict: false coffee: - app_dir: + app_dir: expand: true, flatten: false, cwd: 'app/coffee', @@ -30,7 +30,7 @@ module.exports = (grunt) -> dest: 'app/js/', ext: '.js' - app: + app: src: 'app.coffee' dest: 'app.js' @@ -42,7 +42,7 @@ module.exports = (grunt) -> dest: 'test/acceptance/js/', ext: '.js' - unit_tests: + unit_tests: expand: true, flatten: false, cwd: 'test/unit/coffee', @@ -71,18 +71,7 @@ module.exports = (grunt) -> shell: fullAcceptanceTests: command: "bash ./test/acceptance/scripts/full-test.sh" - buildDockerImage: - command: """ - if [ -z $(docker images | awk \'{ print $1 }\' | grep sharelatex-docupdater-tests) ]; - then - docker build -t sharelatex-docupdater-tests .; - else - echo ">> docker image \'sharelatex-docupdater-tests\' already exists"; - fi - """ dockerTests: - command: 'docker run -v "$(pwd):/document-updater" --rm --name doc-updater-test sharelatex-docupdater-tests' - dockerTests2: command: 'docker run -v "$(pwd):/app" --rm sl-acceptance-test-runner' availabletasks: @@ -135,21 +124,11 @@ module.exports = (grunt) -> "Start server and run acceptance tests", ['shell:fullAcceptanceTests'] ) - grunt.registerTask( - 'test:acceptance:buildDockerImage', - "Build docker image for acceptance tests", - ['shell:buildDockerImage'] - ) + grunt.registerTask( 'test:acceptance:docker', "Run acceptance tests inside docker container", - ['shell:buildDockerImage', 'shell:dockerTests'] - ) - - grunt.registerTask( - 'test:acceptance:docker2', - "Run acceptance tests inside docker container", - ['shell:dockerTests2'] + ['shell:dockerTests'] ) grunt.registerTask 'test:acceptance', 'Run the acceptance tests (use --grep= for individual tests)', ['compile:acceptance_tests', 'mochaTest:acceptance'] diff --git a/services/document-updater/test/acceptance/scripts/docker-entrypoint.sh b/services/document-updater/test/acceptance/scripts/docker-entrypoint.sh deleted file mode 100644 index 54a12c0468..0000000000 --- a/services/document-updater/test/acceptance/scripts/docker-entrypoint.sh +++ /dev/null @@ -1,12 +0,0 @@ -#! /usr/bin/env bash - -service redis-server start -service mongodb start - -cd /document-updater -npm install - - -source ./test/acceptance/scripts/full-test.sh - -exit 0 From cb62b005f302c030068cd33c46738e963cae685f Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 23 Aug 2016 09:53:17 +0100 Subject: [PATCH 155/769] Add in event loop monitoring --- services/document-updater/app.coffee | 1 + services/document-updater/package.json | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index cb50471965..c716838878 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -18,6 +18,7 @@ Path = require "path" Metrics = require "metrics-sharelatex" Metrics.initialize("doc-updater") Metrics.mongodb.monitor(Path.resolve(__dirname + "/node_modules/mongojs/node_modules/mongodb"), logger) +Metrics.event_loop.monitor(logger, 100) app = express() app.configure -> diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 3133881691..85958950d8 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -13,7 +13,7 @@ "ioredis": "^2.2.0", "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.0.0", "lynx": "0.0.11", - "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.0.0", + "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.5.0", "mongojs": "0.9.11", "redis-sharelatex": "0.0.9", "request": "2.25.0", From 87f3e5e8095cf8a8fe3ecbfc4780540d86aee754 Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 23 Aug 2016 11:03:37 +0100 Subject: [PATCH 156/769] Remove old migration code --- .../app/coffee/RedisManager.coffee | 32 +-------- services/document-updater/migrate.coffee | 36 ---------- .../RedisManager/RedisManagerTests.coffee | 66 +------------------ 3 files changed, 2 insertions(+), 132 deletions(-) delete mode 100644 services/document-updater/migrate.coffee diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 2e59fefd57..cb2e28b296 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -119,34 +119,4 @@ module.exports = RedisManager = getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) -> rclient.smembers keys.docsInProject(project_id: project_id), callback - - getAndSetDoc: (doc_id, callback = (error, project_id) ->) -> - multi = rclient.multi() - multi.get keys.docLines(doc_id:doc_id) - multi.get keys.docVersion(doc_id:doc_id) - multi.lrange keys.docOps(doc_id:doc_id), 0, -1 - multi.get keys.projectKey(doc_id:doc_id) - multi.exec (error, results = []) -> - return callback(error) if error? - [lines, version, ops, project_id] = results - multi = rclient.multi() - if lines? - multi.set keys.docLines(doc_id:doc_id), lines - if version? - multi.set keys.docVersion(doc_id:doc_id), version - multi.del keys.docOps(doc_id:doc_id) - if ops.length > 0 - multi.rpush keys.docOps(doc_id:doc_id), ops... - if project_id? - multi.set keys.projectKey(doc_id:doc_id), project_id - multi.exec (error) -> - return callback(error) if error? - return callback null, project_id - - getAndSetProject: (project_id, callback = (error) ->) -> - rclient.smembers keys.docsInProject(project_id: project_id), (error, doc_ids) -> - return callback(error) if error? - if doc_ids.length > 0 - rclient.sadd keys.docsInProject(project_id: project_id), doc_ids..., callback - else - callback() + diff --git a/services/document-updater/migrate.coffee b/services/document-updater/migrate.coffee deleted file mode 100644 index d0ede23a22..0000000000 --- a/services/document-updater/migrate.coffee +++ /dev/null @@ -1,36 +0,0 @@ -RedisManager = require "./app/coffee/RedisManager" -UpdateManager = require "./app/coffee/UpdateManager" -LockManager = require "./app/coffee/LockManager" - -async = require "async" - -handleErrorInsideLock = (doc_id, lockValue, original_error, callback = (error) ->) -> - LockManager.releaseLock doc_id, lockValue, (lock_error) -> - callback(original_error) - -migrateDoc = (doc_id, callback = (error) ->) -> - LockManager.getLock doc_id, (error, lockValue) -> - return callback(error) if error? - RedisManager.getAndSetDoc doc_id, (error, project_id) -> - return handleErrorInsideLock(doc_id, lockValue, error, callback) if error? - RedisManager.getAndSetProject project_id, (error) -> - return handleErrorInsideLock(doc_id, lockValue, error, callback) if error? - LockManager.releaseLock doc_id, lockValue, (error) -> - return callback(error) if error? - UpdateManager.continueProcessingUpdatesWithLock project_id, doc_id, callback - -doc_ids = process.argv.slice(2) -if doc_ids.length == 0 - console.log "Usage: coffee migrate.coffee DOC_ID [DOC_ID ...]" - process.exit(1) - -jobs = [] -for doc_id in doc_ids - do (doc_id) -> - jobs.push (cb) -> - console.log "MIGRATING #{doc_id}" - migrateDoc doc_id, cb - -async.series jobs, (error) -> - throw error if error? - process.exit(0) diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 6ca5250050..105b391b33 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -230,68 +230,4 @@ describe "RedisManager", -> @rclient.srem .calledWith("DocsIn:#{@project_id}", @doc_id) .should.equal true - - describe "getAndSetDoc", -> - beforeEach -> - @rclient.get = sinon.stub() - @rclient.lrange = sinon.stub() - @rclient.del = sinon.stub() - @rclient.set = sinon.stub() - @rclient.rpush = sinon.stub() - @rclient.exec = sinon.stub() - @rclient.exec.yields(null, [ - @lines = '["mock","lines"]', - @version = 42, - @doc_ops = ["mock", "doc", "ops"], - @project_id = "mock-project-id" - ]) - @RedisManager.getAndSetDoc @doc_id, @callback - - it "should get the original values", -> - @rclient.get - .calledWith("doclines:#{@doc_id}") - .should.equal true - @rclient.get - .calledWith("DocVersion:#{@doc_id}") - .should.equal true - @rclient.get - .calledWith("ProjectId:#{@doc_id}") - .should.equal true - @rclient.lrange - .calledWith("DocOps:#{@doc_id}", 0, -1) - .should.equal true - - it "should set the doclines again", -> - @rclient.set - .calledWith("doclines:#{@doc_id}", @lines) - .should.equal true - - it "should set the DocVersion again", -> - @rclient.set - .calledWith("DocVersion:#{@doc_id}", @version) - .should.equal true - - it "should set the project id again", -> - @rclient.set - .calledWith("ProjectId:#{@doc_id}", @project_id) - .should.equal true - - it "should set the doc ops again", -> - @rclient.del - .calledWith("DocOps:#{@doc_id}") - .should.equal true - @rclient.rpush - .calledWith("DocOps:#{@doc_id}", @doc_ops...) - .should.equal true - - describe "getAndSetProject", -> - beforeEach -> - @rclient.smembers = sinon.stub() - @rclient.sadd = sinon.stub() - @rclient.smembers.withArgs("DocsIn:#{@project_id}").yields(null, @doc_ids = ["mock-doc-1", "mock-doc-2"]) - @RedisManager.getAndSetProject @project_id, @callback - - it "should set the doc ids again", -> - @rclient.sadd - .calledWith("DocsIn:#{@project_id}", @doc_ids...) - .should.equal true + From 8779f3f686c0cce325bdb00858ab897a9699be85 Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 23 Aug 2016 16:00:46 +0100 Subject: [PATCH 157/769] Only write DocOps atomically with version and lines, after all docs are applied --- .../app/coffee/DocOpsManager.coffee | 17 ---- .../app/coffee/DocumentManager.coffee | 3 +- .../app/coffee/RedisManager.coffee | 33 ++++---- .../app/coffee/ShareJsDB.coffee | 28 +++---- .../app/coffee/ShareJsUpdateManager.coffee | 8 +- .../app/coffee/TrackChangesManager.coffee | 16 +++- .../app/coffee/UpdateManager.coffee | 8 +- .../app/coffee/WebRedisManager.coffee | 6 +- .../coffee/ApplyingUpdatesToADocTests.coffee | 16 +++- .../DocOpsManager/DocOpsManagerTests.coffee | 52 ------------- .../getDocAndRecentOpsTests.coffee | 13 ++-- .../RedisManager/RedisManagerTests.coffee | 78 +++++++++++++------ .../unit/coffee/ShareJsDB/GetOpsTests.coffee | 20 ++--- .../coffee/ShareJsDB/GetSnapshotTests.coffee | 9 ++- .../coffee/ShareJsDB/WriteOpsTests.coffee | 22 ++---- .../ShareJsUpdateManagerTests.coffee | 11 ++- .../TrackChangesManagerTests.coffee | 36 ++++++--- .../UpdateManager/ApplyingUpdates.coffee | 17 +++- .../lockUpdatesAndDoTests.coffee | 5 ++ .../WebRedisManagerTests.coffee | 10 +-- 20 files changed, 205 insertions(+), 203 deletions(-) delete mode 100644 services/document-updater/app/coffee/DocOpsManager.coffee delete mode 100644 services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee diff --git a/services/document-updater/app/coffee/DocOpsManager.coffee b/services/document-updater/app/coffee/DocOpsManager.coffee deleted file mode 100644 index a85a1e18ee..0000000000 --- a/services/document-updater/app/coffee/DocOpsManager.coffee +++ /dev/null @@ -1,17 +0,0 @@ -RedisManager = require "./RedisManager" -TrackChangesManager = require "./TrackChangesManager" - -module.exports = DocOpsManager = - getPreviousDocOps: (project_id, doc_id, start, end, callback = (error, ops) ->) -> - RedisManager.getPreviousDocOps doc_id, start, end, (error, ops) -> - return callback(error) if error? - callback null, ops - - pushDocOp: (project_id, doc_id, op, callback = (error) ->) -> - RedisManager.pushDocOp doc_id, op, (error, version) -> - return callback(error) if error? - TrackChangesManager.pushUncompressedHistoryOp project_id, doc_id, op, (error) -> - return callback(error) if error? - callback null, version - - diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 69311bd979..ddeaceea2f 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -1,6 +1,5 @@ RedisManager = require "./RedisManager" PersistenceManager = require "./PersistenceManager" -DocOpsManager = require "./DocOpsManager" DiffCodec = require "./DiffCodec" logger = require "logger-sharelatex" Metrics = require "./Metrics" @@ -37,7 +36,7 @@ module.exports = DocumentManager = if fromVersion == -1 callback null, lines, version, [] else - DocOpsManager.getPreviousDocOps project_id, doc_id, fromVersion, version, (error, ops) -> + RedisManager.getPreviousDocOps doc_id, fromVersion, version, (error, ops) -> return callback(error) if error? callback null, lines, version, ops diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index cb2e28b296..6e474c9b96 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -66,12 +66,6 @@ module.exports = RedisManager = version = parseInt(version, 10) callback null, version - setDocument : (doc_id, docLines, version, callback = (error) ->)-> - multi = rclient.multi() - multi.set keys.docLines(doc_id:doc_id), JSON.stringify(docLines) - multi.set keys.docVersion(doc_id:doc_id), version - multi.exec (error, replys) -> callback(error) - getPreviousDocOps: (doc_id, start, end, callback = (error, jsonOps) ->) -> rclient.llen keys.docOps(doc_id: doc_id), (error, length) -> return callback(error) if error? @@ -104,18 +98,23 @@ module.exports = RedisManager = DOC_OPS_TTL: 60 * minutes DOC_OPS_MAX_LENGTH: 100 - pushDocOp: (doc_id, op, callback = (error, new_version) ->) -> - jsonOp = JSON.stringify op - multi = rclient.multi() - multi.rpush keys.docOps(doc_id: doc_id), jsonOp - multi.expire keys.docOps(doc_id: doc_id), RedisManager.DOC_OPS_TTL - multi.ltrim keys.docOps(doc_id: doc_id), -RedisManager.DOC_OPS_MAX_LENGTH, -1 - multi.incr keys.docVersion(doc_id: doc_id) - multi.exec (error, replys) -> - [_, __, ___, version] = replys + updateDocument : (doc_id, docLines, newVersion, appliedOps = [], callback = (error) ->)-> + RedisManager.getDocVersion doc_id, (error, currentVersion) -> return callback(error) if error? - version = parseInt(version, 10) - callback null, version + if currentVersion + appliedOps.length != newVersion + error = new Error("Version mismatch. '#{doc_id}' is corrupted.") + logger.error {err: error, doc_id, currentVersion, newVersion, opsLength: appliedOps.length}, "version mismatch" + return callback(error) + jsonOps = appliedOps.map (op) -> JSON.stringify op + multi = rclient.multi() + multi.set keys.docLines(doc_id:doc_id), JSON.stringify(docLines) + multi.set keys.docVersion(doc_id:doc_id), newVersion + multi.rpush keys.docOps(doc_id: doc_id), jsonOps... # TODO: Really double check that these are going onto the array in the correct order + multi.expire keys.docOps(doc_id: doc_id), RedisManager.DOC_OPS_TTL + multi.ltrim keys.docOps(doc_id: doc_id), -RedisManager.DOC_OPS_MAX_LENGTH, -1 + multi.exec (error, replys) -> + return callback(error) if error? + return callback() getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) -> rclient.smembers keys.docsInProject(project_id: project_id), callback diff --git a/services/document-updater/app/coffee/ShareJsDB.coffee b/services/document-updater/app/coffee/ShareJsDB.coffee index f9527ccc0b..3d80c680cb 100644 --- a/services/document-updater/app/coffee/ShareJsDB.coffee +++ b/services/document-updater/app/coffee/ShareJsDB.coffee @@ -2,11 +2,16 @@ Keys = require('./UpdateKeys') Settings = require('settings-sharelatex') DocumentManager = require "./DocumentManager" RedisManager = require "./RedisManager" -DocOpsManager = require "./DocOpsManager" Errors = require "./Errors" logger = require "logger-sharelatex" -module.exports = ShareJsDB = +module.exports = class ShareJsDB + constructor: () -> + @appliedOps = {} + # ShareJS calls this detacted from the instance, so we need + # bind it to keep our context that can access @appliedOps + @writeOp = @_writeOp.bind(@) + getOps: (doc_key, start, end, callback) -> if start == end return callback null, [] @@ -18,21 +23,12 @@ module.exports = ShareJsDB = end = -1 [project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key) - DocOpsManager.getPreviousDocOps project_id, doc_id, start, end, (error, ops) -> - return callback error if error? - callback null, ops + RedisManager.getPreviousDocOps doc_id, start, end, callback - writeOp: (doc_key, opData, callback) -> - [project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key) - DocOpsManager.pushDocOp project_id, doc_id, opData, (error, version) -> - return callback error if error? - - if version == opData.v + 1 - callback() - else - error = new Error("Version mismatch. '#{doc_id}' is corrupted.") - logger.error err: error, doc_id: doc_id, project_id: project_id, opVersion: opData.v, expectedVersion: version, "doc is corrupt" - callback error + _writeOp: (doc_key, opData, callback) -> + @appliedOps[doc_key] ?= [] + @appliedOps[doc_key].push opData + callback() getSnapshot: (doc_key, callback) -> [project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key) diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee index eb7ad92720..ca00a04ea9 100644 --- a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee +++ b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee @@ -15,7 +15,11 @@ ShareJsModel:: = {} util.inherits ShareJsModel, EventEmitter module.exports = ShareJsUpdateManager = - getNewShareJsModel: () -> new ShareJsModel(ShareJsDB, maxDocLength: Settings.max_doc_length) + getNewShareJsModel: () -> + db = new ShareJsDB() + model = new ShareJsModel(db, maxDocLength: Settings.max_doc_length) + model.db = db + return model applyUpdates: (project_id, doc_id, updates, callback = (error, updatedDocLines) ->) -> logger.log project_id: project_id, doc_id: doc_id, updates: updates, "applying sharejs updates" @@ -51,7 +55,7 @@ module.exports = ShareJsUpdateManager = @_sendError(project_id, doc_id, error) return callback(error) docLines = data.snapshot.split(/\r\n|\n|\r/) - callback(null, docLines, data.v) + callback(null, docLines, data.v, model.db.appliedOps[doc_key] or []) _listenForOps: (model) -> model.on "applyOp", (doc_key, opData) -> diff --git a/services/document-updater/app/coffee/TrackChangesManager.coffee b/services/document-updater/app/coffee/TrackChangesManager.coffee index cc61bdb0ae..7dfc98115a 100644 --- a/services/document-updater/app/coffee/TrackChangesManager.coffee +++ b/services/document-updater/app/coffee/TrackChangesManager.coffee @@ -1,6 +1,7 @@ settings = require "settings-sharelatex" request = require "request" logger = require "logger-sharelatex" +async = require "async" WebRedisManager = require "./WebRedisManager" module.exports = TrackChangesManager = @@ -21,14 +22,21 @@ module.exports = TrackChangesManager = return callback(error) FLUSH_EVERY_N_OPS: 50 - pushUncompressedHistoryOp: (project_id, doc_id, op, callback = (error) ->) -> - WebRedisManager.pushUncompressedHistoryOp project_id, doc_id, op, (error, length) -> + pushUncompressedHistoryOps: (project_id, doc_id, ops, callback = (error) ->) -> + WebRedisManager.pushUncompressedHistoryOps project_id, doc_id, ops, (error, length) -> return callback(error) if error? - if length > 0 and length % TrackChangesManager.FLUSH_EVERY_N_OPS == 0 + # We want to flush every 50 ops, i.e. 50, 100, 150, etc + # Find out which 'block' (i.e. 0-49, 50-99) we were in before and after pushing these + # ops. If we've changed, then we've gone over a multiple of 50 and should flush. + # (Most of the time, we will only hit 50 and then flushing will put us back to 0) + previousLength = length - ops.length + prevBlock = Math.floor(previousLength / TrackChangesManager.FLUSH_EVERY_N_OPS) + newBlock = Math.floor(length / TrackChangesManager.FLUSH_EVERY_N_OPS) + if newBlock != prevBlock # Do this in the background since it uses HTTP and so may be too # slow to wait for when processing a doc update. logger.log length: length, doc_id: doc_id, project_id: project_id, "flushing track changes api" TrackChangesManager.flushDocChanges project_id, doc_id, (error) -> if error? logger.error err: error, doc_id: doc_id, project_id: project_id, "error flushing doc to track changes api" - callback() + callback() \ No newline at end of file diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index 219c52848b..0b5da21c8f 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -2,6 +2,7 @@ LockManager = require "./LockManager" RedisManager = require "./RedisManager" WebRedisManager = require "./WebRedisManager" ShareJsUpdateManager = require "./ShareJsUpdateManager" +TrackChangesManager = require "./TrackChangesManager" Settings = require('settings-sharelatex') async = require("async") logger = require('logger-sharelatex') @@ -43,10 +44,13 @@ module.exports = UpdateManager = applyUpdates: (project_id, doc_id, updates, callback = (error) ->) -> for update in updates or [] UpdateManager._sanitizeUpdate update - ShareJsUpdateManager.applyUpdates project_id, doc_id, updates, (error, updatedDocLines, version) -> + ShareJsUpdateManager.applyUpdates project_id, doc_id, updates, (error, updatedDocLines, version, appliedOps) -> return callback(error) if error? logger.log doc_id: doc_id, version: version, "updating doc via sharejs" - RedisManager.setDocument doc_id, updatedDocLines, version, callback + # TODO: Do these in parallel? Worry about consistency here? + RedisManager.updateDocument doc_id, updatedDocLines, version, appliedOps, (error) -> + return callback(error) if error? + TrackChangesManager.pushUncompressedHistoryOps project_id, doc_id, appliedOps, callback lockUpdatesAndDo: (method, project_id, doc_id, args..., callback) -> LockManager.getLock doc_id, (error, lockValue) -> diff --git a/services/document-updater/app/coffee/WebRedisManager.coffee b/services/document-updater/app/coffee/WebRedisManager.coffee index a14c2d6c86..85f301752f 100644 --- a/services/document-updater/app/coffee/WebRedisManager.coffee +++ b/services/document-updater/app/coffee/WebRedisManager.coffee @@ -22,10 +22,10 @@ module.exports = WebRedisManager = getUpdatesLength: (doc_id, callback)-> rclient.llen "PendingUpdates:#{doc_id}", callback - pushUncompressedHistoryOp: (project_id, doc_id, op, callback = (error, length) ->) -> - jsonOp = JSON.stringify op + pushUncompressedHistoryOps: (project_id, doc_id, ops = [], callback = (error, length) ->) -> + jsonOps = ops.map (op) -> JSON.stringify op async.parallel [ - (cb) -> rclient.rpush "UncompressedHistoryOps:#{doc_id}", jsonOp, cb + (cb) -> rclient.rpush "UncompressedHistoryOps:#{doc_id}", jsonOps..., cb (cb) -> rclient.sadd "DocsWithHistoryOps:#{project_id}", doc_id, cb ], (error, results) -> return callback(error) if error? diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index 3dbd2f0a3d..94463f4ad6 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -141,6 +141,13 @@ describe "Applying updates to a doc", -> rclient.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => result.should.equal 1 done() + + it "should store the doc ops in the correct order", (done) -> + rclient.lrange "DocOps:#{@doc_id}", 0, -1, (error, updates) => + updates = (JSON.parse(u) for u in updates) + for appliedUpdate, i in @updates + appliedUpdate.op.should.deep.equal updates[i].op + done() describe "when older ops come in after the delete", -> before (done) -> @@ -208,7 +215,14 @@ describe "Applying updates to a doc", -> MockWebApi.insertDoc @project_id, @doc_id, lines: @lines db.docOps.insert doc_id: ObjectId(@doc_id), version: 0, (error) => throw error if error? - DocUpdaterClient.sendUpdates @project_id, @doc_id, updates, (error) => + + # Send updates in chunks to causes multiple flushes + actions = [] + for i in [0..9] + do (i) => + actions.push (cb) => + DocUpdaterClient.sendUpdates @project_id, @doc_id, updates.slice(i*10, (i+1)*10), cb + async.series actions, (error) => throw error if error? setTimeout done, 2000 diff --git a/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee b/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee deleted file mode 100644 index a215b0ccd4..0000000000 --- a/services/document-updater/test/unit/coffee/DocOpsManager/DocOpsManagerTests.coffee +++ /dev/null @@ -1,52 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/DocOpsManager.js" -SandboxedModule = require('sandboxed-module') -{ObjectId} = require "mongojs" - -describe "DocOpsManager", -> - beforeEach -> - @doc_id = ObjectId().toString() - @project_id = ObjectId().toString() - @callback = sinon.stub() - @DocOpsManager = SandboxedModule.require modulePath, requires: - "./RedisManager": @RedisManager = {} - "./TrackChangesManager": @TrackChangesManager = {} - - describe "getPreviousDocOps", -> - beforeEach -> - @ops = [ "mock-op-1", "mock-op-2" ] - @start = 30 - @end = 32 - @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) - @DocOpsManager.getPreviousDocOps @project_id, @doc_id, @start, @end, @callback - - it "should get the previous doc ops", -> - @RedisManager.getPreviousDocOps - .calledWith(@doc_id, @start, @end) - .should.equal true - - it "should call the callback with the ops", -> - @callback.calledWith(null, @ops).should.equal true - - describe "pushDocOp", -> - beforeEach -> - @op = "mock-op" - @RedisManager.pushDocOp = sinon.stub().callsArgWith(2, null, @version = 42) - @TrackChangesManager.pushUncompressedHistoryOp = sinon.stub().callsArg(3) - @DocOpsManager.pushDocOp @project_id, @doc_id, @op, @callback - - it "should push the op in to the docOps list", -> - @RedisManager.pushDocOp - .calledWith(@doc_id, @op) - .should.equal true - - it "should push the op into the pushUncompressedHistoryOp", -> - @TrackChangesManager.pushUncompressedHistoryOp - .calledWith(@project_id, @doc_id, @op) - .should.equal true - - it "should call the callback with the version", -> - @callback.calledWith(null, @version).should.equal true - diff --git a/services/document-updater/test/unit/coffee/DocumentManager/getDocAndRecentOpsTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/getDocAndRecentOpsTests.coffee index 8c54b2b854..c77af9a77c 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/getDocAndRecentOpsTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/getDocAndRecentOpsTests.coffee @@ -4,12 +4,11 @@ should = chai.should() modulePath = "../../../../app/js/DocumentManager.js" SandboxedModule = require('sandboxed-module') -describe "DocumentUpdater.getDocAndRecentOps", -> +describe "DocumentManager.getDocAndRecentOps", -> beforeEach -> @DocumentManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} "./PersistenceManager": @PersistenceManager = {} - "./DocOpsManager": @DocOpsManager = {} "logger-sharelatex": @logger = {log: sinon.stub()} "./Metrics": @Metrics = Timer: class Timer @@ -27,7 +26,7 @@ describe "DocumentUpdater.getDocAndRecentOps", -> describe "with a previous version specified", -> beforeEach -> @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version) - @DocOpsManager.getPreviousDocOps = sinon.stub().callsArgWith(4, null, @ops) + @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) @DocumentManager.getDocAndRecentOps @project_id, @doc_id, @fromVersion, @callback it "should get the doc", -> @@ -36,8 +35,8 @@ describe "DocumentUpdater.getDocAndRecentOps", -> .should.equal true it "should get the doc ops", -> - @DocOpsManager.getPreviousDocOps - .calledWith(@project_id, @doc_id, @fromVersion, @version) + @RedisManager.getPreviousDocOps + .calledWith(@doc_id, @fromVersion, @version) .should.equal true it "should call the callback with the doc info", -> @@ -49,7 +48,7 @@ describe "DocumentUpdater.getDocAndRecentOps", -> describe "with no previous version specified", -> beforeEach -> @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version) - @DocOpsManager.getPreviousDocOps = sinon.stub().callsArgWith(4, null, @ops) + @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) @DocumentManager.getDocAndRecentOps @project_id, @doc_id, -1, @callback it "should get the doc", -> @@ -58,7 +57,7 @@ describe "DocumentUpdater.getDocAndRecentOps", -> .should.equal true it "should not need to get the doc ops", -> - @DocOpsManager.getPreviousDocOps.called.should.equal false + @RedisManager.getPreviousDocOps.called.should.equal false it "should call the callback with the doc info", -> @callback.calledWith(null, @lines, @version, []).should.equal true diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 105b391b33..0e122e63fb 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -140,40 +140,70 @@ describe "RedisManager", -> it "should log out the problem", -> @logger.warn.called.should.equal true - describe "pushDocOp", -> + describe "updateDocument", -> beforeEach -> + @rclient.set = sinon.stub() @rclient.rpush = sinon.stub() @rclient.expire = sinon.stub() - @rclient.incr = sinon.stub() @rclient.ltrim = sinon.stub() - @op = { op: [{ i: "foo", p: 4 }] } + @RedisManager.getDocVersion = sinon.stub() + + @lines = ["one", "two", "three"] + @ops = [{ op: [{ i: "foo", p: 4 }] },{ op: [{ i: "bar", p: 8 }] }] @version = 42 - _ = null - @rclient.exec = sinon.stub().callsArgWith(0, null, [_, _, _, @version]) - @RedisManager.pushDocOp @doc_id, @op, @callback - it "should push the doc op into the doc ops list", -> - @rclient.rpush - .calledWith("DocOps:#{@doc_id}", JSON.stringify(@op)) - .should.equal true + @rclient.exec = sinon.stub().callsArg(0) - it "should renew the expiry ttl on the doc ops array", -> - @rclient.expire - .calledWith("DocOps:#{@doc_id}", @RedisManager.DOC_OPS_TTL) - .should.equal true + describe "with a consistent version", -> + beforeEach -> + @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) + @RedisManager.updateDocument @doc_id, @lines, @version, @ops, @callback + + it "should get the current doc version to check for consistency", -> + @RedisManager.getDocVersion + .calledWith(@doc_id) + .should.equal true + + it "should set the doclines", -> + @rclient.set + .calledWith("doclines:#{@doc_id}", JSON.stringify(@lines)) + .should.equal true + + it "should set the version", -> + @rclient.set + .calledWith("DocVersion:#{@doc_id}", @version) + .should.equal true - it "should truncate the list to 100 members", -> - @rclient.ltrim - .calledWith("DocOps:#{@doc_id}", -@RedisManager.DOC_OPS_MAX_LENGTH, -1) - .should.equal true + it "should push the doc op into the doc ops list", -> + @rclient.rpush + .calledWith("DocOps:#{@doc_id}", JSON.stringify(@ops[0]), JSON.stringify(@ops[1])) + .should.equal true - it "should increment the version number", -> - @rclient.incr - .calledWith("DocVersion:#{@doc_id}") - .should.equal true + it "should renew the expiry ttl on the doc ops array", -> + @rclient.expire + .calledWith("DocOps:#{@doc_id}", @RedisManager.DOC_OPS_TTL) + .should.equal true - it "should call the callback with the version number", -> - @callback.calledWith(null, parseInt(@version, 10)).should.equal true + it "should truncate the list to 100 members", -> + @rclient.ltrim + .calledWith("DocOps:#{@doc_id}", -@RedisManager.DOC_OPS_MAX_LENGTH, -1) + .should.equal true + + it "should call the callback", -> + @callback.called.should.equal true + + describe "with an inconsistent version", -> + beforeEach -> + @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length - 1) + @RedisManager.updateDocument @doc_id, @lines, @version, @ops, @callback + + it "should not call multi.exec", -> + @rclient.exec.called.should.equal false + + it "should call the callback with an error", -> + @callback + .calledWith(new Error("Version mismatch. '#{@doc_id}' is corrupted.")) + .should.equal true describe "putDocInMemory", -> beforeEach (done) -> diff --git a/services/document-updater/test/unit/coffee/ShareJsDB/GetOpsTests.coffee b/services/document-updater/test/unit/coffee/ShareJsDB/GetOpsTests.coffee index 31830e5afc..5621f39a85 100644 --- a/services/document-updater/test/unit/coffee/ShareJsDB/GetOpsTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJsDB/GetOpsTests.coffee @@ -14,14 +14,14 @@ describe "ShareJsDB.getOps", -> @redis_ops = (JSON.stringify(op) for op in @ops) @ShareJsDB = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} - "./DocOpsManager": @DocOpsManager = {} "./DocumentManager":{} "logger-sharelatex": {} + @db = new @ShareJsDB() describe "with start == end", -> beforeEach -> @start = @end = 42 - @ShareJsDB.getOps @doc_key, @start, @end, @callback + @db.getOps @doc_key, @start, @end, @callback it "should return an empty array", -> @callback.calledWith(null, []).should.equal true @@ -30,12 +30,12 @@ describe "ShareJsDB.getOps", -> beforeEach -> @start = 35 @end = 42 - @DocOpsManager.getPreviousDocOps = sinon.stub().callsArgWith(4, null, @ops) - @ShareJsDB.getOps @doc_key, @start, @end, @callback + @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) + @db.getOps @doc_key, @start, @end, @callback it "should get the range from redis", -> - @DocOpsManager.getPreviousDocOps - .calledWith(@project_id, @doc_id, @start, @end-1) + @RedisManager.getPreviousDocOps + .calledWith(@doc_id, @start, @end-1) .should.equal true it "should return the ops", -> @@ -45,11 +45,11 @@ describe "ShareJsDB.getOps", -> beforeEach -> @start = 35 @end = null - @DocOpsManager.getPreviousDocOps = sinon.stub().callsArgWith(4, null, @ops) - @ShareJsDB.getOps @doc_key, @start, @end, @callback + @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) + @db.getOps @doc_key, @start, @end, @callback it "should get until the end of the list", -> - @DocOpsManager.getPreviousDocOps - .calledWith(@project_id, @doc_id, @start, -1) + @RedisManager.getPreviousDocOps + .calledWith(@doc_id, @start, -1) .should.equal true diff --git a/services/document-updater/test/unit/coffee/ShareJsDB/GetSnapshotTests.coffee b/services/document-updater/test/unit/coffee/ShareJsDB/GetSnapshotTests.coffee index 1cd1e62c4e..f2527b01a2 100644 --- a/services/document-updater/test/unit/coffee/ShareJsDB/GetSnapshotTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJsDB/GetSnapshotTests.coffee @@ -17,6 +17,7 @@ describe "ShareJsDB.getSnapshot", -> "./RedisManager": {} "./DocOpsManager": {} "logger-sharelatex": {} + @db = new @ShareJsDB() @version = 42 @@ -27,7 +28,7 @@ describe "ShareJsDB.getSnapshot", -> describe "successfully", -> beforeEach -> @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version) - @ShareJsDB.getSnapshot @doc_key, @callback + @db.getSnapshot @doc_key, @callback it "should get the doc", -> @DocumentManager.getDoc @@ -46,7 +47,7 @@ describe "ShareJsDB.getSnapshot", -> describe "when the doclines do not exist", -> beforeEach -> @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, null, null) - @ShareJsDB.getSnapshot @doc_key, @callback + @db.getSnapshot @doc_key, @callback it "should return the callback with a NotFoundError", -> @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true @@ -54,7 +55,7 @@ describe "ShareJsDB.getSnapshot", -> describe "when getDoc returns an error", -> beforeEach -> @DocumentManager.getDoc = sinon.stub().callsArgWith(2, @error = new Error("oops"), null, null) - @ShareJsDB.getSnapshot @doc_key, @callback + @db.getSnapshot @doc_key, @callback it "should return the callback with an error", -> @callback.calledWith(@error).should.equal true @@ -66,7 +67,7 @@ describe "ShareJsDB.getSnapshot", -> describe "successfully", -> beforeEach -> @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version) - @ShareJsDB.getSnapshot @doc_key, @callback + @db.getSnapshot @doc_key, @callback it "should get the doc", -> @DocumentManager.getDoc diff --git a/services/document-updater/test/unit/coffee/ShareJsDB/WriteOpsTests.coffee b/services/document-updater/test/unit/coffee/ShareJsDB/WriteOpsTests.coffee index 30e92bad3c..838f63034e 100644 --- a/services/document-updater/test/unit/coffee/ShareJsDB/WriteOpsTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJsDB/WriteOpsTests.coffee @@ -1,5 +1,6 @@ sinon = require('sinon') chai = require('chai') +expect = chai.expect should = chai.should() modulePath = "../../../../app/js/ShareJsDB.js" SandboxedModule = require('sandboxed-module') @@ -18,34 +19,21 @@ describe "ShareJsDB.writeOps", -> "./DocOpsManager": @DocOpsManager = {} "./DocumentManager": {} "logger-sharelatex": @logger = {error: sinon.stub()} + @db = new @ShareJsDB() describe "writing an op", -> beforeEach -> @version = 42 @opData.v = @version - @DocOpsManager.pushDocOp = sinon.stub().callsArgWith(3, null, @version+1) - @ShareJsDB.writeOp @doc_key, @opData, @callback + @db.writeOp @doc_key, @opData, @callback - it "should write the op to redis", -> - @DocOpsManager.pushDocOp - .calledWith(@project_id, @doc_id, @opData) - .should.equal true + it "should write into appliedOps", -> + expect(@db.appliedOps[@doc_key]).to.deep.equal [@opData] it "should call the callback without an error", -> @callback.called.should.equal true (@callback.args[0][0]?).should.equal false - describe "writing an op at the wrong version", -> - beforeEach -> - @version = 42 - @mismatch = 5 - @opData.v = @version - @DocOpsManager.pushDocOp = sinon.stub().callsArgWith(3, null, @version + @mismatch) - @ShareJsDB.writeOp @doc_key, @opData, @callback - - it "should call the callback with an error", -> - @callback.calledWith(new Error()).should.equal true - diff --git a/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee index 6d21ca3889..8d967ec2ee 100644 --- a/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee @@ -26,6 +26,8 @@ describe "ShareJsUpdateManager", -> @model = applyOp: sinon.stub().callsArg(2) getSnapshot: sinon.stub() + db: + appliedOps: {} @ShareJsUpdateManager.getNewShareJsModel = sinon.stub().returns(@model) @ShareJsUpdateManager._listenForOps = sinon.stub() @ShareJsUpdateManager.removeDocFromCache = sinon.stub().callsArg(1) @@ -38,8 +40,9 @@ describe "ShareJsUpdateManager", -> describe "successfully", -> beforeEach (done) -> @model.getSnapshot.callsArgWith(1, null, {snapshot: @updatedDocLines.join("\n"), v: @version}) - @ShareJsUpdateManager.applyUpdates @project_id, @doc_id, @updates, (err, docLines, version) => - @callback(err, docLines, version) + @model.db.appliedOps["#{@project_id}:#{@doc_id}"] = @appliedOps = ["mock-ops"] + @ShareJsUpdateManager.applyUpdates @project_id, @doc_id, @updates, (err, docLines, version, appliedOps) => + @callback(err, docLines, version, appliedOps) done() it "should create a new ShareJs model", -> @@ -61,8 +64,8 @@ describe "ShareJsUpdateManager", -> .calledWith("#{@project_id}:#{@doc_id}") .should.equal true - it "should return the updated doc lines", -> - @callback.calledWith(null, @updatedDocLines, @version).should.equal true + it "should return the updated doc lines, version and ops", -> + @callback.calledWith(null, @updatedDocLines, @version, @appliedOps).should.equal true describe "when applyOp fails", -> beforeEach (done) -> diff --git a/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee b/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee index 574795f3bb..143f01d1ee 100644 --- a/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee @@ -40,19 +40,19 @@ describe "TrackChangesManager", -> it "should return the callback with an error", -> @callback.calledWith(new Error("track changes api return non-success code: 500")).should.equal true - describe "pushUncompressedHistoryOp", -> + describe "pushUncompressedHistoryOps", -> beforeEach -> - @op = "mock-op" + @ops = ["mock-ops"] @TrackChangesManager.flushDocChanges = sinon.stub().callsArg(2) describe "pushing the op", -> beforeEach -> - @WebRedisManager.pushUncompressedHistoryOp = sinon.stub().callsArgWith(3, null, 1) - @TrackChangesManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback + @WebRedisManager.pushUncompressedHistoryOps = sinon.stub().callsArgWith(3, null, 1) + @TrackChangesManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, @callback - it "should push the op into redis", -> - @WebRedisManager.pushUncompressedHistoryOp - .calledWith(@project_id, @doc_id, @op) + it "should push the ops into redis", -> + @WebRedisManager.pushUncompressedHistoryOps + .calledWith(@project_id, @doc_id, @ops) .should.equal true it "should call the callback", -> @@ -61,11 +61,23 @@ describe "TrackChangesManager", -> it "should not try to flush the op", -> @TrackChangesManager.flushDocChanges.called.should.equal false - describe "when there are a multiple of FLUSH_EVERY_N_OPS ops", -> + describe "when we hit a multiple of FLUSH_EVERY_N_OPS ops", -> beforeEach -> - @WebRedisManager.pushUncompressedHistoryOp = + @WebRedisManager.pushUncompressedHistoryOps = sinon.stub().callsArgWith(3, null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS) - @TrackChangesManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback + @TrackChangesManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, @callback + + it "should tell the track changes api to flush", -> + @TrackChangesManager.flushDocChanges + .calledWith(@project_id, @doc_id) + .should.equal true + + describe "when we go over a multiple of FLUSH_EVERY_N_OPS ops", -> + beforeEach -> + @ops = ["op1", "op2", "op3"] + @WebRedisManager.pushUncompressedHistoryOps = + sinon.stub().callsArgWith(3, null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS + 1) + @TrackChangesManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, @callback it "should tell the track changes api to flush", -> @TrackChangesManager.flushDocChanges @@ -74,10 +86,10 @@ describe "TrackChangesManager", -> describe "when TrackChangesManager errors", -> beforeEach -> - @WebRedisManager.pushUncompressedHistoryOp = + @WebRedisManager.pushUncompressedHistoryOps = sinon.stub().callsArgWith(3, null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS) @TrackChangesManager.flushDocChanges = sinon.stub().callsArgWith(2, @error = new Error("oops")) - @TrackChangesManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, @callback + @TrackChangesManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, @callback it "should log out the error", -> @logger.error diff --git a/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee b/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee index 19094794bb..e5c4cf9118 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee @@ -14,10 +14,12 @@ describe "UpdateManager", -> "./RedisManager" : @RedisManager = {} "./WebRedisManager" : @WebRedisManager = {} "./ShareJsUpdateManager" : @ShareJsUpdateManager = {} + "./TrackChangesManager" : @TrackChangesManager = {} "logger-sharelatex": @logger = { log: sinon.stub() } "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() + "settings-sharelatex": Settings = {} describe "processOutstandingUpdates", -> beforeEach -> @@ -152,8 +154,10 @@ describe "UpdateManager", -> @updates = [{op: [{p: 42, i: "foo"}]}] @updatedDocLines = ["updated", "lines"] @version = 34 - @ShareJsUpdateManager.applyUpdates = sinon.stub().callsArgWith(3, null, @updatedDocLines, @version) - @RedisManager.setDocument = sinon.stub().callsArg(3) + @appliedOps = ["mock-applied-ops"] + @ShareJsUpdateManager.applyUpdates = sinon.stub().callsArgWith(3, null, @updatedDocLines, @version, @appliedOps) + @RedisManager.updateDocument = sinon.stub().callsArg(4) + @TrackChangesManager.pushUncompressedHistoryOps = sinon.stub().callsArg(3) describe "normally", -> beforeEach -> @@ -165,8 +169,13 @@ describe "UpdateManager", -> .should.equal true it "should save the document", -> - @RedisManager.setDocument - .calledWith(@doc_id, @updatedDocLines, @version) + @RedisManager.updateDocument + .calledWith(@doc_id, @updatedDocLines, @version, @appliedOps) + .should.equal true + + it "should push the applied ops into the track changes queue", -> + @TrackChangesManager.pushUncompressedHistoryOps + .calledWith(@project_id, @doc_id, @appliedOps) .should.equal true it "should call the callback", -> diff --git a/services/document-updater/test/unit/coffee/UpdateManager/lockUpdatesAndDoTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/lockUpdatesAndDoTests.coffee index fa9ca76356..a4b455d219 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/lockUpdatesAndDoTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/lockUpdatesAndDoTests.coffee @@ -11,7 +11,12 @@ describe 'UpdateManager - lockUpdatesAndDo', -> "./RedisManager" : @RedisManager = {} "./WebRedisManager" : @WebRedisManager = {} "./ShareJsUpdateManager" : @ShareJsUpdateManager = {} + "./TrackChangesManager" : @TrackChangesManager = {} "logger-sharelatex": @logger = { log: sinon.stub() } + "./Metrics": @Metrics = + Timer: class Timer + done: sinon.stub() + "settings-sharelatex": Settings = {} @project_id = "project-id-123" @doc_id = "doc-id-123" @method = sinon.stub().callsArgWith(3, null, @response_arg1) diff --git a/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee index 932cb92e26..cd0ce7e9fe 100644 --- a/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee @@ -70,18 +70,18 @@ describe "WebRedisManager", -> it "should return the length", -> @callback.calledWith(null, @length).should.equal true - describe "pushUncompressedHistoryOp", -> + describe "pushUncompressedHistoryOps", -> beforeEach (done) -> - @op = { op: [{ i: "foo", p: 4 }] } + @ops = [{ op: [{ i: "foo", p: 4 }] },{ op: [{ i: "bar", p: 56 }] }] @rclient.rpush = sinon.stub().yields(null, @length = 42) @rclient.sadd = sinon.stub().yields() - @WebRedisManager.pushUncompressedHistoryOp @project_id, @doc_id, @op, (args...) => + @WebRedisManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, (args...) => @callback(args...) done() - it "should push the doc op into the doc ops list", -> + it "should push the doc op into the doc ops list as JSON", -> @rclient.rpush - .calledWith("UncompressedHistoryOps:#{@doc_id}", JSON.stringify(@op)) + .calledWith("UncompressedHistoryOps:#{@doc_id}", JSON.stringify(@ops[0]), JSON.stringify(@ops[1])) .should.equal true it "should add the doc_id to the set of which records the project docs", -> From 9bc7594226688dd5b6d4116f7035c90432759170 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 2 Sep 2016 14:47:41 +0100 Subject: [PATCH 158/769] clean up redis query --- .../app/coffee/DocumentManager.coffee | 9 ++++----- .../app/coffee/RedisManager.coffee | 10 ++++++++-- .../flushDocIfLoadedTests.coffee | 8 ++++---- .../coffee/DocumentManager/getDocTests.coffee | 8 ++++---- .../RedisManager/RedisManagerTests.coffee | 19 +++++++++++++++++-- 5 files changed, 37 insertions(+), 17 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 69311bd979..6bdffb76b8 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -7,13 +7,13 @@ Metrics = require "./Metrics" TrackChangesManager = require "./TrackChangesManager" module.exports = DocumentManager = - getDoc: (project_id, doc_id, _callback = (error, lines, version) ->) -> + getDoc: (project_id, doc_id, _callback = (error, lines, version, alreadyLoaded) ->) -> timer = new Metrics.Timer("docManager.getDoc") callback = (args...) -> timer.done() _callback(args...) - RedisManager.getDoc doc_id, (error, lines, version, alreadyLoaded) -> + RedisManager.getDoc project_id, doc_id, (error, lines, version) -> return callback(error) if error? if !lines? or !version? logger.log project_id: project_id, doc_id: doc_id, "doc not in redis so getting from persistence API" @@ -90,12 +90,11 @@ module.exports = DocumentManager = callback = (args...) -> timer.done() _callback(args...) - - RedisManager.getDoc doc_id, (error, lines, version) -> + RedisManager.getDoc project_id, doc_id, (error, lines, version) -> return callback(error) if error? if !lines? or !version? logger.log project_id: project_id, doc_id: doc_id, "doc is not loaded so not flushing" - callback null + callback null # TODO: return a flag to bail out, as we go on to remove doc from memory? else logger.log project_id: project_id, doc_id: doc_id, version: version, "flushing doc" PersistenceManager.setDoc project_id, doc_id, lines, version, (error) -> diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 2e59fefd57..d7790dfe5e 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -45,11 +45,12 @@ module.exports = RedisManager = return callback(error) if error? rclient.srem keys.docsInProject(project_id:project_id), doc_id, callback - getDoc : (doc_id, callback = (error, lines, version) ->)-> + getDoc : (project_id, doc_id, callback = (error, lines, version, project_id) ->)-> timer = new metrics.Timer("redis.get-doc") multi = rclient.multi() multi.get keys.docLines(doc_id:doc_id) multi.get keys.docVersion(doc_id:doc_id) + multi.get keys.projectKey(doc_id:doc_id) multi.exec (error, result)-> timer.done() return callback(error) if error? @@ -58,7 +59,12 @@ module.exports = RedisManager = catch e return callback(e) version = parseInt(result[1] or 0, 10) - callback null, docLines, version + doc_project_id = result[2] + # check doc is in requested project + if doc_project_id? and doc_project_id isnt project_id + logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, "doc not in project" + return callback(new Errors.NotFoundError("document not found")) + callback null, docLines, version, project_id getDocVersion: (doc_id, callback = (error, version) ->) -> rclient.get keys.docVersion(doc_id: doc_id), (error, version) -> diff --git a/services/document-updater/test/unit/coffee/DocumentManager/flushDocIfLoadedTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/flushDocIfLoadedTests.coffee index bda914999b..4a17e2b84c 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/flushDocIfLoadedTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/flushDocIfLoadedTests.coffee @@ -23,13 +23,13 @@ describe "DocumentManager.flushDocIfLoaded", -> describe "when the doc is in Redis", -> beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(1, null, @lines, @version) + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version) @PersistenceManager.setDoc = sinon.stub().callsArgWith(4) @DocumentManager.flushDocIfLoaded @project_id, @doc_id, @callback it "should get the doc from redis", -> @RedisManager.getDoc - .calledWith(@doc_id) + .calledWith(@project_id, @doc_id) .should.equal true it "should write the doc lines to the persistence layer", -> @@ -45,14 +45,14 @@ describe "DocumentManager.flushDocIfLoaded", -> describe "when the document is not in Redis", -> beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(1, null, null, null) + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null) @PersistenceManager.setDoc = sinon.stub().callsArgWith(4) @DocOpsManager.flushDocOpsToMongo = sinon.stub().callsArgWith(2) @DocumentManager.flushDocIfLoaded @project_id, @doc_id, @callback it "should get the doc from redis", -> @RedisManager.getDoc - .calledWith(@doc_id) + .calledWith(@project_id, @doc_id) .should.equal true it "should not write anything to the persistence layer", -> diff --git a/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee index b11686ac3c..3edf4cb67d 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee @@ -24,12 +24,12 @@ describe "DocumentUpdater.getDoc", -> describe "when the doc exists in Redis", -> beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(1, null, @lines, @version) + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version) @DocumentManager.getDoc @project_id, @doc_id, @callback it "should get the doc from Redis", -> @RedisManager.getDoc - .calledWith(@doc_id) + .calledWith(@project_id, @doc_id) .should.equal true it "should call the callback with the doc info", -> @@ -40,14 +40,14 @@ describe "DocumentUpdater.getDoc", -> describe "when the doc does not exist in Redis", -> beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(1, null, null, null) + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null) @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version) @RedisManager.putDocInMemory = sinon.stub().callsArg(4) @DocumentManager.getDoc @project_id, @doc_id, @callback it "should try to get the doc from Redis", -> @RedisManager.getDoc - .calledWith(@doc_id) + .calledWith(@project_id, @doc_id) .should.equal true it "should get the doc from the PersistenceManager", -> diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 6ca5250050..7e811e3858 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -38,8 +38,8 @@ describe "RedisManager", -> @jsonlines = JSON.stringify @lines @version = 42 @rclient.get = sinon.stub() - @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version]) - @RedisManager.getDoc @doc_id, @callback + @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @project_id]) + @RedisManager.getDoc @project_id, @doc_id, @callback it "should get the lines from redis", -> @rclient.get @@ -56,6 +56,21 @@ describe "RedisManager", -> .calledWith(null, @lines, @version) .should.equal true + describe "getDoc with an invalid project id", -> + beforeEach -> + @lines = ["one", "two", "three"] + @jsonlines = JSON.stringify @lines + @version = 42 + @another_project_id = "project-id-456" + @rclient.get = sinon.stub() + @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @another_project_id]) + @RedisManager.getDoc @project_id, @doc_id, @callback + + it 'should return an error', -> + @callback + .calledWith(new Errors.NotFoundError("not found")) + .should.equal true + describe "getPreviousDocOpsTests", -> describe "with a start and an end value", -> beforeEach -> From 8d14f8f7f0275142aa344d8dc294d9fe2cef636b Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 2 Sep 2016 14:03:38 +0100 Subject: [PATCH 159/769] add route parameters --- services/document-updater/app.coffee | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index cb50471965..8e14d79718 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -27,6 +27,18 @@ app.configure -> DispatchManager.createAndStartDispatchers(Settings.dispatcherCount || 10) +app.param 'project_id', (req, res, next, project_id) -> + if project_id?.match /^[0-9a-f]{24}$/ + next() + else + next new Error("invalid project id") + +app.param 'doc_id', (req, res, next, doc_id) -> + if doc_id?.match /^[0-9a-f]{24}$/ + next() + else + next new Error("invalid doc id") + app.get '/project/:project_id/doc/:doc_id', HttpController.getDoc app.post '/project/:project_id/doc/:doc_id', HttpController.setDoc app.post '/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLoaded From 2d82d56f934a0a9e9ea0f811799ba21e00259b38 Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 9 Sep 2016 11:01:14 +0100 Subject: [PATCH 160/769] Process updates one at a time and write into redis after each one --- .../app/coffee/DocumentManager.coffee | 2 +- .../app/coffee/ShareJsUpdateManager.coffee | 31 ++++++---------- .../app/coffee/UpdateManager.coffee | 14 +++---- .../coffee/GettingADocumentTests.coffee | 3 +- .../coffee/DocumentManager/setDocTests.coffee | 8 ++-- .../ShareJsUpdateManagerTests.coffee | 21 +++++------ .../UpdateManager/ApplyingUpdates.coffee | 37 ++++++++++--------- 7 files changed, 54 insertions(+), 62 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 63df6d0d79..ebbdc3a66e 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -68,7 +68,7 @@ module.exports = DocumentManager = type: "external" source: source user_id: user_id - UpdateManager.applyUpdates project_id, doc_id, [update], (error) -> + UpdateManager.applyUpdate project_id, doc_id, update, (error) -> return callback(error) if error? # If the document was loaded already, then someone has it open # in a project, and the usual flushing mechanism will happen. diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee index ca00a04ea9..985d03094a 100644 --- a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee +++ b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee @@ -21,8 +21,8 @@ module.exports = ShareJsUpdateManager = model.db = db return model - applyUpdates: (project_id, doc_id, updates, callback = (error, updatedDocLines) ->) -> - logger.log project_id: project_id, doc_id: doc_id, updates: updates, "applying sharejs updates" + applyUpdate: (project_id, doc_id, update, callback = (error, updatedDocLines) ->) -> + logger.log project_id: project_id, doc_id: doc_id, update: update, "applying sharejs updates" jobs = [] # We could use a global model for all docs, but we're hitting issues with the @@ -33,26 +33,19 @@ module.exports = ShareJsUpdateManager = model = @getNewShareJsModel() @_listenForOps(model) doc_key = Keys.combineProjectIdAndDocId(project_id, doc_id) - for update in updates - do (update) => - jobs.push (callback) => - model.applyOp doc_key, update, (error) -> - if error == "Op already submitted" - logger.warn {project_id, doc_id, update}, "op has already been submitted" - update.dup = true - ShareJsUpdateManager._sendOp(project_id, doc_id, update) - callback() - else - callback(error) - - async.series jobs, (error) => - logger.log project_id: project_id, doc_id: doc_id, error: error, "applied updates" + model.applyOp doc_key, update, (error) -> if error? - @_sendError(project_id, doc_id, error) - return callback(error) + if error == "Op already submitted" + logger.warn {project_id, doc_id, update}, "op has already been submitted" + update.dup = true + ShareJsUpdateManager._sendOp(project_id, doc_id, update) + else + ShareJsUpdateManager._sendError(project_id, doc_id, error) + return callback(error) + logger.log project_id: project_id, doc_id: doc_id, error: error, "applied update" model.getSnapshot doc_key, (error, data) => if error? - @_sendError(project_id, doc_id, error) + ShareJsUpdateManager._sendError(project_id, doc_id, error) return callback(error) docLines = data.snapshot.split(/\r\n|\n|\r/) callback(null, docLines, data.v, model.db.appliedOps[doc_key] or []) diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index 0b5da21c8f..bcd0baf8b6 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -39,15 +39,15 @@ module.exports = UpdateManager = return callback(error) if error? if updates.length == 0 return callback() - UpdateManager.applyUpdates project_id, doc_id, updates, callback + async.mapSeries updates, + (update, cb) -> UpdateManager.applyUpdate project_id, doc_id, update, cb + callback - applyUpdates: (project_id, doc_id, updates, callback = (error) ->) -> - for update in updates or [] - UpdateManager._sanitizeUpdate update - ShareJsUpdateManager.applyUpdates project_id, doc_id, updates, (error, updatedDocLines, version, appliedOps) -> + applyUpdate: (project_id, doc_id, update, callback = (error) ->) -> + UpdateManager._sanitizeUpdate update + ShareJsUpdateManager.applyUpdate project_id, doc_id, update, (error, updatedDocLines, version, appliedOps) -> return callback(error) if error? - logger.log doc_id: doc_id, version: version, "updating doc via sharejs" - # TODO: Do these in parallel? Worry about consistency here? + logger.log doc_id: doc_id, version: version, "updating doc in redis" RedisManager.updateDocument doc_id, updatedDocLines, version, appliedOps, (error) -> return callback(error) if error? TrackChangesManager.pushUncompressedHistoryOps project_id, doc_id, appliedOps, callback diff --git a/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee index 210502ae45..0823b8483a 100644 --- a/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee @@ -8,9 +8,10 @@ MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" describe "Getting a document", -> - beforeEach -> + before (done) -> @lines = ["one", "two", "three"] @version = 42 + setTimeout done, 200 # Give MockWebApi a chance to start describe "when the document is not loaded", -> before (done) -> diff --git a/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee index 9307c42feb..360d939b9f 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee @@ -32,7 +32,7 @@ describe "DocumentManager.setDoc", -> @afterLines = ["after", "lines"] @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, true) @DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, @ops) - @UpdateManager.applyUpdates = sinon.stub().callsArgWith(3, null) + @UpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null) @DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2) @DocumentManager.flushAndDeleteDoc = sinon.stub().callsArg(2) @@ -51,11 +51,11 @@ describe "DocumentManager.setDoc", -> .should.equal true it "should apply the diff as a ShareJS op", -> - @UpdateManager.applyUpdates + @UpdateManager.applyUpdate .calledWith( @project_id, @doc_id, - [ + { doc: @doc_id, v: @version, op: @ops, @@ -64,7 +64,7 @@ describe "DocumentManager.setDoc", -> source: @source user_id: @user_id } - ] + } ) .should.equal true diff --git a/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee index 8d967ec2ee..94806a1a9d 100644 --- a/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee @@ -20,7 +20,7 @@ describe "ShareJsUpdateManager", -> globals: clearTimeout: @clearTimeout = sinon.stub() - describe "applyUpdates", -> + describe "applyUpdate", -> beforeEach -> @version = 34 @model = @@ -31,17 +31,14 @@ describe "ShareJsUpdateManager", -> @ShareJsUpdateManager.getNewShareJsModel = sinon.stub().returns(@model) @ShareJsUpdateManager._listenForOps = sinon.stub() @ShareJsUpdateManager.removeDocFromCache = sinon.stub().callsArg(1) - @updates = [ - {p: 4, t: "foo"} - {p: 6, t: "bar"} - ] + @update = {p: 4, t: "foo"} @updatedDocLines = ["one", "two"] describe "successfully", -> beforeEach (done) -> @model.getSnapshot.callsArgWith(1, null, {snapshot: @updatedDocLines.join("\n"), v: @version}) @model.db.appliedOps["#{@project_id}:#{@doc_id}"] = @appliedOps = ["mock-ops"] - @ShareJsUpdateManager.applyUpdates @project_id, @doc_id, @updates, (err, docLines, version, appliedOps) => + @ShareJsUpdateManager.applyUpdate @project_id, @doc_id, @update, (err, docLines, version, appliedOps) => @callback(err, docLines, version, appliedOps) done() @@ -54,10 +51,10 @@ describe "ShareJsUpdateManager", -> .calledWith(@model) .should.equal true - it "should send each update to ShareJs", -> - for update in @updates - @model.applyOp - .calledWith("#{@project_id}:#{@doc_id}", update).should.equal true + it "should send the update to ShareJs", -> + @model.applyOp + .calledWith("#{@project_id}:#{@doc_id}", @update) + .should.equal true it "should get the updated doc lines", -> @model.getSnapshot @@ -72,7 +69,7 @@ describe "ShareJsUpdateManager", -> @error = new Error("Something went wrong") @ShareJsUpdateManager._sendError = sinon.stub() @model.applyOp = sinon.stub().callsArgWith(2, @error) - @ShareJsUpdateManager.applyUpdates @project_id, @doc_id, @updates, (err, docLines, version) => + @ShareJsUpdateManager.applyUpdate @project_id, @doc_id, @update, (err, docLines, version) => @callback(err, docLines, version) done() @@ -89,7 +86,7 @@ describe "ShareJsUpdateManager", -> @error = new Error("Something went wrong") @ShareJsUpdateManager._sendError = sinon.stub() @model.getSnapshot.callsArgWith(1, @error) - @ShareJsUpdateManager.applyUpdates @project_id, @doc_id, @updates, (err, docLines, version) => + @ShareJsUpdateManager.applyUpdate @project_id, @doc_id, @update, (err, docLines, version) => @callback(err, docLines, version) done() diff --git a/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee b/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee index e5c4cf9118..43786f4b98 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee @@ -121,16 +121,17 @@ describe "UpdateManager", -> @updatedDocLines = ["updated", "lines"] @version = 34 @WebRedisManager.getPendingUpdatesForDoc = sinon.stub().callsArgWith(1, null, @updates) - @UpdateManager.applyUpdates = sinon.stub().callsArgWith(3, null, @updatedDocLines, @version) + @UpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null, @updatedDocLines, @version) @UpdateManager.fetchAndApplyUpdates @project_id, @doc_id, @callback it "should get the pending updates", -> @WebRedisManager.getPendingUpdatesForDoc.calledWith(@doc_id).should.equal true it "should apply the updates", -> - @UpdateManager.applyUpdates - .calledWith(@project_id, @doc_id, @updates) - .should.equal true + for update in @updates + @UpdateManager.applyUpdate + .calledWith(@project_id, @doc_id, update) + .should.equal true it "should call the callback", -> @callback.called.should.equal true @@ -139,33 +140,33 @@ describe "UpdateManager", -> beforeEach -> @updates = [] @WebRedisManager.getPendingUpdatesForDoc = sinon.stub().callsArgWith(1, null, @updates) - @UpdateManager.applyUpdates = sinon.stub() + @UpdateManager.applyUpdate = sinon.stub() @RedisManager.setDocument = sinon.stub() @UpdateManager.fetchAndApplyUpdates @project_id, @doc_id, @callback - it "should not call applyUpdates", -> - @UpdateManager.applyUpdates.called.should.equal false + it "should not call applyUpdate", -> + @UpdateManager.applyUpdate.called.should.equal false it "should call the callback", -> @callback.called.should.equal true - describe "applyUpdates", -> + describe "applyUpdate", -> beforeEach -> - @updates = [{op: [{p: 42, i: "foo"}]}] + @update = {op: [{p: 42, i: "foo"}]} @updatedDocLines = ["updated", "lines"] @version = 34 @appliedOps = ["mock-applied-ops"] - @ShareJsUpdateManager.applyUpdates = sinon.stub().callsArgWith(3, null, @updatedDocLines, @version, @appliedOps) + @ShareJsUpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null, @updatedDocLines, @version, @appliedOps) @RedisManager.updateDocument = sinon.stub().callsArg(4) @TrackChangesManager.pushUncompressedHistoryOps = sinon.stub().callsArg(3) describe "normally", -> beforeEach -> - @UpdateManager.applyUpdates @project_id, @doc_id, @updates, @callback + @UpdateManager.applyUpdate @project_id, @doc_id, @update, @callback it "should apply the updates via ShareJS", -> - @ShareJsUpdateManager.applyUpdates - .calledWith(@project_id, @doc_id, @updates) + @ShareJsUpdateManager.applyUpdate + .calledWith(@project_id, @doc_id, @update) .should.equal true it "should save the document", -> @@ -183,14 +184,14 @@ describe "UpdateManager", -> describe "with UTF-16 surrogate pairs in the update", -> beforeEach -> - @updates = [{op: [{p: 42, i: "\uD835\uDC00"}]}] - @UpdateManager.applyUpdates @project_id, @doc_id, @updates, @callback + @update = {op: [{p: 42, i: "\uD835\uDC00"}]} + @UpdateManager.applyUpdate @project_id, @doc_id, @update, @callback it "should apply the update but with surrogate pairs removed", -> - @ShareJsUpdateManager.applyUpdates - .calledWith(@project_id, @doc_id, @updates) + @ShareJsUpdateManager.applyUpdate + .calledWith(@project_id, @doc_id, @update) .should.equal true # \uFFFD is 'replacement character' - @updates[0].op[0].i.should.equal "\uFFFD\uFFFD" + @update.op[0].i.should.equal "\uFFFD\uFFFD" From 6ca48523d9510b17a3ef024057c548cb813d83be Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 9 Sep 2016 15:28:27 +0100 Subject: [PATCH 161/769] mapSeries -> eachSeries --- services/document-updater/app/coffee/UpdateManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index bcd0baf8b6..f35bc1a9b7 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -39,7 +39,7 @@ module.exports = UpdateManager = return callback(error) if error? if updates.length == 0 return callback() - async.mapSeries updates, + async.eachSeries updates, (update, cb) -> UpdateManager.applyUpdate project_id, doc_id, update, cb callback From 993aab7a782aff861ce6edf50f9564dcc60bc01c Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 8 Sep 2016 11:22:54 +0100 Subject: [PATCH 162/769] Don't try to redis rpush with no arguments --- .../app/coffee/RedisManager.coffee | 3 +- .../app/coffee/WebRedisManager.coffee | 2 + .../coffee/ApplyingUpdatesToADocTests.coffee | 53 +++++++++++++++++++ .../RedisManager/RedisManagerTests.coffee | 15 ++++++ .../WebRedisManagerTests.coffee | 50 ++++++++++++----- 5 files changed, 108 insertions(+), 15 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 8063b20ddd..87e31b7826 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -115,7 +115,8 @@ module.exports = RedisManager = multi = rclient.multi() multi.set keys.docLines(doc_id:doc_id), JSON.stringify(docLines) multi.set keys.docVersion(doc_id:doc_id), newVersion - multi.rpush keys.docOps(doc_id: doc_id), jsonOps... # TODO: Really double check that these are going onto the array in the correct order + if jsonOps.length > 0 + multi.rpush keys.docOps(doc_id: doc_id), jsonOps... multi.expire keys.docOps(doc_id: doc_id), RedisManager.DOC_OPS_TTL multi.ltrim keys.docOps(doc_id: doc_id), -RedisManager.DOC_OPS_MAX_LENGTH, -1 multi.exec (error, replys) -> diff --git a/services/document-updater/app/coffee/WebRedisManager.coffee b/services/document-updater/app/coffee/WebRedisManager.coffee index 85f301752f..85a056f961 100644 --- a/services/document-updater/app/coffee/WebRedisManager.coffee +++ b/services/document-updater/app/coffee/WebRedisManager.coffee @@ -23,6 +23,8 @@ module.exports = WebRedisManager = rclient.llen "PendingUpdates:#{doc_id}", callback pushUncompressedHistoryOps: (project_id, doc_id, ops = [], callback = (error, length) ->) -> + if ops.length == 0 + return callback(null, 0) jsonOps = ops.map (op) -> JSON.stringify op async.parallel [ (cb) -> rclient.rpush "UncompressedHistoryOps:#{doc_id}", jsonOps..., cb diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index 94463f4ad6..89f1acbcfe 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -1,6 +1,7 @@ sinon = require "sinon" chai = require("chai") chai.should() +expect = chai.expect async = require "async" rclient = require("redis").createClient() {db, ObjectId} = require "../../../app/js/mongojs" @@ -251,3 +252,55 @@ describe "Applying updates to a doc", -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => doc.lines.should.deep.equal @result done() + + describe "when the sending duplicate ops", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + MockWebApi.insertDoc @project_id, @doc_id, lines: @lines + db.docOps.insert { + doc_id: ObjectId(@doc_id) + version: @version + }, (error) => + throw error if error? + # One user delete 'one', the next turns it into 'once'. The second becomes a NOP. + DocUpdaterClient.sendUpdate @project_id, @doc_id, { + doc: @doc_id + op: [{ + i: "one and a half\n" + p: 4 + }] + v: @version + meta: + source: "ikHceq3yfAdQYzBo4-xZ" + }, (error) => + throw error if error? + setTimeout () => + DocUpdaterClient.sendUpdate @project_id, @doc_id, { + doc: @doc_id + op: [{ + i: "one and a half\n" + p: 4 + }] + v: @version + dupIfSource: ["ikHceq3yfAdQYzBo4-xZ"] + meta: + source: "ikHceq3yfAdQYzBo4-xZ" + }, (error) => + throw error if error? + setTimeout done, 200 + , 200 + + DocUpdaterClient.subscribeToAppliedOps @messageCallback = sinon.stub() + + it "should update the doc", (done) -> + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => + doc.lines.should.deep.equal @result + done() + + it "should return a message about duplicate ops", -> + @messageCallback.calledTwice.should.equal true + @messageCallback.args[0][0].should.equal "applied-ops" + expect(JSON.parse(@messageCallback.args[0][1]).op.dup).to.be.undefined + @messageCallback.args[1][0].should.equal "applied-ops" + expect(JSON.parse(@messageCallback.args[1][1]).op.dup).to.equal true + diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 5ee3398e87..205692d634 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -219,6 +219,21 @@ describe "RedisManager", -> @callback .calledWith(new Error("Version mismatch. '#{@doc_id}' is corrupted.")) .should.equal true + + describe "with no updates", -> + beforeEach -> + @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version) + @RedisManager.updateDocument @doc_id, @lines, @version, [], @callback + + it "should not do an rpush", -> + @rclient.rpush + .called + .should.equal false + + it "should still set the doclines", -> + @rclient.set + .calledWith("doclines:#{@doc_id}", JSON.stringify(@lines)) + .should.equal true describe "putDocInMemory", -> beforeEach (done) -> diff --git a/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee index cd0ce7e9fe..107fdaee53 100644 --- a/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee @@ -71,23 +71,45 @@ describe "WebRedisManager", -> @callback.calledWith(null, @length).should.equal true describe "pushUncompressedHistoryOps", -> - beforeEach (done) -> + beforeEach -> @ops = [{ op: [{ i: "foo", p: 4 }] },{ op: [{ i: "bar", p: 56 }] }] @rclient.rpush = sinon.stub().yields(null, @length = 42) @rclient.sadd = sinon.stub().yields() - @WebRedisManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, (args...) => - @callback(args...) - done() - it "should push the doc op into the doc ops list as JSON", -> - @rclient.rpush - .calledWith("UncompressedHistoryOps:#{@doc_id}", JSON.stringify(@ops[0]), JSON.stringify(@ops[1])) - .should.equal true + describe "with ops", -> + beforeEach (done) -> + @WebRedisManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, (args...) => + @callback(args...) + done() + + it "should push the doc op into the doc ops list as JSON", -> + @rclient.rpush + .calledWith("UncompressedHistoryOps:#{@doc_id}", JSON.stringify(@ops[0]), JSON.stringify(@ops[1])) + .should.equal true - it "should add the doc_id to the set of which records the project docs", -> - @rclient.sadd - .calledWith("DocsWithHistoryOps:#{@project_id}", @doc_id) - .should.equal true + it "should add the doc_id to the set of which records the project docs", -> + @rclient.sadd + .calledWith("DocsWithHistoryOps:#{@project_id}", @doc_id) + .should.equal true - it "should call the callback with the length", -> - @callback.calledWith(null, @length).should.equal true + it "should call the callback with the length", -> + @callback.calledWith(null, @length).should.equal true + + describe "with no ops", -> + beforeEach (done) -> + @WebRedisManager.pushUncompressedHistoryOps @project_id, @doc_id, [], (args...) => + @callback(args...) + done() + + it "should not push the doc op into the doc ops list as JSON", -> + @rclient.rpush + .called + .should.equal false + + it "should not add the doc_id to the set of which records the project docs", -> + @rclient.sadd + .called + .should.equal false + + it "should call the callback with the length", -> + @callback.calledWith(null, 0).should.equal true From 5ce15c4d60a86613b6ad7bfe39731860e56d9ec4 Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 8 Sep 2016 11:41:59 +0100 Subject: [PATCH 163/769] Move check of zero length op array up a level --- .../app/coffee/TrackChangesManager.coffee | 4 +++- .../document-updater/app/coffee/WebRedisManager.coffee | 2 +- .../TrackChangesManager/TrackChangesManagerTests.coffee | 9 +++++++++ .../coffee/WebRedisManager/WebRedisManagerTests.coffee | 4 ++-- 4 files changed, 15 insertions(+), 4 deletions(-) diff --git a/services/document-updater/app/coffee/TrackChangesManager.coffee b/services/document-updater/app/coffee/TrackChangesManager.coffee index 7dfc98115a..9aa1c0ad47 100644 --- a/services/document-updater/app/coffee/TrackChangesManager.coffee +++ b/services/document-updater/app/coffee/TrackChangesManager.coffee @@ -22,7 +22,9 @@ module.exports = TrackChangesManager = return callback(error) FLUSH_EVERY_N_OPS: 50 - pushUncompressedHistoryOps: (project_id, doc_id, ops, callback = (error) ->) -> + pushUncompressedHistoryOps: (project_id, doc_id, ops = [], callback = (error) ->) -> + if ops.length == 0 + return callback() WebRedisManager.pushUncompressedHistoryOps project_id, doc_id, ops, (error, length) -> return callback(error) if error? # We want to flush every 50 ops, i.e. 50, 100, 150, etc diff --git a/services/document-updater/app/coffee/WebRedisManager.coffee b/services/document-updater/app/coffee/WebRedisManager.coffee index 85a056f961..73c099f9da 100644 --- a/services/document-updater/app/coffee/WebRedisManager.coffee +++ b/services/document-updater/app/coffee/WebRedisManager.coffee @@ -24,7 +24,7 @@ module.exports = WebRedisManager = pushUncompressedHistoryOps: (project_id, doc_id, ops = [], callback = (error, length) ->) -> if ops.length == 0 - return callback(null, 0) + return callback(new Error("cannot push no ops")) # This should never be called with no ops, but protect against a redis error if we sent an empty array to rpush jsonOps = ops.map (op) -> JSON.stringify op async.parallel [ (cb) -> rclient.rpush "UncompressedHistoryOps:#{doc_id}", jsonOps..., cb diff --git a/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee b/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee index 143f01d1ee..03106e2c2e 100644 --- a/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee @@ -100,4 +100,13 @@ describe "TrackChangesManager", -> "error flushing doc to track changes api" ) .should.equal true + + describe "with no ops", -> + beforeEach -> + @WebRedisManager.pushUncompressedHistoryOps = sinon.stub().callsArgWith(3, null, 1) + @TrackChangesManager.pushUncompressedHistoryOps @project_id, @doc_id, [], @callback + + it "should not call WebRedisManager.pushUncompressedHistoryOps", -> + @WebRedisManager.pushUncompressedHistoryOps.called.should.equal false + diff --git a/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee index 107fdaee53..f3f0d8afdc 100644 --- a/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee @@ -111,5 +111,5 @@ describe "WebRedisManager", -> .called .should.equal false - it "should call the callback with the length", -> - @callback.calledWith(null, 0).should.equal true + it "should call the callback with an error", -> + @callback.calledWith(new Error("cannot push no ops")).should.equal true From e739e86c48cccfc828a538bb6ff6da3b57555f6f Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 28 Nov 2016 10:14:42 +0000 Subject: [PATCH 164/769] Get basic ChangeTracker hooked up. WIP --- services/document-updater/app.coffee | 1 + .../app/coffee/ChangesTracker.coffee | 457 ++++++++++++++++++ .../app/coffee/DocumentManager.coffee | 35 +- .../app/coffee/HistoryManager.coffee | 44 ++ .../app/coffee/HttpController.coffee | 12 + .../app/coffee/PersistenceManager.coffee | 14 +- .../app/coffee/ProjectManager.coffee | 26 + .../app/coffee/RedisKeyBuilder.coffee | 4 + .../app/coffee/RedisManager.coffee | 20 +- .../app/coffee/ShareJsDB.coffee | 27 +- .../app/coffee/ShareJsUpdateManager.coffee | 28 +- .../app/coffee/TrackChangesManager.coffee | 52 +- .../app/coffee/UpdateManager.coffee | 19 +- .../app/coffee/WebRedisManager.coffee | 5 +- .../config/settings.defaults.coffee | 4 + .../coffee/TrackChangesTests.coffee | 96 ++++ .../coffee/helpers/DocUpdaterClient.coffee | 19 +- 17 files changed, 751 insertions(+), 112 deletions(-) create mode 100644 services/document-updater/app/coffee/ChangesTracker.coffee create mode 100644 services/document-updater/app/coffee/HistoryManager.coffee create mode 100644 services/document-updater/test/acceptance/coffee/TrackChangesTests.coffee diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index af0ad242c2..df26b81a2f 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -46,6 +46,7 @@ app.post '/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLo app.delete '/project/:project_id/doc/:doc_id', HttpController.flushAndDeleteDoc app.delete '/project/:project_id', HttpController.deleteProject app.post '/project/:project_id/flush', HttpController.flushProject +app.post '/project/:project_id/track_changes', HttpController.setTrackChanges app.get '/total', (req, res)-> timer = new Metrics.Timer("http.allDocList") diff --git a/services/document-updater/app/coffee/ChangesTracker.coffee b/services/document-updater/app/coffee/ChangesTracker.coffee new file mode 100644 index 0000000000..8bc4cf9380 --- /dev/null +++ b/services/document-updater/app/coffee/ChangesTracker.coffee @@ -0,0 +1,457 @@ +load = (EventEmitter) -> + class ChangesTracker extends EventEmitter + # The purpose of this class is to track a set of inserts and deletes to a document, like + # track changes in Word. We store these as a set of ShareJs style ranges: + # {i: "foo", p: 42} # Insert 'foo' at offset 42 + # {d: "bar", p: 37} # Delete 'bar' at offset 37 + # We only track the inserts and deletes, not the whole document, but by being given all + # updates that are applied to a document, we can update these appropriately. + # + # Note that the set of inserts and deletes we store applies to the document as-is at the moment. + # So inserts correspond to text which is in the document, while deletes correspond to text which + # is no longer there, so their lengths do not affect the position of later offsets. + # E.g. + # this is the current text of the document + # |-----| | + # {i: "current ", p:12} -^ ^- {d: "old ", p: 31} + # + # Track changes rules (should be consistent with Word): + # * When text is inserted at a delete, the text goes to the left of the delete + # I.e. "foo|bar" -> "foobaz|bar", where | is the delete, and 'baz' is inserted + # * Deleting content flagged as 'inserted' does not create a new delete marker, it only + # removes the insert marker. E.g. + # * "abdefghijkl" -> "abfghijkl" when 'de' is deleted. No delete marker added + # |---| <- inserted |-| <- inserted + # * Deletes overlapping regular text and inserted text will insert a delete marker for the + # regular text: + # "abcdefghijkl" -> "abcdejkl" when 'fghi' is deleted + # |----| |--|| + # ^- inserted 'bcdefg' \ ^- deleted 'hi' + # \--inserted 'bcde' + # * Deletes overlapping other deletes are merged. E.g. + # "abcghijkl" -> "ahijkl" when 'bcg is deleted' + # | <- delete 'def' | <- delete 'bcdefg' + # * Deletes by another user will consume deletes by the first user + # * Inserts by another user will not combine with inserts by the first user. If they are in the + # middle of a previous insert by the first user, the original insert will be split into two. + constructor: (@changes = [], @comments = []) -> + # Change objects have the following structure: + # { + # id: ... # Uniquely generated by us + # op: { # ShareJs style op tracking the offset (p) and content inserted (i) or deleted (d) + # i: "..." + # p: 42 + # } + # } + # + # Ids are used to uniquely identify a change, e.g. for updating it in the database, or keeping in + # sync with Ace ranges. + @id = 0 + + addComment: (offset, length, metadata) -> + # TODO: Don't allow overlapping comments? + @comments.push comment = { + id: @_newId() + offset, length, metadata + } + @emit "comment:added", comment + return comment + + getComment: (comment_id) -> + comment = null + for c in @comments + if c.id == comment_id + comment = c + break + return comment + + resolveCommentId: (comment_id, resolved_data) -> + comment = @getComment(comment_id) + return if !comment? + comment.metadata.resolved = true + comment.metadata.resolved_data = resolved_data + @emit "comment:resolved", comment + + unresolveCommentId: (comment_id) -> + comment = @getComment(comment_id) + return if !comment? + comment.metadata.resolved = false + @emit "comment:unresolved", comment + + removeCommentId: (comment_id) -> + comment = @getComment(comment_id) + return if !comment? + @comments = @comments.filter (c) -> c.id != comment_id + @emit "comment:removed", comment + + getChange: (change_id) -> + change = null + for c in @changes + if c.id == change_id + change = c + break + return change + + removeChangeId: (change_id) -> + change = @getChange(change_id) + return if !change? + @_removeChange(change) + + applyOp: (op, metadata = {}) -> + metadata.ts ?= new Date() + # Apply an op that has been applied to the document to our changes to keep them up to date + if op.i? + @applyInsertToChanges(op, metadata) + @applyInsertToComments(op) + else if op.d? + @applyDeleteToChanges(op, metadata) + @applyDeleteToComments(op) + + applyInsertToComments: (op) -> + for comment in @comments + if op.p <= comment.offset + comment.offset += op.i.length + @emit "comment:moved", comment + else if op.p < comment.offset + comment.length + comment.length += op.i.length + @emit "comment:moved", comment + + applyDeleteToComments: (op) -> + op_start = op.p + op_length = op.d.length + op_end = op.p + op_length + for comment in @comments + comment_end = comment.offset + comment.length + if op_end <= comment.offset + # delete is fully before comment + comment.offset -= op_length + @emit "comment:moved", comment + else if op_start >= comment_end + # delete is fully after comment, nothing to do + else + # delete and comment overlap + delete_length_before = Math.max(0, comment.offset - op_start) + delete_length_after = Math.max(0, op_end - comment_end) + delete_length_overlapping = op_length - delete_length_before - delete_length_after + comment.offset = Math.min(comment.offset, op_start) + comment.length -= delete_length_overlapping + @emit "comment:moved", comment + + applyInsertToChanges: (op, metadata) -> + op_start = op.p + op_length = op.i.length + op_end = op.p + op_length + + already_merged = false + previous_change = null + moved_changes = [] + remove_changes = [] + new_changes = [] + for change in @changes + change_start = change.op.p + + if change.op.d? + # Shift any deletes after this along by the length of this insert + if op_start < change_start + change.op.p += op_length + moved_changes.push change + else if op_start == change_start + # If the insert matches the start of the delete, just remove it from the delete instead + if change.op.d.length >= op.i.length and change.op.d.slice(0, op.i.length) == op.i + change.op.d = change.op.d.slice(op.i.length) + change.op.p += op.i.length + if change.op.d == "" + remove_changes.push change + else + moved_changes.push change + already_merged = true + else + change.op.p += op_length + moved_changes.push change + else if change.op.i? + change_end = change_start + change.op.i.length + is_change_overlapping = (op_start >= change_start and op_start <= change_end) + + # Only merge inserts if they are from the same user + is_same_user = metadata.user_id == change.metadata.user_id + + # If there is a delete at the start of the insert, and we're inserting + # at the start, we SHOULDN'T merge since the delete acts as a partition. + # The previous op will be the delete, but it's already been shifted by this insert + # + # I.e. + # Originally: |-- existing insert --| + # | <- existing delete at same offset + # + # Now: |-- existing insert --| <- not shifted yet + # |-- this insert --|| <- existing delete shifted along to end of this op + # + # After: |-- existing insert --| + # |-- this insert --|| <- existing delete + # + # Without the delete, the inserts would be merged. + is_insert_blocked_by_delete = (previous_change? and previous_change.op.d? and previous_change.op.p == op_end) + + # If the insert is overlapping another insert, either at the beginning in the middle or touching the end, + # then we merge them into one. + if @track_changes and + is_change_overlapping and + !is_insert_blocked_by_delete and + !already_merged and + is_same_user + offset = op_start - change_start + change.op.i = change.op.i.slice(0, offset) + op.i + change.op.i.slice(offset) + change.metadata.ts = metadata.ts + already_merged = true + moved_changes.push change + else if op_start <= change_start + # If we're fully before the other insert we can just shift the other insert by our length. + # If they are touching, and should have been merged, they will have been above. + # If not merged above, then it must be blocked by a delete, and will be after this insert, so we shift it along as well + change.op.p += op_length + moved_changes.push change + else if (!is_same_user or !@track_changes) and change_start < op_start < change_end + # This user is inserting inside a change by another user, so we need to split the + # other user's change into one before and after this one. + offset = op_start - change_start + before_content = change.op.i.slice(0, offset) + after_content = change.op.i.slice(offset) + + # The existing change can become the 'before' change + change.op.i = before_content + moved_changes.push change + + # Create a new op afterwards + after_change = { + op: { + i: after_content + p: change_start + offset + op_length + } + metadata: {} + } + after_change.metadata[key] = value for key, value of change.metadata + new_changes.push after_change + + previous_change = change + + if @track_changes and !already_merged + @_addOp op, metadata + for {op, metadata} in new_changes + @_addOp op, metadata + + for change in remove_changes + @_removeChange change + + if moved_changes.length > 0 + @emit "changes:moved", moved_changes + + applyDeleteToChanges: (op, metadata) -> + op_start = op.p + op_length = op.d.length + op_end = op.p + op_length + remove_changes = [] + moved_changes = [] + + # We might end up modifying our delete op if it merges with existing deletes, or cancels out + # with an existing insert. Since we might do multiple modifications, we record them and do + # all the modifications after looping through the existing changes, so as not to mess up the + # offset indexes as we go. + op_modifications = [] + for change in @changes + if change.op.i? + change_start = change.op.p + change_end = change_start + change.op.i.length + if op_end <= change_start + # Shift ops after us back by our length + change.op.p -= op_length + moved_changes.push change + else if op_start >= change_end + # Delete is after insert, nothing to do + else + # When the new delete overlaps an insert, we should remove the part of the insert that + # is now deleted, and also remove the part of the new delete that overlapped. I.e. + # the two cancel out where they overlap. + if op_start >= change_start + # |-- existing insert --| + # insert_remaining_before -> |.....||-- new delete --| + delete_remaining_before = "" + insert_remaining_before = change.op.i.slice(0, op_start - change_start) + else + # delete_remaining_before -> |.....||-- existing insert --| + # |-- new delete --| + delete_remaining_before = op.d.slice(0, change_start - op_start) + insert_remaining_before = "" + + if op_end <= change_end + # |-- existing insert --| + # |-- new delete --||.....| <- insert_remaining_after + delete_remaining_after = "" + insert_remaining_after = change.op.i.slice(op_end - change_start) + else + # |-- existing insert --||.....| <- delete_remaining_after + # |-- new delete --| + delete_remaining_after = op.d.slice(change_end - op_start) + insert_remaining_after = "" + + insert_remaining = insert_remaining_before + insert_remaining_after + if insert_remaining.length > 0 + change.op.i = insert_remaining + change.op.p = Math.min(change_start, op_start) + change.metadata.ts = metadata.ts + moved_changes.push change + else + remove_changes.push change + + # We know what we want to preserve of our delete op before (delete_remaining_before) and what we want to preserve + # afterwards (delete_remaining_before). Now we need to turn that into a modification which deletes the + # chunk in the middle not covered by these. + delete_removed_length = op.d.length - delete_remaining_before.length - delete_remaining_after.length + delete_removed_start = delete_remaining_before.length + modification = { + d: op.d.slice(delete_removed_start, delete_removed_start + delete_removed_length) + p: delete_removed_start + } + if modification.d.length > 0 + op_modifications.push modification + else if change.op.d? + change_start = change.op.p + if op_end < change_start or (!@track_changes and op_end == change_start) + # Shift ops after us back by our length. + # If we're tracking changes, it must be strictly before, since we'll merge + # below if they are touching. Otherwise, touching is fine. + change.op.p -= op_length + moved_changes.push change + else if op_start <= change_start <= op_end + if @track_changes + # If we overlap a delete, add it in our content, and delete the existing change. + # It's easier to do it this way, rather than modifying the existing delete in case + # we overlap many deletes and we'd need to track that. We have a workaround to + # update the delete in place if possible below. + offset = change_start - op_start + op_modifications.push { i: change.op.d, p: offset } + remove_changes.push change + else + change.op.p = op_start + moved_changes.push change + + # Copy rather than modify because we still need to apply it to comments + op = { + p: op.p + d: @_applyOpModifications(op.d, op_modifications) + } + + for change in remove_changes + # This is a bit of hack to avoid removing one delete and replacing it with another. + # If we don't do this, it causes the UI to flicker + if op.d.length > 0 and change.op.d? and op.p <= change.op.p <= op.p + op.d.length + change.op.p = op.p + change.op.d = op.d + change.metadata = metadata + moved_changes.push change + op.d = "" # stop it being added + else + @_removeChange change + + if @track_changes and op.d.length > 0 + @_addOp op, metadata + else + # It's possible that we deleted an insert between two other inserts. I.e. + # If we delete 'user_2 insert' in: + # |-- user_1 insert --||-- user_2 insert --||-- user_1 insert --| + # it becomes: + # |-- user_1 insert --||-- user_1 insert --| + # We need to merge these together again + results = @_scanAndMergeAdjacentUpdates() + moved_changes = moved_changes.concat(results.moved_changes) + for change in results.remove_changes + @_removeChange change + moved_changes = moved_changes.filter (c) -> c != change + + if moved_changes.length > 0 + @emit "changes:moved", moved_changes + + _newId: () -> + (@id++).toString() + + _addOp: (op, metadata) -> + change = { + id: @_newId() + op: op + metadata: metadata + } + @changes.push change + + # Keep ops in order of offset, with deletes before inserts + @changes.sort (c1, c2) -> + result = c1.op.p - c2.op.p + if result != 0 + return result + else if c1.op.i? and c2.op.d? + return 1 + else + return -1 + + if op.d? + @emit "delete:added", change + else if op.i? + @emit "insert:added", change + + _removeChange: (change) -> + @changes = @changes.filter (c) -> c.id != change.id + if change.op.d? + @emit "delete:removed", change + else if change.op.i? + @emit "insert:removed", change + + _applyOpModifications: (content, op_modifications) -> + # Put in descending position order, with deleting first if at the same offset + # (Inserting first would modify the content that the delete will delete) + op_modifications.sort (a, b) -> + result = b.p - a.p + if result != 0 + return result + else if a.i? and b.d? + return 1 + else + return -1 + + for modification in op_modifications + if modification.i? + content = content.slice(0, modification.p) + modification.i + content.slice(modification.p) + else if modification.d? + if content.slice(modification.p, modification.p + modification.d.length) != modification.d + throw new Error("deleted content does not match. content: #{JSON.stringify(content)}; modification: #{JSON.stringify(modification)}") + content = content.slice(0, modification.p) + content.slice(modification.p + modification.d.length) + return content + + _scanAndMergeAdjacentUpdates: () -> + # This should only need calling when deleting an update between two + # other updates. There's no other way to get two adjacent updates from the + # same user, since they would be merged on insert. + previous_change = null + remove_changes = [] + moved_changes = [] + for change in @changes + if previous_change?.op.i? and change.op.i? + previous_change_end = previous_change.op.p + previous_change.op.i.length + previous_change_user_id = previous_change.metadata.user_id + change_start = change.op.p + change_user_id = change.metadata.user_id + if previous_change_end == change_start and previous_change_user_id == change_user_id + remove_changes.push change + previous_change.op.i += change.op.i + moved_changes.push previous_change + else if previous_change?.op.d? and change.op.d? and previous_change.op.p == change.op.p + # Merge adjacent deletes + previous_change.op.d += change.op.d + remove_changes.push change + moved_changes.push previous_change + else # Only update to the current change if we haven't removed it. + previous_change = change + return { moved_changes, remove_changes } + +if define? + define ["utils/EventEmitter"], load +else + EventEmitter = require("events").EventEmitter + module.exports = load(EventEmitter) \ No newline at end of file diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index ebbdc3a66e..9c7277c469 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -3,7 +3,8 @@ PersistenceManager = require "./PersistenceManager" DiffCodec = require "./DiffCodec" logger = require "logger-sharelatex" Metrics = require "./Metrics" -TrackChangesManager = require "./TrackChangesManager" +HistoryManager = require "./HistoryManager" +WebRedisManager = require "./WebRedisManager" module.exports = DocumentManager = getDoc: (project_id, doc_id, _callback = (error, lines, version, alreadyLoaded) ->) -> @@ -12,18 +13,18 @@ module.exports = DocumentManager = timer.done() _callback(args...) - RedisManager.getDoc project_id, doc_id, (error, lines, version) -> + RedisManager.getDoc project_id, doc_id, (error, lines, version, track_changes, track_changes_entries) -> return callback(error) if error? if !lines? or !version? - logger.log project_id: project_id, doc_id: doc_id, "doc not in redis so getting from persistence API" - PersistenceManager.getDoc project_id, doc_id, (error, lines, version) -> + logger.log {project_id, doc_id, track_changes}, "doc not in redis so getting from persistence API" + PersistenceManager.getDoc project_id, doc_id, (error, lines, version, track_changes, track_changes_entries) -> return callback(error) if error? - logger.log project_id: project_id, doc_id: doc_id, lines: lines, version: version, "got doc from persistence API" - RedisManager.putDocInMemory project_id, doc_id, lines, version, (error) -> + logger.log {project_id, doc_id, lines, version, track_changes}, "got doc from persistence API" + RedisManager.putDocInMemory project_id, doc_id, lines, version, track_changes, track_changes_entries, (error) -> return callback(error) if error? - callback null, lines, version, false + callback null, lines, version, track_changes, track_changes_entries, false else - callback null, lines, version, true + callback null, lines, version, track_changes, track_changes_entries, true getDocAndRecentOps: (project_id, doc_id, fromVersion, _callback = (error, lines, version, recentOps) ->) -> timer = new Metrics.Timer("docManager.getDocAndRecentOps") @@ -50,7 +51,7 @@ module.exports = DocumentManager = return callback(new Error("No lines were provided to setDoc")) UpdateManager = require "./UpdateManager" - DocumentManager.getDoc project_id, doc_id, (error, oldLines, version, alreadyLoaded) -> + DocumentManager.getDoc project_id, doc_id, (error, oldLines, version, track_changes, alreadyLoaded) -> return callback(error) if error? if oldLines? and oldLines.length > 0 and oldLines[0].text? @@ -89,14 +90,14 @@ module.exports = DocumentManager = callback = (args...) -> timer.done() _callback(args...) - RedisManager.getDoc project_id, doc_id, (error, lines, version) -> + RedisManager.getDoc project_id, doc_id, (error, lines, version, track_changes, track_changes_entries) -> return callback(error) if error? if !lines? or !version? logger.log project_id: project_id, doc_id: doc_id, "doc is not loaded so not flushing" callback null # TODO: return a flag to bail out, as we go on to remove doc from memory? else logger.log project_id: project_id, doc_id: doc_id, version: version, "flushing doc" - PersistenceManager.setDoc project_id, doc_id, lines, version, (error) -> + PersistenceManager.setDoc project_id, doc_id, lines, version, track_changes, track_changes_entries, (error) -> return callback(error) if error? callback null @@ -111,13 +112,19 @@ module.exports = DocumentManager = # Flush in the background since it requires and http request # to track changes - TrackChangesManager.flushDocChanges project_id, doc_id, (err) -> + HistoryManager.flushDocChanges project_id, doc_id, (err) -> if err? logger.err {err, project_id, doc_id}, "error flushing to track changes" RedisManager.removeDocFromMemory project_id, doc_id, (error) -> return callback(error) if error? callback null + + setTrackChanges: (project_id, doc_id, track_changes_on, callback = (error) ->) -> + RedisManager.setTrackChanges project_id, doc_id, track_changes_on, (error) -> + return callback(error) if error? + WebRedisManager.sendData {project_id, doc_id, track_changes_on} + callback() getDocWithLock: (project_id, doc_id, callback = (error, lines, version) ->) -> UpdateManager = require "./UpdateManager" @@ -138,3 +145,7 @@ module.exports = DocumentManager = flushAndDeleteDocWithLock: (project_id, doc_id, callback = (error) ->) -> UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.flushAndDeleteDoc, project_id, doc_id, callback + + setTrackChangesWithLock: (project_id, doc_id, track_changes_on, callback = (error) ->) -> + UpdateManager = require "./UpdateManager" + UpdateManager.lockUpdatesAndDo DocumentManager.setTrackChanges, project_id, doc_id, track_changes_on, callback \ No newline at end of file diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee new file mode 100644 index 0000000000..637fd2cb5f --- /dev/null +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -0,0 +1,44 @@ +settings = require "settings-sharelatex" +request = require "request" +logger = require "logger-sharelatex" +async = require "async" +WebRedisManager = require "./WebRedisManager" + +module.exports = HistoryManager = + flushDocChanges: (project_id, doc_id, callback = (error) ->) -> + if !settings.apis?.trackchanges? + logger.warn doc_id: doc_id, "track changes API is not configured, so not flushing" + return callback() + + url = "#{settings.apis.trackchanges.url}/project/#{project_id}/doc/#{doc_id}/flush" + logger.log project_id: project_id, doc_id: doc_id, url: url, "flushing doc in track changes api" + request.post url, (error, res, body)-> + if error? + return callback(error) + else if res.statusCode >= 200 and res.statusCode < 300 + return callback(null) + else + error = new Error("track changes api returned a failure status code: #{res.statusCode}") + return callback(error) + + FLUSH_EVERY_N_OPS: 50 + pushUncompressedHistoryOps: (project_id, doc_id, ops = [], callback = (error) ->) -> + if ops.length == 0 + return callback() + WebRedisManager.pushUncompressedHistoryOps project_id, doc_id, ops, (error, length) -> + return callback(error) if error? + # We want to flush every 50 ops, i.e. 50, 100, 150, etc + # Find out which 'block' (i.e. 0-49, 50-99) we were in before and after pushing these + # ops. If we've changed, then we've gone over a multiple of 50 and should flush. + # (Most of the time, we will only hit 50 and then flushing will put us back to 0) + previousLength = length - ops.length + prevBlock = Math.floor(previousLength / HistoryManager.FLUSH_EVERY_N_OPS) + newBlock = Math.floor(length / HistoryManager.FLUSH_EVERY_N_OPS) + if newBlock != prevBlock + # Do this in the background since it uses HTTP and so may be too + # slow to wait for when processing a doc update. + logger.log length: length, doc_id: doc_id, project_id: project_id, "flushing track changes api" + HistoryManager.flushDocChanges project_id, doc_id, (error) -> + if error? + logger.error err: error, doc_id: doc_id, project_id: project_id, "error flushing doc to track changes api" + callback() \ No newline at end of file diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index ee6359b104..0366746d56 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -96,3 +96,15 @@ module.exports = HttpController = return next(error) if error? logger.log project_id: project_id, "deleted project via http" res.send 204 # No Content + + setTrackChanges: (req, res, next = (error) ->) -> + project_id = req.params.project_id + track_changes_on = req.body.on + if !track_changes_on? + return res.send 400 + track_changes_on = !!track_changes_on # Make boolean + logger.log {project_id, track_changes_on}, "turning on track changes via http" + ProjectManager.setTrackChangesWithLocks project_id, track_changes_on, (error) -> + return next(error) if error? + res.send 204 + diff --git a/services/document-updater/app/coffee/PersistenceManager.coffee b/services/document-updater/app/coffee/PersistenceManager.coffee index 605425eb5e..8d5578b4cf 100644 --- a/services/document-updater/app/coffee/PersistenceManager.coffee +++ b/services/document-updater/app/coffee/PersistenceManager.coffee @@ -12,14 +12,14 @@ MAX_HTTP_REQUEST_LENGTH = 5000 # 5 seconds module.exports = PersistenceManager = getDoc: (project_id, doc_id, callback = (error, lines, version) ->) -> - PersistenceManager.getDocFromWeb project_id, doc_id, (error, lines) -> + PersistenceManager.getDocFromWeb project_id, doc_id, (error, lines, track_changes, track_changes_entries) -> return callback(error) if error? PersistenceManager.getDocVersionInMongo doc_id, (error, version) -> return callback(error) if error? - callback null, lines, version + callback null, lines, version, track_changes, track_changes_entries - setDoc: (project_id, doc_id, lines, version, callback = (error) ->) -> - PersistenceManager.setDocInWeb project_id, doc_id, lines, (error) -> + setDoc: (project_id, doc_id, lines, version, track_changes, track_changes_entries, callback = (error) ->) -> + PersistenceManager.setDocInWeb project_id, doc_id, lines, track_changes, track_changes_entries, (error) -> return callback(error) if error? PersistenceManager.setDocVersionInMongo doc_id, version, (error) -> return callback(error) if error? @@ -50,13 +50,13 @@ module.exports = PersistenceManager = body = JSON.parse body catch e return callback(e) - return callback null, body.lines + return callback null, body.lines, body.track_changes, body.track_changes_entries else if res.statusCode == 404 return callback(new Errors.NotFoundError("doc not not found: #{url}")) else return callback(new Error("error accessing web API: #{url} #{res.statusCode}")) - setDocInWeb: (project_id, doc_id, lines, _callback = (error) ->) -> + setDocInWeb: (project_id, doc_id, lines, track_changes, track_changes_entries, _callback = (error) ->) -> timer = new Metrics.Timer("persistenceManager.setDoc") callback = (args...) -> timer.done() @@ -68,6 +68,8 @@ module.exports = PersistenceManager = method: "POST" body: JSON.stringify lines: lines + track_changes: track_changes + track_changes_entries: track_changes_entries headers: "content-type": "application/json" auth: diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index f0f62b6d1b..a38fe08397 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -57,4 +57,30 @@ module.exports = ProjectManager = else callback(null) + setTrackChangesWithLocks: (project_id, track_changes_on, _callback = (error) ->) -> + timer = new Metrics.Timer("projectManager.toggleTrackChangesWithLocks") + callback = (args...) -> + timer.done() + _callback(args...) + + RedisManager.getDocIdsInProject project_id, (error, doc_ids) -> + return callback(error) if error? + jobs = [] + errors = [] + for doc_id in (doc_ids or []) + do (doc_id) -> + jobs.push (callback) -> + DocumentManager.setTrackChangesWithLock project_id, doc_id, track_changes_on, (error) -> + if error? + logger.error {err: error, project_id, doc_ids, track_changes_on}, "error toggle track changes for doc" + errors.push(error) + callback() + # TODO: If no docs, turn on track changes in Mongo manually + + logger.log {project_id, doc_ids, track_changes_on}, "toggling track changes for docs" + async.series jobs, () -> + if errors.length > 0 + callback new Error("Errors toggling track changes for docs. See log for details") + else + callback(null) diff --git a/services/document-updater/app/coffee/RedisKeyBuilder.coffee b/services/document-updater/app/coffee/RedisKeyBuilder.coffee index 0e9e59e8f1..c09fb43f00 100644 --- a/services/document-updater/app/coffee/RedisKeyBuilder.coffee +++ b/services/document-updater/app/coffee/RedisKeyBuilder.coffee @@ -34,6 +34,10 @@ module.exports = RedisKeyBuilder = return (key_schema) -> key_schema.uncompressedHistoryOp({doc_id}) pendingUpdates: ({doc_id}) -> return (key_schema) -> key_schema.pendingUpdates({doc_id}) + trackChangesEnabled: ({doc_id}) -> + return (key_schema) -> key_schema.trackChangesEnabled({doc_id}) + trackChangesEntries: ({doc_id}) -> + return (key_schema) -> key_schema.trackChangesEntries({doc_id}) docsInProject: ({project_id}) -> return (key_schema) -> key_schema.docsInProject({project_id}) docsWithHistoryOps: ({project_id}) -> diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 87e31b7826..6ee764cb7e 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -13,7 +13,7 @@ minutes = 60 # seconds for Redis expire module.exports = RedisManager = rclient: rclient - putDocInMemory : (project_id, doc_id, docLines, version, _callback)-> + putDocInMemory : (project_id, doc_id, docLines, version, track_changes, track_changes_entries, _callback)-> timer = new metrics.Timer("redis.put-doc") callback = (error) -> timer.done() @@ -23,6 +23,8 @@ module.exports = RedisManager = multi.set keys.docLines(doc_id:doc_id), JSON.stringify(docLines) multi.set keys.projectKey({doc_id:doc_id}), project_id multi.set keys.docVersion(doc_id:doc_id), version + multi.set keys.trackChangesEnabled(doc_id:doc_id), if track_changes then "1" else "0" + multi.set keys.trackChangesEntries(doc_id:doc_id), JSON.stringify(track_changes_entries) multi.exec (error) -> return callback(error) if error? rclient.sadd keys.docsInProject(project_id:project_id), doc_id, callback @@ -41,30 +43,36 @@ module.exports = RedisManager = multi.del keys.docLines(doc_id:doc_id) multi.del keys.projectKey(doc_id:doc_id) multi.del keys.docVersion(doc_id:doc_id) + multi.del keys.trackChangesEnabled(doc_id:doc_id) + multi.del keys.trackChangesEntries(doc_id:doc_id) multi.exec (error) -> return callback(error) if error? rclient.srem keys.docsInProject(project_id:project_id), doc_id, callback - getDoc : (project_id, doc_id, callback = (error, lines, version, project_id) ->)-> + getDoc : (project_id, doc_id, callback = (error, lines, version, track_changes, track_changes_entries) ->)-> timer = new metrics.Timer("redis.get-doc") multi = rclient.multi() multi.get keys.docLines(doc_id:doc_id) multi.get keys.docVersion(doc_id:doc_id) multi.get keys.projectKey(doc_id:doc_id) + multi.get keys.trackChangesEnabled(doc_id:doc_id) + multi.get keys.trackChangesEntries(doc_id:doc_id) multi.exec (error, result)-> timer.done() return callback(error) if error? try docLines = JSON.parse result[0] + track_changes_entries = JSON.parse result[4] catch e return callback(e) version = parseInt(result[1] or 0, 10) doc_project_id = result[2] + track_changes = (result[3] == "1") # check doc is in requested project if doc_project_id? and doc_project_id isnt project_id logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, "doc not in project" return callback(new Errors.NotFoundError("document not found")) - callback null, docLines, version, project_id + callback null, docLines, version, track_changes, track_changes_entries getDocVersion: (doc_id, callback = (error, version) ->) -> rclient.get keys.docVersion(doc_id: doc_id), (error, version) -> @@ -104,7 +112,7 @@ module.exports = RedisManager = DOC_OPS_TTL: 60 * minutes DOC_OPS_MAX_LENGTH: 100 - updateDocument : (doc_id, docLines, newVersion, appliedOps = [], callback = (error) ->)-> + updateDocument : (doc_id, docLines, newVersion, appliedOps = [], track_changes_entries, callback = (error) ->)-> RedisManager.getDocVersion doc_id, (error, currentVersion) -> return callback(error) if error? if currentVersion + appliedOps.length != newVersion @@ -119,6 +127,7 @@ module.exports = RedisManager = multi.rpush keys.docOps(doc_id: doc_id), jsonOps... multi.expire keys.docOps(doc_id: doc_id), RedisManager.DOC_OPS_TTL multi.ltrim keys.docOps(doc_id: doc_id), -RedisManager.DOC_OPS_MAX_LENGTH, -1 + multi.set keys.trackChangesEntries(doc_id:doc_id), JSON.stringify(track_changes_entries) multi.exec (error, replys) -> return callback(error) if error? return callback() @@ -126,3 +135,6 @@ module.exports = RedisManager = getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) -> rclient.smembers keys.docsInProject(project_id: project_id), callback + setTrackChanges: (project_id, doc_id, track_changes_on, callback = (error) ->) -> + value = (if track_changes_on then "1" else "0") + rclient.set keys.trackChangesEnabled({doc_id}), value, callback diff --git a/services/document-updater/app/coffee/ShareJsDB.coffee b/services/document-updater/app/coffee/ShareJsDB.coffee index 3d80c680cb..a21c8aea7f 100644 --- a/services/document-updater/app/coffee/ShareJsDB.coffee +++ b/services/document-updater/app/coffee/ShareJsDB.coffee @@ -1,12 +1,11 @@ Keys = require('./UpdateKeys') Settings = require('settings-sharelatex') -DocumentManager = require "./DocumentManager" RedisManager = require "./RedisManager" Errors = require "./Errors" logger = require "logger-sharelatex" module.exports = class ShareJsDB - constructor: () -> + constructor: (@project_id, @doc_id, @lines, @version) -> @appliedOps = {} # ShareJS calls this detacted from the instance, so we need # bind it to keep our context that can access @appliedOps @@ -31,22 +30,14 @@ module.exports = class ShareJsDB callback() getSnapshot: (doc_key, callback) -> - [project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key) - DocumentManager.getDoc project_id, doc_id, (error, lines, version) -> - return callback(error) if error? - if !lines? or !version? - return callback(new Errors.NotFoundError("document not found: #{doc_id}")) - - if lines.length > 0 and lines[0].text? - type = "json" - snapshot = lines: lines - else - type = "text" - snapshot = lines.join("\n") - callback null, - snapshot: snapshot - v: parseInt(version, 10) - type: type + if doc_key != Keys.combineProjectIdAndDocId(@project_id, @doc_id) + return callback(new Errors.NotFoundError("unexpected doc_key #{doc_key}, expected #{Keys.combineProjectIdAndDocId(@project_id, @doc_id)}")) + else + return callback null, { + snapshot: @lines.join("\n") + v: parseInt(@version, 10) + type: "text" + } # To be able to remove a doc from the ShareJS memory # we need to called Model::delete, which calls this diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee index 985d03094a..876d56e71b 100644 --- a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee +++ b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee @@ -6,22 +6,19 @@ Settings = require('settings-sharelatex') Keys = require "./UpdateKeys" {EventEmitter} = require "events" util = require "util" - -redis = require("redis-sharelatex") -rclient = redis.createClient(Settings.redis.web) - +WebRedisManager = require "./WebRedisManager" ShareJsModel:: = {} util.inherits ShareJsModel, EventEmitter module.exports = ShareJsUpdateManager = - getNewShareJsModel: () -> - db = new ShareJsDB() + getNewShareJsModel: (project_id, doc_id, lines, version) -> + db = new ShareJsDB(project_id, doc_id, lines, version) model = new ShareJsModel(db, maxDocLength: Settings.max_doc_length) model.db = db return model - applyUpdate: (project_id, doc_id, update, callback = (error, updatedDocLines) ->) -> + applyUpdate: (project_id, doc_id, update, lines, version, callback = (error, updatedDocLines) ->) -> logger.log project_id: project_id, doc_id: doc_id, update: update, "applying sharejs updates" jobs = [] @@ -30,7 +27,7 @@ module.exports = ShareJsUpdateManager = # getting stuck due to queued callbacks (line 260 of sharejs/server/model.coffee) # This adds a small but hopefully acceptable overhead (~12ms per 1000 updates on # my 2009 MBP). - model = @getNewShareJsModel() + model = @getNewShareJsModel(project_id, doc_id, lines, version) @_listenForOps(model) doc_key = Keys.combineProjectIdAndDocId(project_id, doc_id) model.applyOp doc_key, update, (error) -> @@ -55,18 +52,9 @@ module.exports = ShareJsUpdateManager = [project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key) ShareJsUpdateManager._sendOp(project_id, doc_id, opData) - _sendOp: (project_id, doc_id, opData) -> - data = - project_id: project_id - doc_id: doc_id - op: opData - data = JSON.stringify data - rclient.publish "applied-ops", data + _sendOp: (project_id, doc_id, op) -> + WebRedisManager.sendData {project_id, doc_id, op} _sendError: (project_id, doc_id, error) -> - data = JSON.stringify - project_id: project_id - doc_id: doc_id - error: error.message || error - rclient.publish "applied-ops", data + WebRedisManager.sendData {project_id, doc_id, error: error.message || error} diff --git a/services/document-updater/app/coffee/TrackChangesManager.coffee b/services/document-updater/app/coffee/TrackChangesManager.coffee index 9aa1c0ad47..94f8a11ca1 100644 --- a/services/document-updater/app/coffee/TrackChangesManager.coffee +++ b/services/document-updater/app/coffee/TrackChangesManager.coffee @@ -1,44 +1,12 @@ -settings = require "settings-sharelatex" -request = require "request" -logger = require "logger-sharelatex" -async = require "async" -WebRedisManager = require "./WebRedisManager" +ChangesTracker = require "./ChangesTracker" module.exports = TrackChangesManager = - flushDocChanges: (project_id, doc_id, callback = (error) ->) -> - if !settings.apis?.trackchanges? - logger.warn doc_id: doc_id, "track changes API is not configured, so not flushing" - return callback() - - url = "#{settings.apis.trackchanges.url}/project/#{project_id}/doc/#{doc_id}/flush" - logger.log project_id: project_id, doc_id: doc_id, url: url, "flushing doc in track changes api" - request.post url, (error, res, body)-> - if error? - return callback(error) - else if res.statusCode >= 200 and res.statusCode < 300 - return callback(null) - else - error = new Error("track changes api returned a failure status code: #{res.statusCode}") - return callback(error) - - FLUSH_EVERY_N_OPS: 50 - pushUncompressedHistoryOps: (project_id, doc_id, ops = [], callback = (error) ->) -> - if ops.length == 0 - return callback() - WebRedisManager.pushUncompressedHistoryOps project_id, doc_id, ops, (error, length) -> - return callback(error) if error? - # We want to flush every 50 ops, i.e. 50, 100, 150, etc - # Find out which 'block' (i.e. 0-49, 50-99) we were in before and after pushing these - # ops. If we've changed, then we've gone over a multiple of 50 and should flush. - # (Most of the time, we will only hit 50 and then flushing will put us back to 0) - previousLength = length - ops.length - prevBlock = Math.floor(previousLength / TrackChangesManager.FLUSH_EVERY_N_OPS) - newBlock = Math.floor(length / TrackChangesManager.FLUSH_EVERY_N_OPS) - if newBlock != prevBlock - # Do this in the background since it uses HTTP and so may be too - # slow to wait for when processing a doc update. - logger.log length: length, doc_id: doc_id, project_id: project_id, "flushing track changes api" - TrackChangesManager.flushDocChanges project_id, doc_id, (error) -> - if error? - logger.error err: error, doc_id: doc_id, project_id: project_id, "error flushing doc to track changes api" - callback() \ No newline at end of file + applyUpdate: (project_id, doc_id, entries = {}, updates = [], track_changes, callback = (error, new_entries) ->) -> + {changes, comments} = entries + changesTracker = new ChangesTracker(changes, comments) + changesTracker.track_changes = track_changes + for update in updates + for op in update.op + changesTracker.applyOp(op, { user_id: update.meta?.user_id, }) + {changes, comments} = changesTracker + callback null, {changes, comments} \ No newline at end of file diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index f35bc1a9b7..d08d9a62f3 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -2,11 +2,14 @@ LockManager = require "./LockManager" RedisManager = require "./RedisManager" WebRedisManager = require "./WebRedisManager" ShareJsUpdateManager = require "./ShareJsUpdateManager" -TrackChangesManager = require "./TrackChangesManager" +HistoryManager = require "./HistoryManager" Settings = require('settings-sharelatex') async = require("async") logger = require('logger-sharelatex') Metrics = require "./Metrics" +Errors = require "./Errors" +DocumentManager = require "./DocumentManager" +TrackChangesManager = require "./TrackChangesManager" module.exports = UpdateManager = processOutstandingUpdates: (project_id, doc_id, callback = (error) ->) -> @@ -45,12 +48,18 @@ module.exports = UpdateManager = applyUpdate: (project_id, doc_id, update, callback = (error) ->) -> UpdateManager._sanitizeUpdate update - ShareJsUpdateManager.applyUpdate project_id, doc_id, update, (error, updatedDocLines, version, appliedOps) -> + DocumentManager.getDoc project_id, doc_id, (error, lines, version, track_changes, track_changes_entries) -> return callback(error) if error? - logger.log doc_id: doc_id, version: version, "updating doc in redis" - RedisManager.updateDocument doc_id, updatedDocLines, version, appliedOps, (error) -> + if !lines? or !version? + return callback(new Errors.NotFoundError("document not found: #{doc_id}")) + ShareJsUpdateManager.applyUpdate project_id, doc_id, update, lines, version, (error, updatedDocLines, version, appliedOps) -> return callback(error) if error? - TrackChangesManager.pushUncompressedHistoryOps project_id, doc_id, appliedOps, callback + TrackChangesManager.applyUpdate project_id, doc_id, track_changes_entries, appliedOps, track_changes, (error, new_track_changes_entries) -> + return callback(error) if error? + logger.log doc_id: doc_id, version: version, "updating doc in redis" + RedisManager.updateDocument doc_id, updatedDocLines, version, appliedOps, new_track_changes_entries, (error) -> + return callback(error) if error? + HistoryManager.pushUncompressedHistoryOps project_id, doc_id, appliedOps, callback lockUpdatesAndDo: (method, project_id, doc_id, args..., callback) -> LockManager.getLock doc_id, (error, lockValue) -> diff --git a/services/document-updater/app/coffee/WebRedisManager.coffee b/services/document-updater/app/coffee/WebRedisManager.coffee index 73c099f9da..eb3b6a583c 100644 --- a/services/document-updater/app/coffee/WebRedisManager.coffee +++ b/services/document-updater/app/coffee/WebRedisManager.coffee @@ -32,4 +32,7 @@ module.exports = WebRedisManager = ], (error, results) -> return callback(error) if error? [length, _] = results - callback(error, length) \ No newline at end of file + callback(error, length) + + sendData: (data) -> + rclient.publish "applied-ops", JSON.stringify(data) \ No newline at end of file diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 15456db932..edb8c56ad3 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -32,6 +32,8 @@ module.exports = docVersion: ({doc_id}) -> "DocVersion:#{doc_id}" projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" docsInProject: ({project_id}) -> "DocsIn:#{project_id}" + trackChangesEnabled: ({doc_id}) -> "TrackChangesEnabled:#{doc_id}" + trackChangesEntries: ({doc_id}) -> "TrackChangesEntries:#{doc_id}" # }, { # cluster: [{ # port: "7000" @@ -44,6 +46,8 @@ module.exports = # docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}" # projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" # docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" + # trackChangesEnabled: ({doc_id}) -> "TrackChangesEnabled:{#{doc_id}}" + # trackChangesEntries: ({doc_id}) -> "TrackChangesEntries:{#{doc_id}}" }] max_doc_length: 2 * 1024 * 1024 # 2mb diff --git a/services/document-updater/test/acceptance/coffee/TrackChangesTests.coffee b/services/document-updater/test/acceptance/coffee/TrackChangesTests.coffee new file mode 100644 index 0000000000..406f46b430 --- /dev/null +++ b/services/document-updater/test/acceptance/coffee/TrackChangesTests.coffee @@ -0,0 +1,96 @@ +sinon = require "sinon" +chai = require("chai") +chai.should() +async = require "async" +rclient = require("redis").createClient() + +MockWebApi = require "./helpers/MockWebApi" +DocUpdaterClient = require "./helpers/DocUpdaterClient" + +describe "Track changes", -> + describe "turning on track changes", -> + before (done) -> + DocUpdaterClient.subscribeToAppliedOps @appliedOpsListener = sinon.stub() + @project_id = DocUpdaterClient.randomId() + @docs = [{ + id: doc_id0 = DocUpdaterClient.randomId() + lines: ["one", "two", "three"] + updatedLines: ["one", "one and a half", "two", "three"] + }, { + id: doc_id1 = DocUpdaterClient.randomId() + lines: ["four", "five", "six"] + updatedLines: ["four", "four and a half", "five", "six"] + }] + for doc in @docs + MockWebApi.insertDoc @project_id, doc.id, { + lines: doc.lines + version: 0 + } + async.series @docs.map((doc) => + (callback) => + DocUpdaterClient.preloadDoc @project_id, doc.id, callback + ), (error) => + throw error if error? + setTimeout () => + DocUpdaterClient.setTrackChangesOn @project_id, (error, res, body) => + @statusCode = res.statusCode + done() + , 200 + + it "should return a 204 status code", -> + @statusCode.should.equal 204 + + it "should send a track changes message to real-time for each doc", -> + @appliedOpsListener.calledWith("applied-ops", JSON.stringify({ + project_id: @project_id, doc_id: @docs[0].id, track_changes_on: true + })).should.equal true + @appliedOpsListener.calledWith("applied-ops", JSON.stringify({ + project_id: @project_id, doc_id: @docs[1].id, track_changes_on: true + })).should.equal true + + it "should set the track changes key in redis", (done) -> + rclient.get "TrackChangesEnabled:#{@docs[0].id}", (error, value) => + throw error if error? + value.should.equal "1" + rclient.get "TrackChangesEnabled:#{@docs[1].id}", (error, value) -> + throw error if error? + value.should.equal "1" + done() + + describe "tracking changes", -> + before (done) -> + @project_id = DocUpdaterClient.randomId() + @doc = { + id: doc_id0 = DocUpdaterClient.randomId() + lines: ["one", "two", "three"] + } + @update = + doc: @doc.id + op: [{ + i: "one and a half\n" + p: 4 + }] + v: 0 + meta: + user_id: @user_id = DocUpdaterClient.randomId() + MockWebApi.insertDoc @project_id, @doc.id, { + lines: @doc.lines + version: 0 + } + DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) => + throw error if error? + DocUpdaterClient.setTrackChangesOn @project_id, (error, res, body) => + throw error if error? + DocUpdaterClient.sendUpdate @project_id, @doc.id, @update, (error) -> + throw error if error? + setTimeout done, 200 + + it "should set the updated track changes entries in redis", (done) -> + rclient.get "TrackChangesEntries:#{@doc.id}", (error, value) => + throw error if error? + entries = JSON.parse(value) + change = entries.changes[0] + change.op.should.deep.equal @update.op[0] + change.metadata.user_id.should.equal @user_id + done() + diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee index a14f6f9364..b90e7ea82e 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee @@ -72,7 +72,18 @@ module.exports = DocUpdaterClient = deleteProject: (project_id, callback = () ->) -> request.del "http://localhost:3003/project/#{project_id}", callback - - - - + + setTrackChangesOn: (project_id, callback = () ->) -> + request.post { + url: "http://localhost:3003/project/#{project_id}/track_changes" + json: + on: true + }, callback + + setTrackChangesOff: (project_id, callback = () ->) -> + request.post { + url: "http://localhost:3003/project/#{project_id}/track_changes" + json: + on: false + }, callback + From db8b4bf991db0e7922435364b95dc76705720b1b Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 29 Nov 2016 14:57:05 +0000 Subject: [PATCH 165/769] Update acceptance test script --- .../test/acceptance/scripts/full-test.sh | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) mode change 100644 => 100755 services/document-updater/test/acceptance/scripts/full-test.sh diff --git a/services/document-updater/test/acceptance/scripts/full-test.sh b/services/document-updater/test/acceptance/scripts/full-test.sh old mode 100644 new mode 100755 index 32fa62133c..af8ad4103d --- a/services/document-updater/test/acceptance/scripts/full-test.sh +++ b/services/document-updater/test/acceptance/scripts/full-test.sh @@ -1,20 +1,22 @@ #! /usr/bin/env bash +npm rebuild + echo ">> Starting server..." -grunt execute:app >> /dev/null & -_pid="$!" +grunt --no-color forever:app:start -echo ">> Server started with pid: $_pid" +echo ">> Server started" -sleep 20 +sleep 5 echo ">> Running acceptance tests..." -grunt mochaTest:acceptance +grunt --no-color test:acceptance _test_exit_code=$? -echo ">> Killing server (pid: $_pid)" -kill -1 "$_pid" +echo ">> Killing server" + +grunt --no-color forever:app:stop echo ">> Done" From 2df5c083897d328804610e0aa525e631f47c9d62 Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 29 Nov 2016 15:05:02 +0000 Subject: [PATCH 166/769] Update acceptance tests to use redis-sharelatex --- services/document-updater/package.json | 2 +- .../test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 85958950d8..21b48df0e0 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -35,6 +35,6 @@ "grunt-execute": "~0.1.5", "grunt-forever": "0.4.1", "grunt-mocha-test": "~0.9.0", - "grunt-shell": "^1.3.0" + "grunt-shell": "^2.1.0" } } diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index 89f1acbcfe..053ef2a464 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -3,7 +3,8 @@ chai = require("chai") chai.should() expect = chai.expect async = require "async" -rclient = require("redis").createClient() +Settings = require('settings-sharelatex') +rclient = require("redis-sharelatex").createClient(Settings.redis.web) {db, ObjectId} = require "../../../app/js/mongojs" MockTrackChangesApi = require "./helpers/MockTrackChangesApi" From f7ba0946be27ea716358818d1a760dd23236a4ce Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 29 Nov 2016 15:09:11 +0000 Subject: [PATCH 167/769] More acceptance test fixes --- services/document-updater/Gruntfile.coffee | 18 +++++++++--------- .../coffee/SettingADocumentTests.coffee | 3 ++- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/services/document-updater/Gruntfile.coffee b/services/document-updater/Gruntfile.coffee index 042fe32ce2..2e0e12dd66 100644 --- a/services/document-updater/Gruntfile.coffee +++ b/services/document-updater/Gruntfile.coffee @@ -1,13 +1,4 @@ module.exports = (grunt) -> - grunt.loadNpmTasks 'grunt-contrib-coffee' - grunt.loadNpmTasks 'grunt-contrib-clean' - grunt.loadNpmTasks 'grunt-mocha-test' - grunt.loadNpmTasks 'grunt-available-tasks' - grunt.loadNpmTasks 'grunt-execute' - grunt.loadNpmTasks 'grunt-bunyan' - grunt.loadNpmTasks 'grunt-forever' - grunt.loadNpmTasks 'grunt-shell' - grunt.initConfig forever: app: @@ -107,6 +98,15 @@ module.exports = (grunt) -> "help" ] + grunt.loadNpmTasks 'grunt-contrib-coffee' + grunt.loadNpmTasks 'grunt-contrib-clean' + grunt.loadNpmTasks 'grunt-mocha-test' + grunt.loadNpmTasks 'grunt-available-tasks' + grunt.loadNpmTasks 'grunt-execute' + grunt.loadNpmTasks 'grunt-bunyan' + grunt.loadNpmTasks 'grunt-forever' + grunt.loadNpmTasks 'grunt-shell' + grunt.registerTask 'help', 'Display this help list', 'availabletasks' grunt.registerTask 'compile:server', 'Compile the server side coffee script', ['clean:app', 'coffee:app', 'coffee:app_dir'] diff --git a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee index 0d05e30982..243b3db234 100644 --- a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee @@ -3,7 +3,8 @@ chai = require("chai") chai.should() expect = require("chai").expect {db, ObjectId} = require "../../../app/js/mongojs" -rclient = require("redis").createClient() +Settings = require('settings-sharelatex') +rclient = require("redis-sharelatex").createClient(Settings.redis.web) MockTrackChangesApi = require "./helpers/MockTrackChangesApi" MockWebApi = require "./helpers/MockWebApi" From 77b1d0ea215461a1ab354a61f064d8fd25912c5c Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 29 Nov 2016 15:29:22 +0000 Subject: [PATCH 168/769] Fix up package versions --- services/document-updater/package.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 21b48df0e0..18a517526b 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -11,7 +11,7 @@ "coffee-script": "1.4.0", "express": "3.3.4", "ioredis": "^2.2.0", - "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.0.0", + "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.1", "lynx": "0.0.11", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.5.0", "mongojs": "0.9.11", @@ -33,8 +33,8 @@ "grunt-contrib-clean": "~0.5.0", "grunt-contrib-coffee": "~0.10.0", "grunt-execute": "~0.1.5", - "grunt-forever": "0.4.1", + "grunt-forever": "^0.4.7", "grunt-mocha-test": "~0.9.0", - "grunt-shell": "^2.1.0" + "grunt-shell": "^1.3.0" } } From 2852043a6d98de7b06dd39f769f2764fd8b07e22 Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 29 Nov 2016 17:06:23 +0000 Subject: [PATCH 169/769] Don't store doc version in Mongo directly, instead use docstore --- services/document-updater/app.coffee | 8 - .../app/coffee/MongoHealthCheck.coffee | 26 --- .../app/coffee/PersistenceManager.coffee | 45 +---- .../app/coffee/mongojs.coffee | 6 - services/document-updater/package.json | 1 - .../coffee/ApplyingUpdatesToADocTests.coffee | 176 ++++++++---------- .../coffee/DeletingADocumentTests.coffee | 48 ++--- .../coffee/DeletingAProjectTests.coffee | 6 +- .../coffee/FlushingAProjectTests.coffee | 6 +- .../coffee/FlushingDocsTests.coffee | 58 ++---- .../coffee/GettingADocumentTests.coffee | 24 +-- .../coffee/SettingADocumentTests.coffee | 45 ++--- .../coffee/helpers/MockWebApi.coffee | 11 +- .../getDocFromWebTests.coffee | 87 --------- .../PersistenceManager/getDocTests.coffee | 88 ++++++--- .../getDocVersionInMongoTests.coffee | 47 ----- .../setDocInWebTests.coffee | 88 --------- .../PersistenceManager/setDocTests.coffee | 88 +++++++-- .../setDocVersionInMongo.coffee | 44 ----- 19 files changed, 287 insertions(+), 615 deletions(-) delete mode 100644 services/document-updater/app/coffee/MongoHealthCheck.coffee delete mode 100644 services/document-updater/app/coffee/mongojs.coffee delete mode 100644 services/document-updater/test/unit/coffee/PersistenceManager/getDocFromWebTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/PersistenceManager/getDocVersionInMongoTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/PersistenceManager/setDocInWebTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/PersistenceManager/setDocVersionInMongo.coffee diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index af0ad242c2..004b9f77bc 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -8,7 +8,6 @@ DispatchManager = require('./app/js/DispatchManager') Keys = require('./app/js/RedisKeyBuilder') Errors = require "./app/js/Errors" HttpController = require "./app/js/HttpController" -MongoHealthCheck = require('./app/js/MongoHealthCheck') redis = require("redis-sharelatex") rclient = redis.createClient(Settings.redis.web) @@ -59,13 +58,6 @@ app.get '/status', (req, res)-> else res.send('document updater is alive') -app.get '/health_check/mongo', (req, res, next) -> - MongoHealthCheck.isAlive (error) -> - if error? - res.send 500, error.message - else - res.send 200 - redisCheck = require("redis-sharelatex").activeHealthCheckRedis(Settings.redis.web) app.get "/health_check/redis", (req, res, next)-> if redisCheck.isAlive() diff --git a/services/document-updater/app/coffee/MongoHealthCheck.coffee b/services/document-updater/app/coffee/MongoHealthCheck.coffee deleted file mode 100644 index 3872c051c4..0000000000 --- a/services/document-updater/app/coffee/MongoHealthCheck.coffee +++ /dev/null @@ -1,26 +0,0 @@ -Settings = require "settings-sharelatex" -PersistenceManager = require "./PersistenceManager" - -module.exports = MongoHealthCheck = - isAlive: (_callback = (error) ->) -> - # We've seen very occasionally the doc-updater losing its connection to Mongo. - # E.g. https://sharelatex.hackpad.com/29th-Aug-2015-0650-0740-fHlw8RL8zuN - # It seems that the mongo callbacks never returned. - # Mongo is only called in the persistence manager, so we do a read-only - # test call, check that it's working, and returns in a reasonable time. - callback = (args...) -> - _callback(args...) - _callback = () -> - - doc_id = Settings.smokeTest?.doc_id - if !doc_id? - return callback(new Error("No test doc_id configured")) - - PersistenceManager.getDocVersionInMongo doc_id, (error, version) -> - return callback(error) if error? - callback(null) - - timeout = Settings.smokeTest?.timeout or 10000 - setTimeout () -> - callback(new Error("Mongo did not return in #{timeout}ms")) - , timeout \ No newline at end of file diff --git a/services/document-updater/app/coffee/PersistenceManager.coffee b/services/document-updater/app/coffee/PersistenceManager.coffee index 605425eb5e..27585c8290 100644 --- a/services/document-updater/app/coffee/PersistenceManager.coffee +++ b/services/document-updater/app/coffee/PersistenceManager.coffee @@ -2,7 +2,6 @@ request = require "request" Settings = require "settings-sharelatex" Errors = require "./Errors" Metrics = require "./Metrics" -{db, ObjectId} = require("./mongojs") logger = require "logger-sharelatex" # We have to be quick with HTTP calls because we're holding a lock that @@ -11,21 +10,7 @@ logger = require "logger-sharelatex" MAX_HTTP_REQUEST_LENGTH = 5000 # 5 seconds module.exports = PersistenceManager = - getDoc: (project_id, doc_id, callback = (error, lines, version) ->) -> - PersistenceManager.getDocFromWeb project_id, doc_id, (error, lines) -> - return callback(error) if error? - PersistenceManager.getDocVersionInMongo doc_id, (error, version) -> - return callback(error) if error? - callback null, lines, version - - setDoc: (project_id, doc_id, lines, version, callback = (error) ->) -> - PersistenceManager.setDocInWeb project_id, doc_id, lines, (error) -> - return callback(error) if error? - PersistenceManager.setDocVersionInMongo doc_id, version, (error) -> - return callback(error) if error? - callback() - - getDocFromWeb: (project_id, doc_id, _callback = (error, lines) ->) -> + getDoc: (project_id, doc_id, _callback = (error, lines, version) ->) -> timer = new Metrics.Timer("persistenceManager.getDoc") callback = (args...) -> timer.done() @@ -50,13 +35,13 @@ module.exports = PersistenceManager = body = JSON.parse body catch e return callback(e) - return callback null, body.lines + return callback null, body.lines, body.version else if res.statusCode == 404 return callback(new Errors.NotFoundError("doc not not found: #{url}")) else return callback(new Error("error accessing web API: #{url} #{res.statusCode}")) - setDocInWeb: (project_id, doc_id, lines, _callback = (error) ->) -> + setDoc: (project_id, doc_id, lines, version, _callback = (error) ->) -> timer = new Metrics.Timer("persistenceManager.setDoc") callback = (args...) -> timer.done() @@ -68,6 +53,7 @@ module.exports = PersistenceManager = method: "POST" body: JSON.stringify lines: lines + version: version headers: "content-type": "application/json" auth: @@ -84,27 +70,4 @@ module.exports = PersistenceManager = return callback(new Errors.NotFoundError("doc not not found: #{url}")) else return callback(new Error("error accessing web API: #{url} #{res.statusCode}")) - - getDocVersionInMongo: (doc_id, callback = (error, version) ->) -> - db.docOps.find { - doc_id: ObjectId(doc_id) - }, { - version: 1 - }, (error, docs) -> - return callback(error) if error? - if docs.length < 1 or !docs[0].version? - return callback null, 0 - else - return callback null, docs[0].version - setDocVersionInMongo: (doc_id, version, callback = (error) ->) -> - db.docOps.update { - doc_id: ObjectId(doc_id) - }, { - $set: version: version - }, { - upsert: true - }, callback - - - diff --git a/services/document-updater/app/coffee/mongojs.coffee b/services/document-updater/app/coffee/mongojs.coffee deleted file mode 100644 index cf9f5fec86..0000000000 --- a/services/document-updater/app/coffee/mongojs.coffee +++ /dev/null @@ -1,6 +0,0 @@ -Settings = require "settings-sharelatex" -mongojs = require "mongojs" -db = mongojs.connect(Settings.mongo.url, ["docOps"]) -module.exports = - db: db - ObjectId: mongojs.ObjectId diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 18a517526b..eaa8c726ec 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -14,7 +14,6 @@ "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.1", "lynx": "0.0.11", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.5.0", - "mongojs": "0.9.11", "redis-sharelatex": "0.0.9", "request": "2.25.0", "sandboxed-module": "~0.2.0", diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index 053ef2a464..4166f8499e 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -5,7 +5,6 @@ expect = chai.expect async = require "async" Settings = require('settings-sharelatex') rclient = require("redis-sharelatex").createClient(Settings.redis.web) -{db, ObjectId} = require "../../../app/js/mongojs" MockTrackChangesApi = require "./helpers/MockTrackChangesApi" MockWebApi = require "./helpers/MockWebApi" @@ -29,15 +28,10 @@ describe "Applying updates to a doc", -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] sinon.spy MockWebApi, "getDocument" - MockWebApi.insertDoc @project_id, @doc_id, lines: @lines - db.docOps.insert { - doc_id: ObjectId(@doc_id) - version: @version - }, (error) => + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> throw error if error? - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> - throw error if error? - setTimeout done, 200 + setTimeout done, 200 after -> MockWebApi.getDocument.restore() @@ -66,15 +60,13 @@ describe "Applying updates to a doc", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, lines: @lines - db.docOps.insert doc_id: ObjectId(@doc_id), version: @version, (error) => + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => throw error if error? - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + sinon.spy MockWebApi, "getDocument" + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> throw error if error? - sinon.spy MockWebApi, "getDocument" - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> - throw error if error? - setTimeout done, 200 + setTimeout done, 200 after -> MockWebApi.getDocument.restore() @@ -99,24 +91,22 @@ describe "Applying updates to a doc", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] lines = ["", "", ""] - MockWebApi.insertDoc @project_id, @doc_id, lines: lines - db.docOps.insert doc_id: ObjectId(@doc_id), version: 0, (error) => - throw error if error? - @updates = [ - { doc_id: @doc_id, v: 0, op: [i: "h", p: 0 ] } - { doc_id: @doc_id, v: 1, op: [i: "e", p: 1 ] } - { doc_id: @doc_id, v: 2, op: [i: "l", p: 2 ] } - { doc_id: @doc_id, v: 3, op: [i: "l", p: 3 ] } - { doc_id: @doc_id, v: 4, op: [i: "o", p: 4 ] } - { doc_id: @doc_id, v: 5, op: [i: " ", p: 5 ] } - { doc_id: @doc_id, v: 6, op: [i: "w", p: 6 ] } - { doc_id: @doc_id, v: 7, op: [i: "o", p: 7 ] } - { doc_id: @doc_id, v: 8, op: [i: "r", p: 8 ] } - { doc_id: @doc_id, v: 9, op: [i: "l", p: 9 ] } - { doc_id: @doc_id, v: 10, op: [i: "d", p: 10] } - ] - @my_result = ["hello world", "", ""] - done() + MockWebApi.insertDoc @project_id, @doc_id, {lines: lines, version: 0} + @updates = [ + { doc_id: @doc_id, v: 0, op: [i: "h", p: 0 ] } + { doc_id: @doc_id, v: 1, op: [i: "e", p: 1 ] } + { doc_id: @doc_id, v: 2, op: [i: "l", p: 2 ] } + { doc_id: @doc_id, v: 3, op: [i: "l", p: 3 ] } + { doc_id: @doc_id, v: 4, op: [i: "o", p: 4 ] } + { doc_id: @doc_id, v: 5, op: [i: " ", p: 5 ] } + { doc_id: @doc_id, v: 6, op: [i: "w", p: 6 ] } + { doc_id: @doc_id, v: 7, op: [i: "o", p: 7 ] } + { doc_id: @doc_id, v: 8, op: [i: "r", p: 8 ] } + { doc_id: @doc_id, v: 9, op: [i: "l", p: 9 ] } + { doc_id: @doc_id, v: 10, op: [i: "d", p: 10] } + ] + @my_result = ["hello world", "", ""] + done() it "should be able to continue applying updates when the project has been deleted", (done) -> actions = [] @@ -155,21 +145,17 @@ describe "Applying updates to a doc", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] lines = ["", "", ""] - MockWebApi.insertDoc @project_id, @doc_id, lines: lines - db.docOps.insert doc_id: ObjectId(@doc_id), version: 0, (error) => - throw error if error? - - @updates = [ - { doc_id: @doc_id, v: 0, op: [i: "h", p: 0 ] } - { doc_id: @doc_id, v: 1, op: [i: "e", p: 1 ] } - { doc_id: @doc_id, v: 2, op: [i: "l", p: 2 ] } - { doc_id: @doc_id, v: 3, op: [i: "l", p: 3 ] } - { doc_id: @doc_id, v: 4, op: [i: "o", p: 4 ] } - { doc_id: @doc_id, v: 0, op: [i: "world", p: 1 ] } - ] - @my_result = ["hello", "world", ""] - - done() + MockWebApi.insertDoc @project_id, @doc_id, {lines: lines, version: 0} + @updates = [ + { doc_id: @doc_id, v: 0, op: [i: "h", p: 0 ] } + { doc_id: @doc_id, v: 1, op: [i: "e", p: 1 ] } + { doc_id: @doc_id, v: 2, op: [i: "l", p: 2 ] } + { doc_id: @doc_id, v: 3, op: [i: "l", p: 3 ] } + { doc_id: @doc_id, v: 4, op: [i: "o", p: 4 ] } + { doc_id: @doc_id, v: 0, op: [i: "world", p: 1 ] } + ] + @my_result = ["hello", "world", ""] + done() it "should be able to continue applying updates when the project has been deleted", (done) -> actions = [] @@ -190,12 +176,10 @@ describe "Applying updates to a doc", -> describe "with a broken update", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, lines: @lines - db.docOps.insert doc_id: ObjectId(@doc_id), version: @version, (error) => + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + DocUpdaterClient.sendUpdate @project_id, @doc_id, @undefined, (error) -> throw error if error? - DocUpdaterClient.sendUpdate @project_id, @doc_id, @undefined, (error) -> - throw error if error? - setTimeout done, 200 + setTimeout done, 200 it "should not update the doc", (done) -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => @@ -214,19 +198,17 @@ describe "Applying updates to a doc", -> sinon.spy MockTrackChangesApi, "flushDoc" - MockWebApi.insertDoc @project_id, @doc_id, lines: @lines - db.docOps.insert doc_id: ObjectId(@doc_id), version: 0, (error) => + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: 0} + + # Send updates in chunks to causes multiple flushes + actions = [] + for i in [0..9] + do (i) => + actions.push (cb) => + DocUpdaterClient.sendUpdates @project_id, @doc_id, updates.slice(i*10, (i+1)*10), cb + async.series actions, (error) => throw error if error? - - # Send updates in chunks to causes multiple flushes - actions = [] - for i in [0..9] - do (i) => - actions.push (cb) => - DocUpdaterClient.sendUpdates @project_id, @doc_id, updates.slice(i*10, (i+1)*10), cb - async.series actions, (error) => - throw error if error? - setTimeout done, 2000 + setTimeout done, 2000 after -> MockTrackChangesApi.flushDoc.restore() @@ -257,41 +239,37 @@ describe "Applying updates to a doc", -> describe "when the sending duplicate ops", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, lines: @lines - db.docOps.insert { - doc_id: ObjectId(@doc_id) - version: @version + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + + DocUpdaterClient.subscribeToAppliedOps @messageCallback = sinon.stub() + + # One user delete 'one', the next turns it into 'once'. The second becomes a NOP. + DocUpdaterClient.sendUpdate @project_id, @doc_id, { + doc: @doc_id + op: [{ + i: "one and a half\n" + p: 4 + }] + v: @version + meta: + source: "ikHceq3yfAdQYzBo4-xZ" }, (error) => throw error if error? - # One user delete 'one', the next turns it into 'once'. The second becomes a NOP. - DocUpdaterClient.sendUpdate @project_id, @doc_id, { - doc: @doc_id - op: [{ - i: "one and a half\n" - p: 4 - }] - v: @version - meta: - source: "ikHceq3yfAdQYzBo4-xZ" - }, (error) => - throw error if error? - setTimeout () => - DocUpdaterClient.sendUpdate @project_id, @doc_id, { - doc: @doc_id - op: [{ - i: "one and a half\n" - p: 4 - }] - v: @version - dupIfSource: ["ikHceq3yfAdQYzBo4-xZ"] - meta: - source: "ikHceq3yfAdQYzBo4-xZ" - }, (error) => - throw error if error? - setTimeout done, 200 - , 200 - - DocUpdaterClient.subscribeToAppliedOps @messageCallback = sinon.stub() + setTimeout () => + DocUpdaterClient.sendUpdate @project_id, @doc_id, { + doc: @doc_id + op: [{ + i: "one and a half\n" + p: 4 + }] + v: @version + dupIfSource: ["ikHceq3yfAdQYzBo4-xZ"] + meta: + source: "ikHceq3yfAdQYzBo4-xZ" + }, (error) => + throw error if error? + setTimeout done, 200 + , 200 it "should update the doc", (done) -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => diff --git a/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee index e08b7fc12f..291b627a3e 100644 --- a/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee @@ -1,7 +1,6 @@ sinon = require "sinon" chai = require("chai") chai.should() -{db, ObjectId} = require "../../../app/js/mongojs" MockTrackChangesApi = require "./helpers/MockTrackChangesApi" MockWebApi = require "./helpers/MockWebApi" @@ -28,47 +27,32 @@ describe "Deleting a document", -> describe "when the updated doc exists in the doc updater", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - sinon.spy MockWebApi, "setDocumentLines" + sinon.spy MockWebApi, "setDocument" sinon.spy MockWebApi, "getDocument" - MockWebApi.insertDoc @project_id, @doc_id, lines: @lines - db.docOps.insert { - doc_id: ObjectId(@doc_id) - version: @version - }, (error) => + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => throw error if error? - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) => throw error if error? - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) => - throw error if error? - setTimeout () => - DocUpdaterClient.deleteDoc @project_id, @doc_id, (error, res, body) => - @statusCode = res.statusCode - setTimeout done, 200 - , 200 + setTimeout () => + DocUpdaterClient.deleteDoc @project_id, @doc_id, (error, res, body) => + @statusCode = res.statusCode + setTimeout done, 200 + , 200 after -> - MockWebApi.setDocumentLines.restore() + MockWebApi.setDocument.restore() MockWebApi.getDocument.restore() it "should return a 204 status code", -> @statusCode.should.equal 204 - it "should send the updated document to the web api", -> - MockWebApi.setDocumentLines - .calledWith(@project_id, @doc_id, @result) + it "should send the updated document and version to the web api", -> + MockWebApi.setDocument + .calledWith(@project_id, @doc_id, @result, @version + 1) .should.equal true - it "should write the version to mongo", (done) -> - db.docOps.find { - doc_id: ObjectId(@doc_id) - }, { - version: 1 - }, (error, docs) => - doc = docs[0] - doc.version.should.equal @version + 1 - done() - it "should need to reload the doc if read again", (done) -> MockWebApi.getDocument.called.should.equal.false DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => @@ -86,21 +70,21 @@ describe "Deleting a document", -> MockWebApi.insertDoc @project_id, @doc_id, { lines: @lines } - sinon.spy MockWebApi, "setDocumentLines" + sinon.spy MockWebApi, "setDocument" sinon.spy MockWebApi, "getDocument" DocUpdaterClient.deleteDoc @project_id, @doc_id, (error, res, body) => @statusCode = res.statusCode setTimeout done, 200 after -> - MockWebApi.setDocumentLines.restore() + MockWebApi.setDocument.restore() MockWebApi.getDocument.restore() it "should return a 204 status code", -> @statusCode.should.equal 204 it "should not need to send the updated document to the web api", -> - MockWebApi.setDocumentLines.called.should.equal false + MockWebApi.setDocument.called.should.equal false it "should need to reload the doc if read again", (done) -> MockWebApi.getDocument.called.should.equal.false diff --git a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee index 2f7a47ff8b..27d241d97d 100644 --- a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee +++ b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee @@ -46,7 +46,7 @@ describe "Deleting a project", -> describe "with documents which have been updated", -> before (done) -> - sinon.spy MockWebApi, "setDocumentLines" + sinon.spy MockWebApi, "setDocument" async.series @docs.map((doc) => (callback) => DocUpdaterClient.preloadDoc @project_id, doc.id, (error) => @@ -62,14 +62,14 @@ describe "Deleting a project", -> , 200 after -> - MockWebApi.setDocumentLines.restore() + MockWebApi.setDocument.restore() it "should return a 204 status code", -> @statusCode.should.equal 204 it "should send each document to the web api", -> for doc in @docs - MockWebApi.setDocumentLines + MockWebApi.setDocument .calledWith(@project_id, doc.id, doc.updatedLines) .should.equal true diff --git a/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee b/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee index 61837fa2b6..f6f7818990 100644 --- a/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee +++ b/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee @@ -40,7 +40,7 @@ describe "Flushing a project", -> describe "with documents which have been updated", -> before (done) -> - sinon.spy MockWebApi, "setDocumentLines" + sinon.spy MockWebApi, "setDocument" async.series @docs.map((doc) => (callback) => @@ -57,14 +57,14 @@ describe "Flushing a project", -> , 200 after -> - MockWebApi.setDocumentLines.restore() + MockWebApi.setDocument.restore() it "should return a 204 status code", -> @statusCode.should.equal 204 it "should send each document to the web api", -> for doc in @docs - MockWebApi.setDocumentLines + MockWebApi.setDocument .calledWith(@project_id, doc.id, doc.updatedLines) .should.equal true diff --git a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee index 8fe89de7be..47dddcd19a 100644 --- a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee +++ b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee @@ -3,7 +3,6 @@ chai = require("chai") chai.should() expect = chai.expect async = require "async" -{db, ObjectId} = require "../../../app/js/mongojs" MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" @@ -24,53 +23,37 @@ describe "Flushing a doc to Mongo", -> describe "when the updated doc exists in the doc updater", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - sinon.spy MockWebApi, "setDocumentLines" + sinon.spy MockWebApi, "setDocument" - MockWebApi.insertDoc @project_id, @doc_id, lines: @lines - db.docOps.insert { - doc_id: ObjectId(@doc_id) - version: @version - }, (error) => + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + DocUpdaterClient.sendUpdates @project_id, @doc_id, [@update], (error) => throw error if error? - DocUpdaterClient.sendUpdates @project_id, @doc_id, [@update], (error) => - throw error if error? - setTimeout () => - DocUpdaterClient.flushDoc @project_id, @doc_id, done - , 200 + setTimeout () => + DocUpdaterClient.flushDoc @project_id, @doc_id, done + , 200 after -> - MockWebApi.setDocumentLines.restore() + MockWebApi.setDocument.restore() - it "should flush the updated doc lines to the web api", -> - MockWebApi.setDocumentLines - .calledWith(@project_id, @doc_id, @result) + it "should flush the updated doc lines and version to the web api", -> + MockWebApi.setDocument + .calledWith(@project_id, @doc_id, @result, @version + 1) .should.equal true - it "should store the updated doc version into mongo", (done) -> - db.docOps.find { - doc_id: ObjectId(@doc_id) - }, { - version: 1 - }, (error, docs) => - doc = docs[0] - doc.version.should.equal @version + 1 - done() - - describe "when the doc does not exist in the doc updater", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] MockWebApi.insertDoc @project_id, @doc_id, { lines: @lines } - sinon.spy MockWebApi, "setDocumentLines" + sinon.spy MockWebApi, "setDocument" DocUpdaterClient.flushDoc @project_id, @doc_id, done after -> - MockWebApi.setDocumentLines.restore() + MockWebApi.setDocument.restore() it "should not flush the doc to the web api", -> - MockWebApi.setDocumentLines.called.should.equal false + MockWebApi.setDocument.called.should.equal false describe "when the web api http request takes a long time", -> before (done) -> @@ -78,19 +61,14 @@ describe "Flushing a doc to Mongo", -> @timeout = 10000 MockWebApi.insertDoc @project_id, @doc_id, { lines: @lines - } - sinon.stub MockWebApi, "setDocumentLines", (project_id, doc_id, lines, callback = (error) ->) -> - setTimeout callback, 30000 - - db.docOps.insert { - doc_id: ObjectId(@doc_id) version: @version - }, (error) => - throw error if error? - DocUpdaterClient.preloadDoc @project_id, @doc_id, done + } + sinon.stub MockWebApi, "setDocument", (project_id, doc_id, lines, version, callback = (error) ->) -> + setTimeout callback, 30000 + DocUpdaterClient.preloadDoc @project_id, @doc_id, done after -> - MockWebApi.setDocumentLines.restore() + MockWebApi.setDocument.restore() it "should return quickly(ish)", (done) -> start = Date.now() diff --git a/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee index 0823b8483a..67bbd6ea80 100644 --- a/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee @@ -2,7 +2,6 @@ sinon = require "sinon" chai = require("chai") chai.should() expect = chai.expect -{db, ObjectId} = require "../../../app/js/mongojs" MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" @@ -18,13 +17,9 @@ describe "Getting a document", -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] sinon.spy MockWebApi, "getDocument" - MockWebApi.insertDoc @project_id, @doc_id, lines: @lines - db.docOps.insert { - doc_id: ObjectId(@doc_id) - version: @version - }, (error) => - throw error if error? - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, @returnedDoc) => done() + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, @returnedDoc) => done() after -> MockWebApi.getDocument.restore() @@ -44,16 +39,11 @@ describe "Getting a document", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, lines: @lines - db.docOps.insert { - doc_id: ObjectId(@doc_id) - version: @version - }, (error) => + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => throw error if error? - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => - throw error if error? - sinon.spy MockWebApi, "getDocument" - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, @returnedDoc) => done() + sinon.spy MockWebApi, "getDocument" + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, @returnedDoc) => done() after -> MockWebApi.getDocument.restore() diff --git a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee index 243b3db234..3232c6e219 100644 --- a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee @@ -2,7 +2,6 @@ sinon = require "sinon" chai = require("chai") chai.should() expect = require("chai").expect -{db, ObjectId} = require "../../../app/js/mongojs" Settings = require('settings-sharelatex') rclient = require("redis-sharelatex").createClient(Settings.redis.web) @@ -27,36 +26,31 @@ describe "Setting a document", -> @user_id = "user-id-123" sinon.spy MockTrackChangesApi, "flushDoc" - sinon.spy MockWebApi, "setDocumentLines" + sinon.spy MockWebApi, "setDocument" after -> - MockWebApi.setDocumentLines.restore() + MockWebApi.setDocument.restore() MockTrackChangesApi.flushDoc.restore() describe "when the updated doc exists in the doc updater", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, lines: @lines - db.docOps.insert { - doc_id: ObjectId(@doc_id) - version: @version - }, (error) => + MockWebApi.insertDoc @project_id, @doc_id, lines: @lines, version: @version + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => throw error if error? - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) => throw error if error? - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) => - throw error if error? - setTimeout () => - DocUpdaterClient.setDocLines @project_id, @doc_id, @newLines, @source, @user_id, (error, res, body) => - @statusCode = res.statusCode - done() - , 200 + setTimeout () => + DocUpdaterClient.setDocLines @project_id, @doc_id, @newLines, @source, @user_id, (error, res, body) => + @statusCode = res.statusCode + done() + , 200 it "should return a 204 status code", -> @statusCode.should.equal 204 - it "should send the updated doc lines to the web api", -> - MockWebApi.setDocumentLines + it "should send the updated doc lines and version to the web api", -> + MockWebApi.setDocument .calledWith(@project_id, @doc_id, @newLines) .should.equal true @@ -79,21 +73,16 @@ describe "Setting a document", -> describe "when the updated doc does not exist in the doc updater", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, lines: @lines - db.docOps.insert { - doc_id: ObjectId(@doc_id) - version: @version - }, (error) => - throw error if error? - DocUpdaterClient.setDocLines @project_id, @doc_id, @newLines, @source, @user_id, (error, res, body) => - @statusCode = res.statusCode - setTimeout done, 200 + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + DocUpdaterClient.setDocLines @project_id, @doc_id, @newLines, @source, @user_id, (error, res, body) => + @statusCode = res.statusCode + setTimeout done, 200 it "should return a 204 status code", -> @statusCode.should.equal 204 it "should send the updated doc lines to the web api", -> - MockWebApi.setDocumentLines + MockWebApi.setDocument .calledWith(@project_id, @doc_id, @newLines) .should.equal true diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee index 7bab5b9b9f..e77a18c0ea 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee @@ -7,11 +7,14 @@ module.exports = MockWebApi = clearDocs: () -> @docs = {} insertDoc: (project_id, doc_id, doc) -> + doc.version ?= 0 + doc.lines ?= [] @docs["#{project_id}:#{doc_id}"] = doc - setDocumentLines: (project_id, doc_id, lines, callback = (error) ->) -> - @docs["#{project_id}:#{doc_id}"] ||= {} - @docs["#{project_id}:#{doc_id}"].lines = lines + setDocument: (project_id, doc_id, lines, version, callback = (error) ->) -> + doc = @docs["#{project_id}:#{doc_id}"] ||= {} + doc.lines = lines + doc.version = version callback null getDocument: (project_id, doc_id, callback = (error, doc) ->) -> @@ -28,7 +31,7 @@ module.exports = MockWebApi = res.send 404 app.post "/project/:project_id/doc/:doc_id", express.bodyParser(), (req, res, next) => - MockWebApi.setDocumentLines req.params.project_id, req.params.doc_id, req.body.lines, (error) -> + MockWebApi.setDocument req.params.project_id, req.params.doc_id, req.body.lines, req.body.version, (error) -> if error? res.send 500 else diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/getDocFromWebTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/getDocFromWebTests.coffee deleted file mode 100644 index e782c0065b..0000000000 --- a/services/document-updater/test/unit/coffee/PersistenceManager/getDocFromWebTests.coffee +++ /dev/null @@ -1,87 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/PersistenceManager.js" -SandboxedModule = require('sandboxed-module') -Errors = require "../../../../app/js/Errors" - -describe "PersistenceManager.getDocFromWeb", -> - beforeEach -> - @PersistenceManager = SandboxedModule.require modulePath, requires: - "request": @request = sinon.stub() - "settings-sharelatex": @Settings = {} - "./Metrics": @Metrics = - Timer: class Timer - done: sinon.stub() - "logger-sharelatex": @logger = {log: sinon.stub(), err: sinon.stub()} - @project_id = "project-id-123" - @doc_id = "doc-id-123" - @lines = ["one", "two", "three"] - @callback = sinon.stub() - @Settings.apis = - web: - url: @url = "www.example.com" - user: @user = "sharelatex" - pass: @pass = "password" - - describe "with a successful response from the web api", -> - beforeEach -> - @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(lines: @lines)) - @PersistenceManager.getDocFromWeb(@project_id, @doc_id, @callback) - - it "should call the web api", -> - @request - .calledWith({ - url: "#{@url}/project/#{@project_id}/doc/#{@doc_id}" - method: "GET" - headers: - "accept": "application/json" - auth: - user: @user - pass: @pass - sendImmediately: true - jar: false - timeout: 5000 - }) - .should.equal true - - it "should call the callback with the doc lines", -> - @callback.calledWith(null, @lines).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "when request returns an error", -> - beforeEach -> - @request.callsArgWith(1, @error = new Error("oops"), null, null) - @PersistenceManager.getDocFromWeb(@project_id, @doc_id, @callback) - - it "should return the error", -> - @callback.calledWith(@error).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "when the request returns 404", -> - beforeEach -> - @request.callsArgWith(1, null, {statusCode: 404}, "") - @PersistenceManager.getDocFromWeb(@project_id, @doc_id, @callback) - - it "should return a NotFoundError", -> - @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "when the request returns an error status code", -> - beforeEach -> - @request.callsArgWith(1, null, {statusCode: 500}, "") - @PersistenceManager.getDocFromWeb(@project_id, @doc_id, @callback) - - it "should return an error", -> - @callback.calledWith(new Error("web api error")).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee index ae3e476ec4..03ea59cd90 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee @@ -3,7 +3,7 @@ chai = require('chai') should = chai.should() modulePath = "../../../../app/js/PersistenceManager.js" SandboxedModule = require('sandboxed-module') -{ObjectId} = require("mongojs") +Errors = require "../../../../app/js/Errors" describe "PersistenceManager.getDoc", -> beforeEach -> @@ -13,34 +13,76 @@ describe "PersistenceManager.getDoc", -> "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() - "logger-sharelatex": @logger = {warn: sinon.stub()} - "./mongojs": - db: @db = { docOps: {} } - ObjectId: ObjectId - - @project_id = ObjectId().toString() - @doc_id = ObjectId().toString() - @callback = sinon.stub() - @lines = ["mock", "doc", "lines"] + "logger-sharelatex": @logger = {log: sinon.stub(), err: sinon.stub()} + @project_id = "project-id-123" + @doc_id = "doc-id-123" + @lines = ["one", "two", "three"] @version = 42 + @callback = sinon.stub() + @Settings.apis = + web: + url: @url = "www.example.com" + user: @user = "sharelatex" + pass: @pass = "password" - describe "successfully", -> + describe "with a successful response from the web api", -> beforeEach -> - @PersistenceManager.getDocFromWeb = sinon.stub().callsArgWith(2, null, @lines) - @PersistenceManager.getDocVersionInMongo = sinon.stub().callsArgWith(1, null, @version) - @PersistenceManager.getDoc @project_id, @doc_id, @callback + @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(lines: @lines, version: @version)) + @PersistenceManager.getDoc(@project_id, @doc_id, @callback) - it "should look up the doc in the web api", -> - @PersistenceManager.getDocFromWeb - .calledWith(@project_id, @doc_id) + it "should call the web api", -> + @request + .calledWith({ + url: "#{@url}/project/#{@project_id}/doc/#{@doc_id}" + method: "GET" + headers: + "accept": "application/json" + auth: + user: @user + pass: @pass + sendImmediately: true + jar: false + timeout: 5000 + }) .should.equal true - it "should look up the version in Mongo", -> - @PersistenceManager.getDocVersionInMongo - .calledWith(@doc_id) - .should.equal true - - it "should call the callback with the lines and version", -> + it "should call the callback with the doc lines and version", -> @callback.calledWith(null, @lines, @version).should.equal true + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + describe "when request returns an error", -> + beforeEach -> + @request.callsArgWith(1, @error = new Error("oops"), null, null) + @PersistenceManager.getDoc(@project_id, @doc_id, @callback) + + it "should return the error", -> + @callback.calledWith(@error).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when the request returns 404", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 404}, "") + @PersistenceManager.getDoc(@project_id, @doc_id, @callback) + + it "should return a NotFoundError", -> + @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when the request returns an error status code", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 500}, "") + @PersistenceManager.getDoc(@project_id, @doc_id, @callback) + + it "should return an error", -> + @callback.calledWith(new Error("web api error")).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/getDocVersionInMongoTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/getDocVersionInMongoTests.coffee deleted file mode 100644 index 2ab89f6795..0000000000 --- a/services/document-updater/test/unit/coffee/PersistenceManager/getDocVersionInMongoTests.coffee +++ /dev/null @@ -1,47 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/PersistenceManager.js" -SandboxedModule = require('sandboxed-module') -Errors = require "../../../../app/js/Errors" -{ObjectId} = require("mongojs") - -describe "PersistenceManager.getDocVersionInMongo", -> - beforeEach -> - @PersistenceManager = SandboxedModule.require modulePath, requires: - "request": @request = sinon.stub() - "settings-sharelatex": @Settings = {} - "./Metrics": @Metrics = - Timer: class Timer - done: sinon.stub() - "./mongojs": - db: @db = { docOps: {} } - ObjectId: ObjectId - "logger-sharelatex": @logger = {log: sinon.stub(), err: sinon.stub()} - - @doc_id = ObjectId().toString() - @callback = sinon.stub() - - describe "getDocVersionInMongo", -> - describe "when the doc exists", -> - beforeEach -> - @doc = - version: @version = 42 - @db.docOps.find = sinon.stub().callsArgWith(2, null, [@doc]) - @PersistenceManager.getDocVersionInMongo @doc_id, @callback - - it "should look for the doc in the database", -> - @db.docOps.find - .calledWith({ doc_id: ObjectId(@doc_id) }, {version: 1}) - .should.equal true - - it "should call the callback with the version", -> - @callback.calledWith(null, @version).should.equal true - - describe "when the doc doesn't exist", -> - beforeEach -> - @db.docOps.find = sinon.stub().callsArgWith(2, null, []) - @PersistenceManager.getDocVersionInMongo @doc_id, @callback - - it "should call the callback with 0", -> - @callback.calledWith(null, 0).should.equal true \ No newline at end of file diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/setDocInWebTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/setDocInWebTests.coffee deleted file mode 100644 index b70c7dbe1b..0000000000 --- a/services/document-updater/test/unit/coffee/PersistenceManager/setDocInWebTests.coffee +++ /dev/null @@ -1,88 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/PersistenceManager.js" -SandboxedModule = require('sandboxed-module') -Errors = require "../../../../app/js/Errors" - -describe "PersistenceManager.setDocInWeb", -> - beforeEach -> - @PersistenceManager = SandboxedModule.require modulePath, requires: - "request": @request = sinon.stub() - "settings-sharelatex": @Settings = {} - "./Metrics": @Metrics = - Timer: class Timer - done: sinon.stub() - "logger-sharelatex": @logger = {log: sinon.stub(), err: sinon.stub()} - @project_id = "project-id-123" - @doc_id = "doc-id-123" - @lines = ["one", "two", "three"] - @callback = sinon.stub() - @Settings.apis = - web: - url: @url = "www.example.com" - user: @user = "sharelatex" - pass: @pass = "password" - - describe "with a successful response from the web api", -> - beforeEach -> - @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(lines: @lines, version: @version)) - @PersistenceManager.setDocInWeb(@project_id, @doc_id, @lines, @callback) - - it "should call the web api", -> - @request - .calledWith({ - url: "#{@url}/project/#{@project_id}/doc/#{@doc_id}" - body: JSON.stringify - lines: @lines - method: "POST" - headers: - "content-type": "application/json" - auth: - user: @user - pass: @pass - sendImmediately: true - jar: false - timeout: 5000 - }) - .should.equal true - - it "should call the callback without error", -> - @callback.calledWith(null).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "when request returns an error", -> - beforeEach -> - @request.callsArgWith(1, @error = new Error("oops"), null, null) - @PersistenceManager.setDocInWeb(@project_id, @doc_id, @lines, @callback) - - it "should return the error", -> - @callback.calledWith(@error).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "when the request returns 404", -> - beforeEach -> - @request.callsArgWith(1, null, {statusCode: 404}, "") - @PersistenceManager.setDocInWeb(@project_id, @doc_id, @lines, @callback) - - it "should return a NotFoundError", -> - @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "when the request returns an error status code", -> - beforeEach -> - @request.callsArgWith(1, null, {statusCode: 500}, "") - @PersistenceManager.setDocInWeb(@project_id, @doc_id, @lines, @callback) - - it "should return an error", -> - @callback.calledWith(new Error("web api error")).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/setDocTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/setDocTests.coffee index 80c0a5e18f..98f252a35d 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/setDocTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/setDocTests.coffee @@ -3,6 +3,7 @@ chai = require('chai') should = chai.should() modulePath = "../../../../app/js/PersistenceManager.js" SandboxedModule = require('sandboxed-module') +Errors = require "../../../../app/js/Errors" describe "PersistenceManager.setDoc", -> beforeEach -> @@ -12,27 +13,78 @@ describe "PersistenceManager.setDoc", -> "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() - "logger-sharelatex": @logger = {warn: sinon.stub()} - - @project_id = "mock-project-id" - @doc_id = "mock-doc-id" + "logger-sharelatex": @logger = {log: sinon.stub(), err: sinon.stub()} + @project_id = "project-id-123" + @doc_id = "doc-id-123" + @lines = ["one", "two", "three"] + @version = 42 @callback = sinon.stub() - @lines = ["mock", "doc", "lines"] + @Settings.apis = + web: + url: @url = "www.example.com" + user: @user = "sharelatex" + pass: @pass = "password" - @PersistenceManager.setDocInWeb = sinon.stub().callsArg(3) - @PersistenceManager.setDocVersionInMongo = sinon.stub().callsArg(2) + describe "with a successful response from the web api", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(lines: @lines, version: @version)) + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @callback) - @PersistenceManager.setDoc @project_id, @doc_id, @lines, @version, @callback + it "should call the web api", -> + @request + .calledWith({ + url: "#{@url}/project/#{@project_id}/doc/#{@doc_id}" + body: JSON.stringify + lines: @lines + version: @version + method: "POST" + headers: + "content-type": "application/json" + auth: + user: @user + pass: @pass + sendImmediately: true + jar: false + timeout: 5000 + }) + .should.equal true - it "should set the doc in the web api", -> - @PersistenceManager.setDocInWeb - .calledWith(@project_id, @doc_id, @lines) - .should.equal true + it "should call the callback without error", -> + @callback.calledWith(null).should.equal true - it "should set the doc version in mongo", -> - @PersistenceManager.setDocVersionInMongo - .calledWith(@doc_id, @version) - .should.equal true + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when request returns an error", -> + beforeEach -> + @request.callsArgWith(1, @error = new Error("oops"), null, null) + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @callback) + + it "should return the error", -> + @callback.calledWith(@error).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when the request returns 404", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 404}, "") + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @callback) + + it "should return a NotFoundError", -> + @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when the request returns an error status code", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 500}, "") + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @callback) + + it "should return an error", -> + @callback.calledWith(new Error("web api error")).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true - it "should call the callback", -> - @callback.called.should.equal true diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/setDocVersionInMongo.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/setDocVersionInMongo.coffee deleted file mode 100644 index d642aba0d8..0000000000 --- a/services/document-updater/test/unit/coffee/PersistenceManager/setDocVersionInMongo.coffee +++ /dev/null @@ -1,44 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/PersistenceManager.js" -SandboxedModule = require('sandboxed-module') -Errors = require "../../../../app/js/Errors" -{ObjectId} = require("mongojs") - -describe "PersistenceManager.getDocVersionInMongo", -> - beforeEach -> - @PersistenceManager = SandboxedModule.require modulePath, requires: - "request": @request = sinon.stub() - "settings-sharelatex": @Settings = {} - "./Metrics": @Metrics = - Timer: class Timer - done: sinon.stub() - "./mongojs": - db: @db = { docOps: {} } - ObjectId: ObjectId - "logger-sharelatex": @logger = {log: sinon.stub(), err: sinon.stub()} - - @doc_id = ObjectId().toString() - @callback = sinon.stub() - - describe "setDocVersionInMongo", -> - beforeEach -> - @version = 42 - @db.docOps.update = sinon.stub().callsArg(3) - @PersistenceManager.setDocVersionInMongo @doc_id, @version, @callback - - it "should update the doc version", -> - @db.docOps.update - .calledWith({ - doc_id: ObjectId(@doc_id) - }, { - $set: - version: @version - }, { - upsert: true - }) - .should.equal true - - it "should call the callback", -> - @callback.called.should.equal true From 546d9e9cee5e11de0033213e88d3de86e99dd224 Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 29 Nov 2016 17:13:07 +0000 Subject: [PATCH 170/769] Remove left over test --- .../MongoHealthCheckTests.coffee | 52 ------------------- 1 file changed, 52 deletions(-) delete mode 100644 services/document-updater/test/unit/coffee/MongoHealthCheckTests/MongoHealthCheckTests.coffee diff --git a/services/document-updater/test/unit/coffee/MongoHealthCheckTests/MongoHealthCheckTests.coffee b/services/document-updater/test/unit/coffee/MongoHealthCheckTests/MongoHealthCheckTests.coffee deleted file mode 100644 index b8da766a4c..0000000000 --- a/services/document-updater/test/unit/coffee/MongoHealthCheckTests/MongoHealthCheckTests.coffee +++ /dev/null @@ -1,52 +0,0 @@ -SandboxedModule = require('sandboxed-module') -sinon = require('sinon') -require('chai').should() -modulePath = require('path').join __dirname, '../../../../app/js/MongoHealthCheck' - -describe "MongoHealthCheck", -> - beforeEach -> - @MongoHealthCheck = SandboxedModule.require modulePath, requires: - "settings-sharelatex": @Settings = {} - "./PersistenceManager": @PersistenceManager = {} - @doc_id = "mock-doc-id" - @callback = sinon.stub() - - describe "isAlive", -> - describe "with no configured doc_id", -> - beforeEach -> - @MongoHealthCheck.isAlive @callback - - it "should call the call the callback with an error", -> - @callback.calledOnce.should.equal true - error = @callback.args[0][0] - error.message.should.equal "No test doc_id configured" - - describe "when mongo returns within the timeout", -> - beforeEach -> - @Settings.smokeTest = - doc_id: @doc_id - @PersistenceManager.getDocVersionInMongo = sinon.stub().callsArg(1) - @MongoHealthCheck.isAlive @callback - - it "should call PersistenceManager.getDocVersionInMongo", -> - @PersistenceManager.getDocVersionInMongo - .calledWith(@doc_id) - .should.equal true - - it "should call the call the callback without an error", -> - @callback.calledOnce.should.equal true - @callback.calledWith(null).should.equal true - - describe "when mongo does not return within the timeout", -> - beforeEach (done) -> - @Settings.smokeTest = - doc_id: @doc_id - timeout: 50 - @PersistenceManager.getDocVersionInMongo = (doc_id, callback) -> - setTimeout callback, 100 - @MongoHealthCheck.isAlive (@error) => - done() - - it "should call the call the callback with an error", -> - @error.message.should.equal "Mongo did not return in 50ms" - \ No newline at end of file From efab68e6b285b7b95ec2e411b8f0d7d1c0a21b9a Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 29 Nov 2016 17:13:16 +0000 Subject: [PATCH 171/769] Add in guards on bad data from web API --- .../app/coffee/PersistenceManager.coffee | 4 ++++ .../PersistenceManager/getDocTests.coffee | 18 +++++++++++++++++- 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/PersistenceManager.coffee b/services/document-updater/app/coffee/PersistenceManager.coffee index 27585c8290..8be268a6e8 100644 --- a/services/document-updater/app/coffee/PersistenceManager.coffee +++ b/services/document-updater/app/coffee/PersistenceManager.coffee @@ -35,6 +35,10 @@ module.exports = PersistenceManager = body = JSON.parse body catch e return callback(e) + if !body.lines? + return callback(new Error("web API response had no doc lines")) + if !body.version? or not body.version instanceof Number + return callback(new Error("web API response had no valid doc version")) return callback null, body.lines, body.version else if res.statusCode == 404 return callback(new Errors.NotFoundError("doc not not found: #{url}")) diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee index 03ea59cd90..d4f44afa46 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee @@ -84,5 +84,21 @@ describe "PersistenceManager.getDoc", -> it "should time the execution", -> @Metrics.Timer::done.called.should.equal true - + + describe "when request returns an doc without lines", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(version: @version)) + @PersistenceManager.getDoc(@project_id, @doc_id, @callback) + + it "should return and error", -> + @callback.calledWith(new Error("web API response had no doc lines")).should.equal true + + describe "when request returns an doc without a version", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(lines: @lines)) + @PersistenceManager.getDoc(@project_id, @doc_id, @callback) + + it "should return and error", -> + @callback.calledWith(new Error("web API response had no valid doc version")).should.equal true + From fb39e37fe0ebf446a61043c113e592668e90d182 Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 1 Dec 2016 16:27:40 +0000 Subject: [PATCH 172/769] Update DocumentManager tests --- .../DocumentManagerTests.coffee | 263 ++++++++++++++++++ .../flushAndDeleteDocTests.coffee | 48 ---- .../flushDocIfLoadedTests.coffee | 68 ----- .../getDocAndRecentOpsTests.coffee | 67 ----- .../coffee/DocumentManager/getDocTests.coffee | 70 ----- .../coffee/DocumentManager/setDocTests.coffee | 107 ------- 6 files changed, 263 insertions(+), 360 deletions(-) create mode 100644 services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/DocumentManager/flushAndDeleteDocTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/DocumentManager/flushDocIfLoadedTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/DocumentManager/getDocAndRecentOpsTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee new file mode 100644 index 0000000000..2b0dce169b --- /dev/null +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -0,0 +1,263 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/DocumentManager.js" +SandboxedModule = require('sandboxed-module') + +describe "DocumentManager", -> + beforeEach -> + @DocumentManager = SandboxedModule.require modulePath, requires: + "./RedisManager": @RedisManager = {} + "./PersistenceManager": @PersistenceManager = {} + "./HistoryManager": @HistoryManager = {} + "logger-sharelatex": @logger = {log: sinon.stub()} + "./DocOpsManager": @DocOpsManager = {} + "./Metrics": @Metrics = + Timer: class Timer + done: sinon.stub() + "./WebRedisManager": @WebRedisManager = {} + "./DiffCodec": @DiffCodec = {} + "./UpdateManager": @UpdateManager = {} + @project_id = "project-id-123" + @doc_id = "doc-id-123" + @callback = sinon.stub() + @lines = ["one", "two", "three"] + @version = 42 + @track_changes_entries = { comments: "mock", entries: "mock" } + @track_changes_on = true + + describe "flushAndDeleteDoc", -> + describe "successfully", -> + beforeEach -> + @RedisManager.removeDocFromMemory = sinon.stub().callsArg(2) + @DocumentManager.flushDocIfLoaded = sinon.stub().callsArgWith(2) + @HistoryManager.flushDocChanges = sinon.stub().callsArg(2) + @DocumentManager.flushAndDeleteDoc @project_id, @doc_id, @callback + + it "should flush the doc", -> + @DocumentManager.flushDocIfLoaded + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should remove the doc from redis", -> + @RedisManager.removeDocFromMemory + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should call the callback without error", -> + @callback.calledWith(null).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + it "should flush to track changes", -> + @HistoryManager.flushDocChanges + .calledWith(@project_id, @doc_id) + .should.equal true + + describe "flushDocIfLoaded", -> + describe "when the doc is in Redis", -> + beforeEach -> + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @track_changes_on, @track_changes_entries) + @PersistenceManager.setDoc = sinon.stub().yields() + @DocumentManager.flushDocIfLoaded @project_id, @doc_id, @callback + + it "should get the doc from redis", -> + @RedisManager.getDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should write the doc lines to the persistence layer", -> + @PersistenceManager.setDoc + .calledWith(@project_id, @doc_id, @lines, @version, @track_changes_on, @track_changes_entries) + .should.equal true + + it "should call the callback without error", -> + @callback.calledWith(null).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when the document is not in Redis", -> + beforeEach -> + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null, null, null) + @PersistenceManager.setDoc = sinon.stub().yields() + @DocOpsManager.flushDocOpsToMongo = sinon.stub().callsArgWith(2) + @DocumentManager.flushDocIfLoaded @project_id, @doc_id, @callback + + it "should get the doc from redis", -> + @RedisManager.getDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should not write anything to the persistence layer", -> + @PersistenceManager.setDoc.called.should.equal false + @DocOpsManager.flushDocOpsToMongo.called.should.equal false + + it "should call the callback without error", -> + @callback.calledWith(null).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "getDocAndRecentOps", -> + describe "with a previous version specified", -> + beforeEach -> + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @track_changes_on, @track_changes_entries) + @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) + @DocumentManager.getDocAndRecentOps @project_id, @doc_id, @fromVersion, @callback + + it "should get the doc", -> + @DocumentManager.getDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should get the doc ops", -> + @RedisManager.getPreviousDocOps + .calledWith(@doc_id, @fromVersion, @version) + .should.equal true + + it "should call the callback with the doc info", -> + @callback.calledWith(null, @lines, @version, @ops).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "with no previous version specified", -> + beforeEach -> + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @track_changes_on, @track_changes_entries) + @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) + @DocumentManager.getDocAndRecentOps @project_id, @doc_id, -1, @callback + + it "should get the doc", -> + @DocumentManager.getDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should not need to get the doc ops", -> + @RedisManager.getPreviousDocOps.called.should.equal false + + it "should call the callback with the doc info", -> + @callback.calledWith(null, @lines, @version, []).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "getDoc", -> + describe "when the doc exists in Redis", -> + beforeEach -> + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @track_changes_on, @track_changes_entries) + @DocumentManager.getDoc @project_id, @doc_id, @callback + + it "should get the doc from Redis", -> + @RedisManager.getDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should call the callback with the doc info", -> + @callback.calledWith(null, @lines, @version, @track_changes_on, @track_changes_entries, true).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when the doc does not exist in Redis", -> + beforeEach -> + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null, null, null) + @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @track_changes_on, @track_changes_entries) + @RedisManager.putDocInMemory = sinon.stub().yields() + @DocumentManager.getDoc @project_id, @doc_id, @callback + + it "should try to get the doc from Redis", -> + @RedisManager.getDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should get the doc from the PersistenceManager", -> + @PersistenceManager.getDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should set the doc in Redis", -> + @RedisManager.putDocInMemory + .calledWith(@project_id, @doc_id, @lines, @version, @track_changes_on, @track_changes_entries) + .should.equal true + + it "should call the callback with the doc info", -> + @callback.calledWith(null, @lines, @version, @track_changes_on, @track_changes_entries, false).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "setDoc", -> + describe "with plain tex lines", -> + beforeEach -> + @beforeLines = ["before", "lines"] + @afterLines = ["after", "lines"] + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, @track_changes_on, @track_changes_entries, true) + @DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, @ops) + @UpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null) + @DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2) + @DocumentManager.flushAndDeleteDoc = sinon.stub().callsArg(2) + + describe "when already loaded", -> + beforeEach -> + @DocumentManager.setDoc @project_id, @doc_id, @afterLines, @source, @user_id, @callback + + it "should get the current doc lines", -> + @DocumentManager.getDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should return a diff of the old and new lines", -> + @DiffCodec.diffAsShareJsOp + .calledWith(@beforeLines, @afterLines) + .should.equal true + + it "should apply the diff as a ShareJS op", -> + @UpdateManager.applyUpdate + .calledWith( + @project_id, + @doc_id, + { + doc: @doc_id, + v: @version, + op: @ops, + meta: { + type: "external" + source: @source + user_id: @user_id + } + } + ) + .should.equal true + + it "should flush the doc to Mongo", -> + @DocumentManager.flushDocIfLoaded + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should call the callback", -> + @callback.calledWith(null).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when not already loaded", -> + beforeEach -> + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, false) + @DocumentManager.setDoc @project_id, @doc_id, @afterLines, @source, @user_id, @callback + + it "should flush and delete the doc from the doc updater", -> + @DocumentManager.flushAndDeleteDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + describe "without new lines", -> + beforeEach -> + @DocumentManager.setDoc @project_id, @doc_id, null, @source, @user_id, @callback + + it "should return the callback with an error", -> + @callback.calledWith(new Error("No lines were passed to setDoc")) + + it "should not try to get the doc lines", -> + @DocumentManager.getDoc.called.should.equal false \ No newline at end of file diff --git a/services/document-updater/test/unit/coffee/DocumentManager/flushAndDeleteDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/flushAndDeleteDocTests.coffee deleted file mode 100644 index 911efce1ba..0000000000 --- a/services/document-updater/test/unit/coffee/DocumentManager/flushAndDeleteDocTests.coffee +++ /dev/null @@ -1,48 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/DocumentManager.js" -SandboxedModule = require('sandboxed-module') - -describe "DocumentUpdater.flushAndDeleteDoc", -> - beforeEach -> - @DocumentManager = SandboxedModule.require modulePath, requires: - "./RedisManager": @RedisManager = {} - "./PersistenceManager": @PersistenceManager = {} - "./TrackChangesManager": @TrackChangesManager = {} - "logger-sharelatex": @logger = {log: sinon.stub()} - "./DocOpsManager" :{} - "./Metrics": @Metrics = - Timer: class Timer - done: sinon.stub() - @project_id = "project-id-123" - @doc_id = "doc-id-123" - @callback = sinon.stub() - - describe "successfully", -> - beforeEach -> - @RedisManager.removeDocFromMemory = sinon.stub().callsArg(2) - @DocumentManager.flushDocIfLoaded = sinon.stub().callsArgWith(2) - @TrackChangesManager.flushDocChanges = sinon.stub().callsArg(2) - @DocumentManager.flushAndDeleteDoc @project_id, @doc_id, @callback - - it "should flush the doc", -> - @DocumentManager.flushDocIfLoaded - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should remove the doc from redis", -> - @RedisManager.removeDocFromMemory - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should call the callback without error", -> - @callback.calledWith(null).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - it "should flush to track changes", -> - @TrackChangesManager.flushDocChanges - .calledWith(@project_id, @doc_id) - .should.equal true diff --git a/services/document-updater/test/unit/coffee/DocumentManager/flushDocIfLoadedTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/flushDocIfLoadedTests.coffee deleted file mode 100644 index 4a17e2b84c..0000000000 --- a/services/document-updater/test/unit/coffee/DocumentManager/flushDocIfLoadedTests.coffee +++ /dev/null @@ -1,68 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/DocumentManager.js" -SandboxedModule = require('sandboxed-module') - -describe "DocumentManager.flushDocIfLoaded", -> - beforeEach -> - @DocumentManager = SandboxedModule.require modulePath, requires: - "./RedisManager": @RedisManager = {} - "./PersistenceManager": @PersistenceManager = {} - "./DocOpsManager": @DocOpsManager = {} - "logger-sharelatex": @logger = {log: sinon.stub()} - "./Metrics": @Metrics = - Timer: class Timer - done: sinon.stub() - "./TrackChangesManager": {} - @project_id = "project-id-123" - @doc_id = "doc-id-123" - @lines = ["one", "two", "three"] - @version = 42 - @callback = sinon.stub() - - describe "when the doc is in Redis", -> - beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version) - @PersistenceManager.setDoc = sinon.stub().callsArgWith(4) - @DocumentManager.flushDocIfLoaded @project_id, @doc_id, @callback - - it "should get the doc from redis", -> - @RedisManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should write the doc lines to the persistence layer", -> - @PersistenceManager.setDoc - .calledWith(@project_id, @doc_id, @lines, @version) - .should.equal true - - it "should call the callback without error", -> - @callback.calledWith(null).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "when the document is not in Redis", -> - beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null) - @PersistenceManager.setDoc = sinon.stub().callsArgWith(4) - @DocOpsManager.flushDocOpsToMongo = sinon.stub().callsArgWith(2) - @DocumentManager.flushDocIfLoaded @project_id, @doc_id, @callback - - it "should get the doc from redis", -> - @RedisManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should not write anything to the persistence layer", -> - @PersistenceManager.setDoc.called.should.equal false - @DocOpsManager.flushDocOpsToMongo.called.should.equal false - - it "should call the callback without error", -> - @callback.calledWith(null).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - diff --git a/services/document-updater/test/unit/coffee/DocumentManager/getDocAndRecentOpsTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/getDocAndRecentOpsTests.coffee deleted file mode 100644 index c77af9a77c..0000000000 --- a/services/document-updater/test/unit/coffee/DocumentManager/getDocAndRecentOpsTests.coffee +++ /dev/null @@ -1,67 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/DocumentManager.js" -SandboxedModule = require('sandboxed-module') - -describe "DocumentManager.getDocAndRecentOps", -> - beforeEach -> - @DocumentManager = SandboxedModule.require modulePath, requires: - "./RedisManager": @RedisManager = {} - "./PersistenceManager": @PersistenceManager = {} - "logger-sharelatex": @logger = {log: sinon.stub()} - "./Metrics": @Metrics = - Timer: class Timer - done: sinon.stub() - "./TrackChangesManager": {} - - @project_id = "project-id-123" - @doc_id = "doc-id-123" - @lines = ["one", "two", "three"] - @version = 42 - @fromVersion = 40 - @ops = ["mock-op-1", "mock-op-2"] - @callback = sinon.stub() - - describe "with a previous version specified", -> - beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version) - @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) - @DocumentManager.getDocAndRecentOps @project_id, @doc_id, @fromVersion, @callback - - it "should get the doc", -> - @DocumentManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should get the doc ops", -> - @RedisManager.getPreviousDocOps - .calledWith(@doc_id, @fromVersion, @version) - .should.equal true - - it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, @ops).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "with no previous version specified", -> - beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version) - @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) - @DocumentManager.getDocAndRecentOps @project_id, @doc_id, -1, @callback - - it "should get the doc", -> - @DocumentManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should not need to get the doc ops", -> - @RedisManager.getPreviousDocOps.called.should.equal false - - it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, []).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - diff --git a/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee deleted file mode 100644 index 3edf4cb67d..0000000000 --- a/services/document-updater/test/unit/coffee/DocumentManager/getDocTests.coffee +++ /dev/null @@ -1,70 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/DocumentManager.js" -SandboxedModule = require('sandboxed-module') - -describe "DocumentUpdater.getDoc", -> - beforeEach -> - @DocumentManager = SandboxedModule.require modulePath, requires: - "./RedisManager": @RedisManager = {} - "./PersistenceManager": @PersistenceManager = {} - "./DocOpsManager": @DocOpsManager = {} - "logger-sharelatex": @logger = {log: sinon.stub()} - "./Metrics": @Metrics = - Timer: class Timer - done: sinon.stub() - "./TrackChangesManager": {} - - @project_id = "project-id-123" - @doc_id = "doc-id-123" - @lines = ["one", "two", "three"] - @version = 42 - @callback = sinon.stub() - - describe "when the doc exists in Redis", -> - beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version) - @DocumentManager.getDoc @project_id, @doc_id, @callback - - it "should get the doc from Redis", -> - @RedisManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, true).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "when the doc does not exist in Redis", -> - beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null) - @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version) - @RedisManager.putDocInMemory = sinon.stub().callsArg(4) - @DocumentManager.getDoc @project_id, @doc_id, @callback - - it "should try to get the doc from Redis", -> - @RedisManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should get the doc from the PersistenceManager", -> - @PersistenceManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should set the doc in Redis", -> - @RedisManager.putDocInMemory - .calledWith(@project_id, @doc_id, @lines, @version) - .should.equal true - - it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, false).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - - diff --git a/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee deleted file mode 100644 index 360d939b9f..0000000000 --- a/services/document-updater/test/unit/coffee/DocumentManager/setDocTests.coffee +++ /dev/null @@ -1,107 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/DocumentManager.js" -SandboxedModule = require('sandboxed-module') - -describe "DocumentManager.setDoc", -> - beforeEach -> - @DocumentManager = SandboxedModule.require modulePath, requires: - "./RedisManager": @RedisManager = {} - "./PersistenceManager": @PersistenceManager = {} - "./DiffCodec": @DiffCodec = {} - "./DocOpsManager":{} - "./UpdateManager": @UpdateManager = {} - "logger-sharelatex": @logger = {log: sinon.stub()} - "./Metrics": @Metrics = - Timer: class Timer - done: sinon.stub() - "./TrackChangesManager": {} - - @project_id = "project-id-123" - @doc_id = "doc-id-123" - @version = 42 - @ops = ["mock-ops"] - @callback = sinon.stub() - @source = "dropbox" - @user_id = "mock-user-id" - - describe "with plain tex lines", -> - beforeEach -> - @beforeLines = ["before", "lines"] - @afterLines = ["after", "lines"] - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, true) - @DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, @ops) - @UpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null) - @DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2) - @DocumentManager.flushAndDeleteDoc = sinon.stub().callsArg(2) - - describe "when already loaded", -> - beforeEach -> - @DocumentManager.setDoc @project_id, @doc_id, @afterLines, @source, @user_id, @callback - - it "should get the current doc lines", -> - @DocumentManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should return a diff of the old and new lines", -> - @DiffCodec.diffAsShareJsOp - .calledWith(@beforeLines, @afterLines) - .should.equal true - - it "should apply the diff as a ShareJS op", -> - @UpdateManager.applyUpdate - .calledWith( - @project_id, - @doc_id, - { - doc: @doc_id, - v: @version, - op: @ops, - meta: { - type: "external" - source: @source - user_id: @user_id - } - } - ) - .should.equal true - - it "should flush the doc to Mongo", -> - @DocumentManager.flushDocIfLoaded - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should call the callback", -> - @callback.calledWith(null).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "when not already loaded", -> - beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, false) - @DocumentManager.setDoc @project_id, @doc_id, @afterLines, @source, @user_id, @callback - - it "should flush and delete the doc from the doc updater", -> - @DocumentManager.flushAndDeleteDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - describe "without new lines", -> - beforeEach -> - @DocumentManager.setDoc @project_id, @doc_id, null, @source, @user_id, @callback - - it "should return the callback with an error", -> - @callback.calledWith(new Error("No lines were passed to setDoc")) - - it "should not try to get the doc lines", -> - @DocumentManager.getDoc.called.should.equal false - - - - - - - From 9ee913be39a5ccac287187c23167a337a715ee8b Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 1 Dec 2016 16:40:15 +0000 Subject: [PATCH 173/769] Update PersistenceManagerTests --- .../app/coffee/PersistenceManager.coffee | 6 +- .../PersistenceManagerTests.coffee | 176 ++++++++++++++++++ .../PersistenceManager/getDocTests.coffee | 104 ----------- .../PersistenceManager/setDocTests.coffee | 90 --------- 4 files changed, 178 insertions(+), 198 deletions(-) create mode 100644 services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/PersistenceManager/setDocTests.coffee diff --git a/services/document-updater/app/coffee/PersistenceManager.coffee b/services/document-updater/app/coffee/PersistenceManager.coffee index fff037f6fc..25eb1f32a2 100644 --- a/services/document-updater/app/coffee/PersistenceManager.coffee +++ b/services/document-updater/app/coffee/PersistenceManager.coffee @@ -39,7 +39,7 @@ module.exports = PersistenceManager = return callback(new Error("web API response had no doc lines")) if !body.version? or not body.version instanceof Number return callback(new Error("web API response had no valid doc version")) - return callback null, body.lines, body.track_changes, body.track_changes_entries + return callback null, body.lines, body.version, body.track_changes, body.track_changes_entries else if res.statusCode == 404 return callback(new Errors.NotFoundError("doc not not found: #{url}")) else @@ -55,13 +55,11 @@ module.exports = PersistenceManager = request { url: url method: "POST" - body: JSON.stringify + json: lines: lines track_changes: track_changes track_changes_entries: track_changes_entries version: version - headers: - "content-type": "application/json" auth: user: Settings.apis.web.user pass: Settings.apis.web.pass diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee new file mode 100644 index 0000000000..e9f2cf212e --- /dev/null +++ b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee @@ -0,0 +1,176 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/PersistenceManager.js" +SandboxedModule = require('sandboxed-module') +Errors = require "../../../../app/js/Errors" + +describe "PersistenceManager", -> + beforeEach -> + @PersistenceManager = SandboxedModule.require modulePath, requires: + "request": @request = sinon.stub() + "settings-sharelatex": @Settings = {} + "./Metrics": @Metrics = + Timer: class Timer + done: sinon.stub() + "logger-sharelatex": @logger = {log: sinon.stub(), err: sinon.stub()} + @project_id = "project-id-123" + @doc_id = "doc-id-123" + @lines = ["one", "two", "three"] + @version = 42 + @callback = sinon.stub() + @track_changes_on = true + @track_changes_entries = { comments: "mock", entries: "mock" } + @Settings.apis = + web: + url: @url = "www.example.com" + user: @user = "sharelatex" + pass: @pass = "password" + + describe "getDoc", -> + + describe "with a successful response from the web api", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify({ + lines: @lines, + version: @version, + track_changes: @track_changes_on, + track_changes_entries: @track_changes_entries + })) + @PersistenceManager.getDoc(@project_id, @doc_id, @callback) + + it "should call the web api", -> + @request + .calledWith({ + url: "#{@url}/project/#{@project_id}/doc/#{@doc_id}" + method: "GET" + headers: + "accept": "application/json" + auth: + user: @user + pass: @pass + sendImmediately: true + jar: false + timeout: 5000 + }) + .should.equal true + + it "should call the callback with the doc lines, version and track changes state", -> + @callback.calledWith(null, @lines, @version, @track_changes_on, @track_changes_entries).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when request returns an error", -> + beforeEach -> + @request.callsArgWith(1, @error = new Error("oops"), null, null) + @PersistenceManager.getDoc(@project_id, @doc_id, @callback) + + it "should return the error", -> + @callback.calledWith(@error).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when the request returns 404", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 404}, "") + @PersistenceManager.getDoc(@project_id, @doc_id, @callback) + + it "should return a NotFoundError", -> + @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when the request returns an error status code", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 500}, "") + @PersistenceManager.getDoc(@project_id, @doc_id, @callback) + + it "should return an error", -> + @callback.calledWith(new Error("web api error")).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when request returns an doc without lines", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(version: @version)) + @PersistenceManager.getDoc(@project_id, @doc_id, @callback) + + it "should return and error", -> + @callback.calledWith(new Error("web API response had no doc lines")).should.equal true + + describe "when request returns an doc without a version", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(lines: @lines)) + @PersistenceManager.getDoc(@project_id, @doc_id, @callback) + + it "should return and error", -> + @callback.calledWith(new Error("web API response had no valid doc version")).should.equal true + + describe "setDoc", -> + describe "with a successful response from the web api", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 200}) + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @track_changes_on, @track_changes_entries, @callback) + + it "should call the web api", -> + @request + .calledWith({ + url: "#{@url}/project/#{@project_id}/doc/#{@doc_id}" + json: + lines: @lines + version: @version + track_changes: @track_changes_on + track_changes_entries: @track_changes_entries + method: "POST" + auth: + user: @user + pass: @pass + sendImmediately: true + jar: false + timeout: 5000 + }) + .should.equal true + + it "should call the callback without error", -> + @callback.calledWith(null).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when request returns an error", -> + beforeEach -> + @request.callsArgWith(1, @error = new Error("oops"), null, null) + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @track_changes_on, @track_changes_entries, @callback) + + it "should return the error", -> + @callback.calledWith(@error).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when the request returns 404", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 404}, "") + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @track_changes_on, @track_changes_entries, @callback) + + it "should return a NotFoundError", -> + @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when the request returns an error status code", -> + beforeEach -> + @request.callsArgWith(1, null, {statusCode: 500}, "") + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @track_changes_on, @track_changes_entries, @callback) + + it "should return an error", -> + @callback.calledWith(new Error("web api error")).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee deleted file mode 100644 index d4f44afa46..0000000000 --- a/services/document-updater/test/unit/coffee/PersistenceManager/getDocTests.coffee +++ /dev/null @@ -1,104 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/PersistenceManager.js" -SandboxedModule = require('sandboxed-module') -Errors = require "../../../../app/js/Errors" - -describe "PersistenceManager.getDoc", -> - beforeEach -> - @PersistenceManager = SandboxedModule.require modulePath, requires: - "request": @request = sinon.stub() - "settings-sharelatex": @Settings = {} - "./Metrics": @Metrics = - Timer: class Timer - done: sinon.stub() - "logger-sharelatex": @logger = {log: sinon.stub(), err: sinon.stub()} - @project_id = "project-id-123" - @doc_id = "doc-id-123" - @lines = ["one", "two", "three"] - @version = 42 - @callback = sinon.stub() - @Settings.apis = - web: - url: @url = "www.example.com" - user: @user = "sharelatex" - pass: @pass = "password" - - describe "with a successful response from the web api", -> - beforeEach -> - @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(lines: @lines, version: @version)) - @PersistenceManager.getDoc(@project_id, @doc_id, @callback) - - it "should call the web api", -> - @request - .calledWith({ - url: "#{@url}/project/#{@project_id}/doc/#{@doc_id}" - method: "GET" - headers: - "accept": "application/json" - auth: - user: @user - pass: @pass - sendImmediately: true - jar: false - timeout: 5000 - }) - .should.equal true - - it "should call the callback with the doc lines and version", -> - @callback.calledWith(null, @lines, @version).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "when request returns an error", -> - beforeEach -> - @request.callsArgWith(1, @error = new Error("oops"), null, null) - @PersistenceManager.getDoc(@project_id, @doc_id, @callback) - - it "should return the error", -> - @callback.calledWith(@error).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "when the request returns 404", -> - beforeEach -> - @request.callsArgWith(1, null, {statusCode: 404}, "") - @PersistenceManager.getDoc(@project_id, @doc_id, @callback) - - it "should return a NotFoundError", -> - @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "when the request returns an error status code", -> - beforeEach -> - @request.callsArgWith(1, null, {statusCode: 500}, "") - @PersistenceManager.getDoc(@project_id, @doc_id, @callback) - - it "should return an error", -> - @callback.calledWith(new Error("web api error")).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "when request returns an doc without lines", -> - beforeEach -> - @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(version: @version)) - @PersistenceManager.getDoc(@project_id, @doc_id, @callback) - - it "should return and error", -> - @callback.calledWith(new Error("web API response had no doc lines")).should.equal true - - describe "when request returns an doc without a version", -> - beforeEach -> - @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(lines: @lines)) - @PersistenceManager.getDoc(@project_id, @doc_id, @callback) - - it "should return and error", -> - @callback.calledWith(new Error("web API response had no valid doc version")).should.equal true - - diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/setDocTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/setDocTests.coffee deleted file mode 100644 index 98f252a35d..0000000000 --- a/services/document-updater/test/unit/coffee/PersistenceManager/setDocTests.coffee +++ /dev/null @@ -1,90 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/PersistenceManager.js" -SandboxedModule = require('sandboxed-module') -Errors = require "../../../../app/js/Errors" - -describe "PersistenceManager.setDoc", -> - beforeEach -> - @PersistenceManager = SandboxedModule.require modulePath, requires: - "request": @request = sinon.stub() - "settings-sharelatex": @Settings = {} - "./Metrics": @Metrics = - Timer: class Timer - done: sinon.stub() - "logger-sharelatex": @logger = {log: sinon.stub(), err: sinon.stub()} - @project_id = "project-id-123" - @doc_id = "doc-id-123" - @lines = ["one", "two", "three"] - @version = 42 - @callback = sinon.stub() - @Settings.apis = - web: - url: @url = "www.example.com" - user: @user = "sharelatex" - pass: @pass = "password" - - describe "with a successful response from the web api", -> - beforeEach -> - @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(lines: @lines, version: @version)) - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @callback) - - it "should call the web api", -> - @request - .calledWith({ - url: "#{@url}/project/#{@project_id}/doc/#{@doc_id}" - body: JSON.stringify - lines: @lines - version: @version - method: "POST" - headers: - "content-type": "application/json" - auth: - user: @user - pass: @pass - sendImmediately: true - jar: false - timeout: 5000 - }) - .should.equal true - - it "should call the callback without error", -> - @callback.calledWith(null).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "when request returns an error", -> - beforeEach -> - @request.callsArgWith(1, @error = new Error("oops"), null, null) - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @callback) - - it "should return the error", -> - @callback.calledWith(@error).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "when the request returns 404", -> - beforeEach -> - @request.callsArgWith(1, null, {statusCode: 404}, "") - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @callback) - - it "should return a NotFoundError", -> - @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "when the request returns an error status code", -> - beforeEach -> - @request.callsArgWith(1, null, {statusCode: 500}, "") - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @callback) - - it "should return an error", -> - @callback.calledWith(new Error("web api error")).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - From d878dd575826bc7d01746268f22ccc5d640dcf55 Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 1 Dec 2016 16:49:53 +0000 Subject: [PATCH 174/769] Fix RedisManagerTests --- .../RedisManager/RedisManagerTests.coffee | 99 ++++++++++++------- 1 file changed, 66 insertions(+), 33 deletions(-) diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 205692d634..6f9afc29d3 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -22,6 +22,8 @@ describe "RedisManager", -> projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" docsInProject: ({project_id}) -> "DocsIn:#{project_id}" + trackChangesEnabled: ({doc_id}) -> "TrackChangesEnabled:#{doc_id}" + trackChangesEntries: ({doc_id}) -> "TrackChangesEntries:#{doc_id}" "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } "./Metrics": @metrics = inc: sinon.stub() @@ -37,39 +39,52 @@ describe "RedisManager", -> @lines = ["one", "two", "three"] @jsonlines = JSON.stringify @lines @version = 42 + @track_changes_on = true + @redis_track_changes_on = "1" + @track_changes_entries = { comments: "mock", entries: "mock" } + @json_track_changes_entries = JSON.stringify @track_changes_entries @rclient.get = sinon.stub() - @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @project_id]) - @RedisManager.getDoc @project_id, @doc_id, @callback - - it "should get the lines from redis", -> - @rclient.get - .calledWith("doclines:#{@doc_id}") - .should.equal true - - it "should get the version from", -> - @rclient.get - .calledWith("DocVersion:#{@doc_id}") - .should.equal true + @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @project_id, @redis_track_changes_on, @json_track_changes_entries]) - it 'should return the document', -> - @callback - .calledWith(null, @lines, @version) - .should.equal true + describe "successfully", -> + beforeEach -> + @RedisManager.getDoc @project_id, @doc_id, @callback - describe "getDoc with an invalid project id", -> - beforeEach -> - @lines = ["one", "two", "three"] - @jsonlines = JSON.stringify @lines - @version = 42 - @another_project_id = "project-id-456" - @rclient.get = sinon.stub() - @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @another_project_id]) - @RedisManager.getDoc @project_id, @doc_id, @callback + it "should get the lines from redis", -> + @rclient.get + .calledWith("doclines:#{@doc_id}") + .should.equal true + + it "should get the version from", -> + @rclient.get + .calledWith("DocVersion:#{@doc_id}") + .should.equal true + + it "should get the track changes state", -> + @rclient.get + .calledWith("TrackChangesEnabled:#{@doc_id}") + .should.equal true + + it "should get the track changes entries", -> + @rclient.get + .calledWith("TrackChangesEntries:#{@doc_id}") + .should.equal true - it 'should return an error', -> - @callback - .calledWith(new Errors.NotFoundError("not found")) - .should.equal true + it 'should return the document', -> + @callback + .calledWith(null, @lines, @version, @track_changes_on, @track_changes_entries) + .should.equal true + + describe "getDoc with an invalid project id", -> + beforeEach -> + @another_project_id = "project-id-456" + @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @another_project_id, @redis_track_changes_on, @json_track_changes_entries]) + @RedisManager.getDoc @project_id, @doc_id, @callback + + it 'should return an error', -> + @callback + .calledWith(new Errors.NotFoundError("not found")) + .should.equal true describe "getPreviousDocOpsTests", -> describe "with a start and an end value", -> @@ -166,13 +181,14 @@ describe "RedisManager", -> @lines = ["one", "two", "three"] @ops = [{ op: [{ i: "foo", p: 4 }] },{ op: [{ i: "bar", p: 8 }] }] @version = 42 + @track_changes_entries = { comments: "mock", entries: "mock" } @rclient.exec = sinon.stub().callsArg(0) describe "with a consistent version", -> beforeEach -> @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) - @RedisManager.updateDocument @doc_id, @lines, @version, @ops, @callback + @RedisManager.updateDocument @doc_id, @lines, @version, @ops, @track_changes_entries, @callback it "should get the current doc version to check for consistency", -> @RedisManager.getDocVersion @@ -188,6 +204,11 @@ describe "RedisManager", -> @rclient.set .calledWith("DocVersion:#{@doc_id}", @version) .should.equal true + + it "should set the track changes entries", -> + @rclient.set + .calledWith("TrackChangesEntries:#{@doc_id}", JSON.stringify(@track_changes_entries)) + .should.equal true it "should push the doc op into the doc ops list", -> @rclient.rpush @@ -210,7 +231,7 @@ describe "RedisManager", -> describe "with an inconsistent version", -> beforeEach -> @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length - 1) - @RedisManager.updateDocument @doc_id, @lines, @version, @ops, @callback + @RedisManager.updateDocument @doc_id, @lines, @version, @ops, @track_changes_entries, @callback it "should not call multi.exec", -> @rclient.exec.called.should.equal false @@ -223,7 +244,7 @@ describe "RedisManager", -> describe "with no updates", -> beforeEach -> @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version) - @RedisManager.updateDocument @doc_id, @lines, @version, [], @callback + @RedisManager.updateDocument @doc_id, @lines, @version, [], @track_changes_entries, @callback it "should not do an rpush", -> @rclient.rpush @@ -242,7 +263,9 @@ describe "RedisManager", -> @rclient.exec.yields() @lines = ["one", "two", "three"] @version = 42 - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, done + @track_changes_on = true + @track_changes_entries = { comments: "mock", entries: "mock" } + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @track_changes_on, @track_changes_entries, done it "should set the lines", -> @rclient.set @@ -253,6 +276,16 @@ describe "RedisManager", -> @rclient.set .calledWith("DocVersion:#{@doc_id}", @version) .should.equal true + + it "should set the track changes entries", -> + @rclient.set + .calledWith("TrackChangesEntries:#{@doc_id}", JSON.stringify(@track_changes_entries)) + .should.equal true + + it "should set the track changes state", -> + @rclient.set + .calledWith("TrackChangesEnabled:#{@doc_id}", "1") + .should.equal true it "should set the project_id for the doc", -> @rclient.set From b6c93c718d77e0cba2ad96b6e4445afe8a772bdd Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 1 Dec 2016 16:50:55 +0000 Subject: [PATCH 175/769] Update TrackChangesManagerTests -> HistoryManagerTests --- .../HistoryManagerTests.coffee} | 38 +++++++++---------- 1 file changed, 19 insertions(+), 19 deletions(-) rename services/document-updater/test/unit/coffee/{TrackChangesManager/TrackChangesManagerTests.coffee => HistoryManager/HistoryManagerTests.coffee} (68%) diff --git a/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee similarity index 68% rename from services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee rename to services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee index 03106e2c2e..c33a18d4e6 100644 --- a/services/document-updater/test/unit/coffee/TrackChangesManager/TrackChangesManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee @@ -1,11 +1,11 @@ SandboxedModule = require('sandboxed-module') sinon = require('sinon') require('chai').should() -modulePath = require('path').join __dirname, '../../../../app/js/TrackChangesManager' +modulePath = require('path').join __dirname, '../../../../app/js/HistoryManager' -describe "TrackChangesManager", -> +describe "HistoryManager", -> beforeEach -> - @TrackChangesManager = SandboxedModule.require modulePath, requires: + @HistoryManager = SandboxedModule.require modulePath, requires: "request": @request = {} "settings-sharelatex": @Settings = {} "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } @@ -22,7 +22,7 @@ describe "TrackChangesManager", -> describe "successfully", -> beforeEach -> @request.post = sinon.stub().callsArgWith(1, null, statusCode: 204) - @TrackChangesManager.flushDocChanges @project_id, @doc_id, @callback + @HistoryManager.flushDocChanges @project_id, @doc_id, @callback it "should send a request to the track changes api", -> @request.post @@ -35,7 +35,7 @@ describe "TrackChangesManager", -> describe "when the track changes api returns an error", -> beforeEach -> @request.post = sinon.stub().callsArgWith(1, null, statusCode: 500) - @TrackChangesManager.flushDocChanges @project_id, @doc_id, @callback + @HistoryManager.flushDocChanges @project_id, @doc_id, @callback it "should return the callback with an error", -> @callback.calledWith(new Error("track changes api return non-success code: 500")).should.equal true @@ -43,12 +43,12 @@ describe "TrackChangesManager", -> describe "pushUncompressedHistoryOps", -> beforeEach -> @ops = ["mock-ops"] - @TrackChangesManager.flushDocChanges = sinon.stub().callsArg(2) + @HistoryManager.flushDocChanges = sinon.stub().callsArg(2) describe "pushing the op", -> beforeEach -> @WebRedisManager.pushUncompressedHistoryOps = sinon.stub().callsArgWith(3, null, 1) - @TrackChangesManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, @callback + @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, @callback it "should push the ops into redis", -> @WebRedisManager.pushUncompressedHistoryOps @@ -59,16 +59,16 @@ describe "TrackChangesManager", -> @callback.called.should.equal true it "should not try to flush the op", -> - @TrackChangesManager.flushDocChanges.called.should.equal false + @HistoryManager.flushDocChanges.called.should.equal false describe "when we hit a multiple of FLUSH_EVERY_N_OPS ops", -> beforeEach -> @WebRedisManager.pushUncompressedHistoryOps = - sinon.stub().callsArgWith(3, null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS) - @TrackChangesManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, @callback + sinon.stub().callsArgWith(3, null, 2 * @HistoryManager.FLUSH_EVERY_N_OPS) + @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, @callback it "should tell the track changes api to flush", -> - @TrackChangesManager.flushDocChanges + @HistoryManager.flushDocChanges .calledWith(@project_id, @doc_id) .should.equal true @@ -76,20 +76,20 @@ describe "TrackChangesManager", -> beforeEach -> @ops = ["op1", "op2", "op3"] @WebRedisManager.pushUncompressedHistoryOps = - sinon.stub().callsArgWith(3, null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS + 1) - @TrackChangesManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, @callback + sinon.stub().callsArgWith(3, null, 2 * @HistoryManager.FLUSH_EVERY_N_OPS + 1) + @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, @callback it "should tell the track changes api to flush", -> - @TrackChangesManager.flushDocChanges + @HistoryManager.flushDocChanges .calledWith(@project_id, @doc_id) .should.equal true - describe "when TrackChangesManager errors", -> + describe "when HistoryManager errors", -> beforeEach -> @WebRedisManager.pushUncompressedHistoryOps = - sinon.stub().callsArgWith(3, null, 2 * @TrackChangesManager.FLUSH_EVERY_N_OPS) - @TrackChangesManager.flushDocChanges = sinon.stub().callsArgWith(2, @error = new Error("oops")) - @TrackChangesManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, @callback + sinon.stub().callsArgWith(3, null, 2 * @HistoryManager.FLUSH_EVERY_N_OPS) + @HistoryManager.flushDocChanges = sinon.stub().callsArgWith(2, @error = new Error("oops")) + @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, @callback it "should log out the error", -> @logger.error @@ -104,7 +104,7 @@ describe "TrackChangesManager", -> describe "with no ops", -> beforeEach -> @WebRedisManager.pushUncompressedHistoryOps = sinon.stub().callsArgWith(3, null, 1) - @TrackChangesManager.pushUncompressedHistoryOps @project_id, @doc_id, [], @callback + @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, [], @callback it "should not call WebRedisManager.pushUncompressedHistoryOps", -> @WebRedisManager.pushUncompressedHistoryOps.called.should.equal false From 889f5fdf9f0a95a6ba1f03e51c0f29ecfacb1c5e Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 1 Dec 2016 17:14:40 +0000 Subject: [PATCH 176/769] Fix ShareJsDB tests --- .../app/coffee/ShareJsDB.coffee | 2 - .../unit/coffee/ShareJsDB/GetOpsTests.coffee | 55 ----------- .../coffee/ShareJsDB/GetSnapshotTests.coffee | 87 ----------------- .../coffee/ShareJsDB/ShareJsDBTests.coffee | 93 +++++++++++++++++++ .../coffee/ShareJsDB/WriteOpsTests.coffee | 39 -------- 5 files changed, 93 insertions(+), 183 deletions(-) delete mode 100644 services/document-updater/test/unit/coffee/ShareJsDB/GetOpsTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/ShareJsDB/GetSnapshotTests.coffee create mode 100644 services/document-updater/test/unit/coffee/ShareJsDB/ShareJsDBTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/ShareJsDB/WriteOpsTests.coffee diff --git a/services/document-updater/app/coffee/ShareJsDB.coffee b/services/document-updater/app/coffee/ShareJsDB.coffee index a21c8aea7f..3e5dfe303f 100644 --- a/services/document-updater/app/coffee/ShareJsDB.coffee +++ b/services/document-updater/app/coffee/ShareJsDB.coffee @@ -1,8 +1,6 @@ Keys = require('./UpdateKeys') -Settings = require('settings-sharelatex') RedisManager = require "./RedisManager" Errors = require "./Errors" -logger = require "logger-sharelatex" module.exports = class ShareJsDB constructor: (@project_id, @doc_id, @lines, @version) -> diff --git a/services/document-updater/test/unit/coffee/ShareJsDB/GetOpsTests.coffee b/services/document-updater/test/unit/coffee/ShareJsDB/GetOpsTests.coffee deleted file mode 100644 index 5621f39a85..0000000000 --- a/services/document-updater/test/unit/coffee/ShareJsDB/GetOpsTests.coffee +++ /dev/null @@ -1,55 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/ShareJsDB.js" -SandboxedModule = require('sandboxed-module') - -describe "ShareJsDB.getOps", -> - beforeEach -> - @doc_id = "document-id" - @project_id = "project-id" - @doc_key = "#{@project_id}:#{@doc_id}" - @callback = sinon.stub() - @ops = [{p: 20, t: "foo"}] - @redis_ops = (JSON.stringify(op) for op in @ops) - @ShareJsDB = SandboxedModule.require modulePath, requires: - "./RedisManager": @RedisManager = {} - "./DocumentManager":{} - "logger-sharelatex": {} - @db = new @ShareJsDB() - - describe "with start == end", -> - beforeEach -> - @start = @end = 42 - @db.getOps @doc_key, @start, @end, @callback - - it "should return an empty array", -> - @callback.calledWith(null, []).should.equal true - - describe "with a non empty range", -> - beforeEach -> - @start = 35 - @end = 42 - @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) - @db.getOps @doc_key, @start, @end, @callback - - it "should get the range from redis", -> - @RedisManager.getPreviousDocOps - .calledWith(@doc_id, @start, @end-1) - .should.equal true - - it "should return the ops", -> - @callback.calledWith(null, @ops).should.equal true - - describe "with no specified end", -> - beforeEach -> - @start = 35 - @end = null - @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) - @db.getOps @doc_key, @start, @end, @callback - - it "should get until the end of the list", -> - @RedisManager.getPreviousDocOps - .calledWith(@doc_id, @start, -1) - .should.equal true - diff --git a/services/document-updater/test/unit/coffee/ShareJsDB/GetSnapshotTests.coffee b/services/document-updater/test/unit/coffee/ShareJsDB/GetSnapshotTests.coffee deleted file mode 100644 index f2527b01a2..0000000000 --- a/services/document-updater/test/unit/coffee/ShareJsDB/GetSnapshotTests.coffee +++ /dev/null @@ -1,87 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -expect = chai.expect -modulePath = "../../../../app/js/ShareJsDB.js" -SandboxedModule = require('sandboxed-module') -Errors = require "../../../../app/js/Errors" - -describe "ShareJsDB.getSnapshot", -> - beforeEach -> - @doc_id = "document-id" - @project_id = "project-id" - @doc_key = "#{@project_id}:#{@doc_id}" - @callback = sinon.stub() - @ShareJsDB = SandboxedModule.require modulePath, requires: - "./DocumentManager": @DocumentManager = {} - "./RedisManager": {} - "./DocOpsManager": {} - "logger-sharelatex": {} - @db = new @ShareJsDB() - - @version = 42 - - describe "with a text document", -> - beforeEach -> - @lines = ["one", "two", "three"] - - describe "successfully", -> - beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version) - @db.getSnapshot @doc_key, @callback - - it "should get the doc", -> - @DocumentManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should return the doc lines", -> - @callback.args[0][1].snapshot.should.equal @lines.join("\n") - - it "should return the doc version", -> - @callback.args[0][1].v.should.equal @version - - it "should return the type as text", -> - @callback.args[0][1].type.should.equal "text" - - describe "when the doclines do not exist", -> - beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, null, null) - @db.getSnapshot @doc_key, @callback - - it "should return the callback with a NotFoundError", -> - @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true - - describe "when getDoc returns an error", -> - beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, @error = new Error("oops"), null, null) - @db.getSnapshot @doc_key, @callback - - it "should return the callback with an error", -> - @callback.calledWith(@error).should.equal true - - describe "with a JSON document", -> - beforeEach -> - @lines = [{text: "one"}, {text:"two"}, {text:"three"}] - - describe "successfully", -> - beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version) - @db.getSnapshot @doc_key, @callback - - it "should get the doc", -> - @DocumentManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should return the doc lines", -> - expect(@callback.args[0][1].snapshot).to.deep.equal lines: @lines - - it "should return the doc version", -> - @callback.args[0][1].v.should.equal @version - - it "should return the type as text", -> - @callback.args[0][1].type.should.equal "json" - - - diff --git a/services/document-updater/test/unit/coffee/ShareJsDB/ShareJsDBTests.coffee b/services/document-updater/test/unit/coffee/ShareJsDB/ShareJsDBTests.coffee new file mode 100644 index 0000000000..aa03d9fb1e --- /dev/null +++ b/services/document-updater/test/unit/coffee/ShareJsDB/ShareJsDBTests.coffee @@ -0,0 +1,93 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +expect = chai.expect +modulePath = "../../../../app/js/ShareJsDB.js" +SandboxedModule = require('sandboxed-module') +Errors = require "../../../../app/js/Errors" + +describe "ShareJsDB", -> + beforeEach -> + @doc_id = "document-id" + @project_id = "project-id" + @doc_key = "#{@project_id}:#{@doc_id}" + @callback = sinon.stub() + @ShareJsDB = SandboxedModule.require modulePath, requires: + "./RedisManager": @RedisManager = {} + + @version = 42 + @lines = ["one", "two", "three"] + @db = new @ShareJsDB(@project_id, @doc_id, @lines, @version) + + describe "getSnapshot", -> + describe "successfully", -> + beforeEach -> + @db.getSnapshot @doc_key, @callback + + it "should return the doc lines", -> + @callback.args[0][1].snapshot.should.equal @lines.join("\n") + + it "should return the doc version", -> + @callback.args[0][1].v.should.equal @version + + it "should return the type as text", -> + @callback.args[0][1].type.should.equal "text" + + describe "when the key does not match", -> + beforeEach -> + @db.getSnapshot "bad:key", @callback + + it "should return the callback with a NotFoundError", -> + @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true + + describe "getOps", -> + describe "with start == end", -> + beforeEach -> + @start = @end = 42 + @db.getOps @doc_key, @start, @end, @callback + + it "should return an empty array", -> + @callback.calledWith(null, []).should.equal true + + describe "with a non empty range", -> + beforeEach -> + @start = 35 + @end = 42 + @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) + @db.getOps @doc_key, @start, @end, @callback + + it "should get the range from redis", -> + @RedisManager.getPreviousDocOps + .calledWith(@doc_id, @start, @end-1) + .should.equal true + + it "should return the ops", -> + @callback.calledWith(null, @ops).should.equal true + + describe "with no specified end", -> + beforeEach -> + @start = 35 + @end = null + @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) + @db.getOps @doc_key, @start, @end, @callback + + it "should get until the end of the list", -> + @RedisManager.getPreviousDocOps + .calledWith(@doc_id, @start, -1) + .should.equal true + + describe "writeOps", -> + describe "writing an op", -> + beforeEach -> + @opData = + op: {p: 20, t: "foo"} + meta: {source: "bar"} + v: @version + @db.writeOp @doc_key, @opData, @callback + + it "should write into appliedOps", -> + expect(@db.appliedOps[@doc_key]).to.deep.equal [@opData] + + it "should call the callback without an error", -> + @callback.called.should.equal true + (@callback.args[0][0]?).should.equal false diff --git a/services/document-updater/test/unit/coffee/ShareJsDB/WriteOpsTests.coffee b/services/document-updater/test/unit/coffee/ShareJsDB/WriteOpsTests.coffee deleted file mode 100644 index 838f63034e..0000000000 --- a/services/document-updater/test/unit/coffee/ShareJsDB/WriteOpsTests.coffee +++ /dev/null @@ -1,39 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -expect = chai.expect -should = chai.should() -modulePath = "../../../../app/js/ShareJsDB.js" -SandboxedModule = require('sandboxed-module') - -describe "ShareJsDB.writeOps", -> - beforeEach -> - @project_id = "project-id" - @doc_id = "document-id" - @doc_key = "#{@project_id}:#{@doc_id}" - @callback = sinon.stub() - @opData = - op: {p: 20, t: "foo"} - meta: {source: "bar"} - @ShareJsDB = SandboxedModule.require modulePath, requires: - "./RedisManager": @RedisManager = {} - "./DocOpsManager": @DocOpsManager = {} - "./DocumentManager": {} - "logger-sharelatex": @logger = {error: sinon.stub()} - @db = new @ShareJsDB() - - describe "writing an op", -> - beforeEach -> - @version = 42 - @opData.v = @version - @db.writeOp @doc_key, @opData, @callback - - it "should write into appliedOps", -> - expect(@db.appliedOps[@doc_key]).to.deep.equal [@opData] - - it "should call the callback without an error", -> - @callback.called.should.equal true - (@callback.args[0][0]?).should.equal false - - - - From e7ff05e79220b6ace4d2119a3ec870f1553f0b1c Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 1 Dec 2016 18:06:33 +0000 Subject: [PATCH 177/769] Fix UpdateManager tests --- .../UpdateManager/ApplyingUpdates.coffee | 79 +++++++++++++++- .../lockUpdatesAndDoTests.coffee | 90 ------------------- 2 files changed, 76 insertions(+), 93 deletions(-) delete mode 100644 services/document-updater/test/unit/coffee/UpdateManager/lockUpdatesAndDoTests.coffee diff --git a/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee b/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee index 43786f4b98..d1898378d2 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee @@ -14,12 +14,14 @@ describe "UpdateManager", -> "./RedisManager" : @RedisManager = {} "./WebRedisManager" : @WebRedisManager = {} "./ShareJsUpdateManager" : @ShareJsUpdateManager = {} - "./TrackChangesManager" : @TrackChangesManager = {} + "./HistoryManager" : @HistoryManager = {} "logger-sharelatex": @logger = { log: sinon.stub() } "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() "settings-sharelatex": Settings = {} + "./DocumentManager": @DocumentManager = {} + "./TrackChangesManager": @TrackChangesManager = {} describe "processOutstandingUpdates", -> beforeEach -> @@ -158,7 +160,7 @@ describe "UpdateManager", -> @appliedOps = ["mock-applied-ops"] @ShareJsUpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null, @updatedDocLines, @version, @appliedOps) @RedisManager.updateDocument = sinon.stub().callsArg(4) - @TrackChangesManager.pushUncompressedHistoryOps = sinon.stub().callsArg(3) + @HistoryManager.pushUncompressedHistoryOps = sinon.stub().callsArg(3) describe "normally", -> beforeEach -> @@ -175,7 +177,7 @@ describe "UpdateManager", -> .should.equal true it "should push the applied ops into the track changes queue", -> - @TrackChangesManager.pushUncompressedHistoryOps + @HistoryManager.pushUncompressedHistoryOps .calledWith(@project_id, @doc_id, @appliedOps) .should.equal true @@ -195,3 +197,74 @@ describe "UpdateManager", -> # \uFFFD is 'replacement character' @update.op[0].i.should.equal "\uFFFD\uFFFD" + describe "lockUpdatesAndDo", -> + beforeEach -> + @method = sinon.stub().callsArgWith(3, null, @response_arg1) + @callback = sinon.stub() + @arg1 = "argument 1" + @response_arg1 = "response argument 1" + @lockValue = "mock-lock-value" + @LockManager.getLock = sinon.stub().callsArgWith(1, null, @lockValue) + @LockManager.releaseLock = sinon.stub().callsArg(2) + + describe "successfully", -> + beforeEach -> + @UpdateManager.continueProcessingUpdatesWithLock = sinon.stub() + @UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2) + @UpdateManager.lockUpdatesAndDo @method, @project_id, @doc_id, @arg1, @callback + + it "should lock the doc", -> + @LockManager.getLock + .calledWith(@doc_id) + .should.equal true + + it "should process any outstanding updates", -> + @UpdateManager.processOutstandingUpdates + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should call the method", -> + @method + .calledWith(@project_id, @doc_id, @arg1) + .should.equal true + + it "should return the method response to the callback", -> + @callback + .calledWith(null, @response_arg1) + .should.equal true + + it "should release the lock", -> + @LockManager.releaseLock + .calledWith(@doc_id, @lockValue) + .should.equal true + + it "should continue processing updates", -> + @UpdateManager.continueProcessingUpdatesWithLock + .calledWith(@project_id, @doc_id) + .should.equal true + + describe "when processOutstandingUpdates returns an error", -> + beforeEach -> + @UpdateManager.processOutstandingUpdates = sinon.stub().callsArgWith(2, @error = new Error("Something went wrong")) + @UpdateManager.lockUpdatesAndDo @method, @project_id, @doc_id, @arg1, @callback + + it "should free the lock", -> + @LockManager.releaseLock.calledWith(@doc_id, @lockValue).should.equal true + + it "should return the error in the callback", -> + @callback.calledWith(@error).should.equal true + + describe "when the method returns an error", -> + beforeEach -> + @UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2) + @method = sinon.stub().callsArgWith(3, @error = new Error("something went wrong"), @response_arg1) + @UpdateManager.lockUpdatesAndDo @method, @project_id, @doc_id, @arg1, @callback + + it "should free the lock", -> + @LockManager.releaseLock.calledWith(@doc_id, @lockValue).should.equal true + + it "should return the error in the callback", -> + @callback.calledWith(@error).should.equal true + + + diff --git a/services/document-updater/test/unit/coffee/UpdateManager/lockUpdatesAndDoTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/lockUpdatesAndDoTests.coffee deleted file mode 100644 index a4b455d219..0000000000 --- a/services/document-updater/test/unit/coffee/UpdateManager/lockUpdatesAndDoTests.coffee +++ /dev/null @@ -1,90 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/UpdateManager.js" -SandboxedModule = require('sandboxed-module') - -describe 'UpdateManager - lockUpdatesAndDo', -> - beforeEach -> - @UpdateManager = SandboxedModule.require modulePath, requires: - "./LockManager" : @LockManager = {} - "./RedisManager" : @RedisManager = {} - "./WebRedisManager" : @WebRedisManager = {} - "./ShareJsUpdateManager" : @ShareJsUpdateManager = {} - "./TrackChangesManager" : @TrackChangesManager = {} - "logger-sharelatex": @logger = { log: sinon.stub() } - "./Metrics": @Metrics = - Timer: class Timer - done: sinon.stub() - "settings-sharelatex": Settings = {} - @project_id = "project-id-123" - @doc_id = "doc-id-123" - @method = sinon.stub().callsArgWith(3, null, @response_arg1) - @callback = sinon.stub() - @arg1 = "argument 1" - @response_arg1 = "response argument 1" - @lockValue = "mock-lock-value" - @LockManager.getLock = sinon.stub().callsArgWith(1, null, @lockValue) - @LockManager.releaseLock = sinon.stub().callsArg(2) - - describe "successfully", -> - beforeEach -> - @UpdateManager.continueProcessingUpdatesWithLock = sinon.stub() - @UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2) - @UpdateManager.lockUpdatesAndDo @method, @project_id, @doc_id, @arg1, @callback - - it "should lock the doc", -> - @LockManager.getLock - .calledWith(@doc_id) - .should.equal true - - it "should process any outstanding updates", -> - @UpdateManager.processOutstandingUpdates - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should call the method", -> - @method - .calledWith(@project_id, @doc_id, @arg1) - .should.equal true - - it "should return the method response to the callback", -> - @callback - .calledWith(null, @response_arg1) - .should.equal true - - it "should release the lock", -> - @LockManager.releaseLock - .calledWith(@doc_id, @lockValue) - .should.equal true - - it "should continue processing updates", -> - @UpdateManager.continueProcessingUpdatesWithLock - .calledWith(@project_id, @doc_id) - .should.equal true - - describe "when processOutstandingUpdates returns an error", -> - beforeEach -> - @UpdateManager.processOutstandingUpdates = sinon.stub().callsArgWith(2, @error = new Error("Something went wrong")) - @UpdateManager.lockUpdatesAndDo @method, @project_id, @doc_id, @arg1, @callback - - it "should free the lock", -> - @LockManager.releaseLock.calledWith(@doc_id, @lockValue).should.equal true - - it "should return the error in the callback", -> - @callback.calledWith(@error).should.equal true - - describe "when the method returns an error", -> - beforeEach -> - @UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2) - @method = sinon.stub().callsArgWith(3, @error = new Error("something went wrong"), @response_arg1) - @UpdateManager.lockUpdatesAndDo @method, @project_id, @doc_id, @arg1, @callback - - it "should free the lock", -> - @LockManager.releaseLock.calledWith(@doc_id, @lockValue).should.equal true - - it "should return the error in the callback", -> - @callback.calledWith(@error).should.equal true - - - From ce93a76e7f3f653d95a5b77a754462366b5387e8 Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 1 Dec 2016 18:11:03 +0000 Subject: [PATCH 178/769] Fix ShareJsUpdateManager tests --- .../ShareJsUpdateManagerTests.coffee | 27 ++++++++++--------- 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee index 94806a1a9d..f3b871149d 100644 --- a/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee @@ -17,12 +17,16 @@ describe "ShareJsUpdateManager", -> "./ShareJsDB" : @ShareJsDB = { mockDB: true } "redis-sharelatex" : createClient: () => @rclient = auth:-> "logger-sharelatex": @logger = { log: sinon.stub() } + "./WebRedisManager": @WebRedisManager = {} globals: clearTimeout: @clearTimeout = sinon.stub() describe "applyUpdate", -> beforeEach -> + @lines = ["one", "two"] @version = 34 + @update = {p: 4, t: "foo"} + @updatedDocLines = ["onefoo", "two"] @model = applyOp: sinon.stub().callsArg(2) getSnapshot: sinon.stub() @@ -31,20 +35,19 @@ describe "ShareJsUpdateManager", -> @ShareJsUpdateManager.getNewShareJsModel = sinon.stub().returns(@model) @ShareJsUpdateManager._listenForOps = sinon.stub() @ShareJsUpdateManager.removeDocFromCache = sinon.stub().callsArg(1) - @update = {p: 4, t: "foo"} - @updatedDocLines = ["one", "two"] describe "successfully", -> beforeEach (done) -> @model.getSnapshot.callsArgWith(1, null, {snapshot: @updatedDocLines.join("\n"), v: @version}) @model.db.appliedOps["#{@project_id}:#{@doc_id}"] = @appliedOps = ["mock-ops"] - @ShareJsUpdateManager.applyUpdate @project_id, @doc_id, @update, (err, docLines, version, appliedOps) => + @ShareJsUpdateManager.applyUpdate @project_id, @doc_id, @update, @lines, @version, (err, docLines, version, appliedOps) => @callback(err, docLines, version, appliedOps) done() it "should create a new ShareJs model", -> @ShareJsUpdateManager.getNewShareJsModel - .called.should.equal true + .calledWith(@project_id, @doc_id, @lines, @version) + .should.equal true it "should listen for ops on the model", -> @ShareJsUpdateManager._listenForOps @@ -69,7 +72,7 @@ describe "ShareJsUpdateManager", -> @error = new Error("Something went wrong") @ShareJsUpdateManager._sendError = sinon.stub() @model.applyOp = sinon.stub().callsArgWith(2, @error) - @ShareJsUpdateManager.applyUpdate @project_id, @doc_id, @update, (err, docLines, version) => + @ShareJsUpdateManager.applyUpdate @project_id, @doc_id, @update, @lines, @version, (err, docLines, version) => @callback(err, docLines, version) done() @@ -86,7 +89,7 @@ describe "ShareJsUpdateManager", -> @error = new Error("Something went wrong") @ShareJsUpdateManager._sendError = sinon.stub() @model.getSnapshot.callsArgWith(1, @error) - @ShareJsUpdateManager.applyUpdate @project_id, @doc_id, @update, (err, docLines, version) => + @ShareJsUpdateManager.applyUpdate @project_id, @doc_id, @update, @lines, @version, (err, docLines, version) => @callback(err, docLines, version) done() @@ -114,22 +117,22 @@ describe "ShareJsUpdateManager", -> @opData = op: {t: "foo", p: 1} meta: source: "bar" - @rclient.publish = sinon.stub() + @WebRedisManager.sendData = sinon.stub() @callback("#{@project_id}:#{@doc_id}", @opData) it "should publish the op to redis", -> - @rclient.publish - .calledWith("applied-ops", JSON.stringify(project_id: @project_id, doc_id: @doc_id, op: @opData)) + @WebRedisManager.sendData + .calledWith({project_id: @project_id, doc_id: @doc_id, op: @opData}) .should.equal true describe "_sendError", -> beforeEach -> @error_text = "Something went wrong" - @rclient.publish = sinon.stub() + @WebRedisManager.sendData = sinon.stub() @ShareJsUpdateManager._sendError(@project_id, @doc_id, new Error(@error_text)) it "should publish the error to the redis stream", -> - @rclient.publish - .calledWith("applied-ops", JSON.stringify(project_id: @project_id, doc_id: @doc_id, error: @error_text)) + @WebRedisManager.sendData + .calledWith({project_id: @project_id, doc_id: @doc_id, error: @error_text}) .should.equal true From f43355b74dce812328584f689f982eed1b472567 Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 1 Dec 2016 18:19:47 +0000 Subject: [PATCH 179/769] Fix UpdateManager tests --- ...dates.coffee => UpdateManagerTests.coffee} | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) rename services/document-updater/test/unit/coffee/UpdateManager/{ApplyingUpdates.coffee => UpdateManagerTests.coffee} (91%) diff --git a/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee similarity index 91% rename from services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee rename to services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index d1898378d2..9969e42d61 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/ApplyingUpdates.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -157,9 +157,15 @@ describe "UpdateManager", -> @update = {op: [{p: 42, i: "foo"}]} @updatedDocLines = ["updated", "lines"] @version = 34 + @lines = ["original", "lines"] + @track_changes_on = true + @track_changes_entries = { entries: "mock", comments: "mock" } + @updated_track_changes_entries = { entries: "updated", comments: "updated" } @appliedOps = ["mock-applied-ops"] - @ShareJsUpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null, @updatedDocLines, @version, @appliedOps) - @RedisManager.updateDocument = sinon.stub().callsArg(4) + @DocumentManager.getDoc = sinon.stub().yields(null, @lines, @version, @track_changes_on, @track_changes_entries) + @TrackChangesManager.applyUpdate = sinon.stub().yields(null, @updated_track_changes_entries) + @ShareJsUpdateManager.applyUpdate = sinon.stub().yields(null, @updatedDocLines, @version, @appliedOps) + @RedisManager.updateDocument = sinon.stub().yields() @HistoryManager.pushUncompressedHistoryOps = sinon.stub().callsArg(3) describe "normally", -> @@ -168,12 +174,17 @@ describe "UpdateManager", -> it "should apply the updates via ShareJS", -> @ShareJsUpdateManager.applyUpdate - .calledWith(@project_id, @doc_id, @update) + .calledWith(@project_id, @doc_id, @update, @lines, @version) + .should.equal true + + it "should update the track changes entries", -> + @TrackChangesManager.applyUpdate + .calledWith(@project_id, @doc_id, @track_changes_entries, @appliedOps, @track_changes_on) .should.equal true it "should save the document", -> @RedisManager.updateDocument - .calledWith(@doc_id, @updatedDocLines, @version, @appliedOps) + .calledWith(@doc_id, @updatedDocLines, @version, @appliedOps, @updated_track_changes_entries) .should.equal true it "should push the applied ops into the track changes queue", -> From 4fadd75ef3da82bfb2fffd8eb413a9923bb91bfa Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 2 Dec 2016 11:04:21 +0000 Subject: [PATCH 180/769] Track changes based on flag on op, not global setting --- services/document-updater/app.coffee | 1 - .../app/coffee/DocumentManager.coffee | 28 ++---- .../app/coffee/HttpController.coffee | 11 --- .../app/coffee/PersistenceManager.coffee | 7 +- .../app/coffee/RedisManager.coffee | 23 ++--- .../app/coffee/TrackChangesManager.coffee | 6 +- .../app/coffee/UpdateManager.coffee | 4 +- .../coffee/TrackChangesTests.coffee | 88 ++++++------------- .../DocumentManagerTests.coffee | 23 +++-- .../PersistenceManagerTests.coffee | 13 ++- .../RedisManager/RedisManagerTests.coffee | 21 +---- .../UpdateManager/UpdateManagerTests.coffee | 5 +- 12 files changed, 71 insertions(+), 159 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index f9099660c8..004b9f77bc 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -45,7 +45,6 @@ app.post '/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLo app.delete '/project/:project_id/doc/:doc_id', HttpController.flushAndDeleteDoc app.delete '/project/:project_id', HttpController.deleteProject app.post '/project/:project_id/flush', HttpController.flushProject -app.post '/project/:project_id/track_changes', HttpController.setTrackChanges app.get '/total', (req, res)-> timer = new Metrics.Timer("http.allDocList") diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 9c7277c469..258d55bb65 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -13,18 +13,18 @@ module.exports = DocumentManager = timer.done() _callback(args...) - RedisManager.getDoc project_id, doc_id, (error, lines, version, track_changes, track_changes_entries) -> + RedisManager.getDoc project_id, doc_id, (error, lines, version, track_changes_entries) -> return callback(error) if error? if !lines? or !version? - logger.log {project_id, doc_id, track_changes}, "doc not in redis so getting from persistence API" - PersistenceManager.getDoc project_id, doc_id, (error, lines, version, track_changes, track_changes_entries) -> + logger.log {project_id, doc_id}, "doc not in redis so getting from persistence API" + PersistenceManager.getDoc project_id, doc_id, (error, lines, version, track_changes_entries) -> return callback(error) if error? - logger.log {project_id, doc_id, lines, version, track_changes}, "got doc from persistence API" - RedisManager.putDocInMemory project_id, doc_id, lines, version, track_changes, track_changes_entries, (error) -> + logger.log {project_id, doc_id, lines, version}, "got doc from persistence API" + RedisManager.putDocInMemory project_id, doc_id, lines, version, track_changes_entries, (error) -> return callback(error) if error? - callback null, lines, version, track_changes, track_changes_entries, false + callback null, lines, version, track_changes_entries, false else - callback null, lines, version, track_changes, track_changes_entries, true + callback null, lines, version, track_changes_entries, true getDocAndRecentOps: (project_id, doc_id, fromVersion, _callback = (error, lines, version, recentOps) ->) -> timer = new Metrics.Timer("docManager.getDocAndRecentOps") @@ -90,14 +90,14 @@ module.exports = DocumentManager = callback = (args...) -> timer.done() _callback(args...) - RedisManager.getDoc project_id, doc_id, (error, lines, version, track_changes, track_changes_entries) -> + RedisManager.getDoc project_id, doc_id, (error, lines, version, track_changes_entries) -> return callback(error) if error? if !lines? or !version? logger.log project_id: project_id, doc_id: doc_id, "doc is not loaded so not flushing" callback null # TODO: return a flag to bail out, as we go on to remove doc from memory? else logger.log project_id: project_id, doc_id: doc_id, version: version, "flushing doc" - PersistenceManager.setDoc project_id, doc_id, lines, version, track_changes, track_changes_entries, (error) -> + PersistenceManager.setDoc project_id, doc_id, lines, version, track_changes_entries, (error) -> return callback(error) if error? callback null @@ -119,12 +119,6 @@ module.exports = DocumentManager = RedisManager.removeDocFromMemory project_id, doc_id, (error) -> return callback(error) if error? callback null - - setTrackChanges: (project_id, doc_id, track_changes_on, callback = (error) ->) -> - RedisManager.setTrackChanges project_id, doc_id, track_changes_on, (error) -> - return callback(error) if error? - WebRedisManager.sendData {project_id, doc_id, track_changes_on} - callback() getDocWithLock: (project_id, doc_id, callback = (error, lines, version) ->) -> UpdateManager = require "./UpdateManager" @@ -145,7 +139,3 @@ module.exports = DocumentManager = flushAndDeleteDocWithLock: (project_id, doc_id, callback = (error) ->) -> UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.flushAndDeleteDoc, project_id, doc_id, callback - - setTrackChangesWithLock: (project_id, doc_id, track_changes_on, callback = (error) ->) -> - UpdateManager = require "./UpdateManager" - UpdateManager.lockUpdatesAndDo DocumentManager.setTrackChanges, project_id, doc_id, track_changes_on, callback \ No newline at end of file diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 0366746d56..dc74833697 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -96,15 +96,4 @@ module.exports = HttpController = return next(error) if error? logger.log project_id: project_id, "deleted project via http" res.send 204 # No Content - - setTrackChanges: (req, res, next = (error) ->) -> - project_id = req.params.project_id - track_changes_on = req.body.on - if !track_changes_on? - return res.send 400 - track_changes_on = !!track_changes_on # Make boolean - logger.log {project_id, track_changes_on}, "turning on track changes via http" - ProjectManager.setTrackChangesWithLocks project_id, track_changes_on, (error) -> - return next(error) if error? - res.send 204 diff --git a/services/document-updater/app/coffee/PersistenceManager.coffee b/services/document-updater/app/coffee/PersistenceManager.coffee index 25eb1f32a2..ee7674d80a 100644 --- a/services/document-updater/app/coffee/PersistenceManager.coffee +++ b/services/document-updater/app/coffee/PersistenceManager.coffee @@ -10,7 +10,7 @@ logger = require "logger-sharelatex" MAX_HTTP_REQUEST_LENGTH = 5000 # 5 seconds module.exports = PersistenceManager = - getDoc: (project_id, doc_id, _callback = (error, lines, version, track_changes, track_changes_entries) ->) -> + getDoc: (project_id, doc_id, _callback = (error, lines, version, track_changes_entries) ->) -> timer = new Metrics.Timer("persistenceManager.getDoc") callback = (args...) -> timer.done() @@ -39,13 +39,13 @@ module.exports = PersistenceManager = return callback(new Error("web API response had no doc lines")) if !body.version? or not body.version instanceof Number return callback(new Error("web API response had no valid doc version")) - return callback null, body.lines, body.version, body.track_changes, body.track_changes_entries + return callback null, body.lines, body.version, body.track_changes_entries else if res.statusCode == 404 return callback(new Errors.NotFoundError("doc not not found: #{url}")) else return callback(new Error("error accessing web API: #{url} #{res.statusCode}")) - setDoc: (project_id, doc_id, lines, version, track_changes, track_changes_entries, _callback = (error) ->) -> + setDoc: (project_id, doc_id, lines, version, track_changes_entries, _callback = (error) ->) -> timer = new Metrics.Timer("persistenceManager.setDoc") callback = (args...) -> timer.done() @@ -57,7 +57,6 @@ module.exports = PersistenceManager = method: "POST" json: lines: lines - track_changes: track_changes track_changes_entries: track_changes_entries version: version auth: diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 6ee764cb7e..cad5bd9f04 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -13,7 +13,7 @@ minutes = 60 # seconds for Redis expire module.exports = RedisManager = rclient: rclient - putDocInMemory : (project_id, doc_id, docLines, version, track_changes, track_changes_entries, _callback)-> + putDocInMemory : (project_id, doc_id, docLines, version, track_changes_entries, _callback)-> timer = new metrics.Timer("redis.put-doc") callback = (error) -> timer.done() @@ -23,7 +23,6 @@ module.exports = RedisManager = multi.set keys.docLines(doc_id:doc_id), JSON.stringify(docLines) multi.set keys.projectKey({doc_id:doc_id}), project_id multi.set keys.docVersion(doc_id:doc_id), version - multi.set keys.trackChangesEnabled(doc_id:doc_id), if track_changes then "1" else "0" multi.set keys.trackChangesEntries(doc_id:doc_id), JSON.stringify(track_changes_entries) multi.exec (error) -> return callback(error) if error? @@ -43,36 +42,32 @@ module.exports = RedisManager = multi.del keys.docLines(doc_id:doc_id) multi.del keys.projectKey(doc_id:doc_id) multi.del keys.docVersion(doc_id:doc_id) - multi.del keys.trackChangesEnabled(doc_id:doc_id) multi.del keys.trackChangesEntries(doc_id:doc_id) multi.exec (error) -> return callback(error) if error? rclient.srem keys.docsInProject(project_id:project_id), doc_id, callback - getDoc : (project_id, doc_id, callback = (error, lines, version, track_changes, track_changes_entries) ->)-> + getDoc : (project_id, doc_id, callback = (error, lines, version, track_changes_entries) ->)-> timer = new metrics.Timer("redis.get-doc") multi = rclient.multi() multi.get keys.docLines(doc_id:doc_id) multi.get keys.docVersion(doc_id:doc_id) multi.get keys.projectKey(doc_id:doc_id) - multi.get keys.trackChangesEnabled(doc_id:doc_id) multi.get keys.trackChangesEntries(doc_id:doc_id) - multi.exec (error, result)-> + multi.exec (error, [docLines, version, doc_project_id, track_changes_entries])-> timer.done() return callback(error) if error? try - docLines = JSON.parse result[0] - track_changes_entries = JSON.parse result[4] + docLines = JSON.parse docLines + track_changes_entries = JSON.parse track_changes_entries catch e return callback(e) - version = parseInt(result[1] or 0, 10) - doc_project_id = result[2] - track_changes = (result[3] == "1") + version = parseInt(version or 0, 10) # check doc is in requested project if doc_project_id? and doc_project_id isnt project_id logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, "doc not in project" return callback(new Errors.NotFoundError("document not found")) - callback null, docLines, version, track_changes, track_changes_entries + callback null, docLines, version, track_changes_entries getDocVersion: (doc_id, callback = (error, version) ->) -> rclient.get keys.docVersion(doc_id: doc_id), (error, version) -> @@ -134,7 +129,3 @@ module.exports = RedisManager = getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) -> rclient.smembers keys.docsInProject(project_id: project_id), callback - - setTrackChanges: (project_id, doc_id, track_changes_on, callback = (error) ->) -> - value = (if track_changes_on then "1" else "0") - rclient.set keys.trackChangesEnabled({doc_id}), value, callback diff --git a/services/document-updater/app/coffee/TrackChangesManager.coffee b/services/document-updater/app/coffee/TrackChangesManager.coffee index 94f8a11ca1..65f1931bb4 100644 --- a/services/document-updater/app/coffee/TrackChangesManager.coffee +++ b/services/document-updater/app/coffee/TrackChangesManager.coffee @@ -1,12 +1,12 @@ ChangesTracker = require "./ChangesTracker" module.exports = TrackChangesManager = - applyUpdate: (project_id, doc_id, entries = {}, updates = [], track_changes, callback = (error, new_entries) ->) -> + applyUpdate: (project_id, doc_id, entries = {}, updates = [], callback = (error, new_entries) ->) -> {changes, comments} = entries changesTracker = new ChangesTracker(changes, comments) - changesTracker.track_changes = track_changes for update in updates + changesTracker.track_changes = !!update.meta.tc for op in update.op - changesTracker.applyOp(op, { user_id: update.meta?.user_id, }) + changesTracker.applyOp(op, { user_id: update.meta?.user_id }) {changes, comments} = changesTracker callback null, {changes, comments} \ No newline at end of file diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index d08d9a62f3..7c98b97eee 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -48,13 +48,13 @@ module.exports = UpdateManager = applyUpdate: (project_id, doc_id, update, callback = (error) ->) -> UpdateManager._sanitizeUpdate update - DocumentManager.getDoc project_id, doc_id, (error, lines, version, track_changes, track_changes_entries) -> + DocumentManager.getDoc project_id, doc_id, (error, lines, version, track_changes_entries) -> return callback(error) if error? if !lines? or !version? return callback(new Errors.NotFoundError("document not found: #{doc_id}")) ShareJsUpdateManager.applyUpdate project_id, doc_id, update, lines, version, (error, updatedDocLines, version, appliedOps) -> return callback(error) if error? - TrackChangesManager.applyUpdate project_id, doc_id, track_changes_entries, appliedOps, track_changes, (error, new_track_changes_entries) -> + TrackChangesManager.applyUpdate project_id, doc_id, track_changes_entries, appliedOps, (error, new_track_changes_entries) -> return callback(error) if error? logger.log doc_id: doc_id, version: version, "updating doc in redis" RedisManager.updateDocument doc_id, updatedDocLines, version, appliedOps, new_track_changes_entries, (error) -> diff --git a/services/document-updater/test/acceptance/coffee/TrackChangesTests.coffee b/services/document-updater/test/acceptance/coffee/TrackChangesTests.coffee index 406f46b430..43401cef6d 100644 --- a/services/document-updater/test/acceptance/coffee/TrackChangesTests.coffee +++ b/services/document-updater/test/acceptance/coffee/TrackChangesTests.coffee @@ -8,89 +8,51 @@ MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" describe "Track changes", -> - describe "turning on track changes", -> - before (done) -> - DocUpdaterClient.subscribeToAppliedOps @appliedOpsListener = sinon.stub() - @project_id = DocUpdaterClient.randomId() - @docs = [{ - id: doc_id0 = DocUpdaterClient.randomId() - lines: ["one", "two", "three"] - updatedLines: ["one", "one and a half", "two", "three"] - }, { - id: doc_id1 = DocUpdaterClient.randomId() - lines: ["four", "five", "six"] - updatedLines: ["four", "four and a half", "five", "six"] - }] - for doc in @docs - MockWebApi.insertDoc @project_id, doc.id, { - lines: doc.lines - version: 0 - } - async.series @docs.map((doc) => - (callback) => - DocUpdaterClient.preloadDoc @project_id, doc.id, callback - ), (error) => - throw error if error? - setTimeout () => - DocUpdaterClient.setTrackChangesOn @project_id, (error, res, body) => - @statusCode = res.statusCode - done() - , 200 - - it "should return a 204 status code", -> - @statusCode.should.equal 204 - - it "should send a track changes message to real-time for each doc", -> - @appliedOpsListener.calledWith("applied-ops", JSON.stringify({ - project_id: @project_id, doc_id: @docs[0].id, track_changes_on: true - })).should.equal true - @appliedOpsListener.calledWith("applied-ops", JSON.stringify({ - project_id: @project_id, doc_id: @docs[1].id, track_changes_on: true - })).should.equal true - - it "should set the track changes key in redis", (done) -> - rclient.get "TrackChangesEnabled:#{@docs[0].id}", (error, value) => - throw error if error? - value.should.equal "1" - rclient.get "TrackChangesEnabled:#{@docs[1].id}", (error, value) -> - throw error if error? - value.should.equal "1" - done() - describe "tracking changes", -> before (done) -> @project_id = DocUpdaterClient.randomId() + @user_id = DocUpdaterClient.randomId() @doc = { - id: doc_id0 = DocUpdaterClient.randomId() - lines: ["one", "two", "three"] + id: DocUpdaterClient.randomId() + lines: ["aaa"] } - @update = + @updates = [{ doc: @doc.id - op: [{ - i: "one and a half\n" - p: 4 - }] + op: [{ i: "123", p: 1 }] v: 0 - meta: - user_id: @user_id = DocUpdaterClient.randomId() + meta: { user_id: @user_id } + }, { + doc: @doc.id + op: [{ i: "456", p: 5 }] + v: 1 + meta: { user_id: @user_id, tc: 1 } + }, { + doc: @doc.id + op: [{ d: "12", p: 1 }] + v: 2 + meta: { user_id: @user_id } + }] MockWebApi.insertDoc @project_id, @doc.id, { lines: @doc.lines version: 0 } + jobs = [] + for update in @updates + do (update) => + jobs.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc.id, update, callback DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) => throw error if error? - DocUpdaterClient.setTrackChangesOn @project_id, (error, res, body) => + async.series jobs, (error) -> throw error if error? - DocUpdaterClient.sendUpdate @project_id, @doc.id, @update, (error) -> - throw error if error? - setTimeout done, 200 + setTimeout done, 200 it "should set the updated track changes entries in redis", (done) -> + console.log "TODO: GET ME FROM HTTP REQUEST" rclient.get "TrackChangesEntries:#{@doc.id}", (error, value) => throw error if error? entries = JSON.parse(value) change = entries.changes[0] - change.op.should.deep.equal @update.op[0] + change.op.should.deep.equal { i: "456", p: 3 } change.metadata.user_id.should.equal @user_id done() diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index 2b0dce169b..d29a569d46 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -24,7 +24,6 @@ describe "DocumentManager", -> @lines = ["one", "two", "three"] @version = 42 @track_changes_entries = { comments: "mock", entries: "mock" } - @track_changes_on = true describe "flushAndDeleteDoc", -> describe "successfully", -> @@ -58,7 +57,7 @@ describe "DocumentManager", -> describe "flushDocIfLoaded", -> describe "when the doc is in Redis", -> beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @track_changes_on, @track_changes_entries) + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @track_changes_entries) @PersistenceManager.setDoc = sinon.stub().yields() @DocumentManager.flushDocIfLoaded @project_id, @doc_id, @callback @@ -69,7 +68,7 @@ describe "DocumentManager", -> it "should write the doc lines to the persistence layer", -> @PersistenceManager.setDoc - .calledWith(@project_id, @doc_id, @lines, @version, @track_changes_on, @track_changes_entries) + .calledWith(@project_id, @doc_id, @lines, @version, @track_changes_entries) .should.equal true it "should call the callback without error", -> @@ -80,7 +79,7 @@ describe "DocumentManager", -> describe "when the document is not in Redis", -> beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null, null, null) + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null, null) @PersistenceManager.setDoc = sinon.stub().yields() @DocOpsManager.flushDocOpsToMongo = sinon.stub().callsArgWith(2) @DocumentManager.flushDocIfLoaded @project_id, @doc_id, @callback @@ -103,7 +102,7 @@ describe "DocumentManager", -> describe "getDocAndRecentOps", -> describe "with a previous version specified", -> beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @track_changes_on, @track_changes_entries) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @track_changes_entries) @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) @DocumentManager.getDocAndRecentOps @project_id, @doc_id, @fromVersion, @callback @@ -125,7 +124,7 @@ describe "DocumentManager", -> describe "with no previous version specified", -> beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @track_changes_on, @track_changes_entries) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @track_changes_entries) @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) @DocumentManager.getDocAndRecentOps @project_id, @doc_id, -1, @callback @@ -146,7 +145,7 @@ describe "DocumentManager", -> describe "getDoc", -> describe "when the doc exists in Redis", -> beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @track_changes_on, @track_changes_entries) + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @track_changes_entries) @DocumentManager.getDoc @project_id, @doc_id, @callback it "should get the doc from Redis", -> @@ -155,7 +154,7 @@ describe "DocumentManager", -> .should.equal true it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, @track_changes_on, @track_changes_entries, true).should.equal true + @callback.calledWith(null, @lines, @version, @track_changes_entries, true).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true @@ -163,7 +162,7 @@ describe "DocumentManager", -> describe "when the doc does not exist in Redis", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null, null, null) - @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @track_changes_on, @track_changes_entries) + @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @track_changes_entries) @RedisManager.putDocInMemory = sinon.stub().yields() @DocumentManager.getDoc @project_id, @doc_id, @callback @@ -179,11 +178,11 @@ describe "DocumentManager", -> it "should set the doc in Redis", -> @RedisManager.putDocInMemory - .calledWith(@project_id, @doc_id, @lines, @version, @track_changes_on, @track_changes_entries) + .calledWith(@project_id, @doc_id, @lines, @version, @track_changes_entries) .should.equal true it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, @track_changes_on, @track_changes_entries, false).should.equal true + @callback.calledWith(null, @lines, @version, @track_changes_entries, false).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true @@ -193,7 +192,7 @@ describe "DocumentManager", -> beforeEach -> @beforeLines = ["before", "lines"] @afterLines = ["after", "lines"] - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, @track_changes_on, @track_changes_entries, true) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, @track_changes_entries, true) @DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, @ops) @UpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null) @DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2) diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee index e9f2cf212e..35c276a4f2 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee @@ -19,7 +19,6 @@ describe "PersistenceManager", -> @lines = ["one", "two", "three"] @version = 42 @callback = sinon.stub() - @track_changes_on = true @track_changes_entries = { comments: "mock", entries: "mock" } @Settings.apis = web: @@ -34,7 +33,6 @@ describe "PersistenceManager", -> @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify({ lines: @lines, version: @version, - track_changes: @track_changes_on, track_changes_entries: @track_changes_entries })) @PersistenceManager.getDoc(@project_id, @doc_id, @callback) @@ -56,7 +54,7 @@ describe "PersistenceManager", -> .should.equal true it "should call the callback with the doc lines, version and track changes state", -> - @callback.calledWith(null, @lines, @version, @track_changes_on, @track_changes_entries).should.equal true + @callback.calledWith(null, @lines, @version, @track_changes_entries).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true @@ -114,7 +112,7 @@ describe "PersistenceManager", -> describe "with a successful response from the web api", -> beforeEach -> @request.callsArgWith(1, null, {statusCode: 200}) - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @track_changes_on, @track_changes_entries, @callback) + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @track_changes_entries, @callback) it "should call the web api", -> @request @@ -123,7 +121,6 @@ describe "PersistenceManager", -> json: lines: @lines version: @version - track_changes: @track_changes_on track_changes_entries: @track_changes_entries method: "POST" auth: @@ -144,7 +141,7 @@ describe "PersistenceManager", -> describe "when request returns an error", -> beforeEach -> @request.callsArgWith(1, @error = new Error("oops"), null, null) - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @track_changes_on, @track_changes_entries, @callback) + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @track_changes_entries, @callback) it "should return the error", -> @callback.calledWith(@error).should.equal true @@ -155,7 +152,7 @@ describe "PersistenceManager", -> describe "when the request returns 404", -> beforeEach -> @request.callsArgWith(1, null, {statusCode: 404}, "") - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @track_changes_on, @track_changes_entries, @callback) + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @track_changes_entries, @callback) it "should return a NotFoundError", -> @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true @@ -166,7 +163,7 @@ describe "PersistenceManager", -> describe "when the request returns an error status code", -> beforeEach -> @request.callsArgWith(1, null, {statusCode: 500}, "") - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @track_changes_on, @track_changes_entries, @callback) + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @track_changes_entries, @callback) it "should return an error", -> @callback.calledWith(new Error("web api error")).should.equal true diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 6f9afc29d3..901af153c1 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -22,7 +22,6 @@ describe "RedisManager", -> projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" docsInProject: ({project_id}) -> "DocsIn:#{project_id}" - trackChangesEnabled: ({doc_id}) -> "TrackChangesEnabled:#{doc_id}" trackChangesEntries: ({doc_id}) -> "TrackChangesEntries:#{doc_id}" "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } "./Metrics": @metrics = @@ -40,11 +39,10 @@ describe "RedisManager", -> @jsonlines = JSON.stringify @lines @version = 42 @track_changes_on = true - @redis_track_changes_on = "1" @track_changes_entries = { comments: "mock", entries: "mock" } @json_track_changes_entries = JSON.stringify @track_changes_entries @rclient.get = sinon.stub() - @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @project_id, @redis_track_changes_on, @json_track_changes_entries]) + @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @project_id, @json_track_changes_entries]) describe "successfully", -> beforeEach -> @@ -60,11 +58,6 @@ describe "RedisManager", -> .calledWith("DocVersion:#{@doc_id}") .should.equal true - it "should get the track changes state", -> - @rclient.get - .calledWith("TrackChangesEnabled:#{@doc_id}") - .should.equal true - it "should get the track changes entries", -> @rclient.get .calledWith("TrackChangesEntries:#{@doc_id}") @@ -72,13 +65,13 @@ describe "RedisManager", -> it 'should return the document', -> @callback - .calledWith(null, @lines, @version, @track_changes_on, @track_changes_entries) + .calledWith(null, @lines, @version, @track_changes_entries) .should.equal true describe "getDoc with an invalid project id", -> beforeEach -> @another_project_id = "project-id-456" - @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @another_project_id, @redis_track_changes_on, @json_track_changes_entries]) + @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @another_project_id, @json_track_changes_entries]) @RedisManager.getDoc @project_id, @doc_id, @callback it 'should return an error', -> @@ -263,9 +256,8 @@ describe "RedisManager", -> @rclient.exec.yields() @lines = ["one", "two", "three"] @version = 42 - @track_changes_on = true @track_changes_entries = { comments: "mock", entries: "mock" } - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @track_changes_on, @track_changes_entries, done + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @track_changes_entries, done it "should set the lines", -> @rclient.set @@ -282,11 +274,6 @@ describe "RedisManager", -> .calledWith("TrackChangesEntries:#{@doc_id}", JSON.stringify(@track_changes_entries)) .should.equal true - it "should set the track changes state", -> - @rclient.set - .calledWith("TrackChangesEnabled:#{@doc_id}", "1") - .should.equal true - it "should set the project_id for the doc", -> @rclient.set .calledWith("ProjectId:#{@doc_id}", @project_id) diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index 9969e42d61..fb9bc18eb1 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -158,11 +158,10 @@ describe "UpdateManager", -> @updatedDocLines = ["updated", "lines"] @version = 34 @lines = ["original", "lines"] - @track_changes_on = true @track_changes_entries = { entries: "mock", comments: "mock" } @updated_track_changes_entries = { entries: "updated", comments: "updated" } @appliedOps = ["mock-applied-ops"] - @DocumentManager.getDoc = sinon.stub().yields(null, @lines, @version, @track_changes_on, @track_changes_entries) + @DocumentManager.getDoc = sinon.stub().yields(null, @lines, @version, @track_changes_entries) @TrackChangesManager.applyUpdate = sinon.stub().yields(null, @updated_track_changes_entries) @ShareJsUpdateManager.applyUpdate = sinon.stub().yields(null, @updatedDocLines, @version, @appliedOps) @RedisManager.updateDocument = sinon.stub().yields() @@ -179,7 +178,7 @@ describe "UpdateManager", -> it "should update the track changes entries", -> @TrackChangesManager.applyUpdate - .calledWith(@project_id, @doc_id, @track_changes_entries, @appliedOps, @track_changes_on) + .calledWith(@project_id, @doc_id, @track_changes_entries, @appliedOps) .should.equal true it "should save the document", -> From 418405e8b954003e848180831ce56203318bfbcb Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 2 Dec 2016 11:37:27 +0000 Subject: [PATCH 181/769] Return track changes entries in HTTP request --- .../document-updater/app/coffee/DocumentManager.coffee | 8 ++++---- .../document-updater/app/coffee/HttpController.coffee | 3 ++- .../test/acceptance/coffee/TrackChangesTests.coffee | 5 ++--- .../coffee/DocumentManager/DocumentManagerTests.coffee | 4 ++-- .../test/unit/coffee/HttpController/getDocTests.coffee | 4 +++- 5 files changed, 13 insertions(+), 11 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 258d55bb65..85a7a4263c 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -26,20 +26,20 @@ module.exports = DocumentManager = else callback null, lines, version, track_changes_entries, true - getDocAndRecentOps: (project_id, doc_id, fromVersion, _callback = (error, lines, version, recentOps) ->) -> + getDocAndRecentOps: (project_id, doc_id, fromVersion, _callback = (error, lines, version, recentOps, track_changes_entries) ->) -> timer = new Metrics.Timer("docManager.getDocAndRecentOps") callback = (args...) -> timer.done() _callback(args...) - DocumentManager.getDoc project_id, doc_id, (error, lines, version) -> + DocumentManager.getDoc project_id, doc_id, (error, lines, version, track_changes_entries) -> return callback(error) if error? if fromVersion == -1 - callback null, lines, version, [] + callback null, lines, version, [], track_changes_entries else RedisManager.getPreviousDocOps doc_id, fromVersion, version, (error, ops) -> return callback(error) if error? - callback null, lines, version, ops + callback null, lines, version, ops, track_changes_entries setDoc: (project_id, doc_id, newLines, source, user_id, _callback = (error) ->) -> timer = new Metrics.Timer("docManager.setDoc") diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index dc74833697..fb916da045 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -18,7 +18,7 @@ module.exports = HttpController = else fromVersion = -1 - DocumentManager.getDocAndRecentOpsWithLock project_id, doc_id, fromVersion, (error, lines, version, ops) -> + DocumentManager.getDocAndRecentOpsWithLock project_id, doc_id, fromVersion, (error, lines, version, ops, track_changes_entries) -> timer.done() return next(error) if error? logger.log project_id: project_id, doc_id: doc_id, "got doc via http" @@ -29,6 +29,7 @@ module.exports = HttpController = lines: lines version: version ops: ops + track_changes_entries: track_changes_entries _getTotalSizeOfLines: (lines) -> size = 0 diff --git a/services/document-updater/test/acceptance/coffee/TrackChangesTests.coffee b/services/document-updater/test/acceptance/coffee/TrackChangesTests.coffee index 43401cef6d..e3577fd6d7 100644 --- a/services/document-updater/test/acceptance/coffee/TrackChangesTests.coffee +++ b/services/document-updater/test/acceptance/coffee/TrackChangesTests.coffee @@ -47,10 +47,9 @@ describe "Track changes", -> setTimeout done, 200 it "should set the updated track changes entries in redis", (done) -> - console.log "TODO: GET ME FROM HTTP REQUEST" - rclient.get "TrackChangesEntries:#{@doc.id}", (error, value) => + DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => throw error if error? - entries = JSON.parse(value) + entries = data.track_changes_entries change = entries.changes[0] change.op.should.deep.equal { i: "456", p: 3 } change.metadata.user_id.should.equal @user_id diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index d29a569d46..3a1db1961c 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -117,7 +117,7 @@ describe "DocumentManager", -> .should.equal true it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, @ops).should.equal true + @callback.calledWith(null, @lines, @version, @ops, @track_changes_entries).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true @@ -137,7 +137,7 @@ describe "DocumentManager", -> @RedisManager.getPreviousDocOps.called.should.equal false it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, []).should.equal true + @callback.calledWith(null, @lines, @version, [], @track_changes_entries).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true diff --git a/services/document-updater/test/unit/coffee/HttpController/getDocTests.coffee b/services/document-updater/test/unit/coffee/HttpController/getDocTests.coffee index 17e5ad8e08..8ad2966b23 100644 --- a/services/document-updater/test/unit/coffee/HttpController/getDocTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/getDocTests.coffee @@ -22,6 +22,7 @@ describe "HttpController.getDoc", -> @ops = ["mock-op-1", "mock-op-2"] @version = 42 @fromVersion = 42 + @track_changes_entries = { changes: "mock", comments: "mock" } @res = send: sinon.stub() @req = @@ -32,7 +33,7 @@ describe "HttpController.getDoc", -> describe "when the document exists and no recent ops are requested", -> beforeEach -> - @DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, @lines, @version, []) + @DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, @lines, @version, [], @track_changes_entries) @HttpController.getDoc(@req, @res, @next) it "should get the doc", -> @@ -47,6 +48,7 @@ describe "HttpController.getDoc", -> lines: @lines version: @version ops: [] + track_changes_entries: @track_changes_entries })) .should.equal true From 3ea2e079938424ac52c2b3ee7470f32422f7ac63 Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 2 Dec 2016 12:01:23 +0000 Subject: [PATCH 182/769] Add tests for fetching and flushing track changes entries to persistence layer --- .../app/coffee/TrackChangesManager.coffee | 13 ++++- .../coffee/TrackChangesTests.coffee | 50 ++++++++++++++++++- .../coffee/helpers/DocUpdaterClient.coffee | 15 ------ .../coffee/helpers/MockWebApi.coffee | 5 +- 4 files changed, 63 insertions(+), 20 deletions(-) diff --git a/services/document-updater/app/coffee/TrackChangesManager.coffee b/services/document-updater/app/coffee/TrackChangesManager.coffee index 65f1931bb4..126b9ec7e0 100644 --- a/services/document-updater/app/coffee/TrackChangesManager.coffee +++ b/services/document-updater/app/coffee/TrackChangesManager.coffee @@ -8,5 +8,14 @@ module.exports = TrackChangesManager = changesTracker.track_changes = !!update.meta.tc for op in update.op changesTracker.applyOp(op, { user_id: update.meta?.user_id }) - {changes, comments} = changesTracker - callback null, {changes, comments} \ No newline at end of file + + # Return the minimal data structure needed, since most documents won't have any + # changes or comments + response = null + if changesTracker.changes?.length > 0 + response ?= {} + response.changes = changesTracker.changes + if changesTracker.comments?.length > 0 + response ?= {} + response.comments = changesTracker.comments + callback null, response \ No newline at end of file diff --git a/services/document-updater/test/acceptance/coffee/TrackChangesTests.coffee b/services/document-updater/test/acceptance/coffee/TrackChangesTests.coffee index e3577fd6d7..88250f82a7 100644 --- a/services/document-updater/test/acceptance/coffee/TrackChangesTests.coffee +++ b/services/document-updater/test/acceptance/coffee/TrackChangesTests.coffee @@ -46,7 +46,7 @@ describe "Track changes", -> throw error if error? setTimeout done, 200 - it "should set the updated track changes entries in redis", (done) -> + it "should update the tracked entries", (done) -> DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => throw error if error? entries = data.track_changes_entries @@ -55,3 +55,51 @@ describe "Track changes", -> change.metadata.user_id.should.equal @user_id done() + describe "Loading changes from persistence layer", -> + before (done) -> + @project_id = DocUpdaterClient.randomId() + @user_id = DocUpdaterClient.randomId() + @doc = { + id: DocUpdaterClient.randomId() + lines: ["a123aa"] + } + @update = { + doc: @doc.id + op: [{ i: "456", p: 5 }] + v: 0 + meta: { user_id: @user_id, tc: 1 } + } + MockWebApi.insertDoc @project_id, @doc.id, { + lines: @doc.lines + version: 0 + track_changes_entries: { + changes: [{ + op: { i: "123", p: 1 } + metadata: + user_id: @user_id + ts: new Date() + }] + } + } + DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) => + throw error if error? + DocUpdaterClient.sendUpdate @project_id, @doc.id, @update, (error) -> + throw error if error? + setTimeout done, 200 + + it "should have preloaded the existing changes", (done) -> + DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => + throw error if error? + {changes} = data.track_changes_entries + changes[0].op.should.deep.equal { i: "123", p: 1 } + changes[1].op.should.deep.equal { i: "456", p: 5 } + done() + + it "should flush the changes to the persistence layer again", (done) -> + DocUpdaterClient.flushDoc @project_id, @doc.id, (error) => + throw error if error? + MockWebApi.getDocument @project_id, @doc.id, (error, doc) => + {changes} = doc.track_changes_entries + changes[0].op.should.deep.equal { i: "123", p: 1 } + changes[1].op.should.deep.equal { i: "456", p: 5 } + done() diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee index b90e7ea82e..d704daefd1 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee @@ -72,18 +72,3 @@ module.exports = DocUpdaterClient = deleteProject: (project_id, callback = () ->) -> request.del "http://localhost:3003/project/#{project_id}", callback - - setTrackChangesOn: (project_id, callback = () ->) -> - request.post { - url: "http://localhost:3003/project/#{project_id}/track_changes" - json: - on: true - }, callback - - setTrackChangesOff: (project_id, callback = () ->) -> - request.post { - url: "http://localhost:3003/project/#{project_id}/track_changes" - json: - on: false - }, callback - diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee index e77a18c0ea..4e9d073cc4 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee @@ -11,10 +11,11 @@ module.exports = MockWebApi = doc.lines ?= [] @docs["#{project_id}:#{doc_id}"] = doc - setDocument: (project_id, doc_id, lines, version, callback = (error) ->) -> + setDocument: (project_id, doc_id, lines, version, track_changes_entries, callback = (error) ->) -> doc = @docs["#{project_id}:#{doc_id}"] ||= {} doc.lines = lines doc.version = version + doc.track_changes_entries = track_changes_entries callback null getDocument: (project_id, doc_id, callback = (error, doc) ->) -> @@ -31,7 +32,7 @@ module.exports = MockWebApi = res.send 404 app.post "/project/:project_id/doc/:doc_id", express.bodyParser(), (req, res, next) => - MockWebApi.setDocument req.params.project_id, req.params.doc_id, req.body.lines, req.body.version, (error) -> + MockWebApi.setDocument req.params.project_id, req.params.doc_id, req.body.lines, req.body.version, req.body.track_changes_entries, (error) -> if error? res.send 500 else From e3fee1a1d13c82675884e9c452e8a32141a6ba57 Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 8 Dec 2016 12:31:43 +0000 Subject: [PATCH 183/769] Rename 'track changes entries' -> 'ranges' --- .../app/coffee/DocumentManager.coffee | 24 ++-- .../app/coffee/HttpController.coffee | 4 +- .../app/coffee/PersistenceManager.coffee | 8 +- .../app/coffee/ProjectManager.coffee | 28 ----- .../app/coffee/RangesManager.coffee | 24 ++++ ...gesTracker.coffee => RangesTracker.coffee} | 22 +++- .../app/coffee/RedisKeyBuilder.coffee | 6 +- .../app/coffee/RedisManager.coffee | 42 +++++-- .../app/coffee/TrackChangesManager.coffee | 21 ---- .../app/coffee/UpdateManager.coffee | 8 +- .../config/settings.defaults.coffee | 6 +- ...ChangesTests.coffee => RangesTests.coffee} | 22 ++-- .../coffee/helpers/MockWebApi.coffee | 6 +- .../DocumentManagerTests.coffee | 28 ++--- .../coffee/HttpController/getDocTests.coffee | 6 +- .../PersistenceManagerTests.coffee | 18 +-- .../RedisManager/RedisManagerTests.coffee | 115 +++++++++++------- .../UpdateManager/UpdateManagerTests.coffee | 20 +-- 18 files changed, 225 insertions(+), 183 deletions(-) create mode 100644 services/document-updater/app/coffee/RangesManager.coffee rename services/document-updater/app/coffee/{ChangesTracker.coffee => RangesTracker.coffee} (95%) delete mode 100644 services/document-updater/app/coffee/TrackChangesManager.coffee rename services/document-updater/test/acceptance/coffee/{TrackChangesTests.coffee => RangesTests.coffee} (83%) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 85a7a4263c..c6d4773036 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -13,33 +13,33 @@ module.exports = DocumentManager = timer.done() _callback(args...) - RedisManager.getDoc project_id, doc_id, (error, lines, version, track_changes_entries) -> + RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges) -> return callback(error) if error? if !lines? or !version? logger.log {project_id, doc_id}, "doc not in redis so getting from persistence API" - PersistenceManager.getDoc project_id, doc_id, (error, lines, version, track_changes_entries) -> + PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges) -> return callback(error) if error? logger.log {project_id, doc_id, lines, version}, "got doc from persistence API" - RedisManager.putDocInMemory project_id, doc_id, lines, version, track_changes_entries, (error) -> + RedisManager.putDocInMemory project_id, doc_id, lines, version, ranges, (error) -> return callback(error) if error? - callback null, lines, version, track_changes_entries, false + callback null, lines, version, ranges, false else - callback null, lines, version, track_changes_entries, true + callback null, lines, version, ranges, true - getDocAndRecentOps: (project_id, doc_id, fromVersion, _callback = (error, lines, version, recentOps, track_changes_entries) ->) -> + getDocAndRecentOps: (project_id, doc_id, fromVersion, _callback = (error, lines, version, recentOps, ranges) ->) -> timer = new Metrics.Timer("docManager.getDocAndRecentOps") callback = (args...) -> timer.done() _callback(args...) - DocumentManager.getDoc project_id, doc_id, (error, lines, version, track_changes_entries) -> + DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges) -> return callback(error) if error? if fromVersion == -1 - callback null, lines, version, [], track_changes_entries + callback null, lines, version, [], ranges else RedisManager.getPreviousDocOps doc_id, fromVersion, version, (error, ops) -> return callback(error) if error? - callback null, lines, version, ops, track_changes_entries + callback null, lines, version, ops, ranges setDoc: (project_id, doc_id, newLines, source, user_id, _callback = (error) ->) -> timer = new Metrics.Timer("docManager.setDoc") @@ -51,7 +51,7 @@ module.exports = DocumentManager = return callback(new Error("No lines were provided to setDoc")) UpdateManager = require "./UpdateManager" - DocumentManager.getDoc project_id, doc_id, (error, oldLines, version, track_changes, alreadyLoaded) -> + DocumentManager.getDoc project_id, doc_id, (error, oldLines, version, ranges, alreadyLoaded) -> return callback(error) if error? if oldLines? and oldLines.length > 0 and oldLines[0].text? @@ -90,14 +90,14 @@ module.exports = DocumentManager = callback = (args...) -> timer.done() _callback(args...) - RedisManager.getDoc project_id, doc_id, (error, lines, version, track_changes_entries) -> + RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges) -> return callback(error) if error? if !lines? or !version? logger.log project_id: project_id, doc_id: doc_id, "doc is not loaded so not flushing" callback null # TODO: return a flag to bail out, as we go on to remove doc from memory? else logger.log project_id: project_id, doc_id: doc_id, version: version, "flushing doc" - PersistenceManager.setDoc project_id, doc_id, lines, version, track_changes_entries, (error) -> + PersistenceManager.setDoc project_id, doc_id, lines, version, ranges, (error) -> return callback(error) if error? callback null diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index fb916da045..e138fe8bc4 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -18,7 +18,7 @@ module.exports = HttpController = else fromVersion = -1 - DocumentManager.getDocAndRecentOpsWithLock project_id, doc_id, fromVersion, (error, lines, version, ops, track_changes_entries) -> + DocumentManager.getDocAndRecentOpsWithLock project_id, doc_id, fromVersion, (error, lines, version, ops, ranges) -> timer.done() return next(error) if error? logger.log project_id: project_id, doc_id: doc_id, "got doc via http" @@ -29,7 +29,7 @@ module.exports = HttpController = lines: lines version: version ops: ops - track_changes_entries: track_changes_entries + ranges: ranges _getTotalSizeOfLines: (lines) -> size = 0 diff --git a/services/document-updater/app/coffee/PersistenceManager.coffee b/services/document-updater/app/coffee/PersistenceManager.coffee index ee7674d80a..457627982f 100644 --- a/services/document-updater/app/coffee/PersistenceManager.coffee +++ b/services/document-updater/app/coffee/PersistenceManager.coffee @@ -10,7 +10,7 @@ logger = require "logger-sharelatex" MAX_HTTP_REQUEST_LENGTH = 5000 # 5 seconds module.exports = PersistenceManager = - getDoc: (project_id, doc_id, _callback = (error, lines, version, track_changes_entries) ->) -> + getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges) ->) -> timer = new Metrics.Timer("persistenceManager.getDoc") callback = (args...) -> timer.done() @@ -39,13 +39,13 @@ module.exports = PersistenceManager = return callback(new Error("web API response had no doc lines")) if !body.version? or not body.version instanceof Number return callback(new Error("web API response had no valid doc version")) - return callback null, body.lines, body.version, body.track_changes_entries + return callback null, body.lines, body.version, body.ranges else if res.statusCode == 404 return callback(new Errors.NotFoundError("doc not not found: #{url}")) else return callback(new Error("error accessing web API: #{url} #{res.statusCode}")) - setDoc: (project_id, doc_id, lines, version, track_changes_entries, _callback = (error) ->) -> + setDoc: (project_id, doc_id, lines, version, ranges, _callback = (error) ->) -> timer = new Metrics.Timer("persistenceManager.setDoc") callback = (args...) -> timer.done() @@ -57,7 +57,7 @@ module.exports = PersistenceManager = method: "POST" json: lines: lines - track_changes_entries: track_changes_entries + ranges: ranges version: version auth: user: Settings.apis.web.user diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index a38fe08397..cd4c66ae8d 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -56,31 +56,3 @@ module.exports = ProjectManager = callback new Error("Errors deleting docs. See log for details") else callback(null) - - setTrackChangesWithLocks: (project_id, track_changes_on, _callback = (error) ->) -> - timer = new Metrics.Timer("projectManager.toggleTrackChangesWithLocks") - callback = (args...) -> - timer.done() - _callback(args...) - - RedisManager.getDocIdsInProject project_id, (error, doc_ids) -> - return callback(error) if error? - jobs = [] - errors = [] - for doc_id in (doc_ids or []) - do (doc_id) -> - jobs.push (callback) -> - DocumentManager.setTrackChangesWithLock project_id, doc_id, track_changes_on, (error) -> - if error? - logger.error {err: error, project_id, doc_ids, track_changes_on}, "error toggle track changes for doc" - errors.push(error) - callback() - # TODO: If no docs, turn on track changes in Mongo manually - - logger.log {project_id, doc_ids, track_changes_on}, "toggling track changes for docs" - async.series jobs, () -> - if errors.length > 0 - callback new Error("Errors toggling track changes for docs. See log for details") - else - callback(null) - diff --git a/services/document-updater/app/coffee/RangesManager.coffee b/services/document-updater/app/coffee/RangesManager.coffee new file mode 100644 index 0000000000..1e19a63b0d --- /dev/null +++ b/services/document-updater/app/coffee/RangesManager.coffee @@ -0,0 +1,24 @@ +RangesTracker = require "./RangesTracker" +logger = require "logger-sharelatex" + +module.exports = RangesManager = + applyUpdate: (project_id, doc_id, entries = {}, updates = [], callback = (error, new_entries) ->) -> + {changes, comments} = entries + logger.log {changes, comments, updates}, "appliyng updates to ranges" + rangesTracker = new RangesTracker(changes, comments) + for update in updates + rangesTracker.track_changes = !!update.meta.tc + for op in update.op + rangesTracker.applyOp(op, { user_id: update.meta?.user_id }) + + # Return the minimal data structure needed, since most documents won't have any + # changes or comments + response = {} + if rangesTracker.changes?.length > 0 + response ?= {} + response.changes = rangesTracker.changes + if rangesTracker.comments?.length > 0 + response ?= {} + response.comments = rangesTracker.comments + logger.log {response}, "applied updates to ranges" + callback null, response \ No newline at end of file diff --git a/services/document-updater/app/coffee/ChangesTracker.coffee b/services/document-updater/app/coffee/RangesTracker.coffee similarity index 95% rename from services/document-updater/app/coffee/ChangesTracker.coffee rename to services/document-updater/app/coffee/RangesTracker.coffee index 8bc4cf9380..6a3625fd09 100644 --- a/services/document-updater/app/coffee/ChangesTracker.coffee +++ b/services/document-updater/app/coffee/RangesTracker.coffee @@ -1,5 +1,5 @@ load = (EventEmitter) -> - class ChangesTracker extends EventEmitter + class RangesTracker extends EventEmitter # The purpose of this class is to track a set of inserts and deletes to a document, like # track changes in Word. We store these as a set of ShareJs style ranges: # {i: "foo", p: 42} # Insert 'foo' at offset 42 @@ -97,7 +97,7 @@ load = (EventEmitter) -> return if !change? @_removeChange(change) - applyOp: (op, metadata = {}) -> + applyOp: (op, metadata) -> metadata.ts ?= new Date() # Apply an op that has been applied to the document to our changes to keep them up to date if op.i? @@ -371,7 +371,23 @@ load = (EventEmitter) -> @emit "changes:moved", moved_changes _newId: () -> - (@id++).toString() + # Generate a Mongo ObjectId + # Reference: https://github.com/dreampulse/ObjectId.js/blob/master/src/main/javascript/Objectid.js + @_pid ?= Math.floor(Math.random() * (32767)) + @_machine ?= Math.floor(Math.random() * (16777216)) + timestamp = Math.floor(new Date().valueOf() / 1000) + @_increment ?= 0 + @_increment++ + + timestamp = timestamp.toString(16) + machine = @_machine.toString(16) + pid = @_pid.toString(16) + increment = @_increment.toString(16) + + return '00000000'.substr(0, 8 - timestamp.length) + timestamp + + '000000'.substr(0, 6 - machine.length) + machine + + '0000'.substr(0, 4 - pid.length) + pid + + '000000'.substr(0, 6 - increment.length) + increment; _addOp: (op, metadata) -> change = { diff --git a/services/document-updater/app/coffee/RedisKeyBuilder.coffee b/services/document-updater/app/coffee/RedisKeyBuilder.coffee index c09fb43f00..1b5e548809 100644 --- a/services/document-updater/app/coffee/RedisKeyBuilder.coffee +++ b/services/document-updater/app/coffee/RedisKeyBuilder.coffee @@ -34,10 +34,8 @@ module.exports = RedisKeyBuilder = return (key_schema) -> key_schema.uncompressedHistoryOp({doc_id}) pendingUpdates: ({doc_id}) -> return (key_schema) -> key_schema.pendingUpdates({doc_id}) - trackChangesEnabled: ({doc_id}) -> - return (key_schema) -> key_schema.trackChangesEnabled({doc_id}) - trackChangesEntries: ({doc_id}) -> - return (key_schema) -> key_schema.trackChangesEntries({doc_id}) + ranges: ({doc_id}) -> + return (key_schema) -> key_schema.ranges({doc_id}) docsInProject: ({project_id}) -> return (key_schema) -> key_schema.docsInProject({project_id}) docsWithHistoryOps: ({project_id}) -> diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index cad5bd9f04..be5166b94c 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -13,17 +13,21 @@ minutes = 60 # seconds for Redis expire module.exports = RedisManager = rclient: rclient - putDocInMemory : (project_id, doc_id, docLines, version, track_changes_entries, _callback)-> + putDocInMemory : (project_id, doc_id, docLines, version, ranges, _callback)-> timer = new metrics.Timer("redis.put-doc") callback = (error) -> timer.done() _callback(error) logger.log project_id:project_id, doc_id:doc_id, version: version, "putting doc in redis" + ranges = RedisManager._serializeRanges(ranges) multi = rclient.multi() multi.set keys.docLines(doc_id:doc_id), JSON.stringify(docLines) multi.set keys.projectKey({doc_id:doc_id}), project_id multi.set keys.docVersion(doc_id:doc_id), version - multi.set keys.trackChangesEntries(doc_id:doc_id), JSON.stringify(track_changes_entries) + if ranges? + multi.set keys.ranges(doc_id:doc_id), ranges + else + multi.del keys.ranges(doc_id:doc_id) multi.exec (error) -> return callback(error) if error? rclient.sadd keys.docsInProject(project_id:project_id), doc_id, callback @@ -42,32 +46,33 @@ module.exports = RedisManager = multi.del keys.docLines(doc_id:doc_id) multi.del keys.projectKey(doc_id:doc_id) multi.del keys.docVersion(doc_id:doc_id) - multi.del keys.trackChangesEntries(doc_id:doc_id) + multi.del keys.ranges(doc_id:doc_id) multi.exec (error) -> return callback(error) if error? rclient.srem keys.docsInProject(project_id:project_id), doc_id, callback - getDoc : (project_id, doc_id, callback = (error, lines, version, track_changes_entries) ->)-> + getDoc : (project_id, doc_id, callback = (error, lines, version, ranges) ->)-> timer = new metrics.Timer("redis.get-doc") multi = rclient.multi() multi.get keys.docLines(doc_id:doc_id) multi.get keys.docVersion(doc_id:doc_id) multi.get keys.projectKey(doc_id:doc_id) - multi.get keys.trackChangesEntries(doc_id:doc_id) - multi.exec (error, [docLines, version, doc_project_id, track_changes_entries])-> + multi.get keys.ranges(doc_id:doc_id) + multi.exec (error, [docLines, version, doc_project_id, ranges])-> timer.done() return callback(error) if error? try docLines = JSON.parse docLines - track_changes_entries = JSON.parse track_changes_entries + ranges = RedisManager._deserializeRanges(ranges) catch e return callback(e) + version = parseInt(version or 0, 10) # check doc is in requested project if doc_project_id? and doc_project_id isnt project_id logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, "doc not in project" return callback(new Errors.NotFoundError("document not found")) - callback null, docLines, version, track_changes_entries + callback null, docLines, version, ranges getDocVersion: (doc_id, callback = (error, version) ->) -> rclient.get keys.docVersion(doc_id: doc_id), (error, version) -> @@ -107,7 +112,7 @@ module.exports = RedisManager = DOC_OPS_TTL: 60 * minutes DOC_OPS_MAX_LENGTH: 100 - updateDocument : (doc_id, docLines, newVersion, appliedOps = [], track_changes_entries, callback = (error) ->)-> + updateDocument : (doc_id, docLines, newVersion, appliedOps = [], ranges, callback = (error) ->)-> RedisManager.getDocVersion doc_id, (error, currentVersion) -> return callback(error) if error? if currentVersion + appliedOps.length != newVersion @@ -122,10 +127,27 @@ module.exports = RedisManager = multi.rpush keys.docOps(doc_id: doc_id), jsonOps... multi.expire keys.docOps(doc_id: doc_id), RedisManager.DOC_OPS_TTL multi.ltrim keys.docOps(doc_id: doc_id), -RedisManager.DOC_OPS_MAX_LENGTH, -1 - multi.set keys.trackChangesEntries(doc_id:doc_id), JSON.stringify(track_changes_entries) + ranges = RedisManager._serializeRanges(ranges) + if ranges? + multi.set keys.ranges(doc_id:doc_id), ranges + else + multi.del keys.ranges(doc_id:doc_id) multi.exec (error, replys) -> return callback(error) if error? return callback() getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) -> rclient.smembers keys.docsInProject(project_id: project_id), callback + + _serializeRanges: (ranges) -> + jsonRanges = JSON.stringify(ranges) + if jsonRanges == '{}' + # Most doc will have empty ranges so don't fill redis with lots of '{}' keys + jsonRanges = null + return jsonRanges + + _deserializeRanges: (ranges) -> + if !ranges? or ranges == "" + return {} + else + return JSON.parse(ranges) \ No newline at end of file diff --git a/services/document-updater/app/coffee/TrackChangesManager.coffee b/services/document-updater/app/coffee/TrackChangesManager.coffee deleted file mode 100644 index 126b9ec7e0..0000000000 --- a/services/document-updater/app/coffee/TrackChangesManager.coffee +++ /dev/null @@ -1,21 +0,0 @@ -ChangesTracker = require "./ChangesTracker" - -module.exports = TrackChangesManager = - applyUpdate: (project_id, doc_id, entries = {}, updates = [], callback = (error, new_entries) ->) -> - {changes, comments} = entries - changesTracker = new ChangesTracker(changes, comments) - for update in updates - changesTracker.track_changes = !!update.meta.tc - for op in update.op - changesTracker.applyOp(op, { user_id: update.meta?.user_id }) - - # Return the minimal data structure needed, since most documents won't have any - # changes or comments - response = null - if changesTracker.changes?.length > 0 - response ?= {} - response.changes = changesTracker.changes - if changesTracker.comments?.length > 0 - response ?= {} - response.comments = changesTracker.comments - callback null, response \ No newline at end of file diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index 7c98b97eee..1678c4d4c0 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -9,7 +9,7 @@ logger = require('logger-sharelatex') Metrics = require "./Metrics" Errors = require "./Errors" DocumentManager = require "./DocumentManager" -TrackChangesManager = require "./TrackChangesManager" +RangesManager = require "./RangesManager" module.exports = UpdateManager = processOutstandingUpdates: (project_id, doc_id, callback = (error) ->) -> @@ -48,16 +48,16 @@ module.exports = UpdateManager = applyUpdate: (project_id, doc_id, update, callback = (error) ->) -> UpdateManager._sanitizeUpdate update - DocumentManager.getDoc project_id, doc_id, (error, lines, version, track_changes_entries) -> + DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges) -> return callback(error) if error? if !lines? or !version? return callback(new Errors.NotFoundError("document not found: #{doc_id}")) ShareJsUpdateManager.applyUpdate project_id, doc_id, update, lines, version, (error, updatedDocLines, version, appliedOps) -> return callback(error) if error? - TrackChangesManager.applyUpdate project_id, doc_id, track_changes_entries, appliedOps, (error, new_track_changes_entries) -> + RangesManager.applyUpdate project_id, doc_id, ranges, appliedOps, (error, new_ranges) -> return callback(error) if error? logger.log doc_id: doc_id, version: version, "updating doc in redis" - RedisManager.updateDocument doc_id, updatedDocLines, version, appliedOps, new_track_changes_entries, (error) -> + RedisManager.updateDocument doc_id, updatedDocLines, version, appliedOps, new_ranges, (error) -> return callback(error) if error? HistoryManager.pushUncompressedHistoryOps project_id, doc_id, appliedOps, callback diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index edb8c56ad3..42daf505d9 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -32,8 +32,7 @@ module.exports = docVersion: ({doc_id}) -> "DocVersion:#{doc_id}" projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" docsInProject: ({project_id}) -> "DocsIn:#{project_id}" - trackChangesEnabled: ({doc_id}) -> "TrackChangesEnabled:#{doc_id}" - trackChangesEntries: ({doc_id}) -> "TrackChangesEntries:#{doc_id}" + ranges: ({doc_id}) -> "Ranges:#{doc_id}" # }, { # cluster: [{ # port: "7000" @@ -46,8 +45,7 @@ module.exports = # docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}" # projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" # docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" - # trackChangesEnabled: ({doc_id}) -> "TrackChangesEnabled:{#{doc_id}}" - # trackChangesEntries: ({doc_id}) -> "TrackChangesEntries:{#{doc_id}}" + # ranges: ({doc_id}) -> "Ranges:{#{doc_id}}" }] max_doc_length: 2 * 1024 * 1024 # 2mb diff --git a/services/document-updater/test/acceptance/coffee/TrackChangesTests.coffee b/services/document-updater/test/acceptance/coffee/RangesTests.coffee similarity index 83% rename from services/document-updater/test/acceptance/coffee/TrackChangesTests.coffee rename to services/document-updater/test/acceptance/coffee/RangesTests.coffee index 88250f82a7..95a7fae9d4 100644 --- a/services/document-updater/test/acceptance/coffee/TrackChangesTests.coffee +++ b/services/document-updater/test/acceptance/coffee/RangesTests.coffee @@ -7,8 +7,8 @@ rclient = require("redis").createClient() MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" -describe "Track changes", -> - describe "tracking changes", -> +describe "Ranges", -> + describe "tracking changes from ops", -> before (done) -> @project_id = DocUpdaterClient.randomId() @user_id = DocUpdaterClient.randomId() @@ -46,16 +46,16 @@ describe "Track changes", -> throw error if error? setTimeout done, 200 - it "should update the tracked entries", (done) -> + it "should update the ranges", (done) -> DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => throw error if error? - entries = data.track_changes_entries - change = entries.changes[0] + ranges = data.ranges + change = ranges.changes[0] change.op.should.deep.equal { i: "456", p: 3 } change.metadata.user_id.should.equal @user_id done() - describe "Loading changes from persistence layer", -> + describe "Loading ranges from persistence layer", -> before (done) -> @project_id = DocUpdaterClient.randomId() @user_id = DocUpdaterClient.randomId() @@ -72,7 +72,7 @@ describe "Track changes", -> MockWebApi.insertDoc @project_id, @doc.id, { lines: @doc.lines version: 0 - track_changes_entries: { + ranges: { changes: [{ op: { i: "123", p: 1 } metadata: @@ -87,19 +87,19 @@ describe "Track changes", -> throw error if error? setTimeout done, 200 - it "should have preloaded the existing changes", (done) -> + it "should have preloaded the existing ranges", (done) -> DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => throw error if error? - {changes} = data.track_changes_entries + {changes} = data.ranges changes[0].op.should.deep.equal { i: "123", p: 1 } changes[1].op.should.deep.equal { i: "456", p: 5 } done() - it "should flush the changes to the persistence layer again", (done) -> + it "should flush the ranges to the persistence layer again", (done) -> DocUpdaterClient.flushDoc @project_id, @doc.id, (error) => throw error if error? MockWebApi.getDocument @project_id, @doc.id, (error, doc) => - {changes} = doc.track_changes_entries + {changes} = doc.ranges changes[0].op.should.deep.equal { i: "123", p: 1 } changes[1].op.should.deep.equal { i: "456", p: 5 } done() diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee index 4e9d073cc4..f2b8bce318 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee @@ -11,11 +11,11 @@ module.exports = MockWebApi = doc.lines ?= [] @docs["#{project_id}:#{doc_id}"] = doc - setDocument: (project_id, doc_id, lines, version, track_changes_entries, callback = (error) ->) -> + setDocument: (project_id, doc_id, lines, version, ranges, callback = (error) ->) -> doc = @docs["#{project_id}:#{doc_id}"] ||= {} doc.lines = lines doc.version = version - doc.track_changes_entries = track_changes_entries + doc.ranges = ranges callback null getDocument: (project_id, doc_id, callback = (error, doc) ->) -> @@ -32,7 +32,7 @@ module.exports = MockWebApi = res.send 404 app.post "/project/:project_id/doc/:doc_id", express.bodyParser(), (req, res, next) => - MockWebApi.setDocument req.params.project_id, req.params.doc_id, req.body.lines, req.body.version, req.body.track_changes_entries, (error) -> + MockWebApi.setDocument req.params.project_id, req.params.doc_id, req.body.lines, req.body.version, req.body.ranges, (error) -> if error? res.send 500 else diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index 3a1db1961c..5966843f5a 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -23,7 +23,7 @@ describe "DocumentManager", -> @callback = sinon.stub() @lines = ["one", "two", "three"] @version = 42 - @track_changes_entries = { comments: "mock", entries: "mock" } + @ranges = { comments: "mock", entries: "mock" } describe "flushAndDeleteDoc", -> describe "successfully", -> @@ -49,7 +49,7 @@ describe "DocumentManager", -> it "should time the execution", -> @Metrics.Timer::done.called.should.equal true - it "should flush to track changes", -> + it "should flush to the history api", -> @HistoryManager.flushDocChanges .calledWith(@project_id, @doc_id) .should.equal true @@ -57,7 +57,7 @@ describe "DocumentManager", -> describe "flushDocIfLoaded", -> describe "when the doc is in Redis", -> beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @track_changes_entries) + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges) @PersistenceManager.setDoc = sinon.stub().yields() @DocumentManager.flushDocIfLoaded @project_id, @doc_id, @callback @@ -68,7 +68,7 @@ describe "DocumentManager", -> it "should write the doc lines to the persistence layer", -> @PersistenceManager.setDoc - .calledWith(@project_id, @doc_id, @lines, @version, @track_changes_entries) + .calledWith(@project_id, @doc_id, @lines, @version, @ranges) .should.equal true it "should call the callback without error", -> @@ -102,7 +102,7 @@ describe "DocumentManager", -> describe "getDocAndRecentOps", -> describe "with a previous version specified", -> beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @track_changes_entries) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges) @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) @DocumentManager.getDocAndRecentOps @project_id, @doc_id, @fromVersion, @callback @@ -117,14 +117,14 @@ describe "DocumentManager", -> .should.equal true it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, @ops, @track_changes_entries).should.equal true + @callback.calledWith(null, @lines, @version, @ops, @ranges).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true describe "with no previous version specified", -> beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @track_changes_entries) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges) @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) @DocumentManager.getDocAndRecentOps @project_id, @doc_id, -1, @callback @@ -137,7 +137,7 @@ describe "DocumentManager", -> @RedisManager.getPreviousDocOps.called.should.equal false it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, [], @track_changes_entries).should.equal true + @callback.calledWith(null, @lines, @version, [], @ranges).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true @@ -145,7 +145,7 @@ describe "DocumentManager", -> describe "getDoc", -> describe "when the doc exists in Redis", -> beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @track_changes_entries) + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges) @DocumentManager.getDoc @project_id, @doc_id, @callback it "should get the doc from Redis", -> @@ -154,7 +154,7 @@ describe "DocumentManager", -> .should.equal true it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, @track_changes_entries, true).should.equal true + @callback.calledWith(null, @lines, @version, @ranges, true).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true @@ -162,7 +162,7 @@ describe "DocumentManager", -> describe "when the doc does not exist in Redis", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null, null, null) - @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @track_changes_entries) + @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges) @RedisManager.putDocInMemory = sinon.stub().yields() @DocumentManager.getDoc @project_id, @doc_id, @callback @@ -178,11 +178,11 @@ describe "DocumentManager", -> it "should set the doc in Redis", -> @RedisManager.putDocInMemory - .calledWith(@project_id, @doc_id, @lines, @version, @track_changes_entries) + .calledWith(@project_id, @doc_id, @lines, @version, @ranges) .should.equal true it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, @track_changes_entries, false).should.equal true + @callback.calledWith(null, @lines, @version, @ranges, false).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true @@ -192,7 +192,7 @@ describe "DocumentManager", -> beforeEach -> @beforeLines = ["before", "lines"] @afterLines = ["after", "lines"] - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, @track_changes_entries, true) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, @ranges, true) @DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, @ops) @UpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null) @DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2) diff --git a/services/document-updater/test/unit/coffee/HttpController/getDocTests.coffee b/services/document-updater/test/unit/coffee/HttpController/getDocTests.coffee index 8ad2966b23..8fa3931d65 100644 --- a/services/document-updater/test/unit/coffee/HttpController/getDocTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/getDocTests.coffee @@ -22,7 +22,7 @@ describe "HttpController.getDoc", -> @ops = ["mock-op-1", "mock-op-2"] @version = 42 @fromVersion = 42 - @track_changes_entries = { changes: "mock", comments: "mock" } + @ranges = { changes: "mock", comments: "mock" } @res = send: sinon.stub() @req = @@ -33,7 +33,7 @@ describe "HttpController.getDoc", -> describe "when the document exists and no recent ops are requested", -> beforeEach -> - @DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, @lines, @version, [], @track_changes_entries) + @DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, @lines, @version, [], @ranges) @HttpController.getDoc(@req, @res, @next) it "should get the doc", -> @@ -48,7 +48,7 @@ describe "HttpController.getDoc", -> lines: @lines version: @version ops: [] - track_changes_entries: @track_changes_entries + ranges: @ranges })) .should.equal true diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee index 35c276a4f2..19a3d547a2 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee @@ -19,7 +19,7 @@ describe "PersistenceManager", -> @lines = ["one", "two", "three"] @version = 42 @callback = sinon.stub() - @track_changes_entries = { comments: "mock", entries: "mock" } + @ranges = { comments: "mock", entries: "mock" } @Settings.apis = web: url: @url = "www.example.com" @@ -33,7 +33,7 @@ describe "PersistenceManager", -> @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify({ lines: @lines, version: @version, - track_changes_entries: @track_changes_entries + ranges: @ranges })) @PersistenceManager.getDoc(@project_id, @doc_id, @callback) @@ -53,8 +53,8 @@ describe "PersistenceManager", -> }) .should.equal true - it "should call the callback with the doc lines, version and track changes state", -> - @callback.calledWith(null, @lines, @version, @track_changes_entries).should.equal true + it "should call the callback with the doc lines, version and ranges", -> + @callback.calledWith(null, @lines, @version, @ranges).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true @@ -112,7 +112,7 @@ describe "PersistenceManager", -> describe "with a successful response from the web api", -> beforeEach -> @request.callsArgWith(1, null, {statusCode: 200}) - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @track_changes_entries, @callback) + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @ranges, @callback) it "should call the web api", -> @request @@ -121,7 +121,7 @@ describe "PersistenceManager", -> json: lines: @lines version: @version - track_changes_entries: @track_changes_entries + ranges: @ranges method: "POST" auth: user: @user @@ -141,7 +141,7 @@ describe "PersistenceManager", -> describe "when request returns an error", -> beforeEach -> @request.callsArgWith(1, @error = new Error("oops"), null, null) - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @track_changes_entries, @callback) + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @ranges, @callback) it "should return the error", -> @callback.calledWith(@error).should.equal true @@ -152,7 +152,7 @@ describe "PersistenceManager", -> describe "when the request returns 404", -> beforeEach -> @request.callsArgWith(1, null, {statusCode: 404}, "") - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @track_changes_entries, @callback) + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @ranges, @callback) it "should return a NotFoundError", -> @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true @@ -163,7 +163,7 @@ describe "PersistenceManager", -> describe "when the request returns an error status code", -> beforeEach -> @request.callsArgWith(1, null, {statusCode: 500}, "") - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @track_changes_entries, @callback) + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @ranges, @callback) it "should return an error", -> @callback.calledWith(new Error("web api error")).should.equal true diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 901af153c1..420d2039b4 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -22,7 +22,7 @@ describe "RedisManager", -> projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" docsInProject: ({project_id}) -> "DocsIn:#{project_id}" - trackChangesEntries: ({doc_id}) -> "TrackChangesEntries:#{doc_id}" + ranges: ({doc_id}) -> "Ranges:#{doc_id}" "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } "./Metrics": @metrics = inc: sinon.stub() @@ -38,11 +38,10 @@ describe "RedisManager", -> @lines = ["one", "two", "three"] @jsonlines = JSON.stringify @lines @version = 42 - @track_changes_on = true - @track_changes_entries = { comments: "mock", entries: "mock" } - @json_track_changes_entries = JSON.stringify @track_changes_entries + @ranges = { comments: "mock", entries: "mock" } + @json_ranges = JSON.stringify @ranges @rclient.get = sinon.stub() - @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @project_id, @json_track_changes_entries]) + @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @project_id, @json_ranges]) describe "successfully", -> beforeEach -> @@ -58,20 +57,20 @@ describe "RedisManager", -> .calledWith("DocVersion:#{@doc_id}") .should.equal true - it "should get the track changes entries", -> + it "should get the ranges", -> @rclient.get - .calledWith("TrackChangesEntries:#{@doc_id}") + .calledWith("Ranges:#{@doc_id}") .should.equal true it 'should return the document', -> @callback - .calledWith(null, @lines, @version, @track_changes_entries) + .calledWith(null, @lines, @version, @ranges) .should.equal true describe "getDoc with an invalid project id", -> beforeEach -> @another_project_id = "project-id-456" - @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @another_project_id, @json_track_changes_entries]) + @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @another_project_id, @json_ranges]) @RedisManager.getDoc @project_id, @doc_id, @callback it 'should return an error', -> @@ -169,19 +168,20 @@ describe "RedisManager", -> @rclient.rpush = sinon.stub() @rclient.expire = sinon.stub() @rclient.ltrim = sinon.stub() + @rclient.del = sinon.stub() @RedisManager.getDocVersion = sinon.stub() @lines = ["one", "two", "three"] @ops = [{ op: [{ i: "foo", p: 4 }] },{ op: [{ i: "bar", p: 8 }] }] @version = 42 - @track_changes_entries = { comments: "mock", entries: "mock" } + @ranges = { comments: "mock", entries: "mock" } @rclient.exec = sinon.stub().callsArg(0) describe "with a consistent version", -> beforeEach -> @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) - @RedisManager.updateDocument @doc_id, @lines, @version, @ops, @track_changes_entries, @callback + @RedisManager.updateDocument @doc_id, @lines, @version, @ops, @ranges, @callback it "should get the current doc version to check for consistency", -> @RedisManager.getDocVersion @@ -198,9 +198,9 @@ describe "RedisManager", -> .calledWith("DocVersion:#{@doc_id}", @version) .should.equal true - it "should set the track changes entries", -> + it "should set the ranges", -> @rclient.set - .calledWith("TrackChangesEntries:#{@doc_id}", JSON.stringify(@track_changes_entries)) + .calledWith("Ranges:#{@doc_id}", JSON.stringify(@ranges)) .should.equal true it "should push the doc op into the doc ops list", -> @@ -224,7 +224,7 @@ describe "RedisManager", -> describe "with an inconsistent version", -> beforeEach -> @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length - 1) - @RedisManager.updateDocument @doc_id, @lines, @version, @ops, @track_changes_entries, @callback + @RedisManager.updateDocument @doc_id, @lines, @version, @ops, @ranges, @callback it "should not call multi.exec", -> @rclient.exec.called.should.equal false @@ -237,7 +237,7 @@ describe "RedisManager", -> describe "with no updates", -> beforeEach -> @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version) - @RedisManager.updateDocument @doc_id, @lines, @version, [], @track_changes_entries, @callback + @RedisManager.updateDocument @doc_id, @lines, @version, [], @ranges, @callback it "should not do an rpush", -> @rclient.rpush @@ -248,42 +248,75 @@ describe "RedisManager", -> @rclient.set .calledWith("doclines:#{@doc_id}", JSON.stringify(@lines)) .should.equal true + + describe "with empty ranges", -> + beforeEach -> + @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) + @RedisManager.updateDocument @doc_id, @lines, @version, @ops, {}, @callback + + it "should not set the ranges", -> + @rclient.set + .calledWith("Ranges:#{@doc_id}", JSON.stringify(@ranges)) + .should.equal false + + it "should delete the ranges key", -> + @rclient.del + .calledWith("Ranges:#{@doc_id}") + .should.equal true describe "putDocInMemory", -> - beforeEach (done) -> + beforeEach -> @rclient.set = sinon.stub() @rclient.sadd = sinon.stub().yields() + @rclient.del = sinon.stub() @rclient.exec.yields() @lines = ["one", "two", "three"] @version = 42 - @track_changes_entries = { comments: "mock", entries: "mock" } - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @track_changes_entries, done + @ranges = { comments: "mock", entries: "mock" } - it "should set the lines", -> - @rclient.set - .calledWith("doclines:#{@doc_id}", JSON.stringify @lines) - .should.equal true - - it "should set the version", -> - @rclient.set - .calledWith("DocVersion:#{@doc_id}", @version) - .should.equal true + describe "with non-empty ranges", -> + beforeEach (done) -> + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, done - it "should set the track changes entries", -> - @rclient.set - .calledWith("TrackChangesEntries:#{@doc_id}", JSON.stringify(@track_changes_entries)) - .should.equal true + it "should set the lines", -> + @rclient.set + .calledWith("doclines:#{@doc_id}", JSON.stringify @lines) + .should.equal true - it "should set the project_id for the doc", -> - @rclient.set - .calledWith("ProjectId:#{@doc_id}", @project_id) - .should.equal true - - it "should add the doc_id to the project set", -> - @rclient.sadd - .calledWith("DocsIn:#{@project_id}", @doc_id) - .should.equal true - + it "should set the version", -> + @rclient.set + .calledWith("DocVersion:#{@doc_id}", @version) + .should.equal true + + it "should set the ranges", -> + @rclient.set + .calledWith("Ranges:#{@doc_id}", JSON.stringify(@ranges)) + .should.equal true + + it "should set the project_id for the doc", -> + @rclient.set + .calledWith("ProjectId:#{@doc_id}", @project_id) + .should.equal true + + it "should add the doc_id to the project set", -> + @rclient.sadd + .calledWith("DocsIn:#{@project_id}", @doc_id) + .should.equal true + + describe "with empty ranges", -> + beforeEach (done) -> + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, {}, done + + it "should delete the ranges key", -> + @rclient.del + .calledWith("Ranges:#{@doc_id}") + .should.equal true + + it "should not set the ranges", -> + @rclient.set + .calledWith("Ranges:#{@doc_id}", JSON.stringify(@ranges)) + .should.equal false + describe "removeDocFromMemory", -> beforeEach (done) -> @rclient.del = sinon.stub() diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index fb9bc18eb1..e87391af44 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -21,7 +21,7 @@ describe "UpdateManager", -> done: sinon.stub() "settings-sharelatex": Settings = {} "./DocumentManager": @DocumentManager = {} - "./TrackChangesManager": @TrackChangesManager = {} + "./RangesManager": @RangesManager = {} describe "processOutstandingUpdates", -> beforeEach -> @@ -158,11 +158,11 @@ describe "UpdateManager", -> @updatedDocLines = ["updated", "lines"] @version = 34 @lines = ["original", "lines"] - @track_changes_entries = { entries: "mock", comments: "mock" } - @updated_track_changes_entries = { entries: "updated", comments: "updated" } + @ranges = { entries: "mock", comments: "mock" } + @updated_ranges = { entries: "updated", comments: "updated" } @appliedOps = ["mock-applied-ops"] - @DocumentManager.getDoc = sinon.stub().yields(null, @lines, @version, @track_changes_entries) - @TrackChangesManager.applyUpdate = sinon.stub().yields(null, @updated_track_changes_entries) + @DocumentManager.getDoc = sinon.stub().yields(null, @lines, @version, @ranges) + @RangesManager.applyUpdate = sinon.stub().yields(null, @updated_ranges) @ShareJsUpdateManager.applyUpdate = sinon.stub().yields(null, @updatedDocLines, @version, @appliedOps) @RedisManager.updateDocument = sinon.stub().yields() @HistoryManager.pushUncompressedHistoryOps = sinon.stub().callsArg(3) @@ -176,17 +176,17 @@ describe "UpdateManager", -> .calledWith(@project_id, @doc_id, @update, @lines, @version) .should.equal true - it "should update the track changes entries", -> - @TrackChangesManager.applyUpdate - .calledWith(@project_id, @doc_id, @track_changes_entries, @appliedOps) + it "should update the ranges", -> + @RangesManager.applyUpdate + .calledWith(@project_id, @doc_id, @ranges, @appliedOps) .should.equal true it "should save the document", -> @RedisManager.updateDocument - .calledWith(@doc_id, @updatedDocLines, @version, @appliedOps, @updated_track_changes_entries) + .calledWith(@doc_id, @updatedDocLines, @version, @appliedOps, @updated_ranges) .should.equal true - it "should push the applied ops into the track changes queue", -> + it "should push the applied ops into the history queue", -> @HistoryManager.pushUncompressedHistoryOps .calledWith(@project_id, @doc_id, @appliedOps) .should.equal true From 47b19818ff40b10fa181e6f11fa31b4dfb4e809f Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 12 Dec 2016 17:53:43 +0000 Subject: [PATCH 184/769] Add in new comment op type --- .../app/coffee/sharejs/types/text.coffee | 66 +++- .../coffee/ShareJS/TextTransformTests.coffee | 284 ++++++++++++++++++ 2 files changed, 344 insertions(+), 6 deletions(-) create mode 100644 services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee diff --git a/services/document-updater/app/coffee/sharejs/types/text.coffee b/services/document-updater/app/coffee/sharejs/types/text.coffee index c64b4dfa68..dcf2ef4cfe 100644 --- a/services/document-updater/app/coffee/sharejs/types/text.coffee +++ b/services/document-updater/app/coffee/sharejs/types/text.coffee @@ -31,7 +31,8 @@ checkValidComponent = (c) -> i_type = typeof c.i d_type = typeof c.d - throw new Error 'component needs an i or d field' unless (i_type == 'string') ^ (d_type == 'string') + c_type = typeof c.c + throw new Error 'component needs an i, d or c field' unless (i_type == 'string') ^ (d_type == 'string') ^ (c_type == 'string') throw new Error 'position cannot be negative' unless c.p >= 0 @@ -44,11 +45,15 @@ text.apply = (snapshot, op) -> for component in op if component.i? snapshot = strInject snapshot, component.p, component.i - else + else if component.d? deleted = snapshot[component.p...(component.p + component.d.length)] throw new Error "Delete component '#{component.d}' does not match deleted text '#{deleted}'" unless component.d == deleted snapshot = snapshot[...component.p] + snapshot[(component.p + component.d.length)..] - + else if component.c? + comment = snapshot[component.p...(component.p + component.c.length)] + throw new Error "Comment component '#{component.c}' does not match commented text '#{comment}'" unless component.c == comment + else + throw new Error "Unknown op type" snapshot @@ -112,7 +117,7 @@ transformPosition = (pos, c, insertAfter) -> pos + c.i.length else pos - else + else if c.d? # I think this could also be written as: Math.min(c.p, Math.min(c.p - otherC.p, otherC.d.length)) # but I think its harder to read that way, and it compiles using ternary operators anyway # so its no slower written like this. @@ -122,6 +127,10 @@ transformPosition = (pos, c, insertAfter) -> c.p else pos - c.d.length + else if c.c? + pos + else + throw new Error("unknown op type") # Helper method to transform a cursor position as a result of an op. # @@ -143,7 +152,7 @@ text._tc = transformComponent = (dest, c, otherC, side) -> if c.i? append dest, {i:c.i, p:transformPosition(c.p, otherC, side == 'right')} - else # Delete + else if c.d? # Delete if otherC.i? # delete vs insert s = c.d if c.p < otherC.p @@ -152,7 +161,7 @@ text._tc = transformComponent = (dest, c, otherC, side) -> if s != '' append dest, {d:s, p:c.p + otherC.i.length} - else # Delete vs delete + else if otherC.d? # Delete vs delete if c.p >= otherC.p + otherC.d.length append dest, {d:c.d, p:c.p - otherC.d.length} else if c.p + c.d.length <= otherC.p @@ -177,6 +186,51 @@ text._tc = transformComponent = (dest, c, otherC, side) -> # This could be rewritten similarly to insert v delete, above. newC.p = transformPosition newC.p, otherC append dest, newC + + else if otherC.c? + append dest, c + + else + throw new Error("unknown op type") + + else if c.c? # Comment + if otherC.i? + if c.p < otherC.p < c.p + c.c.length + offset = otherC.p - c.p + new_c = (c.c[0..(offset-1)] + otherC.i + c.c[offset...]) + append dest, {c:new_c, p:c.p} + else + append dest, {c:c.c, p:transformPosition(c.p, otherC, true)} + + else if otherC.d? + if c.p >= otherC.p + otherC.d.length + append dest, {c:c.c, p:c.p - otherC.d.length} + else if c.p + c.c.length <= otherC.p + append dest, c + else # Delete overlaps comment + # They overlap somewhere. + newC = {c:'', p:c.p} + if c.p < otherC.p + newC.c = c.c[...(otherC.p - c.p)] + if c.p + c.c.length > otherC.p + otherC.d.length + newC.c += c.c[(otherC.p + otherC.d.length - c.p)..] + + # This is entirely optional - just for a check that the deleted + # text in the two ops matches + intersectStart = Math.max c.p, otherC.p + intersectEnd = Math.min c.p + c.c.length, otherC.p + otherC.d.length + cIntersect = c.c[intersectStart - c.p...intersectEnd - c.p] + otherIntersect = otherC.d[intersectStart - otherC.p...intersectEnd - otherC.p] + throw new Error 'Delete ops delete different text in the same region of the document' unless cIntersect == otherIntersect + + newC.p = transformPosition newC.p, otherC + append dest, newC + + else if otherC.c? + append dest, c + + else + throw new Error("unknown op type") dest diff --git a/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee b/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee new file mode 100644 index 0000000000..e0f75d2756 --- /dev/null +++ b/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee @@ -0,0 +1,284 @@ +text = require "../../../../app/js/sharejs/types/text" +require("chai").should() +RangesTracker = require "../../../../app/js/RangesTracker" + +describe "ShareJS text type", -> + describe "transform", -> + describe "insert / insert", -> + it "with an insert before", -> + dest = [] + text._tc(dest, { i: "foo", p: 9 }, { i: "bar", p: 3 }) + dest.should.deep.equal [{ i: "foo", p: 12 }] + + it "with an insert after", -> + dest = [] + text._tc(dest, { i: "foo", p: 3 }, { i: "bar", p: 9 }) + dest.should.deep.equal [{ i: "foo", p: 3 }] + + it "with an insert at the same place with side == 'right'", -> + dest = [] + text._tc(dest, { i: "foo", p: 3 }, { i: "bar", p: 3 }, 'right') + dest.should.deep.equal [{ i: "foo", p: 6 }] + + it "with an insert at the same place with side == 'left'", -> + dest = [] + text._tc(dest, { i: "foo", p: 3 }, { i: "bar", p: 3 }, 'left') + dest.should.deep.equal [{ i: "foo", p: 3 }] + + describe "insert / delete", -> + it "with a delete before", -> + dest = [] + text._tc(dest, { i: "foo", p: 9 }, { d: "bar", p: 3 }) + dest.should.deep.equal [{ i: "foo", p: 6 }] + + it "with a delete after", -> + dest = [] + text._tc(dest, { i: "foo", p: 3 }, { d: "bar", p: 9 }) + dest.should.deep.equal [{ i: "foo", p: 3 }] + + it "with a delete at the same place with side == 'right'", -> + dest = [] + text._tc(dest, { i: "foo", p: 3 }, { d: "bar", p: 3 }, 'right') + dest.should.deep.equal [{ i: "foo", p: 3 }] + + it "with a delete at the same place with side == 'left'", -> + dest = [] + + text._tc(dest, { i: "foo", p: 3 }, { d: "bar", p: 3 }, 'left') + dest.should.deep.equal [{ i: "foo", p: 3 }] + + describe "delete / insert", -> + it "with an insert before", -> + dest = [] + text._tc(dest, { d: "foo", p: 9 }, { i: "bar", p: 3 }) + dest.should.deep.equal [{ d: "foo", p: 12 }] + + it "with an insert after", -> + dest = [] + text._tc(dest, { d: "foo", p: 3 }, { i: "bar", p: 9 }) + dest.should.deep.equal [{ d: "foo", p: 3 }] + + it "with an insert at the same place with side == 'right'", -> + dest = [] + text._tc(dest, { d: "foo", p: 3 }, { i: "bar", p: 3 }, 'right') + dest.should.deep.equal [{ d: "foo", p: 6 }] + + it "with an insert at the same place with side == 'left'", -> + dest = [] + text._tc(dest, { d: "foo", p: 3 }, { i: "bar", p: 3 }, 'left') + dest.should.deep.equal [{ d: "foo", p: 6 }] + + it "with a delete that overlaps the insert location", -> + dest = [] + text._tc(dest, { d: "foo", p: 3 }, { i: "bar", p: 4 }) + dest.should.deep.equal [{ d: "f", p: 3 }, { d: "oo", p: 6 }] + + + describe "delete / delete", -> + it "with a delete before", -> + dest = [] + text._tc(dest, { d: "foo", p: 9 }, { d: "bar", p: 3 }) + dest.should.deep.equal [{ d: "foo", p: 6 }] + + it "with a delete after", -> + dest = [] + text._tc(dest, { d: "foo", p: 3 }, { d: "bar", p: 9 }) + dest.should.deep.equal [{ d: "foo", p: 3 }] + + it "with deleting the same content", -> + dest = [] + text._tc(dest, { d: "foo", p: 3 }, { d: "foo", p: 3 }, 'right') + dest.should.deep.equal [] + + it "with the delete overlapping before", -> + dest = [] + text._tc(dest, { d: "foobar", p: 3 }, { d: "abcfoo", p: 0 }, 'right') + dest.should.deep.equal [{ d: "bar", p: 0 }] + + it "with the delete overlapping after", -> + dest = [] + text._tc(dest, { d: "abcfoo", p: 3 }, { d: "foobar", p: 6 }) + dest.should.deep.equal [{ d: "abc", p: 3 }] + + it "with the delete overlapping the whole delete", -> + dest = [] + text._tc(dest, { d: "abcfoo123", p: 3 }, { d: "foo", p: 6 }) + dest.should.deep.equal [{ d: "abc123", p: 3 }] + + it "with the delete inside the whole delete", -> + dest = [] + text._tc(dest, { d: "foo", p: 6 }, { d: "abcfoo123", p: 3 }) + dest.should.deep.equal [] + + describe "comment / insert", -> + it "with an insert before", -> + dest = [] + text._tc(dest, { c: "foo", p: 9 }, { i: "bar", p: 3 }) + dest.should.deep.equal [{ c: "foo", p: 12 }] + + it "with an insert after", -> + dest = [] + text._tc(dest, { c: "foo", p: 3 }, { i: "bar", p: 9 }) + dest.should.deep.equal [{ c: "foo", p: 3 }] + + it "with an insert at the left edge", -> + dest = [] + text._tc(dest, { c: "foo", p: 3 }, { i: "bar", p: 3 }) + # RangesTracker doesn't inject inserts into comments on edges, so neither should we + dest.should.deep.equal [{ c: "foo", p: 6 }] + + it "with an insert at the right edge", -> + dest = [] + text._tc(dest, { c: "foo", p: 3 }, { i: "bar", p: 6 }) + # RangesTracker doesn't inject inserts into comments on edges, so neither should we + dest.should.deep.equal [{ c: "foo", p: 3 }] + + it "with an insert in the middle", -> + dest = [] + text._tc(dest, { c: "foo", p: 3 }, { i: "bar", p: 5 }) + dest.should.deep.equal [{ c: "fobaro", p: 3 }] + + describe "comment / delete", -> + it "with a delete before", -> + dest = [] + text._tc(dest, { c: "foo", p: 9 }, { d: "bar", p: 3 }) + dest.should.deep.equal [{ c: "foo", p: 6 }] + + it "with a delete after", -> + dest = [] + text._tc(dest, { c: "foo", p: 3 }, { i: "bar", p: 9 }) + dest.should.deep.equal [{ c: "foo", p: 3 }] + + it "with a delete overlapping the comment content before", -> + dest = [] + text._tc(dest, { c: "foobar", p: 6 }, { d: "123foo", p: 3 }) + dest.should.deep.equal [{ c: "bar", p: 3 }] + + it "with a delete overlapping the comment content after", -> + dest = [] + text._tc(dest, { c: "foobar", p: 6 }, { d: "bar123", p: 9 }) + dest.should.deep.equal [{ c: "foo", p: 6 }] + + it "with a delete overlapping the comment content in the middle", -> + dest = [] + text._tc(dest, { c: "foo123bar", p: 6 }, { d: "123", p: 9 }) + dest.should.deep.equal [{ c: "foobar", p: 6 }] + + it "with a delete overlapping the whole comment", -> + dest = [] + text._tc(dest, { c: "foo", p: 6 }, { d: "123foo456", p: 3 }) + dest.should.deep.equal [{ c: "", p: 3 }] + + describe "comment / insert", -> + it "should not do anything", -> + dest = [] + text._tc(dest, { i: "foo", p: 6 }, { c: "bar", p: 3 }) + dest.should.deep.equal [{ i: "foo", p: 6 }] + + describe "comment / delete", -> + it "should not do anything", -> + dest = [] + text._tc(dest, { d: "foo", p: 6 }, { c: "bar", p: 3 }) + dest.should.deep.equal [{ d: "foo", p: 6 }] + + describe "comment / comment", -> + it "should not do anything", -> + dest = [] + text._tc(dest, { c: "foo", p: 6 }, { c: "bar", p: 3 }) + dest.should.deep.equal [{ c: "foo", p: 6 }] + + describe "apply", -> + it "should apply an insert", -> + text.apply("foo", [{ i: "bar", p: 2 }]).should.equal "fobaro" + + it "should apply a delete", -> + text.apply("foo123bar", [{ d: "123", p: 3 }]).should.equal "foobar" + + it "should do nothing with a comment", -> + text.apply("foo123bar", [{ c: "123", p: 3 }]).should.equal "foo123bar" + + it "should throw an error when deleted content does not match", -> + (() -> + text.apply("foo123bar", [{ d: "456", p: 3 }]) + ).should.throw(Error) + + it "should throw an error when comment content does not match", -> + (() -> + text.apply("foo123bar", [{ c: "456", p: 3 }]) + ).should.throw(Error) + + describe "applying ops and comments in different orders", -> + it "should not matter which op or comment is applied first", -> + transform = (op1, op2, side) -> + d = [] + text._tc(d, op1, op2, side) + return d + + applySnapshot = (snapshot, op) -> + return text.apply(snapshot, op) + + applyRanges = (rangesTracker, ops) -> + for op in ops + if op.c? + rangesTracker.addComment(op.p, op.c.length, {}) + else + rangesTracker.applyOp(op, {}) + return rangesTracker + + commentsEqual = (comments1, comments2) -> + return false if comments1.length != comments2.length + comments1.sort (a,b) -> + if a.offset - b.offset == 0 + return a.length - b.length + else + return a.offset - b.offset + comments2.sort (a,b) -> + if a.offset - b.offset == 0 + return a.length - b.length + else + return a.offset - b.offset + for comment1, i in comments1 + comment2 = comments2[i] + if comment1.offset != comment2.offset or comment1.length != comment2.length + return false + return true + + SNAPSHOT = "123" + + OPS = [] + # Insert ops + for p in [0..SNAPSHOT.length] + OPS.push {i: "a", p: p} + OPS.push {i: "bc", p: p} + for p in [0..(SNAPSHOT.length-1)] + for length in [1..(SNAPSHOT.length - p)] + OPS.push {d: SNAPSHOT.slice(p, p+length), p} + for p in [0..(SNAPSHOT.length-1)] + for length in [1..(SNAPSHOT.length - p)] + OPS.push {c: SNAPSHOT.slice(p, p+length), p} + + for op1 in OPS + for op2 in OPS + op1_t = transform(op1, op2, "left") + op2_t = transform(op2, op1, "right") + + rt12 = new RangesTracker() + snapshot12 = applySnapshot(applySnapshot(SNAPSHOT, [op1]), op2_t) + applyRanges(rt12, [op1]) + applyRanges(rt12, op2_t) + + rt21 = new RangesTracker() + snapshot21 = applySnapshot(applySnapshot(SNAPSHOT, [op2]), op1_t) + applyRanges(rt21, [op2]) + applyRanges(rt21, op1_t) + + if snapshot12 != snapshot21 + console.error {op1, op2, op1_t, op2_t, snapshot12, snapshot21}, "Ops are not consistent" + throw new Error("OT is inconsistent") + + if !commentsEqual(rt12.comments, rt21.comments) + console.log rt12.comments + console.log rt21.comments + console.error {op1, op2, op1_t, op2_t, rt12_comments: rt12.comments, rt21_comments: rt21.comments}, "Comments are not consistent" + throw new Error("OT is inconsistent") + \ No newline at end of file From 59a06cd798f44c6370b66a5523dd7d077a97a9ab Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 13 Dec 2016 15:51:47 +0000 Subject: [PATCH 185/769] Accept comments with thread id as an op type --- .../app/coffee/RangesTracker.coffee | 66 +++++++++++----- .../app/coffee/sharejs/types/text.coffee | 8 +- .../test/acceptance/coffee/RangesTests.coffee | 76 +++++++++++++++++++ .../coffee/ShareJS/TextTransformTests.coffee | 57 +++++++------- 4 files changed, 153 insertions(+), 54 deletions(-) diff --git a/services/document-updater/app/coffee/RangesTracker.coffee b/services/document-updater/app/coffee/RangesTracker.coffee index 6a3625fd09..1b865a600d 100644 --- a/services/document-updater/app/coffee/RangesTracker.coffee +++ b/services/document-updater/app/coffee/RangesTracker.coffee @@ -48,15 +48,6 @@ load = (EventEmitter) -> # sync with Ace ranges. @id = 0 - addComment: (offset, length, metadata) -> - # TODO: Don't allow overlapping comments? - @comments.push comment = { - id: @_newId() - offset, length, metadata - } - @emit "comment:added", comment - return comment - getComment: (comment_id) -> comment = null for c in @comments @@ -106,14 +97,32 @@ load = (EventEmitter) -> else if op.d? @applyDeleteToChanges(op, metadata) @applyDeleteToComments(op) + else if op.c? + @addComment(op, metadata) + else + throw new Error("unknown op type") + + addComment: (op, metadata) -> + # TODO: Don't allow overlapping comments? + @comments.push comment = { + id: @_newId() + op: # Copy because we'll modify in place + c: op.c + p: op.p + t: op.t + metadata + } + @emit "comment:added", comment + return comment applyInsertToComments: (op) -> for comment in @comments - if op.p <= comment.offset - comment.offset += op.i.length + if op.p <= comment.op.p + comment.op.p += op.i.length @emit "comment:moved", comment - else if op.p < comment.offset + comment.length - comment.length += op.i.length + else if op.p < comment.op.p + comment.op.c.length + offset = op.p - comment.op.p + comment.op.c = comment.op.c[0..(offset-1)] + op.i + comment.op.c[offset...] @emit "comment:moved", comment applyDeleteToComments: (op) -> @@ -121,20 +130,35 @@ load = (EventEmitter) -> op_length = op.d.length op_end = op.p + op_length for comment in @comments - comment_end = comment.offset + comment.length - if op_end <= comment.offset + comment_start = comment.op.p + comment_end = comment.op.p + comment.op.c.length + comment_length = comment_end - comment_start + if op_end <= comment_start # delete is fully before comment - comment.offset -= op_length + comment.op.p -= op_length @emit "comment:moved", comment else if op_start >= comment_end # delete is fully after comment, nothing to do else # delete and comment overlap - delete_length_before = Math.max(0, comment.offset - op_start) - delete_length_after = Math.max(0, op_end - comment_end) - delete_length_overlapping = op_length - delete_length_before - delete_length_after - comment.offset = Math.min(comment.offset, op_start) - comment.length -= delete_length_overlapping + if op_start <= comment_start + remaining_before = "" + else + remaining_before = comment.op.c.slice(0, op_start - comment_start) + if op_end >= comment_end + remaining_after = "" + else + remaining_after = comment.op.c.slice(op_end - comment_start) + + # Check deleted content matches delete op + deleted_comment = comment.op.c.slice(remaining_before.length, comment_length - remaining_after.length) + offset = Math.max(0, comment_start - op_start) + deleted_op_content = op.d.slice(offset).slice(0, deleted_comment.length) + if deleted_comment != deleted_op_content + throw new Error("deleted content does not match comment content") + + comment.op.p = Math.min(comment_start, op_start) + comment.op.c = remaining_before + remaining_after @emit "comment:moved", comment applyInsertToChanges: (op, metadata) -> diff --git a/services/document-updater/app/coffee/sharejs/types/text.coffee b/services/document-updater/app/coffee/sharejs/types/text.coffee index dcf2ef4cfe..2a3b79997d 100644 --- a/services/document-updater/app/coffee/sharejs/types/text.coffee +++ b/services/document-updater/app/coffee/sharejs/types/text.coffee @@ -198,18 +198,18 @@ text._tc = transformComponent = (dest, c, otherC, side) -> if c.p < otherC.p < c.p + c.c.length offset = otherC.p - c.p new_c = (c.c[0..(offset-1)] + otherC.i + c.c[offset...]) - append dest, {c:new_c, p:c.p} + append dest, {c:new_c, p:c.p, t: c.t} else - append dest, {c:c.c, p:transformPosition(c.p, otherC, true)} + append dest, {c:c.c, p:transformPosition(c.p, otherC, true), t: c.t} else if otherC.d? if c.p >= otherC.p + otherC.d.length - append dest, {c:c.c, p:c.p - otherC.d.length} + append dest, {c:c.c, p:c.p - otherC.d.length, t: c.t} else if c.p + c.c.length <= otherC.p append dest, c else # Delete overlaps comment # They overlap somewhere. - newC = {c:'', p:c.p} + newC = {c:'', p:c.p, t: c.t} if c.p < otherC.p newC.c = c.c[...(otherC.p - c.p)] if c.p + c.c.length > otherC.p + otherC.d.length diff --git a/services/document-updater/test/acceptance/coffee/RangesTests.coffee b/services/document-updater/test/acceptance/coffee/RangesTests.coffee index 95a7fae9d4..0cee1598aa 100644 --- a/services/document-updater/test/acceptance/coffee/RangesTests.coffee +++ b/services/document-updater/test/acceptance/coffee/RangesTests.coffee @@ -54,6 +54,82 @@ describe "Ranges", -> change.op.should.deep.equal { i: "456", p: 3 } change.metadata.user_id.should.equal @user_id done() + + describe "Adding comments", -> + describe "standalone", -> + before (done) -> + @project_id = DocUpdaterClient.randomId() + @user_id = DocUpdaterClient.randomId() + @doc = { + id: DocUpdaterClient.randomId() + lines: ["foo bar baz"] + } + @updates = [{ + doc: @doc.id + op: [{ c: "bar", p: 4, t: @tid = DocUpdaterClient.randomId() }] + v: 0 + }] + MockWebApi.insertDoc @project_id, @doc.id, { + lines: @doc.lines + version: 0 + } + jobs = [] + for update in @updates + do (update) => + jobs.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc.id, update, callback + DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) => + throw error if error? + async.series jobs, (error) -> + throw error if error? + setTimeout done, 200 + + it "should update the ranges", (done) -> + DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => + throw error if error? + ranges = data.ranges + comment = ranges.comments[0] + comment.op.should.deep.equal { c: "bar", p: 4, t: @tid } + done() + + describe "with conflicting ops needing OT", -> + before (done) -> + @project_id = DocUpdaterClient.randomId() + @user_id = DocUpdaterClient.randomId() + @doc = { + id: DocUpdaterClient.randomId() + lines: ["foo bar baz"] + } + @updates = [{ + doc: @doc.id + op: [{ i: "ABC", p: 3 }] + v: 0 + meta: { user_id: @user_id } + }, { + doc: @doc.id + op: [{ c: "bar", p: 4, t: @tid = DocUpdaterClient.randomId() }] + v: 0 + }] + MockWebApi.insertDoc @project_id, @doc.id, { + lines: @doc.lines + version: 0 + } + jobs = [] + for update in @updates + do (update) => + jobs.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc.id, update, callback + DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) => + throw error if error? + async.series jobs, (error) -> + throw error if error? + setTimeout done, 200 + + it "should update the comments with the OT shifted comment", (done) -> + DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => + throw error if error? + ranges = data.ranges + comment = ranges.comments[0] + comment.op.should.deep.equal { c: "bar", p: 7, t: @tid } + done() describe "Loading ranges from persistence layer", -> before (done) -> diff --git a/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee b/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee index e0f75d2756..81440bfe5b 100644 --- a/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee @@ -3,6 +3,9 @@ require("chai").should() RangesTracker = require "../../../../app/js/RangesTracker" describe "ShareJS text type", -> + beforeEach -> + @t = "mock-thread-id" + describe "transform", -> describe "insert / insert", -> it "with an insert before", -> @@ -113,61 +116,61 @@ describe "ShareJS text type", -> describe "comment / insert", -> it "with an insert before", -> dest = [] - text._tc(dest, { c: "foo", p: 9 }, { i: "bar", p: 3 }) - dest.should.deep.equal [{ c: "foo", p: 12 }] + text._tc(dest, { c: "foo", p: 9, @t }, { i: "bar", p: 3 }) + dest.should.deep.equal [{ c: "foo", p: 12, @t }] it "with an insert after", -> dest = [] - text._tc(dest, { c: "foo", p: 3 }, { i: "bar", p: 9 }) - dest.should.deep.equal [{ c: "foo", p: 3 }] + text._tc(dest, { c: "foo", p: 3, @t }, { i: "bar", p: 9 }) + dest.should.deep.equal [{ c: "foo", p: 3, @t }] it "with an insert at the left edge", -> dest = [] - text._tc(dest, { c: "foo", p: 3 }, { i: "bar", p: 3 }) + text._tc(dest, { c: "foo", p: 3, @t }, { i: "bar", p: 3 }) # RangesTracker doesn't inject inserts into comments on edges, so neither should we - dest.should.deep.equal [{ c: "foo", p: 6 }] + dest.should.deep.equal [{ c: "foo", p: 6, @t }] it "with an insert at the right edge", -> dest = [] - text._tc(dest, { c: "foo", p: 3 }, { i: "bar", p: 6 }) + text._tc(dest, { c: "foo", p: 3, @t }, { i: "bar", p: 6 }) # RangesTracker doesn't inject inserts into comments on edges, so neither should we - dest.should.deep.equal [{ c: "foo", p: 3 }] + dest.should.deep.equal [{ c: "foo", p: 3, @t }] it "with an insert in the middle", -> dest = [] - text._tc(dest, { c: "foo", p: 3 }, { i: "bar", p: 5 }) - dest.should.deep.equal [{ c: "fobaro", p: 3 }] + text._tc(dest, { c: "foo", p: 3, @t }, { i: "bar", p: 5 }) + dest.should.deep.equal [{ c: "fobaro", p: 3, @t }] describe "comment / delete", -> it "with a delete before", -> dest = [] - text._tc(dest, { c: "foo", p: 9 }, { d: "bar", p: 3 }) - dest.should.deep.equal [{ c: "foo", p: 6 }] + text._tc(dest, { c: "foo", p: 9, @t }, { d: "bar", p: 3 }) + dest.should.deep.equal [{ c: "foo", p: 6, @t }] it "with a delete after", -> dest = [] - text._tc(dest, { c: "foo", p: 3 }, { i: "bar", p: 9 }) - dest.should.deep.equal [{ c: "foo", p: 3 }] + text._tc(dest, { c: "foo", p: 3, @t }, { i: "bar", p: 9 }) + dest.should.deep.equal [{ c: "foo", p: 3, @t }] it "with a delete overlapping the comment content before", -> dest = [] - text._tc(dest, { c: "foobar", p: 6 }, { d: "123foo", p: 3 }) - dest.should.deep.equal [{ c: "bar", p: 3 }] + text._tc(dest, { c: "foobar", p: 6, @t }, { d: "123foo", p: 3 }) + dest.should.deep.equal [{ c: "bar", p: 3, @t }] it "with a delete overlapping the comment content after", -> dest = [] - text._tc(dest, { c: "foobar", p: 6 }, { d: "bar123", p: 9 }) - dest.should.deep.equal [{ c: "foo", p: 6 }] + text._tc(dest, { c: "foobar", p: 6, @t }, { d: "bar123", p: 9 }) + dest.should.deep.equal [{ c: "foo", p: 6, @t }] it "with a delete overlapping the comment content in the middle", -> dest = [] - text._tc(dest, { c: "foo123bar", p: 6 }, { d: "123", p: 9 }) - dest.should.deep.equal [{ c: "foobar", p: 6 }] + text._tc(dest, { c: "foo123bar", p: 6, @t }, { d: "123", p: 9 }) + dest.should.deep.equal [{ c: "foobar", p: 6, @t }] it "with a delete overlapping the whole comment", -> dest = [] - text._tc(dest, { c: "foo", p: 6 }, { d: "123foo456", p: 3 }) - dest.should.deep.equal [{ c: "", p: 3 }] + text._tc(dest, { c: "foo", p: 6, @t }, { d: "123foo456", p: 3 }) + dest.should.deep.equal [{ c: "", p: 3, @t }] describe "comment / insert", -> it "should not do anything", -> @@ -219,10 +222,7 @@ describe "ShareJS text type", -> applyRanges = (rangesTracker, ops) -> for op in ops - if op.c? - rangesTracker.addComment(op.p, op.c.length, {}) - else - rangesTracker.applyOp(op, {}) + rangesTracker.applyOp(op, {}) return rangesTracker commentsEqual = (comments1, comments2) -> @@ -255,8 +255,8 @@ describe "ShareJS text type", -> OPS.push {d: SNAPSHOT.slice(p, p+length), p} for p in [0..(SNAPSHOT.length-1)] for length in [1..(SNAPSHOT.length - p)] - OPS.push {c: SNAPSHOT.slice(p, p+length), p} - + OPS.push {c: SNAPSHOT.slice(p, p+length), p, @t} + for op1 in OPS for op2 in OPS op1_t = transform(op1, op2, "left") @@ -281,4 +281,3 @@ describe "ShareJS text type", -> console.log rt21.comments console.error {op1, op2, op1_t, op2_t, rt12_comments: rt12.comments, rt21_comments: rt21.comments}, "Comments are not consistent" throw new Error("OT is inconsistent") - \ No newline at end of file From 0f13cb3aa7f91e6285393e44d5aeb5086cfe24af Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 6 Jan 2017 16:58:51 +0100 Subject: [PATCH 186/769] Support a {dr:...} op for deleting ranges --- .../app/coffee/RangesTracker.coffee | 80 ++++----- .../app/coffee/sharejs/types/text.coffee | 72 +++++--- .../test/acceptance/coffee/RangesTests.coffee | 168 ++++++++++-------- .../coffee/ShareJS/TextTransformTests.coffee | 92 ++++++++++ 4 files changed, 273 insertions(+), 139 deletions(-) diff --git a/services/document-updater/app/coffee/RangesTracker.coffee b/services/document-updater/app/coffee/RangesTracker.coffee index 1b865a600d..233f5ad989 100644 --- a/services/document-updater/app/coffee/RangesTracker.coffee +++ b/services/document-updater/app/coffee/RangesTracker.coffee @@ -35,18 +35,25 @@ load = (EventEmitter) -> # * Inserts by another user will not combine with inserts by the first user. If they are in the # middle of a previous insert by the first user, the original insert will be split into two. constructor: (@changes = [], @comments = []) -> - # Change objects have the following structure: - # { - # id: ... # Uniquely generated by us - # op: { # ShareJs style op tracking the offset (p) and content inserted (i) or deleted (d) - # i: "..." - # p: 42 - # } - # } - # - # Ids are used to uniquely identify a change, e.g. for updating it in the database, or keeping in - # sync with Ace ranges. - @id = 0 + + @_increment: 0 + @newId: () -> + # Generate a Mongo ObjectId + # Reference: https://github.com/dreampulse/ObjectId.js/blob/master/src/main/javascript/Objectid.js + @_pid ?= Math.floor(Math.random() * (32767)) + @_machine ?= Math.floor(Math.random() * (16777216)) + timestamp = Math.floor(new Date().valueOf() / 1000) + @_increment++ + + timestamp = timestamp.toString(16) + machine = @_machine.toString(16) + pid = @_pid.toString(16) + increment = @_increment.toString(16) + + return '00000000'.substr(0, 8 - timestamp.length) + timestamp + + '000000'.substr(0, 6 - machine.length) + machine + + '0000'.substr(0, 4 - pid.length) + pid + + '000000'.substr(0, 6 - increment.length) + increment; getComment: (comment_id) -> comment = null @@ -56,19 +63,6 @@ load = (EventEmitter) -> break return comment - resolveCommentId: (comment_id, resolved_data) -> - comment = @getComment(comment_id) - return if !comment? - comment.metadata.resolved = true - comment.metadata.resolved_data = resolved_data - @emit "comment:resolved", comment - - unresolveCommentId: (comment_id) -> - comment = @getComment(comment_id) - return if !comment? - comment.metadata.resolved = false - @emit "comment:unresolved", comment - removeCommentId: (comment_id) -> comment = @getComment(comment_id) return if !comment? @@ -88,7 +82,7 @@ load = (EventEmitter) -> return if !change? @_removeChange(change) - applyOp: (op, metadata) -> + applyOp: (op, metadata = {}) -> metadata.ts ?= new Date() # Apply an op that has been applied to the document to our changes to keep them up to date if op.i? @@ -97,6 +91,8 @@ load = (EventEmitter) -> else if op.d? @applyDeleteToChanges(op, metadata) @applyDeleteToComments(op) + else if op.dr? + @applyDeleteRangeToChanges(op) else if op.c? @addComment(op, metadata) else @@ -105,7 +101,7 @@ load = (EventEmitter) -> addComment: (op, metadata) -> # TODO: Don't allow overlapping comments? @comments.push comment = { - id: @_newId() + id: RangesTracker.newId() op: # Copy because we'll modify in place c: op.c p: op.p @@ -394,28 +390,20 @@ load = (EventEmitter) -> if moved_changes.length > 0 @emit "changes:moved", moved_changes - _newId: () -> - # Generate a Mongo ObjectId - # Reference: https://github.com/dreampulse/ObjectId.js/blob/master/src/main/javascript/Objectid.js - @_pid ?= Math.floor(Math.random() * (32767)) - @_machine ?= Math.floor(Math.random() * (16777216)) - timestamp = Math.floor(new Date().valueOf() / 1000) - @_increment ?= 0 - @_increment++ - - timestamp = timestamp.toString(16) - machine = @_machine.toString(16) - pid = @_pid.toString(16) - increment = @_increment.toString(16) - - return '00000000'.substr(0, 8 - timestamp.length) + timestamp + - '000000'.substr(0, 6 - machine.length) + machine + - '0000'.substr(0, 4 - pid.length) + pid + - '000000'.substr(0, 6 - increment.length) + increment; + applyDeleteRangeToChanges: (op) -> + remove_changes = [] + for change in @changes + change_text = change.op.i or change.op.d + if op.p == change.op.p and op.dr == change_text + remove_changes.push change + if remove_changes.length == 0 + throw new Error("no range to remove") + for change in remove_changes + @_removeChange(change) _addOp: (op, metadata) -> change = { - id: @_newId() + id: RangesTracker.newId() op: op metadata: metadata } diff --git a/services/document-updater/app/coffee/sharejs/types/text.coffee b/services/document-updater/app/coffee/sharejs/types/text.coffee index 2a3b79997d..84303b3307 100644 --- a/services/document-updater/app/coffee/sharejs/types/text.coffee +++ b/services/document-updater/app/coffee/sharejs/types/text.coffee @@ -32,7 +32,8 @@ checkValidComponent = (c) -> i_type = typeof c.i d_type = typeof c.d c_type = typeof c.c - throw new Error 'component needs an i, d or c field' unless (i_type == 'string') ^ (d_type == 'string') ^ (c_type == 'string') + dr_type = typeof c.dr + throw new Error 'component needs an i, d, c or dr field' unless (i_type == 'string') ^ (d_type == 'string') ^ (c_type == 'string') ^ (dr_type == 'string') throw new Error 'position cannot be negative' unless c.p >= 0 @@ -40,6 +41,26 @@ checkValidOp = (op) -> checkValidComponent(c) for c in op true +componentText = (c) -> + if c.c? + text = c.c + if c.dr? + text = c.dr + throw new Error("invalid component") if !text? + return text + +duplicateComponent = (c) -> + newC = {} + for key, value of c + newC[key] = value + return newC + +setComponentText = (c, text) -> + if c.c? + c.c = text + if c.dr? + c.dr = text + text.apply = (snapshot, op) -> checkValidOp op for component in op @@ -49,9 +70,10 @@ text.apply = (snapshot, op) -> deleted = snapshot[component.p...(component.p + component.d.length)] throw new Error "Delete component '#{component.d}' does not match deleted text '#{deleted}'" unless component.d == deleted snapshot = snapshot[...component.p] + snapshot[(component.p + component.d.length)..] - else if component.c? - comment = snapshot[component.p...(component.p + component.c.length)] - throw new Error "Comment component '#{component.c}' does not match commented text '#{comment}'" unless component.c == comment + else if component.c? or component.dr? + c_text = componentText(component) + range = snapshot[component.p...(component.p + c_text.length)] + throw new Error "Range component '#{c_text}' does not match range text '#{range}'" unless c_text == range else throw new Error "Unknown op type" snapshot @@ -127,7 +149,7 @@ transformPosition = (pos, c, insertAfter) -> c.p else pos - c.d.length - else if c.c? + else if c.c? or c.dr? pos else throw new Error("unknown op type") @@ -187,46 +209,54 @@ text._tc = transformComponent = (dest, c, otherC, side) -> newC.p = transformPosition newC.p, otherC append dest, newC - else if otherC.c? + else if otherC.c? or otherC.dr? append dest, c else throw new Error("unknown op type") - else if c.c? # Comment + else if c.c? or c.dr? # Comment or delete range + c_text = componentText(c) if otherC.i? - if c.p < otherC.p < c.p + c.c.length + if c.p < otherC.p < c.p + c_text.length offset = otherC.p - c.p - new_c = (c.c[0..(offset-1)] + otherC.i + c.c[offset...]) - append dest, {c:new_c, p:c.p, t: c.t} + newText = (c_text[0..(offset-1)] + otherC.i + c_text[offset...]) + newC = duplicateComponent(c) + setComponentText(newC, newText) + append dest, newC else - append dest, {c:c.c, p:transformPosition(c.p, otherC, true), t: c.t} + newC = duplicateComponent(c) + newC.p = transformPosition(c.p, otherC, true) + append dest, newC else if otherC.d? if c.p >= otherC.p + otherC.d.length - append dest, {c:c.c, p:c.p - otherC.d.length, t: c.t} - else if c.p + c.c.length <= otherC.p + newC = duplicateComponent(c) + newC.p = c.p - otherC.d.length + append dest, newC + else if c.p + c_text.length <= otherC.p append dest, c else # Delete overlaps comment # They overlap somewhere. - newC = {c:'', p:c.p, t: c.t} + newC = duplicateComponent(c) + setComponentText(newC, '') if c.p < otherC.p - newC.c = c.c[...(otherC.p - c.p)] - if c.p + c.c.length > otherC.p + otherC.d.length - newC.c += c.c[(otherC.p + otherC.d.length - c.p)..] + setComponentText(newC, c_text[...(otherC.p - c.p)]) + if c.p + c_text.length > otherC.p + otherC.d.length + setComponentText(newC, componentText(newC) + c_text[(otherC.p + otherC.d.length - c.p)..]) # This is entirely optional - just for a check that the deleted # text in the two ops matches intersectStart = Math.max c.p, otherC.p - intersectEnd = Math.min c.p + c.c.length, otherC.p + otherC.d.length - cIntersect = c.c[intersectStart - c.p...intersectEnd - c.p] + intersectEnd = Math.min c.p + c_text.length, otherC.p + otherC.d.length + cIntersect = c_text[intersectStart - c.p...intersectEnd - c.p] otherIntersect = otherC.d[intersectStart - otherC.p...intersectEnd - otherC.p] - throw new Error 'Delete ops delete different text in the same region of the document' unless cIntersect == otherIntersect + throw new Error 'Delete op text does not match range being modified' unless cIntersect == otherIntersect newC.p = transformPosition newC.p, otherC append dest, newC - else if otherC.c? + else if otherC.c? or otherC.dr? append dest, c else diff --git a/services/document-updater/test/acceptance/coffee/RangesTests.coffee b/services/document-updater/test/acceptance/coffee/RangesTests.coffee index 0cee1598aa..a5cbee7569 100644 --- a/services/document-updater/test/acceptance/coffee/RangesTests.coffee +++ b/services/document-updater/test/acceptance/coffee/RangesTests.coffee @@ -1,6 +1,7 @@ sinon = require "sinon" chai = require("chai") chai.should() +expect = chai.expect async = require "async" rclient = require("redis").createClient() @@ -54,82 +55,105 @@ describe "Ranges", -> change.op.should.deep.equal { i: "456", p: 3 } change.metadata.user_id.should.equal @user_id done() + + describe "removing ranges", -> + it "should delete the range (and perform OT)", (done) -> + @conflicting_update = { + doc: @doc.id + op: [{ i: "X", p: 1 }] + v: 3 + meta: { user_id: @user_id } + } + @delete_range = { + doc: @doc.id + op: [{ dr: "456", p: 3 }] + v: 3 + meta: { user_id: @user_id } + } + DocUpdaterClient.sendUpdate @project_id, @doc.id, @conflicting_update, (error) => + throw error if error? + DocUpdaterClient.sendUpdate @project_id, @doc.id, @delete_range, (error) => + throw error if error? + DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => + throw error if error? + expect(data.ranges.changes).to.be.undefined + done() - describe "Adding comments", -> - describe "standalone", -> - before (done) -> - @project_id = DocUpdaterClient.randomId() - @user_id = DocUpdaterClient.randomId() - @doc = { - id: DocUpdaterClient.randomId() - lines: ["foo bar baz"] - } - @updates = [{ - doc: @doc.id - op: [{ c: "bar", p: 4, t: @tid = DocUpdaterClient.randomId() }] - v: 0 - }] - MockWebApi.insertDoc @project_id, @doc.id, { - lines: @doc.lines - version: 0 - } - jobs = [] - for update in @updates - do (update) => - jobs.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc.id, update, callback - DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) => + describe "Adding comments", -> + describe "standalone", -> + before (done) -> + @project_id = DocUpdaterClient.randomId() + @user_id = DocUpdaterClient.randomId() + @doc = { + id: DocUpdaterClient.randomId() + lines: ["foo bar baz"] + } + @updates = [{ + doc: @doc.id + op: [{ c: "bar", p: 4, t: @tid = DocUpdaterClient.randomId() }] + v: 0 + }] + MockWebApi.insertDoc @project_id, @doc.id, { + lines: @doc.lines + version: 0 + } + jobs = [] + for update in @updates + do (update) => + jobs.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc.id, update, callback + DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) => + throw error if error? + async.series jobs, (error) -> throw error if error? - async.series jobs, (error) -> - throw error if error? - setTimeout done, 200 - - it "should update the ranges", (done) -> - DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => - throw error if error? - ranges = data.ranges - comment = ranges.comments[0] - comment.op.should.deep.equal { c: "bar", p: 4, t: @tid } - done() + setTimeout done, 200 + + it "should update the ranges", (done) -> + DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => + throw error if error? + ranges = data.ranges + comment = ranges.comments[0] + comment.op.should.deep.equal { c: "bar", p: 4, t: @tid } + done() - describe "with conflicting ops needing OT", -> - before (done) -> - @project_id = DocUpdaterClient.randomId() - @user_id = DocUpdaterClient.randomId() - @doc = { - id: DocUpdaterClient.randomId() - lines: ["foo bar baz"] - } - @updates = [{ - doc: @doc.id - op: [{ i: "ABC", p: 3 }] - v: 0 - meta: { user_id: @user_id } - }, { - doc: @doc.id - op: [{ c: "bar", p: 4, t: @tid = DocUpdaterClient.randomId() }] - v: 0 - }] - MockWebApi.insertDoc @project_id, @doc.id, { - lines: @doc.lines - version: 0 - } - jobs = [] - for update in @updates - do (update) => - jobs.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc.id, update, callback - DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) => + describe "with conflicting ops needing OT", -> + before (done) -> + @project_id = DocUpdaterClient.randomId() + @user_id = DocUpdaterClient.randomId() + @doc = { + id: DocUpdaterClient.randomId() + lines: ["foo bar baz"] + } + @updates = [{ + doc: @doc.id + op: [{ i: "ABC", p: 3 }] + v: 0 + meta: { user_id: @user_id } + }, { + doc: @doc.id + op: [{ c: "bar", p: 4, t: @tid = DocUpdaterClient.randomId() }] + v: 0 + }] + MockWebApi.insertDoc @project_id, @doc.id, { + lines: @doc.lines + version: 0 + } + jobs = [] + for update in @updates + do (update) => + jobs.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc.id, update, callback + DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) => + throw error if error? + async.series jobs, (error) -> throw error if error? - async.series jobs, (error) -> - throw error if error? - setTimeout done, 200 - - it "should update the comments with the OT shifted comment", (done) -> - DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => - throw error if error? - ranges = data.ranges - comment = ranges.comments[0] - comment.op.should.deep.equal { c: "bar", p: 7, t: @tid } - done() + setTimeout done, 200 + + it "should update the comments with the OT shifted comment", (done) -> + DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => + throw error if error? + ranges = data.ranges + comment = ranges.comments[0] + comment.op.should.deep.equal { c: "bar", p: 7, t: @tid } + done() describe "Loading ranges from persistence layer", -> before (done) -> diff --git a/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee b/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee index 81440bfe5b..2d9dcf94a0 100644 --- a/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee @@ -189,6 +189,95 @@ describe "ShareJS text type", -> dest = [] text._tc(dest, { c: "foo", p: 6 }, { c: "bar", p: 3 }) dest.should.deep.equal [{ c: "foo", p: 6 }] + + describe "comment / delete_range", -> + it "should not do anything", -> + dest = [] + text._tc(dest, { c: "foo", p: 6 }, { dr: "bar", p: 3 }) + dest.should.deep.equal [{ c: "foo", p: 6 }] + + describe "delete_range / insert", -> + it "with an insert before", -> + dest = [] + text._tc(dest, { dr: "foo", p: 9 }, { i: "bar", p: 3 }) + dest.should.deep.equal [{ dr: "foo", p: 12 }] + + it "with an insert after", -> + dest = [] + text._tc(dest, { dr: "foo", p: 3 }, { i: "bar", p: 9 }) + dest.should.deep.equal [{ dr: "foo", p: 3 }] + + it "with an insert at the left edge", -> + dest = [] + text._tc(dest, { dr: "foo", p: 3 }, { i: "bar", p: 3 }) + # RangesTracker doesn't inject inserts into comments on edges, so neither should we + dest.should.deep.equal [{ dr: "foo", p: 6 }] + + it "with an insert at the right edge", -> + dest = [] + text._tc(dest, { dr: "foo", p: 3 }, { i: "bar", p: 6 }) + # RangesTracker doesn't inject inserts into comments on edges, so neither should we + dest.should.deep.equal [{ dr: "foo", p: 3 }] + + it "with an insert in the middle", -> + dest = [] + text._tc(dest, { dr: "foo", p: 3 }, { i: "bar", p: 5 }) + dest.should.deep.equal [{ dr: "fobaro", p: 3 }] + + describe "delete_range / delete", -> + it "with a delete before", -> + dest = [] + text._tc(dest, { dr: "foo", p: 9 }, { d: "bar", p: 3 }) + dest.should.deep.equal [{ dr: "foo", p: 6 }] + + it "with a delete after", -> + dest = [] + text._tc(dest, { dr: "foo", p: 3 }, { i: "bar", p: 9 }) + dest.should.deep.equal [{ dr: "foo", p: 3 }] + + it "with a delete overlapping the comment content before", -> + dest = [] + text._tc(dest, { dr: "foobar", p: 6 }, { d: "123foo", p: 3 }) + dest.should.deep.equal [{ dr: "bar", p: 3 }] + + it "with a delete overlapping the comment content after", -> + dest = [] + text._tc(dest, { dr: "foobar", p: 6 }, { d: "bar123", p: 9 }) + dest.should.deep.equal [{ dr: "foo", p: 6 }] + + it "with a delete overlapping the comment content in the middle", -> + dest = [] + text._tc(dest, { dr: "foo123bar", p: 6 }, { d: "123", p: 9 }) + dest.should.deep.equal [{ dr: "foobar", p: 6 }] + + it "with a delete overlapping the whole comment", -> + dest = [] + text._tc(dest, { dr: "foo", p: 6 }, { d: "123foo456", p: 3 }) + dest.should.deep.equal [{ dr: "", p: 3 }] + + describe "delete_range / insert", -> + it "should not do anything", -> + dest = [] + text._tc(dest, { i: "foo", p: 6 }, { dr: "bar", p: 3 }) + dest.should.deep.equal [{ i: "foo", p: 6 }] + + describe "delete_range / delete", -> + it "should not do anything", -> + dest = [] + text._tc(dest, { d: "foo", p: 6 }, { dr: "bar", p: 3 }) + dest.should.deep.equal [{ d: "foo", p: 6 }] + + describe "delete_range / comment", -> + it "should not do anything", -> + dest = [] + text._tc(dest, { c: "foo", p: 6 }, { dr: "bar", p: 3 }) + dest.should.deep.equal [{ c: "foo", p: 6 }] + + describe "delete_range / delete_range", -> + it "should not do anything", -> + dest = [] + text._tc(dest, { dr: "foo", p: 6 }, { dr: "bar", p: 3 }) + dest.should.deep.equal [{ dr: "foo", p: 6 }] describe "apply", -> it "should apply an insert", -> @@ -199,6 +288,9 @@ describe "ShareJS text type", -> it "should do nothing with a comment", -> text.apply("foo123bar", [{ c: "123", p: 3 }]).should.equal "foo123bar" + + it "should do nothing with a delete_range", -> + text.apply("foo123bar", [{ dr: "123", p: 3 }]).should.equal "foo123bar" it "should throw an error when deleted content does not match", -> (() -> From 2c7029cc50612f00d21bbbe6633eda46f2c32031 Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 9 Jan 2017 09:24:19 +0100 Subject: [PATCH 187/769] Revert "Support a {dr:...} op for deleting ranges" This reverts commit 24c58e5ad430e0240533cc1e5c21122859fe8dc9. --- .../app/coffee/RangesTracker.coffee | 80 +++++---- .../app/coffee/sharejs/types/text.coffee | 72 +++----- .../test/acceptance/coffee/RangesTests.coffee | 168 ++++++++---------- .../coffee/ShareJS/TextTransformTests.coffee | 92 ---------- 4 files changed, 139 insertions(+), 273 deletions(-) diff --git a/services/document-updater/app/coffee/RangesTracker.coffee b/services/document-updater/app/coffee/RangesTracker.coffee index 233f5ad989..1b865a600d 100644 --- a/services/document-updater/app/coffee/RangesTracker.coffee +++ b/services/document-updater/app/coffee/RangesTracker.coffee @@ -35,25 +35,18 @@ load = (EventEmitter) -> # * Inserts by another user will not combine with inserts by the first user. If they are in the # middle of a previous insert by the first user, the original insert will be split into two. constructor: (@changes = [], @comments = []) -> - - @_increment: 0 - @newId: () -> - # Generate a Mongo ObjectId - # Reference: https://github.com/dreampulse/ObjectId.js/blob/master/src/main/javascript/Objectid.js - @_pid ?= Math.floor(Math.random() * (32767)) - @_machine ?= Math.floor(Math.random() * (16777216)) - timestamp = Math.floor(new Date().valueOf() / 1000) - @_increment++ - - timestamp = timestamp.toString(16) - machine = @_machine.toString(16) - pid = @_pid.toString(16) - increment = @_increment.toString(16) - - return '00000000'.substr(0, 8 - timestamp.length) + timestamp + - '000000'.substr(0, 6 - machine.length) + machine + - '0000'.substr(0, 4 - pid.length) + pid + - '000000'.substr(0, 6 - increment.length) + increment; + # Change objects have the following structure: + # { + # id: ... # Uniquely generated by us + # op: { # ShareJs style op tracking the offset (p) and content inserted (i) or deleted (d) + # i: "..." + # p: 42 + # } + # } + # + # Ids are used to uniquely identify a change, e.g. for updating it in the database, or keeping in + # sync with Ace ranges. + @id = 0 getComment: (comment_id) -> comment = null @@ -63,6 +56,19 @@ load = (EventEmitter) -> break return comment + resolveCommentId: (comment_id, resolved_data) -> + comment = @getComment(comment_id) + return if !comment? + comment.metadata.resolved = true + comment.metadata.resolved_data = resolved_data + @emit "comment:resolved", comment + + unresolveCommentId: (comment_id) -> + comment = @getComment(comment_id) + return if !comment? + comment.metadata.resolved = false + @emit "comment:unresolved", comment + removeCommentId: (comment_id) -> comment = @getComment(comment_id) return if !comment? @@ -82,7 +88,7 @@ load = (EventEmitter) -> return if !change? @_removeChange(change) - applyOp: (op, metadata = {}) -> + applyOp: (op, metadata) -> metadata.ts ?= new Date() # Apply an op that has been applied to the document to our changes to keep them up to date if op.i? @@ -91,8 +97,6 @@ load = (EventEmitter) -> else if op.d? @applyDeleteToChanges(op, metadata) @applyDeleteToComments(op) - else if op.dr? - @applyDeleteRangeToChanges(op) else if op.c? @addComment(op, metadata) else @@ -101,7 +105,7 @@ load = (EventEmitter) -> addComment: (op, metadata) -> # TODO: Don't allow overlapping comments? @comments.push comment = { - id: RangesTracker.newId() + id: @_newId() op: # Copy because we'll modify in place c: op.c p: op.p @@ -390,20 +394,28 @@ load = (EventEmitter) -> if moved_changes.length > 0 @emit "changes:moved", moved_changes - applyDeleteRangeToChanges: (op) -> - remove_changes = [] - for change in @changes - change_text = change.op.i or change.op.d - if op.p == change.op.p and op.dr == change_text - remove_changes.push change - if remove_changes.length == 0 - throw new Error("no range to remove") - for change in remove_changes - @_removeChange(change) + _newId: () -> + # Generate a Mongo ObjectId + # Reference: https://github.com/dreampulse/ObjectId.js/blob/master/src/main/javascript/Objectid.js + @_pid ?= Math.floor(Math.random() * (32767)) + @_machine ?= Math.floor(Math.random() * (16777216)) + timestamp = Math.floor(new Date().valueOf() / 1000) + @_increment ?= 0 + @_increment++ + + timestamp = timestamp.toString(16) + machine = @_machine.toString(16) + pid = @_pid.toString(16) + increment = @_increment.toString(16) + + return '00000000'.substr(0, 8 - timestamp.length) + timestamp + + '000000'.substr(0, 6 - machine.length) + machine + + '0000'.substr(0, 4 - pid.length) + pid + + '000000'.substr(0, 6 - increment.length) + increment; _addOp: (op, metadata) -> change = { - id: RangesTracker.newId() + id: @_newId() op: op metadata: metadata } diff --git a/services/document-updater/app/coffee/sharejs/types/text.coffee b/services/document-updater/app/coffee/sharejs/types/text.coffee index 84303b3307..2a3b79997d 100644 --- a/services/document-updater/app/coffee/sharejs/types/text.coffee +++ b/services/document-updater/app/coffee/sharejs/types/text.coffee @@ -32,8 +32,7 @@ checkValidComponent = (c) -> i_type = typeof c.i d_type = typeof c.d c_type = typeof c.c - dr_type = typeof c.dr - throw new Error 'component needs an i, d, c or dr field' unless (i_type == 'string') ^ (d_type == 'string') ^ (c_type == 'string') ^ (dr_type == 'string') + throw new Error 'component needs an i, d or c field' unless (i_type == 'string') ^ (d_type == 'string') ^ (c_type == 'string') throw new Error 'position cannot be negative' unless c.p >= 0 @@ -41,26 +40,6 @@ checkValidOp = (op) -> checkValidComponent(c) for c in op true -componentText = (c) -> - if c.c? - text = c.c - if c.dr? - text = c.dr - throw new Error("invalid component") if !text? - return text - -duplicateComponent = (c) -> - newC = {} - for key, value of c - newC[key] = value - return newC - -setComponentText = (c, text) -> - if c.c? - c.c = text - if c.dr? - c.dr = text - text.apply = (snapshot, op) -> checkValidOp op for component in op @@ -70,10 +49,9 @@ text.apply = (snapshot, op) -> deleted = snapshot[component.p...(component.p + component.d.length)] throw new Error "Delete component '#{component.d}' does not match deleted text '#{deleted}'" unless component.d == deleted snapshot = snapshot[...component.p] + snapshot[(component.p + component.d.length)..] - else if component.c? or component.dr? - c_text = componentText(component) - range = snapshot[component.p...(component.p + c_text.length)] - throw new Error "Range component '#{c_text}' does not match range text '#{range}'" unless c_text == range + else if component.c? + comment = snapshot[component.p...(component.p + component.c.length)] + throw new Error "Comment component '#{component.c}' does not match commented text '#{comment}'" unless component.c == comment else throw new Error "Unknown op type" snapshot @@ -149,7 +127,7 @@ transformPosition = (pos, c, insertAfter) -> c.p else pos - c.d.length - else if c.c? or c.dr? + else if c.c? pos else throw new Error("unknown op type") @@ -209,54 +187,46 @@ text._tc = transformComponent = (dest, c, otherC, side) -> newC.p = transformPosition newC.p, otherC append dest, newC - else if otherC.c? or otherC.dr? + else if otherC.c? append dest, c else throw new Error("unknown op type") - else if c.c? or c.dr? # Comment or delete range - c_text = componentText(c) + else if c.c? # Comment if otherC.i? - if c.p < otherC.p < c.p + c_text.length + if c.p < otherC.p < c.p + c.c.length offset = otherC.p - c.p - newText = (c_text[0..(offset-1)] + otherC.i + c_text[offset...]) - newC = duplicateComponent(c) - setComponentText(newC, newText) - append dest, newC + new_c = (c.c[0..(offset-1)] + otherC.i + c.c[offset...]) + append dest, {c:new_c, p:c.p, t: c.t} else - newC = duplicateComponent(c) - newC.p = transformPosition(c.p, otherC, true) - append dest, newC + append dest, {c:c.c, p:transformPosition(c.p, otherC, true), t: c.t} else if otherC.d? if c.p >= otherC.p + otherC.d.length - newC = duplicateComponent(c) - newC.p = c.p - otherC.d.length - append dest, newC - else if c.p + c_text.length <= otherC.p + append dest, {c:c.c, p:c.p - otherC.d.length, t: c.t} + else if c.p + c.c.length <= otherC.p append dest, c else # Delete overlaps comment # They overlap somewhere. - newC = duplicateComponent(c) - setComponentText(newC, '') + newC = {c:'', p:c.p, t: c.t} if c.p < otherC.p - setComponentText(newC, c_text[...(otherC.p - c.p)]) - if c.p + c_text.length > otherC.p + otherC.d.length - setComponentText(newC, componentText(newC) + c_text[(otherC.p + otherC.d.length - c.p)..]) + newC.c = c.c[...(otherC.p - c.p)] + if c.p + c.c.length > otherC.p + otherC.d.length + newC.c += c.c[(otherC.p + otherC.d.length - c.p)..] # This is entirely optional - just for a check that the deleted # text in the two ops matches intersectStart = Math.max c.p, otherC.p - intersectEnd = Math.min c.p + c_text.length, otherC.p + otherC.d.length - cIntersect = c_text[intersectStart - c.p...intersectEnd - c.p] + intersectEnd = Math.min c.p + c.c.length, otherC.p + otherC.d.length + cIntersect = c.c[intersectStart - c.p...intersectEnd - c.p] otherIntersect = otherC.d[intersectStart - otherC.p...intersectEnd - otherC.p] - throw new Error 'Delete op text does not match range being modified' unless cIntersect == otherIntersect + throw new Error 'Delete ops delete different text in the same region of the document' unless cIntersect == otherIntersect newC.p = transformPosition newC.p, otherC append dest, newC - else if otherC.c? or otherC.dr? + else if otherC.c? append dest, c else diff --git a/services/document-updater/test/acceptance/coffee/RangesTests.coffee b/services/document-updater/test/acceptance/coffee/RangesTests.coffee index a5cbee7569..0cee1598aa 100644 --- a/services/document-updater/test/acceptance/coffee/RangesTests.coffee +++ b/services/document-updater/test/acceptance/coffee/RangesTests.coffee @@ -1,7 +1,6 @@ sinon = require "sinon" chai = require("chai") chai.should() -expect = chai.expect async = require "async" rclient = require("redis").createClient() @@ -55,105 +54,82 @@ describe "Ranges", -> change.op.should.deep.equal { i: "456", p: 3 } change.metadata.user_id.should.equal @user_id done() - - describe "removing ranges", -> - it "should delete the range (and perform OT)", (done) -> - @conflicting_update = { - doc: @doc.id - op: [{ i: "X", p: 1 }] - v: 3 - meta: { user_id: @user_id } - } - @delete_range = { - doc: @doc.id - op: [{ dr: "456", p: 3 }] - v: 3 - meta: { user_id: @user_id } - } - DocUpdaterClient.sendUpdate @project_id, @doc.id, @conflicting_update, (error) => - throw error if error? - DocUpdaterClient.sendUpdate @project_id, @doc.id, @delete_range, (error) => - throw error if error? - DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => - throw error if error? - expect(data.ranges.changes).to.be.undefined - done() - describe "Adding comments", -> - describe "standalone", -> - before (done) -> - @project_id = DocUpdaterClient.randomId() - @user_id = DocUpdaterClient.randomId() - @doc = { - id: DocUpdaterClient.randomId() - lines: ["foo bar baz"] - } - @updates = [{ - doc: @doc.id - op: [{ c: "bar", p: 4, t: @tid = DocUpdaterClient.randomId() }] - v: 0 - }] - MockWebApi.insertDoc @project_id, @doc.id, { - lines: @doc.lines - version: 0 - } - jobs = [] - for update in @updates - do (update) => - jobs.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc.id, update, callback - DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) => - throw error if error? - async.series jobs, (error) -> + describe "Adding comments", -> + describe "standalone", -> + before (done) -> + @project_id = DocUpdaterClient.randomId() + @user_id = DocUpdaterClient.randomId() + @doc = { + id: DocUpdaterClient.randomId() + lines: ["foo bar baz"] + } + @updates = [{ + doc: @doc.id + op: [{ c: "bar", p: 4, t: @tid = DocUpdaterClient.randomId() }] + v: 0 + }] + MockWebApi.insertDoc @project_id, @doc.id, { + lines: @doc.lines + version: 0 + } + jobs = [] + for update in @updates + do (update) => + jobs.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc.id, update, callback + DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) => throw error if error? - setTimeout done, 200 - - it "should update the ranges", (done) -> - DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => - throw error if error? - ranges = data.ranges - comment = ranges.comments[0] - comment.op.should.deep.equal { c: "bar", p: 4, t: @tid } - done() + async.series jobs, (error) -> + throw error if error? + setTimeout done, 200 + + it "should update the ranges", (done) -> + DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => + throw error if error? + ranges = data.ranges + comment = ranges.comments[0] + comment.op.should.deep.equal { c: "bar", p: 4, t: @tid } + done() - describe "with conflicting ops needing OT", -> - before (done) -> - @project_id = DocUpdaterClient.randomId() - @user_id = DocUpdaterClient.randomId() - @doc = { - id: DocUpdaterClient.randomId() - lines: ["foo bar baz"] - } - @updates = [{ - doc: @doc.id - op: [{ i: "ABC", p: 3 }] - v: 0 - meta: { user_id: @user_id } - }, { - doc: @doc.id - op: [{ c: "bar", p: 4, t: @tid = DocUpdaterClient.randomId() }] - v: 0 - }] - MockWebApi.insertDoc @project_id, @doc.id, { - lines: @doc.lines - version: 0 - } - jobs = [] - for update in @updates - do (update) => - jobs.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc.id, update, callback - DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) => - throw error if error? - async.series jobs, (error) -> + describe "with conflicting ops needing OT", -> + before (done) -> + @project_id = DocUpdaterClient.randomId() + @user_id = DocUpdaterClient.randomId() + @doc = { + id: DocUpdaterClient.randomId() + lines: ["foo bar baz"] + } + @updates = [{ + doc: @doc.id + op: [{ i: "ABC", p: 3 }] + v: 0 + meta: { user_id: @user_id } + }, { + doc: @doc.id + op: [{ c: "bar", p: 4, t: @tid = DocUpdaterClient.randomId() }] + v: 0 + }] + MockWebApi.insertDoc @project_id, @doc.id, { + lines: @doc.lines + version: 0 + } + jobs = [] + for update in @updates + do (update) => + jobs.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc.id, update, callback + DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) => throw error if error? - setTimeout done, 200 - - it "should update the comments with the OT shifted comment", (done) -> - DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => - throw error if error? - ranges = data.ranges - comment = ranges.comments[0] - comment.op.should.deep.equal { c: "bar", p: 7, t: @tid } - done() + async.series jobs, (error) -> + throw error if error? + setTimeout done, 200 + + it "should update the comments with the OT shifted comment", (done) -> + DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => + throw error if error? + ranges = data.ranges + comment = ranges.comments[0] + comment.op.should.deep.equal { c: "bar", p: 7, t: @tid } + done() describe "Loading ranges from persistence layer", -> before (done) -> diff --git a/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee b/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee index 2d9dcf94a0..81440bfe5b 100644 --- a/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee @@ -189,95 +189,6 @@ describe "ShareJS text type", -> dest = [] text._tc(dest, { c: "foo", p: 6 }, { c: "bar", p: 3 }) dest.should.deep.equal [{ c: "foo", p: 6 }] - - describe "comment / delete_range", -> - it "should not do anything", -> - dest = [] - text._tc(dest, { c: "foo", p: 6 }, { dr: "bar", p: 3 }) - dest.should.deep.equal [{ c: "foo", p: 6 }] - - describe "delete_range / insert", -> - it "with an insert before", -> - dest = [] - text._tc(dest, { dr: "foo", p: 9 }, { i: "bar", p: 3 }) - dest.should.deep.equal [{ dr: "foo", p: 12 }] - - it "with an insert after", -> - dest = [] - text._tc(dest, { dr: "foo", p: 3 }, { i: "bar", p: 9 }) - dest.should.deep.equal [{ dr: "foo", p: 3 }] - - it "with an insert at the left edge", -> - dest = [] - text._tc(dest, { dr: "foo", p: 3 }, { i: "bar", p: 3 }) - # RangesTracker doesn't inject inserts into comments on edges, so neither should we - dest.should.deep.equal [{ dr: "foo", p: 6 }] - - it "with an insert at the right edge", -> - dest = [] - text._tc(dest, { dr: "foo", p: 3 }, { i: "bar", p: 6 }) - # RangesTracker doesn't inject inserts into comments on edges, so neither should we - dest.should.deep.equal [{ dr: "foo", p: 3 }] - - it "with an insert in the middle", -> - dest = [] - text._tc(dest, { dr: "foo", p: 3 }, { i: "bar", p: 5 }) - dest.should.deep.equal [{ dr: "fobaro", p: 3 }] - - describe "delete_range / delete", -> - it "with a delete before", -> - dest = [] - text._tc(dest, { dr: "foo", p: 9 }, { d: "bar", p: 3 }) - dest.should.deep.equal [{ dr: "foo", p: 6 }] - - it "with a delete after", -> - dest = [] - text._tc(dest, { dr: "foo", p: 3 }, { i: "bar", p: 9 }) - dest.should.deep.equal [{ dr: "foo", p: 3 }] - - it "with a delete overlapping the comment content before", -> - dest = [] - text._tc(dest, { dr: "foobar", p: 6 }, { d: "123foo", p: 3 }) - dest.should.deep.equal [{ dr: "bar", p: 3 }] - - it "with a delete overlapping the comment content after", -> - dest = [] - text._tc(dest, { dr: "foobar", p: 6 }, { d: "bar123", p: 9 }) - dest.should.deep.equal [{ dr: "foo", p: 6 }] - - it "with a delete overlapping the comment content in the middle", -> - dest = [] - text._tc(dest, { dr: "foo123bar", p: 6 }, { d: "123", p: 9 }) - dest.should.deep.equal [{ dr: "foobar", p: 6 }] - - it "with a delete overlapping the whole comment", -> - dest = [] - text._tc(dest, { dr: "foo", p: 6 }, { d: "123foo456", p: 3 }) - dest.should.deep.equal [{ dr: "", p: 3 }] - - describe "delete_range / insert", -> - it "should not do anything", -> - dest = [] - text._tc(dest, { i: "foo", p: 6 }, { dr: "bar", p: 3 }) - dest.should.deep.equal [{ i: "foo", p: 6 }] - - describe "delete_range / delete", -> - it "should not do anything", -> - dest = [] - text._tc(dest, { d: "foo", p: 6 }, { dr: "bar", p: 3 }) - dest.should.deep.equal [{ d: "foo", p: 6 }] - - describe "delete_range / comment", -> - it "should not do anything", -> - dest = [] - text._tc(dest, { c: "foo", p: 6 }, { dr: "bar", p: 3 }) - dest.should.deep.equal [{ c: "foo", p: 6 }] - - describe "delete_range / delete_range", -> - it "should not do anything", -> - dest = [] - text._tc(dest, { dr: "foo", p: 6 }, { dr: "bar", p: 3 }) - dest.should.deep.equal [{ dr: "foo", p: 6 }] describe "apply", -> it "should apply an insert", -> @@ -288,9 +199,6 @@ describe "ShareJS text type", -> it "should do nothing with a comment", -> text.apply("foo123bar", [{ c: "123", p: 3 }]).should.equal "foo123bar" - - it "should do nothing with a delete_range", -> - text.apply("foo123bar", [{ dr: "123", p: 3 }]).should.equal "foo123bar" it "should throw an error when deleted content does not match", -> (() -> From 7cac2f7d76079150e078a33a2c9e8c4c7c597bb0 Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 9 Jan 2017 10:46:58 +0100 Subject: [PATCH 188/769] Generate deterministic range ids based on seed --- .../app/coffee/RangesManager.coffee | 2 + .../app/coffee/RangesTracker.coffee | 63 +++++-------------- .../test/acceptance/coffee/RangesTests.coffee | 7 ++- 3 files changed, 23 insertions(+), 49 deletions(-) diff --git a/services/document-updater/app/coffee/RangesManager.coffee b/services/document-updater/app/coffee/RangesManager.coffee index 1e19a63b0d..ac1dcbe75f 100644 --- a/services/document-updater/app/coffee/RangesManager.coffee +++ b/services/document-updater/app/coffee/RangesManager.coffee @@ -8,6 +8,8 @@ module.exports = RangesManager = rangesTracker = new RangesTracker(changes, comments) for update in updates rangesTracker.track_changes = !!update.meta.tc + if !!update.meta.tc + rangesTracker.setIdSeed(update.meta.tc) for op in update.op rangesTracker.applyOp(op, { user_id: update.meta?.user_id }) diff --git a/services/document-updater/app/coffee/RangesTracker.coffee b/services/document-updater/app/coffee/RangesTracker.coffee index 1b865a600d..36ef621493 100644 --- a/services/document-updater/app/coffee/RangesTracker.coffee +++ b/services/document-updater/app/coffee/RangesTracker.coffee @@ -35,18 +35,19 @@ load = (EventEmitter) -> # * Inserts by another user will not combine with inserts by the first user. If they are in the # middle of a previous insert by the first user, the original insert will be split into two. constructor: (@changes = [], @comments = []) -> - # Change objects have the following structure: - # { - # id: ... # Uniquely generated by us - # op: { # ShareJs style op tracking the offset (p) and content inserted (i) or deleted (d) - # i: "..." - # p: 42 - # } - # } - # - # Ids are used to uniquely identify a change, e.g. for updating it in the database, or keeping in - # sync with Ace ranges. - @id = 0 + + getIdSeed: () -> + return @id_seed + + setIdSeed: (seed) -> + @id_seed = seed + @id_increment = 0 + + newId: () -> + @id_increment++ + increment = @id_increment.toString(16) + id = @id_seed + '000000'.substr(0, 6 - increment.length) + increment; + return id getComment: (comment_id) -> comment = null @@ -56,19 +57,6 @@ load = (EventEmitter) -> break return comment - resolveCommentId: (comment_id, resolved_data) -> - comment = @getComment(comment_id) - return if !comment? - comment.metadata.resolved = true - comment.metadata.resolved_data = resolved_data - @emit "comment:resolved", comment - - unresolveCommentId: (comment_id) -> - comment = @getComment(comment_id) - return if !comment? - comment.metadata.resolved = false - @emit "comment:unresolved", comment - removeCommentId: (comment_id) -> comment = @getComment(comment_id) return if !comment? @@ -88,7 +76,7 @@ load = (EventEmitter) -> return if !change? @_removeChange(change) - applyOp: (op, metadata) -> + applyOp: (op, metadata = {}) -> metadata.ts ?= new Date() # Apply an op that has been applied to the document to our changes to keep them up to date if op.i? @@ -105,7 +93,7 @@ load = (EventEmitter) -> addComment: (op, metadata) -> # TODO: Don't allow overlapping comments? @comments.push comment = { - id: @_newId() + id: @newId() op: # Copy because we'll modify in place c: op.c p: op.p @@ -394,28 +382,9 @@ load = (EventEmitter) -> if moved_changes.length > 0 @emit "changes:moved", moved_changes - _newId: () -> - # Generate a Mongo ObjectId - # Reference: https://github.com/dreampulse/ObjectId.js/blob/master/src/main/javascript/Objectid.js - @_pid ?= Math.floor(Math.random() * (32767)) - @_machine ?= Math.floor(Math.random() * (16777216)) - timestamp = Math.floor(new Date().valueOf() / 1000) - @_increment ?= 0 - @_increment++ - - timestamp = timestamp.toString(16) - machine = @_machine.toString(16) - pid = @_pid.toString(16) - increment = @_increment.toString(16) - - return '00000000'.substr(0, 8 - timestamp.length) + timestamp + - '000000'.substr(0, 6 - machine.length) + machine + - '0000'.substr(0, 4 - pid.length) + pid + - '000000'.substr(0, 6 - increment.length) + increment; - _addOp: (op, metadata) -> change = { - id: @_newId() + id: @newId() op: op metadata: metadata } diff --git a/services/document-updater/test/acceptance/coffee/RangesTests.coffee b/services/document-updater/test/acceptance/coffee/RangesTests.coffee index 0cee1598aa..8da51c0899 100644 --- a/services/document-updater/test/acceptance/coffee/RangesTests.coffee +++ b/services/document-updater/test/acceptance/coffee/RangesTests.coffee @@ -12,6 +12,7 @@ describe "Ranges", -> before (done) -> @project_id = DocUpdaterClient.randomId() @user_id = DocUpdaterClient.randomId() + @id_seed = "587357bd35e64f6157" @doc = { id: DocUpdaterClient.randomId() lines: ["aaa"] @@ -25,7 +26,7 @@ describe "Ranges", -> doc: @doc.id op: [{ i: "456", p: 5 }] v: 1 - meta: { user_id: @user_id, tc: 1 } + meta: { user_id: @user_id, tc: @id_seed } }, { doc: @doc.id op: [{ d: "12", p: 1 }] @@ -52,6 +53,7 @@ describe "Ranges", -> ranges = data.ranges change = ranges.changes[0] change.op.should.deep.equal { i: "456", p: 3 } + change.id.should.equal @id_seed + "000001" change.metadata.user_id.should.equal @user_id done() @@ -135,6 +137,7 @@ describe "Ranges", -> before (done) -> @project_id = DocUpdaterClient.randomId() @user_id = DocUpdaterClient.randomId() + @id_seed = "587357bd35e64f6157" @doc = { id: DocUpdaterClient.randomId() lines: ["a123aa"] @@ -143,7 +146,7 @@ describe "Ranges", -> doc: @doc.id op: [{ i: "456", p: 5 }] v: 0 - meta: { user_id: @user_id, tc: 1 } + meta: { user_id: @user_id, tc: @id_seed } } MockWebApi.insertDoc @project_id, @doc.id, { lines: @doc.lines From 593e7260d44b4ad6063b6e0124fa8ad3fa592d9b Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 9 Jan 2017 10:52:06 +0100 Subject: [PATCH 189/769] Update RangesTracker --- services/document-updater/app/coffee/RangesTracker.coffee | 1 + 1 file changed, 1 insertion(+) diff --git a/services/document-updater/app/coffee/RangesTracker.coffee b/services/document-updater/app/coffee/RangesTracker.coffee index 36ef621493..09d471d476 100644 --- a/services/document-updater/app/coffee/RangesTracker.coffee +++ b/services/document-updater/app/coffee/RangesTracker.coffee @@ -35,6 +35,7 @@ load = (EventEmitter) -> # * Inserts by another user will not combine with inserts by the first user. If they are in the # middle of a previous insert by the first user, the original insert will be split into two. constructor: (@changes = [], @comments = []) -> + @setIdSeed("") getIdSeed: () -> return @id_seed From 65f4360738d7d3a6efef9b192f1790888f46bffd Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 9 Jan 2017 14:34:10 +0100 Subject: [PATCH 190/769] Consolidate HttpController tests into one file --- .../HttpController/HttpControllerTests.coffee | 335 ++++++++++++++++++ .../HttpController/deleteProjectTests.coffee | 63 ---- .../flushAndDeleteDocTests.coffee | 64 ---- .../flushDocIfLoadedTests.coffee | 65 ---- .../HttpController/flushProjectTests.coffee | 62 ---- .../coffee/HttpController/getDocTests.coffee | 112 ------ .../coffee/HttpController/setDocTests.coffee | 83 ----- 7 files changed, 335 insertions(+), 449 deletions(-) create mode 100644 services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/HttpController/deleteProjectTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/HttpController/flushAndDeleteDocTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/HttpController/flushDocIfLoadedTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/HttpController/flushProjectTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/HttpController/getDocTests.coffee delete mode 100644 services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee new file mode 100644 index 0000000000..cf0f71a301 --- /dev/null +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -0,0 +1,335 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/HttpController.js" +SandboxedModule = require('sandboxed-module') +Errors = require "../../../../app/js/Errors.js" + +describe "HttpController", -> + beforeEach -> + @HttpController = SandboxedModule.require modulePath, requires: + "./DocumentManager": @DocumentManager = {} + "./ProjectManager": @ProjectManager = {} + "logger-sharelatex" : @logger = { log: sinon.stub() } + "./Metrics": @Metrics = {} + + @Metrics.Timer = class Timer + done: sinon.stub() + @project_id = "project-id-123" + @doc_id = "doc-id-123" + @next = sinon.stub() + @res = + send: sinon.stub() + + describe "getDoc", -> + beforeEach -> + @lines = ["one", "two", "three"] + @ops = ["mock-op-1", "mock-op-2"] + @version = 42 + @fromVersion = 42 + @ranges = { changes: "mock", comments: "mock" } + @req = + params: + project_id: @project_id + doc_id: @doc_id + + describe "when the document exists and no recent ops are requested", -> + beforeEach -> + @DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, @lines, @version, [], @ranges) + @HttpController.getDoc(@req, @res, @next) + + it "should get the doc", -> + @DocumentManager.getDocAndRecentOpsWithLock + .calledWith(@project_id, @doc_id, -1) + .should.equal true + + it "should return the doc as JSON", -> + @res.send + .calledWith(JSON.stringify({ + id: @doc_id + lines: @lines + version: @version + ops: [] + ranges: @ranges + })) + .should.equal true + + it "should log the request", -> + @logger.log + .calledWith(doc_id: @doc_id, project_id: @project_id, "getting doc via http") + .should.equal true + + it "should time the request", -> + @Metrics.Timer::done.called.should.equal true + + describe "when recent ops are requested", -> + beforeEach -> + @DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, @lines, @version, @ops) + @req.query = fromVersion: "#{@fromVersion}" + @HttpController.getDoc(@req, @res, @next) + + it "should get the doc", -> + @DocumentManager.getDocAndRecentOpsWithLock + .calledWith(@project_id, @doc_id, @fromVersion) + .should.equal true + + it "should return the doc as JSON", -> + @res.send + .calledWith(JSON.stringify({ + id: @doc_id + lines: @lines + version: @version + ops: @ops + })) + .should.equal true + + it "should log the request", -> + @logger.log + .calledWith(doc_id: @doc_id, project_id: @project_id, "getting doc via http") + .should.equal true + + it "should time the request", -> + @Metrics.Timer::done.called.should.equal true + + describe "when the document does not exist", -> + beforeEach -> + @DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, null, null) + @HttpController.getDoc(@req, @res, @next) + + it "should call next with NotFoundError", -> + @next + .calledWith(new Errors.NotFoundError("not found")) + .should.equal true + + describe "when an errors occurs", -> + beforeEach -> + @DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, new Error("oops"), null, null) + @HttpController.getDoc(@req, @res, @next) + + it "should call next with the error", -> + @next + .calledWith(new Error("oops")) + .should.equal true + + describe "setDoc", -> + beforeEach -> + @lines = ["one", "two", "three"] + @source = "dropbox" + @user_id = "user-id-123" + @req = + headers: {} + params: + project_id: @project_id + doc_id: @doc_id + body: + lines: @lines + source: @source + user_id: @user_id + + describe "successfully", -> + beforeEach -> + @DocumentManager.setDocWithLock = sinon.stub().callsArgWith(5) + @HttpController.setDoc(@req, @res, @next) + + it "should set the doc", -> + @DocumentManager.setDocWithLock + .calledWith(@project_id, @doc_id, @lines, @source, @user_id) + .should.equal true + + it "should return a successful No Content response", -> + @res.send + .calledWith(204) + .should.equal true + + it "should log the request", -> + @logger.log + .calledWith(doc_id: @doc_id, project_id: @project_id, lines: @lines, source: @source, user_id: @user_id, "setting doc via http") + .should.equal true + + it "should time the request", -> + @Metrics.Timer::done.called.should.equal true + + describe "when an errors occurs", -> + beforeEach -> + @DocumentManager.setDocWithLock = sinon.stub().callsArgWith(5, new Error("oops")) + @HttpController.setDoc(@req, @res, @next) + + it "should call next with the error", -> + @next + .calledWith(new Error("oops")) + .should.equal true + + describe "when the payload is too large", -> + beforeEach -> + lines = [] + for _ in [0..200000] + lines.push "test test test" + @req.body.lines = lines + @DocumentManager.setDocWithLock = sinon.stub().callsArgWith(5) + @HttpController.setDoc(@req, @res, @next) + + it 'should send back a 406 response', -> + @res.send.calledWith(406).should.equal true + + it 'should not call setDocWithLock', -> + @DocumentManager.setDocWithLock.callCount.should.equal 0 + + describe "flushProject", -> + beforeEach -> + @req = + params: + project_id: @project_id + + describe "successfully", -> + beforeEach -> + @ProjectManager.flushProjectWithLocks = sinon.stub().callsArgWith(1) + @HttpController.flushProject(@req, @res, @next) + + it "should flush the project", -> + @ProjectManager.flushProjectWithLocks + .calledWith(@project_id) + .should.equal true + + it "should return a successful No Content response", -> + @res.send + .calledWith(204) + .should.equal true + + it "should log the request", -> + @logger.log + .calledWith(project_id: @project_id, "flushing project via http") + .should.equal true + + it "should time the request", -> + @Metrics.Timer::done.called.should.equal true + + describe "when an errors occurs", -> + beforeEach -> + @ProjectManager.flushProjectWithLocks = sinon.stub().callsArgWith(1, new Error("oops")) + @HttpController.flushProject(@req, @res, @next) + + it "should call next with the error", -> + @next + .calledWith(new Error("oops")) + .should.equal true + + describe "flushDocIfLoaded", -> + beforeEach -> + @lines = ["one", "two", "three"] + @version = 42 + @req = + params: + project_id: @project_id + doc_id: @doc_id + + describe "successfully", -> + beforeEach -> + @DocumentManager.flushDocIfLoadedWithLock = sinon.stub().callsArgWith(2) + @HttpController.flushDocIfLoaded(@req, @res, @next) + + it "should flush the doc", -> + @DocumentManager.flushDocIfLoadedWithLock + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should return a successful No Content response", -> + @res.send + .calledWith(204) + .should.equal true + + it "should log the request", -> + @logger.log + .calledWith(doc_id: @doc_id, project_id: @project_id, "flushing doc via http") + .should.equal true + + it "should time the request", -> + @Metrics.Timer::done.called.should.equal true + + describe "when an errors occurs", -> + beforeEach -> + @DocumentManager.flushDocIfLoadedWithLock = sinon.stub().callsArgWith(2, new Error("oops")) + @HttpController.flushDocIfLoaded(@req, @res, @next) + + it "should call next with the error", -> + @next + .calledWith(new Error("oops")) + .should.equal true + + describe "flushAndDeleteDoc", -> + beforeEach -> + @req = + params: + project_id: @project_id + doc_id: @doc_id + + describe "successfully", -> + beforeEach -> + @DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArgWith(2) + @HttpController.flushAndDeleteDoc(@req, @res, @next) + + it "should flush and delete the doc", -> + @DocumentManager.flushAndDeleteDocWithLock + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should return a successful No Content response", -> + @res.send + .calledWith(204) + .should.equal true + + it "should log the request", -> + @logger.log + .calledWith(doc_id: @doc_id, project_id: @project_id, "deleting doc via http") + .should.equal true + + it "should time the request", -> + @Metrics.Timer::done.called.should.equal true + + describe "when an errors occurs", -> + beforeEach -> + @DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArgWith(2, new Error("oops")) + @HttpController.flushAndDeleteDoc(@req, @res, @next) + + it "should call next with the error", -> + @next + .calledWith(new Error("oops")) + .should.equal true + + describe "deleteProject", -> + beforeEach -> + @req = + params: + project_id: @project_id + + describe "successfully", -> + beforeEach -> + @ProjectManager.flushAndDeleteProjectWithLocks = sinon.stub().callsArgWith(1) + @HttpController.deleteProject(@req, @res, @next) + + it "should delete the project", -> + @ProjectManager.flushAndDeleteProjectWithLocks + .calledWith(@project_id) + .should.equal true + + it "should return a successful No Content response", -> + @res.send + .calledWith(204) + .should.equal true + + it "should log the request", -> + @logger.log + .calledWith(project_id: @project_id, "deleting project via http") + .should.equal true + + it "should time the request", -> + @Metrics.Timer::done.called.should.equal true + + describe "when an errors occurs", -> + beforeEach -> + @ProjectManager.flushAndDeleteProjectWithLocks = sinon.stub().callsArgWith(1, new Error("oops")) + @HttpController.deleteProject(@req, @res, @next) + + it "should call next with the error", -> + @next + .calledWith(new Error("oops")) + .should.equal true \ No newline at end of file diff --git a/services/document-updater/test/unit/coffee/HttpController/deleteProjectTests.coffee b/services/document-updater/test/unit/coffee/HttpController/deleteProjectTests.coffee deleted file mode 100644 index 796df52e80..0000000000 --- a/services/document-updater/test/unit/coffee/HttpController/deleteProjectTests.coffee +++ /dev/null @@ -1,63 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/HttpController.js" -SandboxedModule = require('sandboxed-module') -Errors = require "../../../../app/js/Errors.js" - -describe "HttpController.deleteProject", -> - beforeEach -> - @HttpController = SandboxedModule.require modulePath, requires: - "./DocumentManager": @DocumentManager = {} - "./ProjectManager": @ProjectManager = {} - "logger-sharelatex" : @logger = { log: sinon.stub() } - "./Metrics": @Metrics = {} - - @Metrics.Timer = class Timer - done: sinon.stub() - - @project_id = "project-id-123" - @res = - send: sinon.stub() - @req = - params: - project_id: @project_id - @next = sinon.stub() - - describe "successfully", -> - beforeEach -> - @ProjectManager.flushAndDeleteProjectWithLocks = sinon.stub().callsArgWith(1) - @HttpController.deleteProject(@req, @res, @next) - - it "should delete the project", -> - @ProjectManager.flushAndDeleteProjectWithLocks - .calledWith(@project_id) - .should.equal true - - it "should return a successful No Content response", -> - @res.send - .calledWith(204) - .should.equal true - - it "should log the request", -> - @logger.log - .calledWith(project_id: @project_id, "deleting project via http") - .should.equal true - - it "should time the request", -> - @Metrics.Timer::done.called.should.equal true - - describe "when an errors occurs", -> - beforeEach -> - @ProjectManager.flushAndDeleteProjectWithLocks = sinon.stub().callsArgWith(1, new Error("oops")) - @HttpController.deleteProject(@req, @res, @next) - - it "should call next with the error", -> - @next - .calledWith(new Error("oops")) - .should.equal true - - - - - diff --git a/services/document-updater/test/unit/coffee/HttpController/flushAndDeleteDocTests.coffee b/services/document-updater/test/unit/coffee/HttpController/flushAndDeleteDocTests.coffee deleted file mode 100644 index af09c2c1bd..0000000000 --- a/services/document-updater/test/unit/coffee/HttpController/flushAndDeleteDocTests.coffee +++ /dev/null @@ -1,64 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/HttpController.js" -SandboxedModule = require('sandboxed-module') -Errors = require "../../../../app/js/Errors.js" - -describe "HttpController.flushAndDeleteDoc", -> - beforeEach -> - @HttpController = SandboxedModule.require modulePath, requires: - "./DocumentManager": @DocumentManager = {} - "./ProjectManager":{} - "logger-sharelatex" : @logger = { log: sinon.stub() } - "./Metrics": @Metrics = {} - - @Metrics.Timer = class Timer - done: sinon.stub() - - @project_id = "project-id-123" - @doc_id = "doc-id-123" - @res = - send: sinon.stub() - @req = - params: - project_id: @project_id - doc_id: @doc_id - @next = sinon.stub() - - describe "successfully", -> - beforeEach -> - @DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArgWith(2) - @HttpController.flushAndDeleteDoc(@req, @res, @next) - - it "should flush and delete the doc", -> - @DocumentManager.flushAndDeleteDocWithLock - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should return a successful No Content response", -> - @res.send - .calledWith(204) - .should.equal true - - it "should log the request", -> - @logger.log - .calledWith(doc_id: @doc_id, project_id: @project_id, "deleting doc via http") - .should.equal true - - it "should time the request", -> - @Metrics.Timer::done.called.should.equal true - - describe "when an errors occurs", -> - beforeEach -> - @DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArgWith(2, new Error("oops")) - @HttpController.flushAndDeleteDoc(@req, @res, @next) - - it "should call next with the error", -> - @next - .calledWith(new Error("oops")) - .should.equal true - - - - diff --git a/services/document-updater/test/unit/coffee/HttpController/flushDocIfLoadedTests.coffee b/services/document-updater/test/unit/coffee/HttpController/flushDocIfLoadedTests.coffee deleted file mode 100644 index 3321030624..0000000000 --- a/services/document-updater/test/unit/coffee/HttpController/flushDocIfLoadedTests.coffee +++ /dev/null @@ -1,65 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/HttpController.js" -SandboxedModule = require('sandboxed-module') -Errors = require "../../../../app/js/Errors.js" - -describe "HttpController.flushDocIfLoaded", -> - beforeEach -> - @HttpController = SandboxedModule.require modulePath, requires: - "./DocumentManager": @DocumentManager = {} - "./ProjectManager": {} - "logger-sharelatex" : @logger = { log: sinon.stub() } - "./Metrics": @Metrics = {} - - @Metrics.Timer = class Timer - done: sinon.stub() - - @project_id = "project-id-123" - @doc_id = "doc-id-123" - @lines = ["one", "two", "three"] - @version = 42 - @res = - send: sinon.stub() - @req = - params: - project_id: @project_id - doc_id: @doc_id - @next = sinon.stub() - - describe "successfully", -> - beforeEach -> - @DocumentManager.flushDocIfLoadedWithLock = sinon.stub().callsArgWith(2) - @HttpController.flushDocIfLoaded(@req, @res, @next) - - it "should flush the doc", -> - @DocumentManager.flushDocIfLoadedWithLock - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should return a successful No Content response", -> - @res.send - .calledWith(204) - .should.equal true - - it "should log the request", -> - @logger.log - .calledWith(doc_id: @doc_id, project_id: @project_id, "flushing doc via http") - .should.equal true - - it "should time the request", -> - @Metrics.Timer::done.called.should.equal true - - describe "when an errors occurs", -> - beforeEach -> - @DocumentManager.flushDocIfLoadedWithLock = sinon.stub().callsArgWith(2, new Error("oops")) - @HttpController.flushDocIfLoaded(@req, @res, @next) - - it "should call next with the error", -> - @next - .calledWith(new Error("oops")) - .should.equal true - - - diff --git a/services/document-updater/test/unit/coffee/HttpController/flushProjectTests.coffee b/services/document-updater/test/unit/coffee/HttpController/flushProjectTests.coffee deleted file mode 100644 index e45269ce6d..0000000000 --- a/services/document-updater/test/unit/coffee/HttpController/flushProjectTests.coffee +++ /dev/null @@ -1,62 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/HttpController.js" -SandboxedModule = require('sandboxed-module') -Errors = require "../../../../app/js/Errors.js" - -describe "HttpController.flushProject", -> - beforeEach -> - @HttpController = SandboxedModule.require modulePath, requires: - "./DocumentManager": @DocumentManager = {} - "./ProjectManager": @ProjectManager = {} - "logger-sharelatex" : @logger = { log: sinon.stub() } - "./Metrics": @Metrics = {} - - @Metrics.Timer = class Timer - done: sinon.stub() - - @project_id = "project-id-123" - @res = - send: sinon.stub() - @req = - params: - project_id: @project_id - @next = sinon.stub() - - describe "successfully", -> - beforeEach -> - @ProjectManager.flushProjectWithLocks = sinon.stub().callsArgWith(1) - @HttpController.flushProject(@req, @res, @next) - - it "should flush the project", -> - @ProjectManager.flushProjectWithLocks - .calledWith(@project_id) - .should.equal true - - it "should return a successful No Content response", -> - @res.send - .calledWith(204) - .should.equal true - - it "should log the request", -> - @logger.log - .calledWith(project_id: @project_id, "flushing project via http") - .should.equal true - - it "should time the request", -> - @Metrics.Timer::done.called.should.equal true - - describe "when an errors occurs", -> - beforeEach -> - @ProjectManager.flushProjectWithLocks = sinon.stub().callsArgWith(1, new Error("oops")) - @HttpController.flushProject(@req, @res, @next) - - it "should call next with the error", -> - @next - .calledWith(new Error("oops")) - .should.equal true - - - - diff --git a/services/document-updater/test/unit/coffee/HttpController/getDocTests.coffee b/services/document-updater/test/unit/coffee/HttpController/getDocTests.coffee deleted file mode 100644 index 8fa3931d65..0000000000 --- a/services/document-updater/test/unit/coffee/HttpController/getDocTests.coffee +++ /dev/null @@ -1,112 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/HttpController.js" -SandboxedModule = require('sandboxed-module') -Errors = require "../../../../app/js/Errors.js" - -describe "HttpController.getDoc", -> - beforeEach -> - @HttpController = SandboxedModule.require modulePath, requires: - "./DocumentManager": @DocumentManager = {} - "./ProjectManager": {} - "logger-sharelatex" : @logger = { log: sinon.stub() } - "./Metrics": @Metrics = {} - - @Metrics.Timer = class Timer - done: sinon.stub() - - @project_id = "project-id-123" - @doc_id = "doc-id-123" - @lines = ["one", "two", "three"] - @ops = ["mock-op-1", "mock-op-2"] - @version = 42 - @fromVersion = 42 - @ranges = { changes: "mock", comments: "mock" } - @res = - send: sinon.stub() - @req = - params: - project_id: @project_id - doc_id: @doc_id - @next = sinon.stub() - - describe "when the document exists and no recent ops are requested", -> - beforeEach -> - @DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, @lines, @version, [], @ranges) - @HttpController.getDoc(@req, @res, @next) - - it "should get the doc", -> - @DocumentManager.getDocAndRecentOpsWithLock - .calledWith(@project_id, @doc_id, -1) - .should.equal true - - it "should return the doc as JSON", -> - @res.send - .calledWith(JSON.stringify({ - id: @doc_id - lines: @lines - version: @version - ops: [] - ranges: @ranges - })) - .should.equal true - - it "should log the request", -> - @logger.log - .calledWith(doc_id: @doc_id, project_id: @project_id, "getting doc via http") - .should.equal true - - it "should time the request", -> - @Metrics.Timer::done.called.should.equal true - - describe "when recent ops are requested", -> - beforeEach -> - @DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, @lines, @version, @ops) - @req.query = fromVersion: "#{@fromVersion}" - @HttpController.getDoc(@req, @res, @next) - - it "should get the doc", -> - @DocumentManager.getDocAndRecentOpsWithLock - .calledWith(@project_id, @doc_id, @fromVersion) - .should.equal true - - it "should return the doc as JSON", -> - @res.send - .calledWith(JSON.stringify({ - id: @doc_id - lines: @lines - version: @version - ops: @ops - })) - .should.equal true - - it "should log the request", -> - @logger.log - .calledWith(doc_id: @doc_id, project_id: @project_id, "getting doc via http") - .should.equal true - - it "should time the request", -> - @Metrics.Timer::done.called.should.equal true - - describe "when the document does not exist", -> - beforeEach -> - @DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, null, null) - @HttpController.getDoc(@req, @res, @next) - - it "should call next with NotFoundError", -> - @next - .calledWith(new Errors.NotFoundError("not found")) - .should.equal true - - describe "when an errors occurs", -> - beforeEach -> - @DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, new Error("oops"), null, null) - @HttpController.getDoc(@req, @res, @next) - - it "should call next with the error", -> - @next - .calledWith(new Error("oops")) - .should.equal true - - diff --git a/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee b/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee deleted file mode 100644 index 385b8be044..0000000000 --- a/services/document-updater/test/unit/coffee/HttpController/setDocTests.coffee +++ /dev/null @@ -1,83 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/HttpController.js" -SandboxedModule = require('sandboxed-module') -Errors = require "../../../../app/js/Errors.js" - -describe "HttpController.setDoc", -> - beforeEach -> - @HttpController = SandboxedModule.require modulePath, requires: - "./DocumentManager": @DocumentManager = {} - "./ProjectManager": {} - "logger-sharelatex" : @logger = { log: sinon.stub() } - "./Metrics": @Metrics = {} - - @Metrics.Timer = class Timer - done: sinon.stub() - - @project_id = "project-id-123" - @doc_id = "doc-id-123" - @lines = ["one", "two", "three"] - @source = "dropbox" - @user_id = "user-id-123" - @res = - send: sinon.stub() - @req = - headers: {} - params: - project_id: @project_id - doc_id: @doc_id - body: - lines: @lines - source: @source - user_id: @user_id - @next = sinon.stub() - - describe "successfully", -> - beforeEach -> - @DocumentManager.setDocWithLock = sinon.stub().callsArgWith(5) - @HttpController.setDoc(@req, @res, @next) - - it "should set the doc", -> - @DocumentManager.setDocWithLock - .calledWith(@project_id, @doc_id, @lines, @source, @user_id) - .should.equal true - - it "should return a successful No Content response", -> - @res.send - .calledWith(204) - .should.equal true - - it "should log the request", -> - @logger.log - .calledWith(doc_id: @doc_id, project_id: @project_id, lines: @lines, source: @source, user_id: @user_id, "setting doc via http") - .should.equal true - - it "should time the request", -> - @Metrics.Timer::done.called.should.equal true - - describe "when an errors occurs", -> - beforeEach -> - @DocumentManager.setDocWithLock = sinon.stub().callsArgWith(5, new Error("oops")) - @HttpController.setDoc(@req, @res, @next) - - it "should call next with the error", -> - @next - .calledWith(new Error("oops")) - .should.equal true - - describe "when the payload is too large", -> - beforeEach -> - lines = [] - for _ in [0..200000] - lines.push "test test test" - @req.body.lines = lines - @DocumentManager.setDocWithLock = sinon.stub().callsArgWith(5) - @HttpController.setDoc(@req, @res, @next) - - it 'should send back a 406 response', -> - @res.send.calledWith(406).should.equal true - - it 'should not call setDocWithLock', -> - @DocumentManager.setDocWithLock.callCount.should.equal 0 From be19532a1dfa8d281affbb9953649b4a373e8afb Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 9 Jan 2017 14:41:18 +0100 Subject: [PATCH 191/769] Add HTTP end point for accepting changes --- services/document-updater/app.coffee | 1 + .../app/coffee/DocumentManager.coffee | 23 ++++++++- .../app/coffee/HttpController.coffee | 11 ++++ .../app/coffee/RangesManager.coffee | 20 ++++++-- .../test/acceptance/coffee/RangesTests.coffee | 43 ++++++++++++++++ .../coffee/helpers/DocUpdaterClient.coffee | 3 ++ .../DocumentManagerTests.coffee | 50 ++++++++++++++++++- .../HttpController/HttpControllerTests.coffee | 43 +++++++++++++++- 8 files changed, 187 insertions(+), 7 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 004b9f77bc..20170cdd34 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -45,6 +45,7 @@ app.post '/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLo app.delete '/project/:project_id/doc/:doc_id', HttpController.flushAndDeleteDoc app.delete '/project/:project_id', HttpController.deleteProject app.post '/project/:project_id/flush', HttpController.flushProject +app.post '/project/:project_id/doc/:doc_id/change/:change_id/accept', HttpController.acceptChange app.get '/total', (req, res)-> timer = new Metrics.Timer("http.allDocList") diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index c6d4773036..4f02bbcf5c 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -5,6 +5,8 @@ logger = require "logger-sharelatex" Metrics = require "./Metrics" HistoryManager = require "./HistoryManager" WebRedisManager = require "./WebRedisManager" +Errors = require "./Errors" +RangesManager = require "./RangesManager" module.exports = DocumentManager = getDoc: (project_id, doc_id, _callback = (error, lines, version, alreadyLoaded) ->) -> @@ -83,7 +85,6 @@ module.exports = DocumentManager = DocumentManager.flushAndDeleteDoc project_id, doc_id, (error) -> return callback(error) if error? callback null - flushDocIfLoaded: (project_id, doc_id, _callback = (error) ->) -> timer = new Metrics.Timer("docManager.flushDocIfLoaded") @@ -119,6 +120,22 @@ module.exports = DocumentManager = RedisManager.removeDocFromMemory project_id, doc_id, (error) -> return callback(error) if error? callback null + + acceptChange: (project_id, doc_id, change_id, _callback = (error) ->) -> + timer = new Metrics.Timer("docManager.acceptChange") + callback = (args...) -> + timer.done() + _callback(args...) + + DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges) -> + return callback(error) if error? + if !lines? or !version? + return callback(new Errors.NotFoundError("document not found: #{doc_id}")) + RangesManager.acceptChange change_id, ranges, (error, new_ranges) -> + return callback(error) if error? + RedisManager.updateDocument doc_id, lines, version, [], new_ranges, (error) -> + return callback(error) if error? + callback() getDocWithLock: (project_id, doc_id, callback = (error, lines, version) ->) -> UpdateManager = require "./UpdateManager" @@ -139,3 +156,7 @@ module.exports = DocumentManager = flushAndDeleteDocWithLock: (project_id, doc_id, callback = (error) ->) -> UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.flushAndDeleteDoc, project_id, doc_id, callback + + acceptChangeWithLock: (project_id, doc_id, change_id, callback = (error) ->) -> + UpdateManager = require "./UpdateManager" + UpdateManager.lockUpdatesAndDo DocumentManager.acceptChange, project_id, doc_id, change_id, callback diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index e138fe8bc4..683b94230f 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -97,4 +97,15 @@ module.exports = HttpController = return next(error) if error? logger.log project_id: project_id, "deleted project via http" res.send 204 # No Content + + acceptChange: (req, res, next = (error) ->) -> + {project_id, doc_id, change_id} = req.params + logger.log {project_id, doc_id, change_id}, "accepting change via http" + timer = new Metrics.Timer("http.acceptChange") + DocumentManager.acceptChangeWithLock project_id, doc_id, change_id, (error) -> + timer.done() + return next(error) if error? + logger.log {project_id, doc_id, change_id}, "accepted change via http" + res.send 204 # No Content + diff --git a/services/document-updater/app/coffee/RangesManager.coffee b/services/document-updater/app/coffee/RangesManager.coffee index ac1dcbe75f..64f7059399 100644 --- a/services/document-updater/app/coffee/RangesManager.coffee +++ b/services/document-updater/app/coffee/RangesManager.coffee @@ -4,7 +4,7 @@ logger = require "logger-sharelatex" module.exports = RangesManager = applyUpdate: (project_id, doc_id, entries = {}, updates = [], callback = (error, new_entries) ->) -> {changes, comments} = entries - logger.log {changes, comments, updates}, "appliyng updates to ranges" + logger.log {changes, comments, updates}, "applying updates to ranges" rangesTracker = new RangesTracker(changes, comments) for update in updates rangesTracker.track_changes = !!update.meta.tc @@ -12,7 +12,20 @@ module.exports = RangesManager = rangesTracker.setIdSeed(update.meta.tc) for op in update.op rangesTracker.applyOp(op, { user_id: update.meta?.user_id }) - + + response = RangesManager._getRanges rangesTracker + logger.log {response}, "applied updates to ranges" + callback null, response + + acceptChange: (change_id, ranges, callback = (error, ranges) ->) -> + {changes, comments} = ranges + logger.log {changes, comments, change_id}, "accepting change in ranges" + rangesTracker = new RangesTracker(changes, comments) + rangesTracker.removeChangeId(change_id) + response = RangesManager._getRanges(rangesTracker) + callback null, response + + _getRanges: (rangesTracker) -> # Return the minimal data structure needed, since most documents won't have any # changes or comments response = {} @@ -22,5 +35,4 @@ module.exports = RangesManager = if rangesTracker.comments?.length > 0 response ?= {} response.comments = rangesTracker.comments - logger.log {response}, "applied updates to ranges" - callback null, response \ No newline at end of file + return response \ No newline at end of file diff --git a/services/document-updater/test/acceptance/coffee/RangesTests.coffee b/services/document-updater/test/acceptance/coffee/RangesTests.coffee index 8da51c0899..7498a8087b 100644 --- a/services/document-updater/test/acceptance/coffee/RangesTests.coffee +++ b/services/document-updater/test/acceptance/coffee/RangesTests.coffee @@ -1,6 +1,7 @@ sinon = require "sinon" chai = require("chai") chai.should() +expect = chai.expect async = require "async" rclient = require("redis").createClient() @@ -182,3 +183,45 @@ describe "Ranges", -> changes[0].op.should.deep.equal { i: "123", p: 1 } changes[1].op.should.deep.equal { i: "456", p: 5 } done() + + describe "accepting a change", -> + before (done) -> + @project_id = DocUpdaterClient.randomId() + @user_id = DocUpdaterClient.randomId() + @id_seed = "587357bd35e64f6157" + @doc = { + id: DocUpdaterClient.randomId() + lines: ["aaa"] + } + @update = { + doc: @doc.id + op: [{ i: "456", p: 1 }] + v: 0 + meta: { user_id: @user_id, tc: @id_seed } + } + MockWebApi.insertDoc @project_id, @doc.id, { + lines: @doc.lines + version: 0 + } + DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) => + throw error if error? + DocUpdaterClient.sendUpdate @project_id, @doc.id, @update, (error) => + throw error if error? + setTimeout () => + DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => + throw error if error? + ranges = data.ranges + change = ranges.changes[0] + change.op.should.deep.equal { i: "456", p: 1 } + change.id.should.equal @id_seed + "000001" + change.metadata.user_id.should.equal @user_id + done() + , 200 + + it "should remove the change after accepting", (done) -> + DocUpdaterClient.acceptChange @project_id, @doc.id, @id_seed + "000001", (error) => + throw error if error? + DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => + throw error if error? + expect(data.ranges.changes).to.be.undefined + done() \ No newline at end of file diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee index d704daefd1..afcbfd4b45 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee @@ -72,3 +72,6 @@ module.exports = DocUpdaterClient = deleteProject: (project_id, callback = () ->) -> request.del "http://localhost:3003/project/#{project_id}", callback + + acceptChange: (project_id, doc_id, change_id, callback = () ->) -> + request.post "http://localhost:3003/project/#{project_id}/doc/#{doc_id}/change/#{change_id}/accept", callback \ No newline at end of file diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index 5966843f5a..3f0279b5c7 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -3,6 +3,7 @@ chai = require('chai') should = chai.should() modulePath = "../../../../app/js/DocumentManager.js" SandboxedModule = require('sandboxed-module') +Errors = require "../../../../app/js/Errors" describe "DocumentManager", -> beforeEach -> @@ -18,6 +19,7 @@ describe "DocumentManager", -> "./WebRedisManager": @WebRedisManager = {} "./DiffCodec": @DiffCodec = {} "./UpdateManager": @UpdateManager = {} + "./RangesManager": @RangesManager = {} @project_id = "project-id-123" @doc_id = "doc-id-123" @callback = sinon.stub() @@ -259,4 +261,50 @@ describe "DocumentManager", -> @callback.calledWith(new Error("No lines were passed to setDoc")) it "should not try to get the doc lines", -> - @DocumentManager.getDoc.called.should.equal false \ No newline at end of file + @DocumentManager.getDoc.called.should.equal false + + describe "acceptChanges", -> + beforeEach -> + @change_id = "mock-change-id" + @version = 34 + @lines = ["original", "lines"] + @ranges = { entries: "mock", comments: "mock" } + @updated_ranges = { entries: "updated", comments: "updated" } + @DocumentManager.getDoc = sinon.stub().yields(null, @lines, @version, @ranges) + @RangesManager.acceptChange = sinon.stub().yields(null, @updated_ranges) + @RedisManager.updateDocument = sinon.stub().yields() + + describe "successfully", -> + beforeEach -> + @DocumentManager.acceptChange @project_id, @doc_id, @change_id, @callback + + it "should get the document's current ranges", -> + @DocumentManager.getDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should apply the accept change to the ranges", -> + @RangesManager.acceptChange + .calledWith(@change_id, @ranges) + .should.equal true + + it "should save the updated ranges", -> + @RedisManager.updateDocument + .calledWith(@doc_id, @lines, @version, [], @updated_ranges) + .should.equal true + + it "should call the callback", -> + @callback.called.should.equal true + + describe "when the doc is not found", -> + beforeEach -> + @DocumentManager.getDoc = sinon.stub().yields(null, null, null, null) + @DocumentManager.acceptChange @project_id, @doc_id, @change_id, @callback + + it "should not save anything", -> + @RedisManager.updateDocument.called.should.equal false + + it "should call the callback with a not found error", -> + error = new Errors.NotFoundError("document not found: #{@doc_id}") + @callback.calledWith(error).should.equal true + \ No newline at end of file diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index cf0f71a301..4000b402aa 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -332,4 +332,45 @@ describe "HttpController", -> it "should call next with the error", -> @next .calledWith(new Error("oops")) - .should.equal true \ No newline at end of file + .should.equal true + + describe "acceptChange", -> + beforeEach -> + @req = + params: + project_id: @project_id + doc_id: @doc_id + change_id: @change_id = "mock-change-od-1" + + describe "successfully", -> + beforeEach -> + @DocumentManager.acceptChangeWithLock = sinon.stub().callsArgWith(3) + @HttpController.acceptChange(@req, @res, @next) + + it "should accept the change", -> + @DocumentManager.acceptChangeWithLock + .calledWith(@project_id, @doc_id, @change_id) + .should.equal true + + it "should return a successful No Content response", -> + @res.send + .calledWith(204) + .should.equal true + + it "should log the request", -> + @logger.log + .calledWith({@project_id, @doc_id, @change_id}, "accepting change via http") + .should.equal true + + it "should time the request", -> + @Metrics.Timer::done.called.should.equal true + + describe "when an errors occurs", -> + beforeEach -> + @DocumentManager.acceptChangeWithLock = sinon.stub().callsArgWith(3, new Error("oops")) + @HttpController.acceptChange(@req, @res, @next) + + it "should call next with the error", -> + @next + .calledWith(new Error("oops")) + .should.equal true From 540d0f7ec7783bc74b99ea832bd038b3e6c6f778 Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 10 Jan 2017 11:55:38 +0100 Subject: [PATCH 192/769] Make sure comment ids are unique --- .../app/coffee/RangesTracker.coffee | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RangesTracker.coffee b/services/document-updater/app/coffee/RangesTracker.coffee index 09d471d476..f2794f2c07 100644 --- a/services/document-updater/app/coffee/RangesTracker.coffee +++ b/services/document-updater/app/coffee/RangesTracker.coffee @@ -35,7 +35,7 @@ load = (EventEmitter) -> # * Inserts by another user will not combine with inserts by the first user. If they are in the # middle of a previous insert by the first user, the original insert will be split into two. constructor: (@changes = [], @comments = []) -> - @setIdSeed("") + @setIdSeed(RangesTracker.generateIdSeed()) getIdSeed: () -> return @id_seed @@ -43,6 +43,19 @@ load = (EventEmitter) -> setIdSeed: (seed) -> @id_seed = seed @id_increment = 0 + + @generateIdSeed: () -> + # Generate a the first 18 characters of Mongo ObjectId, leaving 6 for the increment part + # Reference: https://github.com/dreampulse/ObjectId.js/blob/master/src/main/javascript/Objectid.js + pid = Math.floor(Math.random() * (32767)).toString(16) + machine = Math.floor(Math.random() * (16777216)).toString(16) + timestamp = Math.floor(new Date().valueOf() / 1000).toString(16) + return '00000000'.substr(0, 8 - timestamp.length) + timestamp + + '000000'.substr(0, 6 - machine.length) + machine + + '0000'.substr(0, 4 - pid.length) + pid + + @generateId: () -> + @generateId() + "000001" newId: () -> @id_increment++ From a3d5971d5462b0584cbb4243fa47faf9be88cdec Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 10 Jan 2017 11:59:09 +0100 Subject: [PATCH 193/769] Update RangesTRacker --- services/document-updater/app/coffee/RangesTracker.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RangesTracker.coffee b/services/document-updater/app/coffee/RangesTracker.coffee index f2794f2c07..722eab1aa5 100644 --- a/services/document-updater/app/coffee/RangesTracker.coffee +++ b/services/document-updater/app/coffee/RangesTracker.coffee @@ -55,7 +55,7 @@ load = (EventEmitter) -> '0000'.substr(0, 4 - pid.length) + pid @generateId: () -> - @generateId() + "000001" + @generateIdSeed() + "000001" newId: () -> @id_increment++ From 0706feb26bb6b182d25300d9f1c97ec5199dd67a Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 10 Jan 2017 16:58:11 +0100 Subject: [PATCH 194/769] Add max limit on number of comments and changes per doc --- .../app/coffee/RangesManager.coffee | 6 +++++ .../app/coffee/ShareJsUpdateManager.coffee | 8 +------ .../app/coffee/UpdateManager.coffee | 7 +++++- .../coffee/ApplyingUpdatesToADocTests.coffee | 16 ++++++++++++- .../ShareJsUpdateManagerTests.coffee | 23 ------------------- .../UpdateManager/UpdateManagerTests.coffee | 20 ++++++++++++++++ 6 files changed, 48 insertions(+), 32 deletions(-) diff --git a/services/document-updater/app/coffee/RangesManager.coffee b/services/document-updater/app/coffee/RangesManager.coffee index 64f7059399..25da4ec9db 100644 --- a/services/document-updater/app/coffee/RangesManager.coffee +++ b/services/document-updater/app/coffee/RangesManager.coffee @@ -2,6 +2,9 @@ RangesTracker = require "./RangesTracker" logger = require "logger-sharelatex" module.exports = RangesManager = + MAX_COMMENTS: 500 + MAX_CHANGES: 500 + applyUpdate: (project_id, doc_id, entries = {}, updates = [], callback = (error, new_entries) ->) -> {changes, comments} = entries logger.log {changes, comments, updates}, "applying updates to ranges" @@ -12,6 +15,9 @@ module.exports = RangesManager = rangesTracker.setIdSeed(update.meta.tc) for op in update.op rangesTracker.applyOp(op, { user_id: update.meta?.user_id }) + + if rangesTracker.changes?.length > RangesManager.MAX_CHANGES or rangesTracker.comments?.length > RangesManager.MAX_COMMENTS + return callback new Error("too many comments or tracked changes") response = RangesManager._getRanges rangesTracker logger.log {response}, "applied updates to ranges" diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee index 876d56e71b..1d36776b9f 100644 --- a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee +++ b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee @@ -37,13 +37,10 @@ module.exports = ShareJsUpdateManager = update.dup = true ShareJsUpdateManager._sendOp(project_id, doc_id, update) else - ShareJsUpdateManager._sendError(project_id, doc_id, error) return callback(error) logger.log project_id: project_id, doc_id: doc_id, error: error, "applied update" model.getSnapshot doc_key, (error, data) => - if error? - ShareJsUpdateManager._sendError(project_id, doc_id, error) - return callback(error) + return callback(error) if error? docLines = data.snapshot.split(/\r\n|\n|\r/) callback(null, docLines, data.v, model.db.appliedOps[doc_key] or []) @@ -55,6 +52,3 @@ module.exports = ShareJsUpdateManager = _sendOp: (project_id, doc_id, op) -> WebRedisManager.sendData {project_id, doc_id, op} - _sendError: (project_id, doc_id, error) -> - WebRedisManager.sendData {project_id, doc_id, error: error.message || error} - diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index 1678c4d4c0..89f58bfd1f 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -46,7 +46,12 @@ module.exports = UpdateManager = (update, cb) -> UpdateManager.applyUpdate project_id, doc_id, update, cb callback - applyUpdate: (project_id, doc_id, update, callback = (error) ->) -> + applyUpdate: (project_id, doc_id, update, _callback = (error) ->) -> + callback = (error) -> + if error? + WebRedisManager.sendData {project_id, doc_id, error: error.message || error} + _callback(error) + UpdateManager._sanitizeUpdate update DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges) -> return callback(error) if error? diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index 4166f8499e..bdfe89b990 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -176,8 +176,12 @@ describe "Applying updates to a doc", -> describe "with a broken update", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + @broken_update = { doc_id: @doc_id, v: @version, op: [d: "not the correct content", p: 0 ] } MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} - DocUpdaterClient.sendUpdate @project_id, @doc_id, @undefined, (error) -> + + DocUpdaterClient.subscribeToAppliedOps @messageCallback = sinon.stub() + + DocUpdaterClient.sendUpdate @project_id, @doc_id, @broken_update, (error) -> throw error if error? setTimeout done, 200 @@ -185,6 +189,16 @@ describe "Applying updates to a doc", -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => doc.lines.should.deep.equal @lines done() + + it "should send a message with an error", -> + @messageCallback.called.should.equal true + [channel, message] = @messageCallback.args[0] + channel.should.equal "applied-ops" + JSON.parse(message).should.deep.equal { + project_id: @project_id, + doc_id: @doc_id, + error:'Delete component \'not the correct content\' does not match deleted text \'one\ntwo\nthree\'' + } describe "with enough updates to flush to the track changes api", -> before (done) -> diff --git a/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee index f3b871149d..42ba3f331b 100644 --- a/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee @@ -70,34 +70,22 @@ describe "ShareJsUpdateManager", -> describe "when applyOp fails", -> beforeEach (done) -> @error = new Error("Something went wrong") - @ShareJsUpdateManager._sendError = sinon.stub() @model.applyOp = sinon.stub().callsArgWith(2, @error) @ShareJsUpdateManager.applyUpdate @project_id, @doc_id, @update, @lines, @version, (err, docLines, version) => @callback(err, docLines, version) done() - it "should call sendError with the error", -> - @ShareJsUpdateManager._sendError - .calledWith(@project_id, @doc_id, @error) - .should.equal true - it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true describe "when getSnapshot fails", -> beforeEach (done) -> @error = new Error("Something went wrong") - @ShareJsUpdateManager._sendError = sinon.stub() @model.getSnapshot.callsArgWith(1, @error) @ShareJsUpdateManager.applyUpdate @project_id, @doc_id, @update, @lines, @version, (err, docLines, version) => @callback(err, docLines, version) done() - it "should call sendError with the error", -> - @ShareJsUpdateManager._sendError - .calledWith(@project_id, @doc_id, @error) - .should.equal true - it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true @@ -125,14 +113,3 @@ describe "ShareJsUpdateManager", -> .calledWith({project_id: @project_id, doc_id: @doc_id, op: @opData}) .should.equal true - describe "_sendError", -> - beforeEach -> - @error_text = "Something went wrong" - @WebRedisManager.sendData = sinon.stub() - @ShareJsUpdateManager._sendError(@project_id, @doc_id, new Error(@error_text)) - - it "should publish the error to the redis stream", -> - @WebRedisManager.sendData - .calledWith({project_id: @project_id, doc_id: @doc_id, error: @error_text}) - .should.equal true - diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index e87391af44..33578cb6f0 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -165,6 +165,7 @@ describe "UpdateManager", -> @RangesManager.applyUpdate = sinon.stub().yields(null, @updated_ranges) @ShareJsUpdateManager.applyUpdate = sinon.stub().yields(null, @updatedDocLines, @version, @appliedOps) @RedisManager.updateDocument = sinon.stub().yields() + @WebRedisManager.sendData = sinon.stub() @HistoryManager.pushUncompressedHistoryOps = sinon.stub().callsArg(3) describe "normally", -> @@ -206,6 +207,25 @@ describe "UpdateManager", -> # \uFFFD is 'replacement character' @update.op[0].i.should.equal "\uFFFD\uFFFD" + + describe "with an error", -> + beforeEach -> + @error = new Error("something went wrong") + @ShareJsUpdateManager.applyUpdate = sinon.stub().yields(@error) + @UpdateManager.applyUpdate @project_id, @doc_id, @update, @callback + + it "should call WebRedisManager.sendData with the error", -> + @WebRedisManager.sendData + .calledWith({ + project_id: @project_id, + doc_id: @doc_id, + error: @error.message + }) + .should.equal true + + it "should call the callback with the error", -> + @callback.calledWith(@error).should.equal true + describe "lockUpdatesAndDo", -> beforeEach -> From 5fed2424d08a947e49721270fa299f856db18efb Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 16 Jan 2017 13:05:05 +0100 Subject: [PATCH 195/769] Remove unused redis package reference --- .../document-updater/test/acceptance/coffee/RangesTests.coffee | 1 - 1 file changed, 1 deletion(-) diff --git a/services/document-updater/test/acceptance/coffee/RangesTests.coffee b/services/document-updater/test/acceptance/coffee/RangesTests.coffee index 7498a8087b..0849d4551f 100644 --- a/services/document-updater/test/acceptance/coffee/RangesTests.coffee +++ b/services/document-updater/test/acceptance/coffee/RangesTests.coffee @@ -3,7 +3,6 @@ chai = require("chai") chai.should() expect = chai.expect async = require "async" -rclient = require("redis").createClient() MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" From 0705242a55b7011ebd5b2e98217673ff42109a94 Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 17 Jan 2017 11:45:10 +0100 Subject: [PATCH 196/769] Increase max op age to 80 --- .../document-updater/app/coffee/ShareJsUpdateManager.coffee | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee index 1d36776b9f..f175796467 100644 --- a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee +++ b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee @@ -11,10 +11,12 @@ WebRedisManager = require "./WebRedisManager" ShareJsModel:: = {} util.inherits ShareJsModel, EventEmitter +MAX_AGE_OF_OP = 80 + module.exports = ShareJsUpdateManager = getNewShareJsModel: (project_id, doc_id, lines, version) -> db = new ShareJsDB(project_id, doc_id, lines, version) - model = new ShareJsModel(db, maxDocLength: Settings.max_doc_length) + model = new ShareJsModel(db, maxDocLength: Settings.max_doc_length, maximumAge: MAX_AGE_OF_OP) model.db = db return model From dc976fd9e9857ee5a29833aecd13e157153d07db Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 23 Jan 2017 09:45:46 +0100 Subject: [PATCH 197/769] Use thread id for comment id --- services/document-updater/app/coffee/RangesTracker.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RangesTracker.coffee b/services/document-updater/app/coffee/RangesTracker.coffee index 722eab1aa5..7a679bb6e3 100644 --- a/services/document-updater/app/coffee/RangesTracker.coffee +++ b/services/document-updater/app/coffee/RangesTracker.coffee @@ -107,7 +107,7 @@ load = (EventEmitter) -> addComment: (op, metadata) -> # TODO: Don't allow overlapping comments? @comments.push comment = { - id: @newId() + id: op.t or @newId() op: # Copy because we'll modify in place c: op.c p: op.p From 8725aeab03033e55fd30018284a7dd750a67ee4f Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 24 Jan 2017 15:57:11 +0100 Subject: [PATCH 198/769] Allow deleting of comment ranges --- services/document-updater/app.coffee | 1 + .../app/coffee/DocumentManager.coffee | 20 ++++++++ .../app/coffee/HttpController.coffee | 10 ++++ .../app/coffee/RangesManager.coffee | 8 ++++ .../app/coffee/RangesTracker.coffee | 1 - .../test/acceptance/coffee/RangesTests.coffee | 41 +++++++++++++++++ .../coffee/helpers/DocUpdaterClient.coffee | 5 +- .../DocumentManagerTests.coffee | 46 ++++++++++++++++++- .../HttpController/HttpControllerTests.coffee | 41 +++++++++++++++++ 9 files changed, 170 insertions(+), 3 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 20170cdd34..9026f653cb 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -46,6 +46,7 @@ app.delete '/project/:project_id/doc/:doc_id', HttpController.flushAndDele app.delete '/project/:project_id', HttpController.deleteProject app.post '/project/:project_id/flush', HttpController.flushProject app.post '/project/:project_id/doc/:doc_id/change/:change_id/accept', HttpController.acceptChange +app.del '/project/:project_id/doc/:doc_id/comment/:comment_id', HttpController.deleteComment app.get '/total', (req, res)-> timer = new Metrics.Timer("http.allDocList") diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 4f02bbcf5c..9ac651b5e7 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -136,6 +136,22 @@ module.exports = DocumentManager = RedisManager.updateDocument doc_id, lines, version, [], new_ranges, (error) -> return callback(error) if error? callback() + + deleteComment: (project_id, doc_id, comment_id, _callback = (error) ->) -> + timer = new Metrics.Timer("docManager.deleteComment") + callback = (args...) -> + timer.done() + _callback(args...) + + DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges) -> + return callback(error) if error? + if !lines? or !version? + return callback(new Errors.NotFoundError("document not found: #{doc_id}")) + RangesManager.deleteComment comment_id, ranges, (error, new_ranges) -> + return callback(error) if error? + RedisManager.updateDocument doc_id, lines, version, [], new_ranges, (error) -> + return callback(error) if error? + callback() getDocWithLock: (project_id, doc_id, callback = (error, lines, version) ->) -> UpdateManager = require "./UpdateManager" @@ -160,3 +176,7 @@ module.exports = DocumentManager = acceptChangeWithLock: (project_id, doc_id, change_id, callback = (error) ->) -> UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.acceptChange, project_id, doc_id, change_id, callback + + deleteCommentWithLock: (project_id, doc_id, thread_id, callback = (error) ->) -> + UpdateManager = require "./UpdateManager" + UpdateManager.lockUpdatesAndDo DocumentManager.deleteComment, project_id, doc_id, thread_id, callback diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 683b94230f..8448361930 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -107,5 +107,15 @@ module.exports = HttpController = return next(error) if error? logger.log {project_id, doc_id, change_id}, "accepted change via http" res.send 204 # No Content + + deleteComment: (req, res, next = (error) ->) -> + {project_id, doc_id, comment_id} = req.params + logger.log {project_id, doc_id, comment_id}, "deleting comment via http" + timer = new Metrics.Timer("http.deleteComment") + DocumentManager.deleteCommentWithLock project_id, doc_id, comment_id, (error) -> + timer.done() + return next(error) if error? + logger.log {project_id, doc_id, comment_id}, "deleted comment via http" + res.send 204 # No Content diff --git a/services/document-updater/app/coffee/RangesManager.coffee b/services/document-updater/app/coffee/RangesManager.coffee index 25da4ec9db..ee94933b8d 100644 --- a/services/document-updater/app/coffee/RangesManager.coffee +++ b/services/document-updater/app/coffee/RangesManager.coffee @@ -30,6 +30,14 @@ module.exports = RangesManager = rangesTracker.removeChangeId(change_id) response = RangesManager._getRanges(rangesTracker) callback null, response + + deleteComment: (comment_id, ranges, callback = (error, ranges) ->) -> + {changes, comments} = ranges + logger.log {changes, comments, comment_id}, "deleting comment in ranges" + rangesTracker = new RangesTracker(changes, comments) + rangesTracker.removeCommentId(comment_id) + response = RangesManager._getRanges(rangesTracker) + callback null, response _getRanges: (rangesTracker) -> # Return the minimal data structure needed, since most documents won't have any diff --git a/services/document-updater/app/coffee/RangesTracker.coffee b/services/document-updater/app/coffee/RangesTracker.coffee index 7a679bb6e3..e31b84f051 100644 --- a/services/document-updater/app/coffee/RangesTracker.coffee +++ b/services/document-updater/app/coffee/RangesTracker.coffee @@ -105,7 +105,6 @@ load = (EventEmitter) -> throw new Error("unknown op type") addComment: (op, metadata) -> - # TODO: Don't allow overlapping comments? @comments.push comment = { id: op.t or @newId() op: # Copy because we'll modify in place diff --git a/services/document-updater/test/acceptance/coffee/RangesTests.coffee b/services/document-updater/test/acceptance/coffee/RangesTests.coffee index 0849d4551f..044fd3191f 100644 --- a/services/document-updater/test/acceptance/coffee/RangesTests.coffee +++ b/services/document-updater/test/acceptance/coffee/RangesTests.coffee @@ -91,6 +91,7 @@ describe "Ranges", -> ranges = data.ranges comment = ranges.comments[0] comment.op.should.deep.equal { c: "bar", p: 4, t: @tid } + comment.id.should.equal @tid done() describe "with conflicting ops needing OT", -> @@ -223,4 +224,44 @@ describe "Ranges", -> DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => throw error if error? expect(data.ranges.changes).to.be.undefined + done() + + describe "deleting a comment range", -> + before (done) -> + @project_id = DocUpdaterClient.randomId() + @user_id = DocUpdaterClient.randomId() + @doc = { + id: DocUpdaterClient.randomId() + lines: ["foo bar"] + } + @update = { + doc: @doc.id + op: [{ c: "bar", p: 4, t: @tid = DocUpdaterClient.randomId() }] + v: 0 + } + MockWebApi.insertDoc @project_id, @doc.id, { + lines: @doc.lines + version: 0 + } + DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) => + throw error if error? + DocUpdaterClient.sendUpdate @project_id, @doc.id, @update, (error) => + throw error if error? + setTimeout () => + DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => + throw error if error? + ranges = data.ranges + change = ranges.comments[0] + change.op.should.deep.equal { c: "bar", p: 4, t: @tid } + change.id.should.equal @tid + done() + , 200 + + it "should remove the comment range", (done) -> + DocUpdaterClient.removeComment @project_id, @doc.id, @tid, (error, res) => + throw error if error? + expect(res.statusCode).to.equal 204 + DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => + throw error if error? + expect(data.ranges.comments).to.be.undefined done() \ No newline at end of file diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee index afcbfd4b45..d2e8dbe51d 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee @@ -74,4 +74,7 @@ module.exports = DocUpdaterClient = request.del "http://localhost:3003/project/#{project_id}", callback acceptChange: (project_id, doc_id, change_id, callback = () ->) -> - request.post "http://localhost:3003/project/#{project_id}/doc/#{doc_id}/change/#{change_id}/accept", callback \ No newline at end of file + request.post "http://localhost:3003/project/#{project_id}/doc/#{doc_id}/change/#{change_id}/accept", callback + + removeComment: (project_id, doc_id, comment, callback = () ->) -> + request.del "http://localhost:3003/project/#{project_id}/doc/#{doc_id}/comment/#{comment}", callback diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index 3f0279b5c7..b7ea49ffc9 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -307,4 +307,48 @@ describe "DocumentManager", -> it "should call the callback with a not found error", -> error = new Errors.NotFoundError("document not found: #{@doc_id}") @callback.calledWith(error).should.equal true - \ No newline at end of file + + describe "deleteComment", -> + beforeEach -> + @comment_id = "mock-comment-id" + @version = 34 + @lines = ["original", "lines"] + @ranges = { comments: ["one", "two", "three"] } + @updated_ranges = { comments: ["one", "three"] } + @DocumentManager.getDoc = sinon.stub().yields(null, @lines, @version, @ranges) + @RangesManager.deleteComment = sinon.stub().yields(null, @updated_ranges) + @RedisManager.updateDocument = sinon.stub().yields() + + describe "successfully", -> + beforeEach -> + @DocumentManager.deleteComment @project_id, @doc_id, @comment_id, @callback + + it "should get the document's current ranges", -> + @DocumentManager.getDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should delete the comment from the ranges", -> + @RangesManager.deleteComment + .calledWith(@comment_id, @ranges) + .should.equal true + + it "should save the updated ranges", -> + @RedisManager.updateDocument + .calledWith(@doc_id, @lines, @version, [], @updated_ranges) + .should.equal true + + it "should call the callback", -> + @callback.called.should.equal true + + describe "when the doc is not found", -> + beforeEach -> + @DocumentManager.getDoc = sinon.stub().yields(null, null, null, null) + @DocumentManager.acceptChange @project_id, @doc_id, @comment_id, @callback + + it "should not save anything", -> + @RedisManager.updateDocument.called.should.equal false + + it "should call the callback with a not found error", -> + error = new Errors.NotFoundError("document not found: #{@doc_id}") + @callback.calledWith(error).should.equal true \ No newline at end of file diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index 4000b402aa..859a0d1089 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -374,3 +374,44 @@ describe "HttpController", -> @next .calledWith(new Error("oops")) .should.equal true + + describe "deleteComment", -> + beforeEach -> + @req = + params: + project_id: @project_id + doc_id: @doc_id + comment_id: @comment_id = "mock-comment-id" + + describe "successfully", -> + beforeEach -> + @DocumentManager.deleteCommentWithLock = sinon.stub().callsArgWith(3) + @HttpController.deleteComment(@req, @res, @next) + + it "should accept the change", -> + @DocumentManager.deleteCommentWithLock + .calledWith(@project_id, @doc_id, @comment_id) + .should.equal true + + it "should return a successful No Content response", -> + @res.send + .calledWith(204) + .should.equal true + + it "should log the request", -> + @logger.log + .calledWith({@project_id, @doc_id, @comment_id}, "deleting comment via http") + .should.equal true + + it "should time the request", -> + @Metrics.Timer::done.called.should.equal true + + describe "when an errors occurs", -> + beforeEach -> + @DocumentManager.deleteCommentWithLock = sinon.stub().callsArgWith(3, new Error("oops")) + @HttpController.deleteComment(@req, @res, @next) + + it "should call next with the error", -> + @next + .calledWith(new Error("oops")) + .should.equal true From 8d1aa56689a901886f6bdea3e37acd4ad7575d82 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 14 Feb 2017 14:34:28 +0000 Subject: [PATCH 199/769] enable sentry for docupdater --- services/document-updater/app.coffee | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 9026f653cb..9ed6c7e7b7 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -3,6 +3,9 @@ http = require("http") Settings = require('settings-sharelatex') logger = require('logger-sharelatex') logger.initialize("documentupdater") +if Settings.sentry?.dsn? + logger.initializeErrorReporting(Settings.sentry.dsn) + RedisManager = require('./app/js/RedisManager') DispatchManager = require('./app/js/DispatchManager') Keys = require('./app/js/RedisKeyBuilder') @@ -99,4 +102,4 @@ app.listen port, host, -> logger.info "Document-updater starting up, listening on #{host}:#{port}" for signal in ['SIGINT', 'SIGHUP', 'SIGQUIT', 'SIGUSR1', 'SIGUSR2', 'SIGTERM', 'SIGABRT'] - process.on signal, shutdownCleanly(signal) \ No newline at end of file + process.on signal, shutdownCleanly(signal) From a33d4f505b1fc4cc356e17e55d37db46404543e1 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 14 Feb 2017 15:20:05 +0000 Subject: [PATCH 200/769] log request object on error --- services/document-updater/app.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 9ed6c7e7b7..36c0cb3a72 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -84,7 +84,7 @@ app.use (error, req, res, next) -> else if error instanceof Errors.OpRangeNotAvailableError res.send 422 # Unprocessable Entity else - logger.error err: error, "request errored" + logger.error err: error, req: req, "request errored" res.send(500, "Oops, something went wrong") shutdownCleanly = (signal) -> From bd70aaa76c780a5cddab3d8c00965e782f0051b0 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 14 Feb 2017 16:11:43 +0000 Subject: [PATCH 201/769] add sha1 hash support on writes --- .../app/coffee/RedisKeyBuilder.coffee | 2 ++ .../app/coffee/RedisManager.coffee | 28 +++++++++++++++---- .../config/settings.defaults.coffee | 1 + .../RedisBackend/RedisBackendTests.coffee | 2 ++ .../RedisManager/RedisManagerTests.coffee | 5 +++- 5 files changed, 32 insertions(+), 6 deletions(-) diff --git a/services/document-updater/app/coffee/RedisKeyBuilder.coffee b/services/document-updater/app/coffee/RedisKeyBuilder.coffee index 1b5e548809..adde3ee1c9 100644 --- a/services/document-updater/app/coffee/RedisKeyBuilder.coffee +++ b/services/document-updater/app/coffee/RedisKeyBuilder.coffee @@ -28,6 +28,8 @@ module.exports = RedisKeyBuilder = return (key_schema) -> key_schema.docOps({doc_id}) docVersion: ({doc_id}) -> return (key_schema) -> key_schema.docVersion({doc_id}) + docHash: ({doc_id}) -> + return (key_schema) -> key_schema.docHash({doc_id}) projectKey: ({doc_id}) -> return (key_schema) -> key_schema.projectKey({doc_id}) uncompressedHistoryOp: ({doc_id}) -> diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index be5166b94c..d9f210f1d6 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -6,6 +6,7 @@ keys = require('./RedisKeyBuilder') logger = require('logger-sharelatex') metrics = require('./Metrics') Errors = require "./Errors" +crypto = require "crypto" # Make times easy to read minutes = 60 # seconds for Redis expire @@ -18,12 +19,15 @@ module.exports = RedisManager = callback = (error) -> timer.done() _callback(error) - logger.log project_id:project_id, doc_id:doc_id, version: version, "putting doc in redis" + docLines = JSON.stringify(docLines) + docHash = RedisManager._computeHash(docLines) + logger.log project_id:project_id, doc_id:doc_id, version: version, hash:docHash, "putting doc in redis" ranges = RedisManager._serializeRanges(ranges) multi = rclient.multi() - multi.set keys.docLines(doc_id:doc_id), JSON.stringify(docLines) + multi.set keys.docLines(doc_id:doc_id), docLines multi.set keys.projectKey({doc_id:doc_id}), project_id multi.set keys.docVersion(doc_id:doc_id), version + multi.set keys.docHash(doc_id:doc_id), docHash if ranges? multi.set keys.ranges(doc_id:doc_id), ranges else @@ -46,6 +50,7 @@ module.exports = RedisManager = multi.del keys.docLines(doc_id:doc_id) multi.del keys.projectKey(doc_id:doc_id) multi.del keys.docVersion(doc_id:doc_id) + multi.del keys.docHash(doc_id:doc_id) multi.del keys.ranges(doc_id:doc_id) multi.exec (error) -> return callback(error) if error? @@ -56,11 +61,17 @@ module.exports = RedisManager = multi = rclient.multi() multi.get keys.docLines(doc_id:doc_id) multi.get keys.docVersion(doc_id:doc_id) + multi.get keys.docHash(doc_id:doc_id) multi.get keys.projectKey(doc_id:doc_id) multi.get keys.ranges(doc_id:doc_id) - multi.exec (error, [docLines, version, doc_project_id, ranges])-> + multi.exec (error, [docLines, version, storedHash, doc_project_id, ranges])-> timer.done() return callback(error) if error? + if docLines? + computedHash = RedisManager._computeHash(docLines) + if computedHash isnt storedHash + logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, computedHash: computedHash, storedHash: storedHash, "hash mismatch on retrieved document" + try docLines = JSON.parse docLines ranges = RedisManager._deserializeRanges(ranges) @@ -121,8 +132,11 @@ module.exports = RedisManager = return callback(error) jsonOps = appliedOps.map (op) -> JSON.stringify op multi = rclient.multi() - multi.set keys.docLines(doc_id:doc_id), JSON.stringify(docLines) + newDocLines = JSON.stringify(docLines) + newHash = RedisManager._computeHash(newDocLines) + multi.set keys.docLines(doc_id:doc_id), newDocLines multi.set keys.docVersion(doc_id:doc_id), newVersion + multi.set keys.docHash(doc_id:doc_id), newHash if jsonOps.length > 0 multi.rpush keys.docOps(doc_id: doc_id), jsonOps... multi.expire keys.docOps(doc_id: doc_id), RedisManager.DOC_OPS_TTL @@ -150,4 +164,8 @@ module.exports = RedisManager = if !ranges? or ranges == "" return {} else - return JSON.parse(ranges) \ No newline at end of file + return JSON.parse(ranges) + + _computeHash: (docLines) -> + # use sha1 checksum of doclines to detect data corruption + return crypto.createHash('sha1').update(docLines).digest('hex') diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 42daf505d9..ae0f9fe681 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -30,6 +30,7 @@ module.exports = docLines: ({doc_id}) -> "doclines:#{doc_id}" docOps: ({doc_id}) -> "DocOps:#{doc_id}" docVersion: ({doc_id}) -> "DocVersion:#{doc_id}" + docHash: ({doc_id}) -> "DocHash:#{doc_id}" projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" docsInProject: ({project_id}) -> "DocsIn:#{project_id}" ranges: ({doc_id}) -> "Ranges:#{doc_id}" diff --git a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee index 814a0b932d..52bb69bebb 100644 --- a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee @@ -19,6 +19,7 @@ describe "RedisBackend", -> docLines: ({doc_id}) -> "doclines:#{doc_id}" docOps: ({doc_id}) -> "DocOps:#{doc_id}" docVersion: ({doc_id}) -> "DocVersion:#{doc_id}" + docHash: ({doc_id}) -> "DocHash:#{doc_id}" projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" docsInProject: ({project_id}) -> "DocsIn:#{project_id}" @@ -33,6 +34,7 @@ describe "RedisBackend", -> docLines: ({doc_id}) -> "doclines:{#{doc_id}}" docOps: ({doc_id}) -> "DocOps:{#{doc_id}}" docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}" + docHash: ({doc_id}) -> "DocHash:{#{doc_id}}" projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 420d2039b4..6e235b4a9d 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -4,6 +4,7 @@ should = chai.should() modulePath = "../../../../app/js/RedisManager.js" SandboxedModule = require('sandboxed-module') Errors = require "../../../../app/js/Errors" +crypto = require "crypto" describe "RedisManager", -> beforeEach -> @@ -19,6 +20,7 @@ describe "RedisManager", -> docLines: ({doc_id}) -> "doclines:#{doc_id}" docOps: ({doc_id}) -> "DocOps:#{doc_id}" docVersion: ({doc_id}) -> "DocVersion:#{doc_id}" + docHash: ({doc_id}) -> "DocHash:#{doc_id}" projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" docsInProject: ({project_id}) -> "DocsIn:#{project_id}" @@ -38,10 +40,11 @@ describe "RedisManager", -> @lines = ["one", "two", "three"] @jsonlines = JSON.stringify @lines @version = 42 + @hash = crypto.createHash('sha1').update(@jsonlines).digest('hex') @ranges = { comments: "mock", entries: "mock" } @json_ranges = JSON.stringify @ranges @rclient.get = sinon.stub() - @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @project_id, @json_ranges]) + @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @hash, @project_id, @json_ranges]) describe "successfully", -> beforeEach -> From b5a4458b68cdda5f24c2721beec2fe7a0a05c477 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 15 Feb 2017 14:12:36 +0000 Subject: [PATCH 202/769] check sha1 hash value only if present --- services/document-updater/app/coffee/RedisManager.coffee | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index d9f210f1d6..ab95d1129f 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -67,7 +67,9 @@ module.exports = RedisManager = multi.exec (error, [docLines, version, storedHash, doc_project_id, ranges])-> timer.done() return callback(error) if error? - if docLines? + + # check sha1 hash value if present + if docLines? and storedHash? computedHash = RedisManager._computeHash(docLines) if computedHash isnt storedHash logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, computedHash: computedHash, storedHash: storedHash, "hash mismatch on retrieved document" From de7f760ea41c5f16dc0591934fb2e3c81eb12cd1 Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 16 Feb 2017 17:09:08 +0100 Subject: [PATCH 203/769] Update Ranges Tracker --- .../app/coffee/RangesTracker.coffee | 24 ++++++++++++++++--- 1 file changed, 21 insertions(+), 3 deletions(-) diff --git a/services/document-updater/app/coffee/RangesTracker.coffee b/services/document-updater/app/coffee/RangesTracker.coffee index e31b84f051..0430ea2945 100644 --- a/services/document-updater/app/coffee/RangesTracker.coffee +++ b/services/document-updater/app/coffee/RangesTracker.coffee @@ -105,6 +105,7 @@ load = (EventEmitter) -> throw new Error("unknown op type") addComment: (op, metadata) -> + # TODO: Don't allow overlapping comments? @comments.push comment = { id: op.t or @newId() op: # Copy because we'll modify in place @@ -167,12 +168,14 @@ load = (EventEmitter) -> op_length = op.i.length op_end = op.p + op_length + already_merged = false previous_change = null moved_changes = [] remove_changes = [] new_changes = [] - for change in @changes + + for change, i in @changes change_start = change.op.p if change.op.d? @@ -200,6 +203,16 @@ load = (EventEmitter) -> # Only merge inserts if they are from the same user is_same_user = metadata.user_id == change.metadata.user_id + # If this is an insert op at the end of an existing insert with a delete following, and it cancels out the following + # delete then we shouldn't append it to this insert, but instead only cancel the following delete. + # E.g. + # foo|<--- about to insert 'b' here + # inserted 'foo' --^ ^-- deleted 'bar' + # should become just 'foo' not 'foob' (with the delete marker becoming just 'ar'), . + next_change = @changes[i+1] + is_op_adjacent_to_next_delete = next_change? and next_change.op.d? and op.p == change_end and next_change.op.p == op.p + will_op_cancel_next_delete = is_op_adjacent_to_next_delete and next_change.op.d.slice(0, op.i.length) == op.i + # If there is a delete at the start of the insert, and we're inserting # at the start, we SHOULDN'T merge since the delete acts as a partition. # The previous op will be the delete, but it's already been shifted by this insert @@ -222,7 +235,8 @@ load = (EventEmitter) -> if @track_changes and is_change_overlapping and !is_insert_blocked_by_delete and - !already_merged and + !already_merged and + !will_op_cancel_next_delete and is_same_user offset = op_start - change_start change.op.i = change.op.i.slice(0, offset) + op.i + change.op.i.slice(offset) @@ -396,9 +410,13 @@ load = (EventEmitter) -> @emit "changes:moved", moved_changes _addOp: (op, metadata) -> + # Don't take a reference to the existing op since we'll modify this in place with future changes + clone_op = {} + for k,v of op + clone_op[k] = v change = { id: @newId() - op: op + op: clone_op metadata: metadata } @changes.push change From 89537927b52c44563f6b4ffaaa9eed89a47d138a Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 17 Feb 2017 09:24:19 +0100 Subject: [PATCH 204/769] Update Ranges Tracker --- .../app/coffee/RangesTracker.coffee | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/services/document-updater/app/coffee/RangesTracker.coffee b/services/document-updater/app/coffee/RangesTracker.coffee index 0430ea2945..865ecf4ef6 100644 --- a/services/document-updater/app/coffee/RangesTracker.coffee +++ b/services/document-updater/app/coffee/RangesTracker.coffee @@ -410,14 +410,10 @@ load = (EventEmitter) -> @emit "changes:moved", moved_changes _addOp: (op, metadata) -> - # Don't take a reference to the existing op since we'll modify this in place with future changes - clone_op = {} - for k,v of op - clone_op[k] = v change = { id: @newId() - op: clone_op - metadata: metadata + op: @_clone(op) # Don't take a reference to the existing op since we'll modify this in place with future changes + metadata: @_clone(metadata) } @changes.push change @@ -489,6 +485,11 @@ load = (EventEmitter) -> else # Only update to the current change if we haven't removed it. previous_change = change return { moved_changes, remove_changes } + + _clone: (object) -> + clone = {} + (clone[k] = v for k,v of object) + return clone if define? define ["utils/EventEmitter"], load From 660752dfa6d6f4d469a7dd51fd9f6d29a6076707 Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 17 Feb 2017 15:32:44 +0100 Subject: [PATCH 205/769] Increase change limit to 1000 --- services/document-updater/app/coffee/RangesManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RangesManager.coffee b/services/document-updater/app/coffee/RangesManager.coffee index ee94933b8d..12efa7f154 100644 --- a/services/document-updater/app/coffee/RangesManager.coffee +++ b/services/document-updater/app/coffee/RangesManager.coffee @@ -3,7 +3,7 @@ logger = require "logger-sharelatex" module.exports = RangesManager = MAX_COMMENTS: 500 - MAX_CHANGES: 500 + MAX_CHANGES: 1000 applyUpdate: (project_id, doc_id, entries = {}, updates = [], callback = (error, new_entries) ->) -> {changes, comments} = entries From 28536bfe73b2ff38756c7d7145f1d9533cf47cd4 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Fri, 17 Feb 2017 16:14:13 +0000 Subject: [PATCH 206/769] try running mochatest directly without compiling --- services/document-updater/test/acceptance/scripts/full-test.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/test/acceptance/scripts/full-test.sh b/services/document-updater/test/acceptance/scripts/full-test.sh index af8ad4103d..9f6167e667 100755 --- a/services/document-updater/test/acceptance/scripts/full-test.sh +++ b/services/document-updater/test/acceptance/scripts/full-test.sh @@ -11,7 +11,7 @@ echo ">> Server started" sleep 5 echo ">> Running acceptance tests..." -grunt --no-color test:acceptance +grunt --no-color mochaTest:acceptance _test_exit_code=$? echo ">> Killing server" From 37172a21157e1b17cdb60c4e0065fab2b1ab1064 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Mon, 20 Feb 2017 09:22:54 +0000 Subject: [PATCH 207/769] Debug Acceptance Tests: Try without npm rebuild --- services/document-updater/test/acceptance/scripts/full-test.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/test/acceptance/scripts/full-test.sh b/services/document-updater/test/acceptance/scripts/full-test.sh index 9f6167e667..8584cd17d0 100755 --- a/services/document-updater/test/acceptance/scripts/full-test.sh +++ b/services/document-updater/test/acceptance/scripts/full-test.sh @@ -1,6 +1,6 @@ #! /usr/bin/env bash -npm rebuild +# npm rebuild echo ">> Starting server..." From a3c127e46936b63cd04309da6cf8cde586063fa9 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 20 Feb 2017 13:53:25 +0000 Subject: [PATCH 208/769] added unit tests --- .../RedisManager/RedisManagerTests.coffee | 44 ++++++++++++++++++- 1 file changed, 43 insertions(+), 1 deletion(-) diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 6e235b4a9d..daad278174 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -59,7 +59,12 @@ describe "RedisManager", -> @rclient.get .calledWith("DocVersion:#{@doc_id}") .should.equal true - + + it 'should get the hash', -> + @rclient.get + .calledWith("DocHash:#{@doc_id}") + .should.equal true + it "should get the ranges", -> @rclient.get .calledWith("Ranges:#{@doc_id}") @@ -70,6 +75,26 @@ describe "RedisManager", -> .calledWith(null, @lines, @version, @ranges) .should.equal true + it 'should not log any errors', -> + @logger.error.calledWith() + .should.equal false + + describe "with a corrupted document", -> + beforeEach -> + @badHash = "INVALID-HASH-VALUE" + @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @badHash, @project_id, @json_ranges]) + @RedisManager.getDoc @project_id, @doc_id, @callback + + it 'should log a hash error', -> + @logger.error.calledWith() + .should.equal true + + it 'should return the document', -> + @callback + .calledWith(null, @lines, @version, @ranges) + .should.equal true + + describe "getDoc with an invalid project id", -> beforeEach -> @another_project_id = "project-id-456" @@ -177,6 +202,7 @@ describe "RedisManager", -> @lines = ["one", "two", "three"] @ops = [{ op: [{ i: "foo", p: 4 }] },{ op: [{ i: "bar", p: 8 }] }] @version = 42 + @hash = crypto.createHash('sha1').update(JSON.stringify(@lines)).digest('hex') @ranges = { comments: "mock", entries: "mock" } @rclient.exec = sinon.stub().callsArg(0) @@ -200,6 +226,11 @@ describe "RedisManager", -> @rclient.set .calledWith("DocVersion:#{@doc_id}", @version) .should.equal true + + it "should set the hash", -> + @rclient.set + .calledWith("DocHash:#{@doc_id}", @hash) + .should.equal true it "should set the ranges", -> @rclient.set @@ -275,6 +306,7 @@ describe "RedisManager", -> @rclient.exec.yields() @lines = ["one", "two", "three"] @version = 42 + @hash = crypto.createHash('sha1').update(JSON.stringify(@lines)).digest('hex') @ranges = { comments: "mock", entries: "mock" } describe "with non-empty ranges", -> @@ -290,6 +322,11 @@ describe "RedisManager", -> @rclient.set .calledWith("DocVersion:#{@doc_id}", @version) .should.equal true + + it "should set the hash", -> + @rclient.set + .calledWith("DocHash:#{@doc_id}", @hash) + .should.equal true it "should set the ranges", -> @rclient.set @@ -336,6 +373,11 @@ describe "RedisManager", -> @rclient.del .calledWith("DocVersion:#{@doc_id}") .should.equal true + + it "should delete the hash", -> + @rclient.del + .calledWith("DocHash:#{@doc_id}") + .should.equal true it "should delete the project_id for the doc", -> @rclient.del From c57d3ce31c025605ad9ca467b697368068292d1f Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 17 Feb 2017 12:29:15 +0000 Subject: [PATCH 209/769] compute hash on write in redis server --- .../app/coffee/RedisManager.coffee | 24 +++++++++++++++---- .../RedisManager/RedisManagerTests.coffee | 14 ++++++----- 2 files changed, 28 insertions(+), 10 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index ab95d1129f..f3d6360a0d 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -11,6 +11,13 @@ crypto = require "crypto" # Make times easy to read minutes = 60 # seconds for Redis expire +# LUA script to write document and return hash +# arguments: docLinesKey docLines +setScript = """ + redis.call('set', KEYS[1], ARGV[1]) + return redis.sha1hex(ARGV[1]) +""" + module.exports = RedisManager = rclient: rclient @@ -24,7 +31,7 @@ module.exports = RedisManager = logger.log project_id:project_id, doc_id:doc_id, version: version, hash:docHash, "putting doc in redis" ranges = RedisManager._serializeRanges(ranges) multi = rclient.multi() - multi.set keys.docLines(doc_id:doc_id), docLines + multi.eval setScript, 1, keys.docLines(doc_id:doc_id), docLines multi.set keys.projectKey({doc_id:doc_id}), project_id multi.set keys.docVersion(doc_id:doc_id), version multi.set keys.docHash(doc_id:doc_id), docHash @@ -32,8 +39,13 @@ module.exports = RedisManager = multi.set keys.ranges(doc_id:doc_id), ranges else multi.del keys.ranges(doc_id:doc_id) - multi.exec (error) -> + multi.exec (error, result) -> return callback(error) if error? + # check the hash computed on the redis server + writeHash = result?[0] + if writeHash? and writeHash isnt docHash + logger.error project_id: project_id, doc_id: doc_id, writeHash: writeHash, origHash: docHash, "hash mismatch on putDocInMemory" + # update docsInProject set rclient.sadd keys.docsInProject(project_id:project_id), doc_id, callback removeDocFromMemory : (project_id, doc_id, _callback)-> @@ -136,7 +148,7 @@ module.exports = RedisManager = multi = rclient.multi() newDocLines = JSON.stringify(docLines) newHash = RedisManager._computeHash(newDocLines) - multi.set keys.docLines(doc_id:doc_id), newDocLines + multi.eval setScript, 1, keys.docLines(doc_id:doc_id), newDocLines multi.set keys.docVersion(doc_id:doc_id), newVersion multi.set keys.docHash(doc_id:doc_id), newHash if jsonOps.length > 0 @@ -148,8 +160,12 @@ module.exports = RedisManager = multi.set keys.ranges(doc_id:doc_id), ranges else multi.del keys.ranges(doc_id:doc_id) - multi.exec (error, replys) -> + multi.exec (error, result) -> return callback(error) if error? + # check the hash computed on the redis server + writeHash = result?[0] + if writeHash? and writeHash isnt newHash + logger.error doc_id: doc_id, writeHash: writeHash, origHash: newHash, "hash mismatch on updateDocument" return callback() getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) -> diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index daad278174..618537f819 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -197,6 +197,7 @@ describe "RedisManager", -> @rclient.expire = sinon.stub() @rclient.ltrim = sinon.stub() @rclient.del = sinon.stub() + @rclient.eval = sinon.stub() @RedisManager.getDocVersion = sinon.stub() @lines = ["one", "two", "three"] @@ -218,8 +219,8 @@ describe "RedisManager", -> .should.equal true it "should set the doclines", -> - @rclient.set - .calledWith("doclines:#{@doc_id}", JSON.stringify(@lines)) + @rclient.eval + .calledWith(sinon.match(/redis.call/), 1, "doclines:#{@doc_id}", JSON.stringify(@lines)) .should.equal true it "should set the version", -> @@ -279,8 +280,8 @@ describe "RedisManager", -> .should.equal false it "should still set the doclines", -> - @rclient.set - .calledWith("doclines:#{@doc_id}", JSON.stringify(@lines)) + @rclient.eval + .calledWith(sinon.match(/redis.call/), 1, "doclines:#{@doc_id}", JSON.stringify(@lines)) .should.equal true describe "with empty ranges", -> @@ -303,6 +304,7 @@ describe "RedisManager", -> @rclient.set = sinon.stub() @rclient.sadd = sinon.stub().yields() @rclient.del = sinon.stub() + @rclient.eval = sinon.stub() @rclient.exec.yields() @lines = ["one", "two", "three"] @version = 42 @@ -314,8 +316,8 @@ describe "RedisManager", -> @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, done it "should set the lines", -> - @rclient.set - .calledWith("doclines:#{@doc_id}", JSON.stringify @lines) + @rclient.eval + .calledWith(sinon.match(/redis.call/), 1, "doclines:#{@doc_id}", JSON.stringify(@lines)) .should.equal true it "should set the version", -> From 62165ddeab91ad92ea1e9334a2c44142e5c7dfdd Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 20 Feb 2017 15:33:19 +0000 Subject: [PATCH 210/769] add unit tests --- .../RedisManager/RedisManagerTests.coffee | 35 +++++++++++++++++-- 1 file changed, 33 insertions(+), 2 deletions(-) diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 618537f819..ccf9d389c9 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -206,7 +206,7 @@ describe "RedisManager", -> @hash = crypto.createHash('sha1').update(JSON.stringify(@lines)).digest('hex') @ranges = { comments: "mock", entries: "mock" } - @rclient.exec = sinon.stub().callsArg(0) + @rclient.exec = sinon.stub().callsArg(0, null, [@hash]) describe "with a consistent version", -> beforeEach -> @@ -255,6 +255,10 @@ describe "RedisManager", -> it "should call the callback", -> @callback.called.should.equal true + + it 'should not log any errors', -> + @logger.error.calledWith() + .should.equal false describe "with an inconsistent version", -> beforeEach -> @@ -299,16 +303,30 @@ describe "RedisManager", -> .calledWith("Ranges:#{@doc_id}") .should.equal true + describe "with a corrupted write", -> + beforeEach -> + @badHash = "INVALID-HASH-VALUE" + @rclient.exec = sinon.stub().callsArgWith(0, null, [@badHash]) + @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) + @RedisManager.updateDocument @doc_id, @lines, @version, @ops, @ranges, @callback + + it 'should log a hash error', -> + @logger.error.calledWith() + .should.equal true + + it "should call the callback", -> + @callback.called.should.equal true + describe "putDocInMemory", -> beforeEach -> @rclient.set = sinon.stub() @rclient.sadd = sinon.stub().yields() @rclient.del = sinon.stub() @rclient.eval = sinon.stub() - @rclient.exec.yields() @lines = ["one", "two", "three"] @version = 42 @hash = crypto.createHash('sha1').update(JSON.stringify(@lines)).digest('hex') + @rclient.exec = sinon.stub().callsArgWith(0, null, [@hash]) @ranges = { comments: "mock", entries: "mock" } describe "with non-empty ranges", -> @@ -344,6 +362,10 @@ describe "RedisManager", -> @rclient.sadd .calledWith("DocsIn:#{@project_id}", @doc_id) .should.equal true + + it 'should not log any errors', -> + @logger.error.calledWith() + .should.equal false describe "with empty ranges", -> beforeEach (done) -> @@ -359,6 +381,15 @@ describe "RedisManager", -> .calledWith("Ranges:#{@doc_id}", JSON.stringify(@ranges)) .should.equal false + describe "with a corrupted write", -> + beforeEach (done) -> + @rclient.exec = sinon.stub().callsArgWith(0, null, ["INVALID-HASH-VALUE"]) + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, done + + it 'should log a hash error', -> + @logger.error.calledWith() + .should.equal true + describe "removeDocFromMemory", -> beforeEach (done) -> @rclient.del = sinon.stub() From 5eb339e525e2c310192bacf53b79b90d29f9924d Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 21 Feb 2017 16:03:06 +0000 Subject: [PATCH 211/769] fix utf8 encoding in sha1 hash --- .../document-updater/app/coffee/RedisManager.coffee | 7 +++++-- .../coffee/RedisManager/RedisManagerTests.coffee | 12 ++++++------ 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index f3d6360a0d..f2187a8abf 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -185,5 +185,8 @@ module.exports = RedisManager = return JSON.parse(ranges) _computeHash: (docLines) -> - # use sha1 checksum of doclines to detect data corruption - return crypto.createHash('sha1').update(docLines).digest('hex') + # use sha1 checksum of doclines to detect data corruption. + # + # note: must specify 'utf8' encoding explicitly, as the default is + # binary in node < v5 + return crypto.createHash('sha1').update(docLines, 'utf8').digest('hex') diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index ccf9d389c9..6cfd8bce3c 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -37,10 +37,10 @@ describe "RedisManager", -> describe "getDoc", -> beforeEach -> - @lines = ["one", "two", "three"] + @lines = ["one", "two", "three", "これは"] # include some utf8 @jsonlines = JSON.stringify @lines @version = 42 - @hash = crypto.createHash('sha1').update(@jsonlines).digest('hex') + @hash = crypto.createHash('sha1').update(@jsonlines,'utf8').digest('hex') @ranges = { comments: "mock", entries: "mock" } @json_ranges = JSON.stringify @ranges @rclient.get = sinon.stub() @@ -200,10 +200,10 @@ describe "RedisManager", -> @rclient.eval = sinon.stub() @RedisManager.getDocVersion = sinon.stub() - @lines = ["one", "two", "three"] + @lines = ["one", "two", "three", "これは"] @ops = [{ op: [{ i: "foo", p: 4 }] },{ op: [{ i: "bar", p: 8 }] }] @version = 42 - @hash = crypto.createHash('sha1').update(JSON.stringify(@lines)).digest('hex') + @hash = crypto.createHash('sha1').update(JSON.stringify(@lines),'utf8').digest('hex') @ranges = { comments: "mock", entries: "mock" } @rclient.exec = sinon.stub().callsArg(0, null, [@hash]) @@ -323,9 +323,9 @@ describe "RedisManager", -> @rclient.sadd = sinon.stub().yields() @rclient.del = sinon.stub() @rclient.eval = sinon.stub() - @lines = ["one", "two", "three"] + @lines = ["one", "two", "three", "これは"] @version = 42 - @hash = crypto.createHash('sha1').update(JSON.stringify(@lines)).digest('hex') + @hash = crypto.createHash('sha1').update(JSON.stringify(@lines),'utf8').digest('hex') @rclient.exec = sinon.stub().callsArgWith(0, null, [@hash]) @ranges = { comments: "mock", entries: "mock" } From 19e7522d3ffad289fdc96ee322ac7bfc6fed75eb Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 22 Feb 2017 13:46:34 +0000 Subject: [PATCH 212/769] make logging of hash errors optional --- services/document-updater/app/coffee/RedisManager.coffee | 8 +++++--- .../unit/coffee/RedisManager/RedisManagerTests.coffee | 1 + 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index f2187a8abf..7a2d9c39e6 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -18,6 +18,8 @@ setScript = """ return redis.sha1hex(ARGV[1]) """ +logHashErrors = Settings.documentupdater?.logHashErrors + module.exports = RedisManager = rclient: rclient @@ -43,7 +45,7 @@ module.exports = RedisManager = return callback(error) if error? # check the hash computed on the redis server writeHash = result?[0] - if writeHash? and writeHash isnt docHash + if logHashErrors and writeHash? and writeHash isnt docHash logger.error project_id: project_id, doc_id: doc_id, writeHash: writeHash, origHash: docHash, "hash mismatch on putDocInMemory" # update docsInProject set rclient.sadd keys.docsInProject(project_id:project_id), doc_id, callback @@ -83,7 +85,7 @@ module.exports = RedisManager = # check sha1 hash value if present if docLines? and storedHash? computedHash = RedisManager._computeHash(docLines) - if computedHash isnt storedHash + if logHashErrors and computedHash isnt storedHash logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, computedHash: computedHash, storedHash: storedHash, "hash mismatch on retrieved document" try @@ -164,7 +166,7 @@ module.exports = RedisManager = return callback(error) if error? # check the hash computed on the redis server writeHash = result?[0] - if writeHash? and writeHash isnt newHash + if logHashErrors and writeHash? and writeHash isnt newHash logger.error doc_id: doc_id, writeHash: writeHash, origHash: newHash, "hash mismatch on updateDocument" return callback() diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 6cfd8bce3c..c61cfddfc7 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -26,6 +26,7 @@ describe "RedisManager", -> docsInProject: ({project_id}) -> "DocsIn:#{project_id}" ranges: ({doc_id}) -> "Ranges:#{doc_id}" "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } + "settings-sharelatex": {documentupdater: {logHashErrors: true}} "./Metrics": @metrics = inc: sinon.stub() Timer: class Timer From 168bab696068b382ea5853ed6029a681b4304112 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 22 Feb 2017 14:32:51 +0000 Subject: [PATCH 213/769] make hash error logging fine grained --- services/document-updater/app/coffee/RedisManager.coffee | 8 +++++--- .../unit/coffee/RedisManager/RedisManagerTests.coffee | 2 +- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 7a2d9c39e6..1eef4971e7 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -19,6 +19,8 @@ setScript = """ """ logHashErrors = Settings.documentupdater?.logHashErrors +logHashReadErrors = logHashErrors?.read +logHashWriteErrors = logHashErrors?.write module.exports = RedisManager = rclient: rclient @@ -45,7 +47,7 @@ module.exports = RedisManager = return callback(error) if error? # check the hash computed on the redis server writeHash = result?[0] - if logHashErrors and writeHash? and writeHash isnt docHash + if logHashWriteErrors and writeHash? and writeHash isnt docHash logger.error project_id: project_id, doc_id: doc_id, writeHash: writeHash, origHash: docHash, "hash mismatch on putDocInMemory" # update docsInProject set rclient.sadd keys.docsInProject(project_id:project_id), doc_id, callback @@ -85,7 +87,7 @@ module.exports = RedisManager = # check sha1 hash value if present if docLines? and storedHash? computedHash = RedisManager._computeHash(docLines) - if logHashErrors and computedHash isnt storedHash + if logHashReadErrors and computedHash isnt storedHash logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, computedHash: computedHash, storedHash: storedHash, "hash mismatch on retrieved document" try @@ -166,7 +168,7 @@ module.exports = RedisManager = return callback(error) if error? # check the hash computed on the redis server writeHash = result?[0] - if logHashErrors and writeHash? and writeHash isnt newHash + if logHashWriteErrors and writeHash? and writeHash isnt newHash logger.error doc_id: doc_id, writeHash: writeHash, origHash: newHash, "hash mismatch on updateDocument" return callback() diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index c61cfddfc7..3e3128324e 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -26,7 +26,7 @@ describe "RedisManager", -> docsInProject: ({project_id}) -> "DocsIn:#{project_id}" ranges: ({doc_id}) -> "Ranges:#{doc_id}" "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } - "settings-sharelatex": {documentupdater: {logHashErrors: true}} + "settings-sharelatex": {documentupdater: {logHashErrors: {write:true, read:true}}} "./Metrics": @metrics = inc: sinon.stub() Timer: class Timer From ed59ca32fd47d125ff6daa02dd54ac83ba7c0a2b Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 22 Feb 2017 15:58:40 +0000 Subject: [PATCH 214/769] update logger to 1.5.2 - rate limiting on sentry --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index eaa8c726ec..72a2e28121 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -11,7 +11,7 @@ "coffee-script": "1.4.0", "express": "3.3.4", "ioredis": "^2.2.0", - "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.1", + "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.2", "lynx": "0.0.11", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.5.0", "redis-sharelatex": "0.0.9", From d6b5cc6389443a98ab6d5eacf058a3d355530a1e Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 22 Feb 2017 18:11:55 +0100 Subject: [PATCH 215/769] Increase changes limit to 1200 --- services/document-updater/app/coffee/RangesManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RangesManager.coffee b/services/document-updater/app/coffee/RangesManager.coffee index 12efa7f154..5fdd480db3 100644 --- a/services/document-updater/app/coffee/RangesManager.coffee +++ b/services/document-updater/app/coffee/RangesManager.coffee @@ -3,7 +3,7 @@ logger = require "logger-sharelatex" module.exports = RangesManager = MAX_COMMENTS: 500 - MAX_CHANGES: 1000 + MAX_CHANGES: 1200 applyUpdate: (project_id, doc_id, entries = {}, updates = [], callback = (error, new_entries) ->) -> {changes, comments} = entries From 5ab903169a9372bcdb4cd2cd08ced2a6b1385460 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 22 Feb 2017 18:26:42 +0100 Subject: [PATCH 216/769] evert "update logger to 1.5.2 - rate limiting on sentry" This reverts commit ba1c4a84959cd2eb33d7d624239d3e621ebae1fd. --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 72a2e28121..eaa8c726ec 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -11,7 +11,7 @@ "coffee-script": "1.4.0", "express": "3.3.4", "ioredis": "^2.2.0", - "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.2", + "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.1", "lynx": "0.0.11", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.5.0", "redis-sharelatex": "0.0.9", From 49271d4ee4b915d37828f6f3ff10ff5265031322 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 23 Feb 2017 10:15:56 +0000 Subject: [PATCH 217/769] update logger to 1.5.2 - rate limiting on sentry --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index eaa8c726ec..72a2e28121 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -11,7 +11,7 @@ "coffee-script": "1.4.0", "express": "3.3.4", "ioredis": "^2.2.0", - "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.1", + "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.2", "lynx": "0.0.11", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.5.0", "redis-sharelatex": "0.0.9", From e1481df76a42ff5aaff62fdb6efda08aac80e241 Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 23 Feb 2017 11:39:19 +0100 Subject: [PATCH 218/769] Don't consume partial parts of delete when inserting a change --- services/document-updater/app/coffee/RangesTracker.coffee | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/document-updater/app/coffee/RangesTracker.coffee b/services/document-updater/app/coffee/RangesTracker.coffee index 865ecf4ef6..6a07e38c0e 100644 --- a/services/document-updater/app/coffee/RangesTracker.coffee +++ b/services/document-updater/app/coffee/RangesTracker.coffee @@ -206,12 +206,12 @@ load = (EventEmitter) -> # If this is an insert op at the end of an existing insert with a delete following, and it cancels out the following # delete then we shouldn't append it to this insert, but instead only cancel the following delete. # E.g. - # foo|<--- about to insert 'b' here + # foo|<--- about to insert 'bar' here # inserted 'foo' --^ ^-- deleted 'bar' - # should become just 'foo' not 'foob' (with the delete marker becoming just 'ar'), . + # should become just 'foo' not 'foobar' (with the delete marker disappearing), . next_change = @changes[i+1] is_op_adjacent_to_next_delete = next_change? and next_change.op.d? and op.p == change_end and next_change.op.p == op.p - will_op_cancel_next_delete = is_op_adjacent_to_next_delete and next_change.op.d.slice(0, op.i.length) == op.i + will_op_cancel_next_delete = is_op_adjacent_to_next_delete and next_change.op.d == op.i # If there is a delete at the start of the insert, and we're inserting # at the start, we SHOULDN'T merge since the delete acts as a partition. From 0420929e2a42740e29612e5d999a5975fc6cc7af Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 23 Feb 2017 13:57:47 +0100 Subject: [PATCH 219/769] Reduce the data logged when updating ranges --- services/document-updater/app/coffee/RangesManager.coffee | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/services/document-updater/app/coffee/RangesManager.coffee b/services/document-updater/app/coffee/RangesManager.coffee index 5fdd480db3..f39dea9537 100644 --- a/services/document-updater/app/coffee/RangesManager.coffee +++ b/services/document-updater/app/coffee/RangesManager.coffee @@ -7,7 +7,6 @@ module.exports = RangesManager = applyUpdate: (project_id, doc_id, entries = {}, updates = [], callback = (error, new_entries) ->) -> {changes, comments} = entries - logger.log {changes, comments, updates}, "applying updates to ranges" rangesTracker = new RangesTracker(changes, comments) for update in updates rangesTracker.track_changes = !!update.meta.tc @@ -20,12 +19,12 @@ module.exports = RangesManager = return callback new Error("too many comments or tracked changes") response = RangesManager._getRanges rangesTracker - logger.log {response}, "applied updates to ranges" + logger.log {project_id, doc_id, changesCount: response.changes?.length, commentsCount: response.comments?.length}, "applied updates to ranges" callback null, response acceptChange: (change_id, ranges, callback = (error, ranges) ->) -> {changes, comments} = ranges - logger.log {changes, comments, change_id}, "accepting change in ranges" + logger.log {change_id}, "accepting change in ranges" rangesTracker = new RangesTracker(changes, comments) rangesTracker.removeChangeId(change_id) response = RangesManager._getRanges(rangesTracker) @@ -33,7 +32,7 @@ module.exports = RangesManager = deleteComment: (comment_id, ranges, callback = (error, ranges) ->) -> {changes, comments} = ranges - logger.log {changes, comments, comment_id}, "deleting comment in ranges" + logger.log {comment_id}, "deleting comment in ranges" rangesTracker = new RangesTracker(changes, comments) rangesTracker.removeCommentId(comment_id) response = RangesManager._getRanges(rangesTracker) From 80284e1b0141cf370ebc234c0d51871e9e373e29 Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 24 Feb 2017 14:21:06 +0100 Subject: [PATCH 220/769] Only cancel deletes with inserts on undo and reject --- .../app/coffee/RangesTracker.coffee | 82 ++++++++++++------- .../app/coffee/sharejs/types/text-api.coffee | 16 ++-- .../app/coffee/sharejs/types/text.coffee | 33 +++++--- .../coffee/ShareJS/TextTransformTests.coffee | 53 +++++++++++- 4 files changed, 133 insertions(+), 51 deletions(-) diff --git a/services/document-updater/app/coffee/RangesTracker.coffee b/services/document-updater/app/coffee/RangesTracker.coffee index 6a07e38c0e..a9c43e9816 100644 --- a/services/document-updater/app/coffee/RangesTracker.coffee +++ b/services/document-updater/app/coffee/RangesTracker.coffee @@ -1,5 +1,5 @@ -load = (EventEmitter) -> - class RangesTracker extends EventEmitter +load = () -> + class RangesTracker # The purpose of this class is to track a set of inserts and deletes to a document, like # track changes in Word. We store these as a set of ShareJs style ranges: # {i: "foo", p: 42} # Insert 'foo' at offset 42 @@ -36,6 +36,7 @@ load = (EventEmitter) -> # middle of a previous insert by the first user, the original insert will be split into two. constructor: (@changes = [], @comments = []) -> @setIdSeed(RangesTracker.generateIdSeed()) + @resetDirtyState() getIdSeed: () -> return @id_seed @@ -75,7 +76,7 @@ load = (EventEmitter) -> comment = @getComment(comment_id) return if !comment? @comments = @comments.filter (c) -> c.id != comment_id - @emit "comment:removed", comment + @_markAsDirty comment, "comment", "removed" getChange: (change_id) -> change = null @@ -103,7 +104,11 @@ load = (EventEmitter) -> @addComment(op, metadata) else throw new Error("unknown op type") - + + applyOps: (ops, metadata = {}) -> + for op in ops + @applyOp(op, metadata) + addComment: (op, metadata) -> # TODO: Don't allow overlapping comments? @comments.push comment = { @@ -114,18 +119,18 @@ load = (EventEmitter) -> t: op.t metadata } - @emit "comment:added", comment + @_markAsDirty comment, "comment", "added" return comment applyInsertToComments: (op) -> for comment in @comments if op.p <= comment.op.p comment.op.p += op.i.length - @emit "comment:moved", comment + @_markAsDirty comment, "comment", "moved" else if op.p < comment.op.p + comment.op.c.length offset = op.p - comment.op.p comment.op.c = comment.op.c[0..(offset-1)] + op.i + comment.op.c[offset...] - @emit "comment:moved", comment + @_markAsDirty comment, "comment", "moved" applyDeleteToComments: (op) -> op_start = op.p @@ -138,7 +143,7 @@ load = (EventEmitter) -> if op_end <= comment_start # delete is fully before comment comment.op.p -= op_length - @emit "comment:moved", comment + @_markAsDirty comment, "comment", "moved" else if op_start >= comment_end # delete is fully after comment, nothing to do else @@ -161,12 +166,13 @@ load = (EventEmitter) -> comment.op.p = Math.min(comment_start, op_start) comment.op.c = remaining_before + remaining_after - @emit "comment:moved", comment + @_markAsDirty comment, "comment", "moved" applyInsertToChanges: (op, metadata) -> op_start = op.p op_length = op.i.length op_end = op.p + op_length + undoing = !!op.u already_merged = false @@ -184,8 +190,9 @@ load = (EventEmitter) -> change.op.p += op_length moved_changes.push change else if op_start == change_start - # If the insert matches the start of the delete, just remove it from the delete instead - if change.op.d.length >= op.i.length and change.op.d.slice(0, op.i.length) == op.i + # If we are undoing, then we want to cancel any existing delete ranges if we can. + # Check if the insert matches the start of the delete, and just remove it from the delete instead if so. + if undoing and change.op.d.length >= op.i.length and change.op.d.slice(0, op.i.length) == op.i change.op.d = change.op.d.slice(op.i.length) change.op.p += op.i.length if change.op.d == "" @@ -203,15 +210,15 @@ load = (EventEmitter) -> # Only merge inserts if they are from the same user is_same_user = metadata.user_id == change.metadata.user_id - # If this is an insert op at the end of an existing insert with a delete following, and it cancels out the following - # delete then we shouldn't append it to this insert, but instead only cancel the following delete. + # If we are undoing, then our changes will be removed from any delete ops just after. In that case, if there is also + # an insert op just before, then we shouldn't append it to this insert, but instead only cancel the following delete. # E.g. - # foo|<--- about to insert 'bar' here + # foo|<--- about to insert 'b' here # inserted 'foo' --^ ^-- deleted 'bar' - # should become just 'foo' not 'foobar' (with the delete marker disappearing), . + # should become just 'foo' not 'foob' (with the delete marker becoming just 'ar'), . next_change = @changes[i+1] is_op_adjacent_to_next_delete = next_change? and next_change.op.d? and op.p == change_end and next_change.op.p == op.p - will_op_cancel_next_delete = is_op_adjacent_to_next_delete and next_change.op.d == op.i + will_op_cancel_next_delete = undoing and is_op_adjacent_to_next_delete and next_change.op.d.slice(0, op.i.length) == op.i # If there is a delete at the start of the insert, and we're inserting # at the start, we SHOULDN'T merge since the delete acts as a partition. @@ -281,8 +288,8 @@ load = (EventEmitter) -> for change in remove_changes @_removeChange change - if moved_changes.length > 0 - @emit "changes:moved", moved_changes + for change in moved_changes + @_markAsDirty change, "change", "moved" applyDeleteToChanges: (op, metadata) -> op_start = op.p @@ -406,8 +413,8 @@ load = (EventEmitter) -> @_removeChange change moved_changes = moved_changes.filter (c) -> c != change - if moved_changes.length > 0 - @emit "changes:moved", moved_changes + for change in moved_changes + @_markAsDirty change, "change", "moved" _addOp: (op, metadata) -> change = { @@ -427,17 +434,11 @@ load = (EventEmitter) -> else return -1 - if op.d? - @emit "delete:added", change - else if op.i? - @emit "insert:added", change + @_markAsDirty(change, "change", "added") _removeChange: (change) -> @changes = @changes.filter (c) -> c.id != change.id - if change.op.d? - @emit "delete:removed", change - else if change.op.i? - @emit "insert:removed", change + @_markAsDirty change, "change", "removed" _applyOpModifications: (content, op_modifications) -> # Put in descending position order, with deleting first if at the same offset @@ -486,13 +487,32 @@ load = (EventEmitter) -> previous_change = change return { moved_changes, remove_changes } + resetDirtyState: () -> + @_dirtyState = { + comment: { + moved: {} + removed: {} + added: {} + } + change: { + moved: {} + removed: {} + added: {} + } + } + + getDirtyState: () -> + return @_dirtyState + + _markAsDirty: (object, type, action) -> + @_dirtyState[type][action][object.id] = object + _clone: (object) -> clone = {} (clone[k] = v for k,v of object) return clone if define? - define ["utils/EventEmitter"], load + define [], load else - EventEmitter = require("events").EventEmitter - module.exports = load(EventEmitter) \ No newline at end of file + module.exports = load() diff --git a/services/document-updater/app/coffee/sharejs/types/text-api.coffee b/services/document-updater/app/coffee/sharejs/types/text-api.coffee index 96243ceffb..98bb3fd503 100644 --- a/services/document-updater/app/coffee/sharejs/types/text-api.coffee +++ b/services/document-updater/app/coffee/sharejs/types/text-api.coffee @@ -11,14 +11,20 @@ text.api = # Get the text contents of a document getText: -> @snapshot - insert: (pos, text, callback) -> - op = [{p:pos, i:text}] + insert: (pos, text, fromUndo, callback) -> + op = {p:pos, i:text} + if fromUndo + op.u = true + op = [op] @submitOp op, callback op - del: (pos, length, callback) -> - op = [{p:pos, d:@snapshot[pos...(pos + length)]}] + del: (pos, length, fromUndo, callback) -> + op = {p:pos, d:@snapshot[pos...(pos + length)]} + if fromUndo + op.u = true + op = [op] @submitOp op, callback op @@ -28,5 +34,5 @@ text.api = for component in op if component.i != undefined @emit 'insert', component.p, component.i - else + else if component.d != undefined @emit 'delete', component.p, component.d diff --git a/services/document-updater/app/coffee/sharejs/types/text.coffee b/services/document-updater/app/coffee/sharejs/types/text.coffee index 2a3b79997d..ee7bf57043 100644 --- a/services/document-updater/app/coffee/sharejs/types/text.coffee +++ b/services/document-updater/app/coffee/sharejs/types/text.coffee @@ -56,6 +56,13 @@ text.apply = (snapshot, op) -> throw new Error "Unknown op type" snapshot +cloneAndModify = (op, modifications) -> + newOp = {} + for k,v of op + newOp[k] = v + for k,v of modifications + newOp[k] = v + return newOp # Exported for use by the random op generator. # @@ -69,10 +76,10 @@ text._append = append = (newOp, c) -> last = newOp[newOp.length - 1] # Compose the insert into the previous insert if possible - if last.i? && c.i? and last.p <= c.p <= (last.p + last.i.length) - newOp[newOp.length - 1] = {i:strInject(last.i, c.p - last.p, c.i), p:last.p} - else if last.d? && c.d? and c.p <= last.p <= (c.p + c.d.length) - newOp[newOp.length - 1] = {d:strInject(c.d, last.p - c.p, last.d), p:c.p} + if last.i? && c.i? and last.p <= c.p <= (last.p + last.i.length) and last.u == c.u + newOp[newOp.length - 1] = cloneAndModify(last, {i:strInject(last.i, c.p - last.p, c.i)}) + else if last.d? && c.d? and c.p <= last.p <= (c.p + c.d.length) and last.u == c.u + newOp[newOp.length - 1] = cloneAndModify(last, {d:strInject(c.d, last.p - c.p, last.d), p: c.p}) else newOp.push c @@ -150,25 +157,25 @@ text._tc = transformComponent = (dest, c, otherC, side) -> checkValidOp [otherC] if c.i? - append dest, {i:c.i, p:transformPosition(c.p, otherC, side == 'right')} + append dest, cloneAndModify(c, {p:transformPosition(c.p, otherC, side == 'right')}) else if c.d? # Delete if otherC.i? # delete vs insert s = c.d if c.p < otherC.p - append dest, {d:s[...otherC.p - c.p], p:c.p} + append dest, cloneAndModify(c, {d:s[...otherC.p - c.p]}) s = s[(otherC.p - c.p)..] if s != '' - append dest, {d:s, p:c.p + otherC.i.length} + append dest, cloneAndModify(c, {d:s, p:c.p + otherC.i.length}) else if otherC.d? # Delete vs delete if c.p >= otherC.p + otherC.d.length - append dest, {d:c.d, p:c.p - otherC.d.length} + append dest, cloneAndModify(c, {p:c.p - otherC.d.length}) else if c.p + c.d.length <= otherC.p append dest, c else # They overlap somewhere. - newC = {d:'', p:c.p} + newC = cloneAndModify(c, {d:''}) if c.p < otherC.p newC.d = c.d[...(otherC.p - c.p)] if c.p + c.d.length > otherC.p + otherC.d.length @@ -198,18 +205,18 @@ text._tc = transformComponent = (dest, c, otherC, side) -> if c.p < otherC.p < c.p + c.c.length offset = otherC.p - c.p new_c = (c.c[0..(offset-1)] + otherC.i + c.c[offset...]) - append dest, {c:new_c, p:c.p, t: c.t} + append dest, cloneAndModify(c, {c:new_c}) else - append dest, {c:c.c, p:transformPosition(c.p, otherC, true), t: c.t} + append dest, cloneAndModify(c, {p:transformPosition(c.p, otherC, true)}) else if otherC.d? if c.p >= otherC.p + otherC.d.length - append dest, {c:c.c, p:c.p - otherC.d.length, t: c.t} + append dest, cloneAndModify(c, {p:c.p - otherC.d.length}) else if c.p + c.c.length <= otherC.p append dest, c else # Delete overlaps comment # They overlap somewhere. - newC = {c:'', p:c.p, t: c.t} + newC = cloneAndModify(c, {c:''}) if c.p < otherC.p newC.c = c.c[...(otherC.p - c.p)] if c.p + c.c.length > otherC.p + otherC.d.length diff --git a/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee b/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee index 81440bfe5b..5477b47b38 100644 --- a/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee @@ -27,6 +27,11 @@ describe "ShareJS text type", -> dest = [] text._tc(dest, { i: "foo", p: 3 }, { i: "bar", p: 3 }, 'left') dest.should.deep.equal [{ i: "foo", p: 3 }] + + it "should preserve the undo flag", -> + dest = [] + text._tc(dest, { i: "foo", p: 9, u: true }, { i: "bar", p: 3 }) + dest.should.deep.equal [{ i: "foo", p: 12, u: true }] describe "insert / delete", -> it "with a delete before", -> @@ -46,9 +51,13 @@ describe "ShareJS text type", -> it "with a delete at the same place with side == 'left'", -> dest = [] - text._tc(dest, { i: "foo", p: 3 }, { d: "bar", p: 3 }, 'left') dest.should.deep.equal [{ i: "foo", p: 3 }] + + it "should preserve the undo flag", -> + dest = [] + text._tc(dest, { i: "foo", p: 9, u: true }, { d: "bar", p: 3 }) + dest.should.deep.equal [{ i: "foo", p: 6, u: true }] describe "delete / insert", -> it "with an insert before", -> @@ -75,7 +84,11 @@ describe "ShareJS text type", -> dest = [] text._tc(dest, { d: "foo", p: 3 }, { i: "bar", p: 4 }) dest.should.deep.equal [{ d: "f", p: 3 }, { d: "oo", p: 6 }] - + + it "should preserve the undo flag", -> + dest = [] + text._tc(dest, { d: "foo", p: 9, u: true }, { i: "bar", p: 3 }) + dest.should.deep.equal [{ d: "foo", p: 12, u: true }] describe "delete / delete", -> it "with a delete before", -> @@ -112,6 +125,11 @@ describe "ShareJS text type", -> dest = [] text._tc(dest, { d: "foo", p: 6 }, { d: "abcfoo123", p: 3 }) dest.should.deep.equal [] + + it "should preserve the undo flag", -> + dest = [] + text._tc(dest, { d: "foo", p: 9, u: true }, { d: "bar", p: 3 }) + dest.should.deep.equal [{ d: "foo", p: 6, u: true }] describe "comment / insert", -> it "with an insert before", -> @@ -210,6 +228,37 @@ describe "ShareJS text type", -> text.apply("foo123bar", [{ c: "456", p: 3 }]) ).should.throw(Error) + describe "_append", -> + it "should combine adjacent inserts", -> + dest = [{ i: "foo", p: 3 }] + text._append dest, { i: "bar", p: 6 } + dest.should.deep.equal [{ i: "foobar", p: 3 }] + + it "should combine adjacent undo inserts", -> + dest = [{ i: "foo", p: 3, u: true }] + text._append dest, { i: "bar", p: 6, u: true } + dest.should.deep.equal [{ i: "foobar", p: 3, u: true }] + + it "should not combine an undo and a normal insert", -> + dest = [{ i: "foo", p: 3, u: true }] + text._append dest, { i: "bar", p: 6 } + dest.should.deep.equal [{ i: "foo", p: 3, u: true }, { i: "bar", p: 6 }] + + it "should combine adjacent deletes", -> + dest = [{ d: "bar", p: 6 }] + text._append dest, { d: "foobaz", p: 3 } + dest.should.deep.equal [{ d: "foobarbaz", p: 3 }] + + it "should combine adjacent undo deletes", -> + dest = [{ d: "foo", p: 3, u: true }] + text._append dest, { d: "bar", p: 3, u: true } + dest.should.deep.equal [{ d: "foobar", p: 3, u: true }] + + it "should not combine an undo and a normal insert", -> + dest = [{ d: "foo", p: 3, u: true }] + text._append dest, { d: "bar", p: 3 } + dest.should.deep.equal [{ d: "foo", p: 3, u: true }, { d: "bar", p: 3 }] + describe "applying ops and comments in different orders", -> it "should not matter which op or comment is applied first", -> transform = (op1, op2, side) -> From fd3dee0af50846f7c37ce6e1cf85ad26ee0c9d27 Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 24 Feb 2017 14:58:42 +0100 Subject: [PATCH 221/769] Increase changes count limit to 2000 --- services/document-updater/app/coffee/RangesManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RangesManager.coffee b/services/document-updater/app/coffee/RangesManager.coffee index f39dea9537..c5aaf97473 100644 --- a/services/document-updater/app/coffee/RangesManager.coffee +++ b/services/document-updater/app/coffee/RangesManager.coffee @@ -3,7 +3,7 @@ logger = require "logger-sharelatex" module.exports = RangesManager = MAX_COMMENTS: 500 - MAX_CHANGES: 1200 + MAX_CHANGES: 2000 applyUpdate: (project_id, doc_id, entries = {}, updates = [], callback = (error, new_entries) ->) -> {changes, comments} = entries From 60f3393c44c81f870b91c467b943b333f74188d8 Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 27 Feb 2017 09:55:04 +0100 Subject: [PATCH 222/769] Do a naive check for null bytes after JSON.stringify on doc lines --- .../app/coffee/RedisManager.coffee | 4 ++ .../RedisManager/RedisManagerTests.coffee | 69 +++++++++++++------ 2 files changed, 53 insertions(+), 20 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 1eef4971e7..700df0a4a3 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -31,6 +31,8 @@ module.exports = RedisManager = timer.done() _callback(error) docLines = JSON.stringify(docLines) + if docLines.indexOf("\u0000") != -1 + return callback(new Error("null bytes found in doc lines")) docHash = RedisManager._computeHash(docLines) logger.log project_id:project_id, doc_id:doc_id, version: version, hash:docHash, "putting doc in redis" ranges = RedisManager._serializeRanges(ranges) @@ -151,6 +153,8 @@ module.exports = RedisManager = jsonOps = appliedOps.map (op) -> JSON.stringify op multi = rclient.multi() newDocLines = JSON.stringify(docLines) + if newDocLines.indexOf("\u0000") != -1 + return callback(new Error("null bytes found in doc lines")) newHash = RedisManager._computeHash(newDocLines) multi.eval setScript, 1, keys.docLines(doc_id:doc_id), newDocLines multi.set keys.docVersion(doc_id:doc_id), newVersion diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 3e3128324e..0050bd119b 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -12,26 +12,30 @@ describe "RedisManager", -> auth: () -> exec: sinon.stub() @rclient.multi = () => @rclient - @RedisManager = SandboxedModule.require modulePath, requires: - "./RedisBackend": - createClient: () => @rclient - "./RedisKeyBuilder": - blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" - docLines: ({doc_id}) -> "doclines:#{doc_id}" - docOps: ({doc_id}) -> "DocOps:#{doc_id}" - docVersion: ({doc_id}) -> "DocVersion:#{doc_id}" - docHash: ({doc_id}) -> "DocHash:#{doc_id}" - projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" - pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" - docsInProject: ({project_id}) -> "DocsIn:#{project_id}" - ranges: ({doc_id}) -> "Ranges:#{doc_id}" - "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } - "settings-sharelatex": {documentupdater: {logHashErrors: {write:true, read:true}}} - "./Metrics": @metrics = - inc: sinon.stub() - Timer: class Timer - done: () -> - "./Errors": Errors + @RedisManager = SandboxedModule.require modulePath, + requires: + "./RedisBackend": + createClient: () => @rclient + "./RedisKeyBuilder": + blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" + docLines: ({doc_id}) -> "doclines:#{doc_id}" + docOps: ({doc_id}) -> "DocOps:#{doc_id}" + docVersion: ({doc_id}) -> "DocVersion:#{doc_id}" + docHash: ({doc_id}) -> "DocHash:#{doc_id}" + projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" + pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" + docsInProject: ({project_id}) -> "DocsIn:#{project_id}" + ranges: ({doc_id}) -> "Ranges:#{doc_id}" + "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } + "settings-sharelatex": {documentupdater: {logHashErrors: {write:true, read:true}}} + "./Metrics": @metrics = + inc: sinon.stub() + Timer: class Timer + done: () -> + "./Errors": Errors + globals: + JSON: @JSON = JSON + @doc_id = "doc-id-123" @project_id = "project-id-123" @callback = sinon.stub() @@ -318,6 +322,19 @@ describe "RedisManager", -> it "should call the callback", -> @callback.called.should.equal true + describe "with null bytes in the serialized doc lines", -> + beforeEach -> + @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) + @_stringify = JSON.stringify + @JSON.stringify = () -> return '["bad bytes! \u0000 <- here"]' + @RedisManager.updateDocument @doc_id, @lines, @version, @ops, @ranges, @callback + + afterEach -> + @JSON.stringify = @_stringify + + it "should call the callback with an error", -> + @callback.calledWith(new Error("null bytes found in doc lines")).should.equal true + describe "putDocInMemory", -> beforeEach -> @rclient.set = sinon.stub() @@ -391,6 +408,18 @@ describe "RedisManager", -> @logger.error.calledWith() .should.equal true + describe "with null bytes in the serialized doc lines", -> + beforeEach -> + @_stringify = JSON.stringify + @JSON.stringify = () -> return '["bad bytes! \u0000 <- here"]' + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @callback + + afterEach -> + @JSON.stringify = @_stringify + + it "should call the callback with an error", -> + @callback.calledWith(new Error("null bytes found in doc lines")).should.equal true + describe "removeDocFromMemory", -> beforeEach (done) -> @rclient.del = sinon.stub() From dd45a8dd7d7abe0d05799c7195bf56e347ad8358 Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 27 Feb 2017 10:06:24 +0100 Subject: [PATCH 223/769] Improve logging around hashes and null byte errors --- .../app/coffee/RedisManager.coffee | 14 +++++++++++--- .../app/coffee/UpdateManager.coffee | 1 - .../coffee/RedisManager/RedisManagerTests.coffee | 6 ++++++ 3 files changed, 17 insertions(+), 4 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 700df0a4a3..d822f4ef74 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -32,7 +32,9 @@ module.exports = RedisManager = _callback(error) docLines = JSON.stringify(docLines) if docLines.indexOf("\u0000") != -1 - return callback(new Error("null bytes found in doc lines")) + error = new Error("null bytes found in doc lines") + logger.error err: error, doc_id: doc_id, docLines: docLines, error.message + return callback(error) docHash = RedisManager._computeHash(docLines) logger.log project_id:project_id, doc_id:doc_id, version: version, hash:docHash, "putting doc in redis" ranges = RedisManager._serializeRanges(ranges) @@ -150,12 +152,18 @@ module.exports = RedisManager = error = new Error("Version mismatch. '#{doc_id}' is corrupted.") logger.error {err: error, doc_id, currentVersion, newVersion, opsLength: appliedOps.length}, "version mismatch" return callback(error) + jsonOps = appliedOps.map (op) -> JSON.stringify op - multi = rclient.multi() newDocLines = JSON.stringify(docLines) if newDocLines.indexOf("\u0000") != -1 - return callback(new Error("null bytes found in doc lines")) + error = new Error("null bytes found in doc lines") + logger.error err: error, doc_id: doc_id, newDocLines: newDocLines, error.message + return callback(error) newHash = RedisManager._computeHash(newDocLines) + + logger.log doc_id: doc_id, version: newVersion, hash: newHash, "updating doc in redis" + + multi = rclient.multi() multi.eval setScript, 1, keys.docLines(doc_id:doc_id), newDocLines multi.set keys.docVersion(doc_id:doc_id), newVersion multi.set keys.docHash(doc_id:doc_id), newHash diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index 89f58bfd1f..2ad3281bfe 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -61,7 +61,6 @@ module.exports = UpdateManager = return callback(error) if error? RangesManager.applyUpdate project_id, doc_id, ranges, appliedOps, (error, new_ranges) -> return callback(error) if error? - logger.log doc_id: doc_id, version: version, "updating doc in redis" RedisManager.updateDocument doc_id, updatedDocLines, version, appliedOps, new_ranges, (error) -> return callback(error) if error? HistoryManager.pushUncompressedHistoryOps project_id, doc_id, appliedOps, callback diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 0050bd119b..abc7307c15 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -331,6 +331,9 @@ describe "RedisManager", -> afterEach -> @JSON.stringify = @_stringify + + it "should log an error", -> + @logger.error.called.should.equal true it "should call the callback with an error", -> @callback.calledWith(new Error("null bytes found in doc lines")).should.equal true @@ -416,6 +419,9 @@ describe "RedisManager", -> afterEach -> @JSON.stringify = @_stringify + + it "should log an error", -> + @logger.error.called.should.equal true it "should call the callback with an error", -> @callback.calledWith(new Error("null bytes found in doc lines")).should.equal true From f544814dda3a7998e22c24b40957f5df8a1b9b76 Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 27 Feb 2017 14:34:20 +0100 Subject: [PATCH 224/769] Add in an absolute size limit on the ranges JSON object --- .../app/coffee/RedisManager.coffee | 92 +++++++++++-------- .../test/acceptance/coffee/RangesTests.coffee | 39 +++++++- .../RedisManager/RedisManagerTests.coffee | 23 +++++ 3 files changed, 113 insertions(+), 41 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index d822f4ef74..101adac631 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -22,6 +22,9 @@ logHashErrors = Settings.documentupdater?.logHashErrors logHashReadErrors = logHashErrors?.read logHashWriteErrors = logHashErrors?.write +MEGABYTES = 1024 * 1024 +MAX_RANGES_SIZE = 3 * MEGABYTES + module.exports = RedisManager = rclient: rclient @@ -37,24 +40,28 @@ module.exports = RedisManager = return callback(error) docHash = RedisManager._computeHash(docLines) logger.log project_id:project_id, doc_id:doc_id, version: version, hash:docHash, "putting doc in redis" - ranges = RedisManager._serializeRanges(ranges) - multi = rclient.multi() - multi.eval setScript, 1, keys.docLines(doc_id:doc_id), docLines - multi.set keys.projectKey({doc_id:doc_id}), project_id - multi.set keys.docVersion(doc_id:doc_id), version - multi.set keys.docHash(doc_id:doc_id), docHash - if ranges? - multi.set keys.ranges(doc_id:doc_id), ranges - else - multi.del keys.ranges(doc_id:doc_id) - multi.exec (error, result) -> - return callback(error) if error? - # check the hash computed on the redis server - writeHash = result?[0] - if logHashWriteErrors and writeHash? and writeHash isnt docHash - logger.error project_id: project_id, doc_id: doc_id, writeHash: writeHash, origHash: docHash, "hash mismatch on putDocInMemory" - # update docsInProject set - rclient.sadd keys.docsInProject(project_id:project_id), doc_id, callback + + RedisManager._serializeRanges ranges, (error, ranges) -> + if error? + logger.error {err: error, doc_id, project_id}, error.message + return callback(error) + multi = rclient.multi() + multi.eval setScript, 1, keys.docLines(doc_id:doc_id), docLines + multi.set keys.projectKey({doc_id:doc_id}), project_id + multi.set keys.docVersion(doc_id:doc_id), version + multi.set keys.docHash(doc_id:doc_id), docHash + if ranges? + multi.set keys.ranges(doc_id:doc_id), ranges + else + multi.del keys.ranges(doc_id:doc_id) + multi.exec (error, result) -> + return callback(error) if error? + # check the hash computed on the redis server + writeHash = result?[0] + if logHashWriteErrors and writeHash? and writeHash isnt docHash + logger.error project_id: project_id, doc_id: doc_id, writeHash: writeHash, origHash: docHash, "hash mismatch on putDocInMemory" + # update docsInProject set + rclient.sadd keys.docsInProject(project_id:project_id), doc_id, callback removeDocFromMemory : (project_id, doc_id, _callback)-> logger.log project_id:project_id, doc_id:doc_id, "removing doc from redis" @@ -163,36 +170,41 @@ module.exports = RedisManager = logger.log doc_id: doc_id, version: newVersion, hash: newHash, "updating doc in redis" - multi = rclient.multi() - multi.eval setScript, 1, keys.docLines(doc_id:doc_id), newDocLines - multi.set keys.docVersion(doc_id:doc_id), newVersion - multi.set keys.docHash(doc_id:doc_id), newHash - if jsonOps.length > 0 - multi.rpush keys.docOps(doc_id: doc_id), jsonOps... - multi.expire keys.docOps(doc_id: doc_id), RedisManager.DOC_OPS_TTL - multi.ltrim keys.docOps(doc_id: doc_id), -RedisManager.DOC_OPS_MAX_LENGTH, -1 - ranges = RedisManager._serializeRanges(ranges) - if ranges? - multi.set keys.ranges(doc_id:doc_id), ranges - else - multi.del keys.ranges(doc_id:doc_id) - multi.exec (error, result) -> - return callback(error) if error? - # check the hash computed on the redis server - writeHash = result?[0] - if logHashWriteErrors and writeHash? and writeHash isnt newHash - logger.error doc_id: doc_id, writeHash: writeHash, origHash: newHash, "hash mismatch on updateDocument" - return callback() + RedisManager._serializeRanges ranges, (error, ranges) -> + if error? + logger.error {err: error, doc_id}, error.message + return callback(error) + multi = rclient.multi() + multi.eval setScript, 1, keys.docLines(doc_id:doc_id), newDocLines + multi.set keys.docVersion(doc_id:doc_id), newVersion + multi.set keys.docHash(doc_id:doc_id), newHash + if jsonOps.length > 0 + multi.rpush keys.docOps(doc_id: doc_id), jsonOps... + multi.expire keys.docOps(doc_id: doc_id), RedisManager.DOC_OPS_TTL + multi.ltrim keys.docOps(doc_id: doc_id), -RedisManager.DOC_OPS_MAX_LENGTH, -1 + if ranges? + multi.set keys.ranges(doc_id:doc_id), ranges + else + multi.del keys.ranges(doc_id:doc_id) + multi.exec (error, result) -> + return callback(error) if error? + # check the hash computed on the redis server + writeHash = result?[0] + if logHashWriteErrors and writeHash? and writeHash isnt newHash + logger.error doc_id: doc_id, writeHash: writeHash, origHash: newHash, "hash mismatch on updateDocument" + return callback() getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) -> rclient.smembers keys.docsInProject(project_id: project_id), callback - _serializeRanges: (ranges) -> + _serializeRanges: (ranges, callback = (error, serializedRanges) ->) -> jsonRanges = JSON.stringify(ranges) + if jsonRanges? and jsonRanges.length > MAX_RANGES_SIZE + return callback new Error("ranges are too large") if jsonRanges == '{}' # Most doc will have empty ranges so don't fill redis with lots of '{}' keys jsonRanges = null - return jsonRanges + return callback null, jsonRanges _deserializeRanges: (ranges) -> if !ranges? or ranges == "" diff --git a/services/document-updater/test/acceptance/coffee/RangesTests.coffee b/services/document-updater/test/acceptance/coffee/RangesTests.coffee index 044fd3191f..e3ec097e2e 100644 --- a/services/document-updater/test/acceptance/coffee/RangesTests.coffee +++ b/services/document-updater/test/acceptance/coffee/RangesTests.coffee @@ -264,4 +264,41 @@ describe "Ranges", -> DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => throw error if error? expect(data.ranges.comments).to.be.undefined - done() \ No newline at end of file + done() + + describe "tripping range size limit", -> + before (done) -> + @project_id = DocUpdaterClient.randomId() + @user_id = DocUpdaterClient.randomId() + @id_seed = DocUpdaterClient.randomId() + @doc = { + id: DocUpdaterClient.randomId() + lines: ["aaa"] + } + @i = new Array(3 * 1024 * 1024).join("a") + @updates = [{ + doc: @doc.id + op: [{ i: @i, p: 1 }] + v: 0 + meta: { user_id: @user_id, tc: @id_seed } + }] + MockWebApi.insertDoc @project_id, @doc.id, { + lines: @doc.lines + version: 0 + } + jobs = [] + for update in @updates + do (update) => + jobs.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc.id, update, callback + DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) => + throw error if error? + async.series jobs, (error) -> + throw error if error? + setTimeout done, 200 + + it "should not update the ranges", (done) -> + DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => + throw error if error? + ranges = data.ranges + expect(ranges.changes).to.be.undefined + done() \ No newline at end of file diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index abc7307c15..258603be9b 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -337,6 +337,18 @@ describe "RedisManager", -> it "should call the callback with an error", -> @callback.calledWith(new Error("null bytes found in doc lines")).should.equal true + + describe "with ranges that are too big", -> + beforeEach -> + @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) + @RedisManager._serializeRanges = sinon.stub().yields(new Error("ranges are too large")) + @RedisManager.updateDocument @doc_id, @lines, @version, @ops, @ranges, @callback + + it 'should log an error', -> + @logger.error.called.should.equal true + + it "should call the callback with the error", -> + @callback.calledWith(new Error("ranges are too large")).should.equal true describe "putDocInMemory", -> beforeEach -> @@ -425,6 +437,17 @@ describe "RedisManager", -> it "should call the callback with an error", -> @callback.calledWith(new Error("null bytes found in doc lines")).should.equal true + + describe "with ranges that are too big", -> + beforeEach -> + @RedisManager._serializeRanges = sinon.stub().yields(new Error("ranges are too large")) + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @callback + + it 'should log an error', -> + @logger.error.called.should.equal true + + it "should call the callback with the error", -> + @callback.calledWith(new Error("ranges are too large")).should.equal true describe "removeDocFromMemory", -> beforeEach (done) -> From d56bb5595320cbc2a0273ed98421ea85b0f6c417 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 1 Mar 2017 16:49:46 +0000 Subject: [PATCH 225/769] Revert PR #19 --- .../app/coffee/RangesTracker.coffee | 78 +++++++------------ .../app/coffee/sharejs/types/text-api.coffee | 16 ++-- .../app/coffee/sharejs/types/text.coffee | 33 ++++---- .../coffee/ShareJS/TextTransformTests.coffee | 53 +------------ 4 files changed, 49 insertions(+), 131 deletions(-) diff --git a/services/document-updater/app/coffee/RangesTracker.coffee b/services/document-updater/app/coffee/RangesTracker.coffee index a9c43e9816..865ecf4ef6 100644 --- a/services/document-updater/app/coffee/RangesTracker.coffee +++ b/services/document-updater/app/coffee/RangesTracker.coffee @@ -1,5 +1,5 @@ -load = () -> - class RangesTracker +load = (EventEmitter) -> + class RangesTracker extends EventEmitter # The purpose of this class is to track a set of inserts and deletes to a document, like # track changes in Word. We store these as a set of ShareJs style ranges: # {i: "foo", p: 42} # Insert 'foo' at offset 42 @@ -36,7 +36,6 @@ load = () -> # middle of a previous insert by the first user, the original insert will be split into two. constructor: (@changes = [], @comments = []) -> @setIdSeed(RangesTracker.generateIdSeed()) - @resetDirtyState() getIdSeed: () -> return @id_seed @@ -76,7 +75,7 @@ load = () -> comment = @getComment(comment_id) return if !comment? @comments = @comments.filter (c) -> c.id != comment_id - @_markAsDirty comment, "comment", "removed" + @emit "comment:removed", comment getChange: (change_id) -> change = null @@ -104,11 +103,7 @@ load = () -> @addComment(op, metadata) else throw new Error("unknown op type") - - applyOps: (ops, metadata = {}) -> - for op in ops - @applyOp(op, metadata) - + addComment: (op, metadata) -> # TODO: Don't allow overlapping comments? @comments.push comment = { @@ -119,18 +114,18 @@ load = () -> t: op.t metadata } - @_markAsDirty comment, "comment", "added" + @emit "comment:added", comment return comment applyInsertToComments: (op) -> for comment in @comments if op.p <= comment.op.p comment.op.p += op.i.length - @_markAsDirty comment, "comment", "moved" + @emit "comment:moved", comment else if op.p < comment.op.p + comment.op.c.length offset = op.p - comment.op.p comment.op.c = comment.op.c[0..(offset-1)] + op.i + comment.op.c[offset...] - @_markAsDirty comment, "comment", "moved" + @emit "comment:moved", comment applyDeleteToComments: (op) -> op_start = op.p @@ -143,7 +138,7 @@ load = () -> if op_end <= comment_start # delete is fully before comment comment.op.p -= op_length - @_markAsDirty comment, "comment", "moved" + @emit "comment:moved", comment else if op_start >= comment_end # delete is fully after comment, nothing to do else @@ -166,13 +161,12 @@ load = () -> comment.op.p = Math.min(comment_start, op_start) comment.op.c = remaining_before + remaining_after - @_markAsDirty comment, "comment", "moved" + @emit "comment:moved", comment applyInsertToChanges: (op, metadata) -> op_start = op.p op_length = op.i.length op_end = op.p + op_length - undoing = !!op.u already_merged = false @@ -190,9 +184,8 @@ load = () -> change.op.p += op_length moved_changes.push change else if op_start == change_start - # If we are undoing, then we want to cancel any existing delete ranges if we can. - # Check if the insert matches the start of the delete, and just remove it from the delete instead if so. - if undoing and change.op.d.length >= op.i.length and change.op.d.slice(0, op.i.length) == op.i + # If the insert matches the start of the delete, just remove it from the delete instead + if change.op.d.length >= op.i.length and change.op.d.slice(0, op.i.length) == op.i change.op.d = change.op.d.slice(op.i.length) change.op.p += op.i.length if change.op.d == "" @@ -210,15 +203,15 @@ load = () -> # Only merge inserts if they are from the same user is_same_user = metadata.user_id == change.metadata.user_id - # If we are undoing, then our changes will be removed from any delete ops just after. In that case, if there is also - # an insert op just before, then we shouldn't append it to this insert, but instead only cancel the following delete. + # If this is an insert op at the end of an existing insert with a delete following, and it cancels out the following + # delete then we shouldn't append it to this insert, but instead only cancel the following delete. # E.g. # foo|<--- about to insert 'b' here # inserted 'foo' --^ ^-- deleted 'bar' # should become just 'foo' not 'foob' (with the delete marker becoming just 'ar'), . next_change = @changes[i+1] is_op_adjacent_to_next_delete = next_change? and next_change.op.d? and op.p == change_end and next_change.op.p == op.p - will_op_cancel_next_delete = undoing and is_op_adjacent_to_next_delete and next_change.op.d.slice(0, op.i.length) == op.i + will_op_cancel_next_delete = is_op_adjacent_to_next_delete and next_change.op.d.slice(0, op.i.length) == op.i # If there is a delete at the start of the insert, and we're inserting # at the start, we SHOULDN'T merge since the delete acts as a partition. @@ -288,8 +281,8 @@ load = () -> for change in remove_changes @_removeChange change - for change in moved_changes - @_markAsDirty change, "change", "moved" + if moved_changes.length > 0 + @emit "changes:moved", moved_changes applyDeleteToChanges: (op, metadata) -> op_start = op.p @@ -413,8 +406,8 @@ load = () -> @_removeChange change moved_changes = moved_changes.filter (c) -> c != change - for change in moved_changes - @_markAsDirty change, "change", "moved" + if moved_changes.length > 0 + @emit "changes:moved", moved_changes _addOp: (op, metadata) -> change = { @@ -434,11 +427,17 @@ load = () -> else return -1 - @_markAsDirty(change, "change", "added") + if op.d? + @emit "delete:added", change + else if op.i? + @emit "insert:added", change _removeChange: (change) -> @changes = @changes.filter (c) -> c.id != change.id - @_markAsDirty change, "change", "removed" + if change.op.d? + @emit "delete:removed", change + else if change.op.i? + @emit "insert:removed", change _applyOpModifications: (content, op_modifications) -> # Put in descending position order, with deleting first if at the same offset @@ -487,32 +486,13 @@ load = () -> previous_change = change return { moved_changes, remove_changes } - resetDirtyState: () -> - @_dirtyState = { - comment: { - moved: {} - removed: {} - added: {} - } - change: { - moved: {} - removed: {} - added: {} - } - } - - getDirtyState: () -> - return @_dirtyState - - _markAsDirty: (object, type, action) -> - @_dirtyState[type][action][object.id] = object - _clone: (object) -> clone = {} (clone[k] = v for k,v of object) return clone if define? - define [], load + define ["utils/EventEmitter"], load else - module.exports = load() + EventEmitter = require("events").EventEmitter + module.exports = load(EventEmitter) \ No newline at end of file diff --git a/services/document-updater/app/coffee/sharejs/types/text-api.coffee b/services/document-updater/app/coffee/sharejs/types/text-api.coffee index 98bb3fd503..96243ceffb 100644 --- a/services/document-updater/app/coffee/sharejs/types/text-api.coffee +++ b/services/document-updater/app/coffee/sharejs/types/text-api.coffee @@ -11,20 +11,14 @@ text.api = # Get the text contents of a document getText: -> @snapshot - insert: (pos, text, fromUndo, callback) -> - op = {p:pos, i:text} - if fromUndo - op.u = true - op = [op] + insert: (pos, text, callback) -> + op = [{p:pos, i:text}] @submitOp op, callback op - del: (pos, length, fromUndo, callback) -> - op = {p:pos, d:@snapshot[pos...(pos + length)]} - if fromUndo - op.u = true - op = [op] + del: (pos, length, callback) -> + op = [{p:pos, d:@snapshot[pos...(pos + length)]}] @submitOp op, callback op @@ -34,5 +28,5 @@ text.api = for component in op if component.i != undefined @emit 'insert', component.p, component.i - else if component.d != undefined + else @emit 'delete', component.p, component.d diff --git a/services/document-updater/app/coffee/sharejs/types/text.coffee b/services/document-updater/app/coffee/sharejs/types/text.coffee index ee7bf57043..2a3b79997d 100644 --- a/services/document-updater/app/coffee/sharejs/types/text.coffee +++ b/services/document-updater/app/coffee/sharejs/types/text.coffee @@ -56,13 +56,6 @@ text.apply = (snapshot, op) -> throw new Error "Unknown op type" snapshot -cloneAndModify = (op, modifications) -> - newOp = {} - for k,v of op - newOp[k] = v - for k,v of modifications - newOp[k] = v - return newOp # Exported for use by the random op generator. # @@ -76,10 +69,10 @@ text._append = append = (newOp, c) -> last = newOp[newOp.length - 1] # Compose the insert into the previous insert if possible - if last.i? && c.i? and last.p <= c.p <= (last.p + last.i.length) and last.u == c.u - newOp[newOp.length - 1] = cloneAndModify(last, {i:strInject(last.i, c.p - last.p, c.i)}) - else if last.d? && c.d? and c.p <= last.p <= (c.p + c.d.length) and last.u == c.u - newOp[newOp.length - 1] = cloneAndModify(last, {d:strInject(c.d, last.p - c.p, last.d), p: c.p}) + if last.i? && c.i? and last.p <= c.p <= (last.p + last.i.length) + newOp[newOp.length - 1] = {i:strInject(last.i, c.p - last.p, c.i), p:last.p} + else if last.d? && c.d? and c.p <= last.p <= (c.p + c.d.length) + newOp[newOp.length - 1] = {d:strInject(c.d, last.p - c.p, last.d), p:c.p} else newOp.push c @@ -157,25 +150,25 @@ text._tc = transformComponent = (dest, c, otherC, side) -> checkValidOp [otherC] if c.i? - append dest, cloneAndModify(c, {p:transformPosition(c.p, otherC, side == 'right')}) + append dest, {i:c.i, p:transformPosition(c.p, otherC, side == 'right')} else if c.d? # Delete if otherC.i? # delete vs insert s = c.d if c.p < otherC.p - append dest, cloneAndModify(c, {d:s[...otherC.p - c.p]}) + append dest, {d:s[...otherC.p - c.p], p:c.p} s = s[(otherC.p - c.p)..] if s != '' - append dest, cloneAndModify(c, {d:s, p:c.p + otherC.i.length}) + append dest, {d:s, p:c.p + otherC.i.length} else if otherC.d? # Delete vs delete if c.p >= otherC.p + otherC.d.length - append dest, cloneAndModify(c, {p:c.p - otherC.d.length}) + append dest, {d:c.d, p:c.p - otherC.d.length} else if c.p + c.d.length <= otherC.p append dest, c else # They overlap somewhere. - newC = cloneAndModify(c, {d:''}) + newC = {d:'', p:c.p} if c.p < otherC.p newC.d = c.d[...(otherC.p - c.p)] if c.p + c.d.length > otherC.p + otherC.d.length @@ -205,18 +198,18 @@ text._tc = transformComponent = (dest, c, otherC, side) -> if c.p < otherC.p < c.p + c.c.length offset = otherC.p - c.p new_c = (c.c[0..(offset-1)] + otherC.i + c.c[offset...]) - append dest, cloneAndModify(c, {c:new_c}) + append dest, {c:new_c, p:c.p, t: c.t} else - append dest, cloneAndModify(c, {p:transformPosition(c.p, otherC, true)}) + append dest, {c:c.c, p:transformPosition(c.p, otherC, true), t: c.t} else if otherC.d? if c.p >= otherC.p + otherC.d.length - append dest, cloneAndModify(c, {p:c.p - otherC.d.length}) + append dest, {c:c.c, p:c.p - otherC.d.length, t: c.t} else if c.p + c.c.length <= otherC.p append dest, c else # Delete overlaps comment # They overlap somewhere. - newC = cloneAndModify(c, {c:''}) + newC = {c:'', p:c.p, t: c.t} if c.p < otherC.p newC.c = c.c[...(otherC.p - c.p)] if c.p + c.c.length > otherC.p + otherC.d.length diff --git a/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee b/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee index 5477b47b38..81440bfe5b 100644 --- a/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee @@ -27,11 +27,6 @@ describe "ShareJS text type", -> dest = [] text._tc(dest, { i: "foo", p: 3 }, { i: "bar", p: 3 }, 'left') dest.should.deep.equal [{ i: "foo", p: 3 }] - - it "should preserve the undo flag", -> - dest = [] - text._tc(dest, { i: "foo", p: 9, u: true }, { i: "bar", p: 3 }) - dest.should.deep.equal [{ i: "foo", p: 12, u: true }] describe "insert / delete", -> it "with a delete before", -> @@ -51,13 +46,9 @@ describe "ShareJS text type", -> it "with a delete at the same place with side == 'left'", -> dest = [] + text._tc(dest, { i: "foo", p: 3 }, { d: "bar", p: 3 }, 'left') dest.should.deep.equal [{ i: "foo", p: 3 }] - - it "should preserve the undo flag", -> - dest = [] - text._tc(dest, { i: "foo", p: 9, u: true }, { d: "bar", p: 3 }) - dest.should.deep.equal [{ i: "foo", p: 6, u: true }] describe "delete / insert", -> it "with an insert before", -> @@ -84,11 +75,7 @@ describe "ShareJS text type", -> dest = [] text._tc(dest, { d: "foo", p: 3 }, { i: "bar", p: 4 }) dest.should.deep.equal [{ d: "f", p: 3 }, { d: "oo", p: 6 }] - - it "should preserve the undo flag", -> - dest = [] - text._tc(dest, { d: "foo", p: 9, u: true }, { i: "bar", p: 3 }) - dest.should.deep.equal [{ d: "foo", p: 12, u: true }] + describe "delete / delete", -> it "with a delete before", -> @@ -125,11 +112,6 @@ describe "ShareJS text type", -> dest = [] text._tc(dest, { d: "foo", p: 6 }, { d: "abcfoo123", p: 3 }) dest.should.deep.equal [] - - it "should preserve the undo flag", -> - dest = [] - text._tc(dest, { d: "foo", p: 9, u: true }, { d: "bar", p: 3 }) - dest.should.deep.equal [{ d: "foo", p: 6, u: true }] describe "comment / insert", -> it "with an insert before", -> @@ -228,37 +210,6 @@ describe "ShareJS text type", -> text.apply("foo123bar", [{ c: "456", p: 3 }]) ).should.throw(Error) - describe "_append", -> - it "should combine adjacent inserts", -> - dest = [{ i: "foo", p: 3 }] - text._append dest, { i: "bar", p: 6 } - dest.should.deep.equal [{ i: "foobar", p: 3 }] - - it "should combine adjacent undo inserts", -> - dest = [{ i: "foo", p: 3, u: true }] - text._append dest, { i: "bar", p: 6, u: true } - dest.should.deep.equal [{ i: "foobar", p: 3, u: true }] - - it "should not combine an undo and a normal insert", -> - dest = [{ i: "foo", p: 3, u: true }] - text._append dest, { i: "bar", p: 6 } - dest.should.deep.equal [{ i: "foo", p: 3, u: true }, { i: "bar", p: 6 }] - - it "should combine adjacent deletes", -> - dest = [{ d: "bar", p: 6 }] - text._append dest, { d: "foobaz", p: 3 } - dest.should.deep.equal [{ d: "foobarbaz", p: 3 }] - - it "should combine adjacent undo deletes", -> - dest = [{ d: "foo", p: 3, u: true }] - text._append dest, { d: "bar", p: 3, u: true } - dest.should.deep.equal [{ d: "foobar", p: 3, u: true }] - - it "should not combine an undo and a normal insert", -> - dest = [{ d: "foo", p: 3, u: true }] - text._append dest, { d: "bar", p: 3 } - dest.should.deep.equal [{ d: "foo", p: 3, u: true }, { d: "bar", p: 3 }] - describe "applying ops and comments in different orders", -> it "should not matter which op or comment is applied first", -> transform = (op1, op2, side) -> From b186a01c04596c38d4cf9f652d23fd8dc8225b50 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 3 Mar 2017 15:27:42 +0000 Subject: [PATCH 226/769] don't log errors from redis backend this also picks up errors from RedisManager like "doc ops range is not loaded in redis" --- services/document-updater/app/coffee/RedisBackend.coffee | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RedisBackend.coffee b/services/document-updater/app/coffee/RedisBackend.coffee index 9ec479c01f..d69cd21a6e 100644 --- a/services/document-updater/app/coffee/RedisBackend.coffee +++ b/services/document-updater/app/coffee/RedisBackend.coffee @@ -100,7 +100,8 @@ class MultiClient cb(error, result) async.parallel jobs, (error, results) -> if error? - logger.error {err: error}, "error in redis backend" + # suppress logging of errors + # logger.error {err: error}, "error in redis backend" else compareResults(results, "exec") callback(primaryError, primaryResult) From 3f13263ecfebbc6dd4da584031fce96df85a3f4e Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 3 Mar 2017 15:32:11 +0000 Subject: [PATCH 227/769] upgrade to logger-sharelatex 1.5.3 --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 72a2e28121..76bb426cf4 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -11,7 +11,7 @@ "coffee-script": "1.4.0", "express": "3.3.4", "ioredis": "^2.2.0", - "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.2", + "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.3", "lynx": "0.0.11", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.5.0", "redis-sharelatex": "0.0.9", From d086e0b61b032a34ecec503596c14524e96af013 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 3 Mar 2017 15:57:44 +0000 Subject: [PATCH 228/769] log doclines on hash mismatch --- services/document-updater/app/coffee/RedisManager.coffee | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index d822f4ef74..1ce0062fe6 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -52,7 +52,7 @@ module.exports = RedisManager = # check the hash computed on the redis server writeHash = result?[0] if logHashWriteErrors and writeHash? and writeHash isnt docHash - logger.error project_id: project_id, doc_id: doc_id, writeHash: writeHash, origHash: docHash, "hash mismatch on putDocInMemory" + logger.error project_id: project_id, doc_id: doc_id, writeHash: writeHash, origHash: docHash, docLines:docLines, "hash mismatch on putDocInMemory" # update docsInProject set rclient.sadd keys.docsInProject(project_id:project_id), doc_id, callback @@ -92,7 +92,7 @@ module.exports = RedisManager = if docLines? and storedHash? computedHash = RedisManager._computeHash(docLines) if logHashReadErrors and computedHash isnt storedHash - logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, computedHash: computedHash, storedHash: storedHash, "hash mismatch on retrieved document" + logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, computedHash: computedHash, storedHash: storedHash, docLines:docLines, "hash mismatch on retrieved document" try docLines = JSON.parse docLines @@ -181,7 +181,7 @@ module.exports = RedisManager = # check the hash computed on the redis server writeHash = result?[0] if logHashWriteErrors and writeHash? and writeHash isnt newHash - logger.error doc_id: doc_id, writeHash: writeHash, origHash: newHash, "hash mismatch on updateDocument" + logger.error doc_id: doc_id, writeHash: writeHash, origHash: newHash, docLines:newDocLines, "hash mismatch on updateDocument" return callback() getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) -> From 501d907299cf8265d7d3b32e10f10d6de7fb355e Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 3 Mar 2017 16:08:14 +0000 Subject: [PATCH 229/769] upgrade to logger-sharelatex 1.5.4 --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 76bb426cf4..9f8439ea04 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -11,7 +11,7 @@ "coffee-script": "1.4.0", "express": "3.3.4", "ioredis": "^2.2.0", - "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.3", + "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.4", "lynx": "0.0.11", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.5.0", "redis-sharelatex": "0.0.9", From a3a9a726d80203ea778d944fd33fd41687297ba2 Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 6 Mar 2017 12:01:18 +0000 Subject: [PATCH 230/769] Update unit tests --- .../coffee/RedisBackend/RedisBackendTests.coffee | 16 +--------------- 1 file changed, 1 insertion(+), 15 deletions(-) diff --git a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee index 52bb69bebb..4a136baae1 100644 --- a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee @@ -368,15 +368,8 @@ describe "RedisBackend", -> it "should return the primary result", -> @result.should.deep.equal [@doclines, @version] - - it "should log out the secondary error", -> - @logger.error - .calledWith({ - err: @error - }, "error in redis backend") - .should.equal true - describe "when the secondary errors", -> + describe "when the primary errors", -> beforeEach (done) -> @rclient_redis.get = sinon.stub() @rclient_redis.exec = sinon.stub().yields(@error = new Error("oops")) @@ -391,13 +384,6 @@ describe "RedisBackend", -> it "should return the error", -> @returned_error.should.equal @error - - it "should log out the error", -> - @logger.error - .calledWith({ - err: @error - }, "error in redis backend") - .should.equal true describe "when the secondary takes longer than SECONDARY_TIMEOUT", -> beforeEach (done) -> From b9e449200189dae302a71dac6af5974e080f9ea4 Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 6 Mar 2017 19:05:11 +0000 Subject: [PATCH 231/769] Wrap RangeTracker in try catch --- services/document-updater/app/coffee/RangesManager.coffee | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RangesManager.coffee b/services/document-updater/app/coffee/RangesManager.coffee index c5aaf97473..7f430f476b 100644 --- a/services/document-updater/app/coffee/RangesManager.coffee +++ b/services/document-updater/app/coffee/RangesManager.coffee @@ -13,7 +13,10 @@ module.exports = RangesManager = if !!update.meta.tc rangesTracker.setIdSeed(update.meta.tc) for op in update.op - rangesTracker.applyOp(op, { user_id: update.meta?.user_id }) + try + rangesTracker.applyOp(op, { user_id: update.meta?.user_id }) + catch error + return callback(error) if rangesTracker.changes?.length > RangesManager.MAX_CHANGES or rangesTracker.comments?.length > RangesManager.MAX_COMMENTS return callback new Error("too many comments or tracked changes") From 5499a67d78f7467382d4620f6d4d9646effb9b03 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 15 Mar 2017 14:12:06 +0000 Subject: [PATCH 232/769] Add in a consistency check after applying updates that ranges still match --- .../app/coffee/RangesManager.coffee | 10 +- .../app/coffee/RangesTracker.coffee | 93 ++++++--- .../app/coffee/UpdateManager.coffee | 2 +- .../RangesManager/RangesManagerTests.coffee | 179 ++++++++++++++++++ .../UpdateManager/UpdateManagerTests.coffee | 2 +- 5 files changed, 254 insertions(+), 32 deletions(-) create mode 100644 services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee diff --git a/services/document-updater/app/coffee/RangesManager.coffee b/services/document-updater/app/coffee/RangesManager.coffee index 7f430f476b..ebef566424 100644 --- a/services/document-updater/app/coffee/RangesManager.coffee +++ b/services/document-updater/app/coffee/RangesManager.coffee @@ -5,7 +5,7 @@ module.exports = RangesManager = MAX_COMMENTS: 500 MAX_CHANGES: 2000 - applyUpdate: (project_id, doc_id, entries = {}, updates = [], callback = (error, new_entries) ->) -> + applyUpdate: (project_id, doc_id, entries = {}, updates = [], newDocLines, callback = (error, new_entries) ->) -> {changes, comments} = entries rangesTracker = new RangesTracker(changes, comments) for update in updates @@ -21,6 +21,14 @@ module.exports = RangesManager = if rangesTracker.changes?.length > RangesManager.MAX_CHANGES or rangesTracker.comments?.length > RangesManager.MAX_COMMENTS return callback new Error("too many comments or tracked changes") + try + # This is a consistency check that all of our ranges and + # comments still match the corresponding text + rangesTracker.validate(newDocLines.join("\n")) + catch error + logger.error {err: error, project_id, doc_id, newDocLines, updates}, "error validating ranges" + return callback(error) + response = RangesManager._getRanges rangesTracker logger.log {project_id, doc_id, changesCount: response.changes?.length, commentsCount: response.comments?.length}, "applied updates to ranges" callback null, response diff --git a/services/document-updater/app/coffee/RangesTracker.coffee b/services/document-updater/app/coffee/RangesTracker.coffee index 865ecf4ef6..5f67a9561b 100644 --- a/services/document-updater/app/coffee/RangesTracker.coffee +++ b/services/document-updater/app/coffee/RangesTracker.coffee @@ -1,5 +1,8 @@ -load = (EventEmitter) -> - class RangesTracker extends EventEmitter +# This file is shared between document-updater and web, so that the server and client share +# an identical track changes implementation. Do not edit it directly in web or document-updater, +# instead edit it at https://github.com/sharelatex/ranges-tracker, where it has a suite of tests +load = () -> + class RangesTracker # The purpose of this class is to track a set of inserts and deletes to a document, like # track changes in Word. We store these as a set of ShareJs style ranges: # {i: "foo", p: 42} # Insert 'foo' at offset 42 @@ -36,6 +39,7 @@ load = (EventEmitter) -> # middle of a previous insert by the first user, the original insert will be split into two. constructor: (@changes = [], @comments = []) -> @setIdSeed(RangesTracker.generateIdSeed()) + @resetDirtyState() getIdSeed: () -> return @id_seed @@ -75,7 +79,7 @@ load = (EventEmitter) -> comment = @getComment(comment_id) return if !comment? @comments = @comments.filter (c) -> c.id != comment_id - @emit "comment:removed", comment + @_markAsDirty comment, "comment", "removed" getChange: (change_id) -> change = null @@ -89,6 +93,18 @@ load = (EventEmitter) -> change = @getChange(change_id) return if !change? @_removeChange(change) + + validate: (text) -> + for change in @changes + if change.op.i? + content = text.slice(change.op.p, change.op.p + change.op.i.length) + if content != change.op.i + throw new Error("Change (#{JSON.stringify(change)}) doesn't match text (#{JSON.stringify(content)})") + for comment in @comments + content = text.slice(comment.op.p, comment.op.p + comment.op.c.length) + if content != comment.op.c + throw new Error("Comment (#{JSON.stringify(comment)}) doesn't match text (#{JSON.stringify(content)})") + return true applyOp: (op, metadata = {}) -> metadata.ts ?= new Date() @@ -103,7 +119,11 @@ load = (EventEmitter) -> @addComment(op, metadata) else throw new Error("unknown op type") - + + applyOps: (ops, metadata = {}) -> + for op in ops + @applyOp(op, metadata) + addComment: (op, metadata) -> # TODO: Don't allow overlapping comments? @comments.push comment = { @@ -114,18 +134,18 @@ load = (EventEmitter) -> t: op.t metadata } - @emit "comment:added", comment + @_markAsDirty comment, "comment", "added" return comment applyInsertToComments: (op) -> for comment in @comments if op.p <= comment.op.p comment.op.p += op.i.length - @emit "comment:moved", comment + @_markAsDirty comment, "comment", "moved" else if op.p < comment.op.p + comment.op.c.length offset = op.p - comment.op.p comment.op.c = comment.op.c[0..(offset-1)] + op.i + comment.op.c[offset...] - @emit "comment:moved", comment + @_markAsDirty comment, "comment", "moved" applyDeleteToComments: (op) -> op_start = op.p @@ -138,7 +158,7 @@ load = (EventEmitter) -> if op_end <= comment_start # delete is fully before comment comment.op.p -= op_length - @emit "comment:moved", comment + @_markAsDirty comment, "comment", "moved" else if op_start >= comment_end # delete is fully after comment, nothing to do else @@ -161,12 +181,13 @@ load = (EventEmitter) -> comment.op.p = Math.min(comment_start, op_start) comment.op.c = remaining_before + remaining_after - @emit "comment:moved", comment + @_markAsDirty comment, "comment", "moved" applyInsertToChanges: (op, metadata) -> op_start = op.p op_length = op.i.length op_end = op.p + op_length + undoing = !!op.u already_merged = false @@ -184,8 +205,9 @@ load = (EventEmitter) -> change.op.p += op_length moved_changes.push change else if op_start == change_start - # If the insert matches the start of the delete, just remove it from the delete instead - if change.op.d.length >= op.i.length and change.op.d.slice(0, op.i.length) == op.i + # If we are undoing, then we want to cancel any existing delete ranges if we can. + # Check if the insert matches the start of the delete, and just remove it from the delete instead if so. + if undoing and change.op.d.length >= op.i.length and change.op.d.slice(0, op.i.length) == op.i change.op.d = change.op.d.slice(op.i.length) change.op.p += op.i.length if change.op.d == "" @@ -203,15 +225,15 @@ load = (EventEmitter) -> # Only merge inserts if they are from the same user is_same_user = metadata.user_id == change.metadata.user_id - # If this is an insert op at the end of an existing insert with a delete following, and it cancels out the following - # delete then we shouldn't append it to this insert, but instead only cancel the following delete. + # If we are undoing, then our changes will be removed from any delete ops just after. In that case, if there is also + # an insert op just before, then we shouldn't append it to this insert, but instead only cancel the following delete. # E.g. # foo|<--- about to insert 'b' here # inserted 'foo' --^ ^-- deleted 'bar' # should become just 'foo' not 'foob' (with the delete marker becoming just 'ar'), . next_change = @changes[i+1] is_op_adjacent_to_next_delete = next_change? and next_change.op.d? and op.p == change_end and next_change.op.p == op.p - will_op_cancel_next_delete = is_op_adjacent_to_next_delete and next_change.op.d.slice(0, op.i.length) == op.i + will_op_cancel_next_delete = undoing and is_op_adjacent_to_next_delete and next_change.op.d.slice(0, op.i.length) == op.i # If there is a delete at the start of the insert, and we're inserting # at the start, we SHOULDN'T merge since the delete acts as a partition. @@ -281,8 +303,8 @@ load = (EventEmitter) -> for change in remove_changes @_removeChange change - if moved_changes.length > 0 - @emit "changes:moved", moved_changes + for change in moved_changes + @_markAsDirty change, "change", "moved" applyDeleteToChanges: (op, metadata) -> op_start = op.p @@ -406,8 +428,8 @@ load = (EventEmitter) -> @_removeChange change moved_changes = moved_changes.filter (c) -> c != change - if moved_changes.length > 0 - @emit "changes:moved", moved_changes + for change in moved_changes + @_markAsDirty change, "change", "moved" _addOp: (op, metadata) -> change = { @@ -427,17 +449,11 @@ load = (EventEmitter) -> else return -1 - if op.d? - @emit "delete:added", change - else if op.i? - @emit "insert:added", change + @_markAsDirty(change, "change", "added") _removeChange: (change) -> @changes = @changes.filter (c) -> c.id != change.id - if change.op.d? - @emit "delete:removed", change - else if change.op.i? - @emit "insert:removed", change + @_markAsDirty change, "change", "removed" _applyOpModifications: (content, op_modifications) -> # Put in descending position order, with deleting first if at the same offset @@ -486,13 +502,32 @@ load = (EventEmitter) -> previous_change = change return { moved_changes, remove_changes } + resetDirtyState: () -> + @_dirtyState = { + comment: { + moved: {} + removed: {} + added: {} + } + change: { + moved: {} + removed: {} + added: {} + } + } + + getDirtyState: () -> + return @_dirtyState + + _markAsDirty: (object, type, action) -> + @_dirtyState[type][action][object.id] = object + _clone: (object) -> clone = {} (clone[k] = v for k,v of object) return clone if define? - define ["utils/EventEmitter"], load + define [], load else - EventEmitter = require("events").EventEmitter - module.exports = load(EventEmitter) \ No newline at end of file + module.exports = load() diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index 2ad3281bfe..b6a5f98c4c 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -59,7 +59,7 @@ module.exports = UpdateManager = return callback(new Errors.NotFoundError("document not found: #{doc_id}")) ShareJsUpdateManager.applyUpdate project_id, doc_id, update, lines, version, (error, updatedDocLines, version, appliedOps) -> return callback(error) if error? - RangesManager.applyUpdate project_id, doc_id, ranges, appliedOps, (error, new_ranges) -> + RangesManager.applyUpdate project_id, doc_id, ranges, appliedOps, updatedDocLines, (error, new_ranges) -> return callback(error) if error? RedisManager.updateDocument doc_id, updatedDocLines, version, appliedOps, new_ranges, (error) -> return callback(error) if error? diff --git a/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee b/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee new file mode 100644 index 0000000000..0c0c556a38 --- /dev/null +++ b/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee @@ -0,0 +1,179 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +expect = chai.expect +modulePath = "../../../../app/js/RangesManager.js" +SandboxedModule = require('sandboxed-module') + +describe "RangesManager", -> + beforeEach -> + @RangesManager = SandboxedModule.require modulePath, + requires: + "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } + + @doc_id = "doc-id-123" + @project_id = "project-id-123" + @user_id = "user-id-123" + @callback = sinon.stub() + + describe "applyUpdate", -> + beforeEach -> + @updates = [{ + meta: + user_id: @user_id + op: [{ + i: "two " + p: 4 + }] + }] + @entries = { + comments: [{ + op: + c: "three " + p: 4 + metadata: + user_id: @user_id + }] + changes: [{ + op: + i: "five" + p: 15 + metadata: + user_id: @user_id + }] + } + @newDocLines = ["one two three four five"] # old is "one three four five" + + describe "successfully", -> + beforeEach -> + @RangesManager.applyUpdate @project_id, @doc_id, @entries, @updates, @newDocLines, @callback + + it "should return the modified the comments and changes", -> + @callback.called.should.equal true + [error, entries] = @callback.args[0] + expect(error).to.be.null + entries.comments[0].op.should.deep.equal { + c: "three " + p: 8 + } + entries.changes[0].op.should.deep.equal { + i: "five" + p: 19 + } + + describe "with empty comments", -> + beforeEach -> + @entries.comments = [] + @RangesManager.applyUpdate @project_id, @doc_id, @entries, @updates, @newDocLines, @callback + + it "should return an object with no comments", -> + # Save space in redis and don't store just {} + @callback.called.should.equal true + [error, entries] = @callback.args[0] + expect(error).to.be.null + expect(entries.comments).to.be.undefined + + describe "with empty changes", -> + beforeEach -> + @entries.changes = [] + @RangesManager.applyUpdate @project_id, @doc_id, @entries, @updates, @newDocLines, @callback + + it "should return an object with no changes", -> + # Save space in redis and don't store just {} + @callback.called.should.equal true + [error, entries] = @callback.args[0] + expect(error).to.be.null + expect(entries.changes).to.be.undefined + + describe "with too many comments", -> + beforeEach -> + @RangesManager.MAX_COMMENTS = 2 + @updates = [{ + meta: + user_id: @user_id + op: [{ + c: "one" + p: 0 + }] + }] + @entries = { + comments: [{ + op: + c: "three " + p: 4 + metadata: + user_id: @user_id + }, { + op: + c: "four " + p: 10 + metadata: + user_id: @user_id + }] + changes: [] + } + @RangesManager.applyUpdate @project_id, @doc_id, @entries, @updates, @newDocLines, @callback + + it "should return an error", -> + # Save space in redis and don't store just {} + @callback.called.should.equal true + [error, entries] = @callback.args[0] + expect(error).to.not.be.null + expect(error.message).to.equal("too many comments or tracked changes") + + describe "with too many changes", -> + beforeEach -> + @RangesManager.MAX_CHANGES = 2 + @updates = [{ + meta: + user_id: @user_id + tc: "track-changes-id-yes" + op: [{ + i: "one " + p: 0 + }] + }] + @entries = { + changes: [{ + op: + i: "three" + p: 4 + metadata: + user_id: @user_id + }, { + op: + i: "four" + p: 10 + metadata: + user_id: @user_id + }] + comments: [] + } + @newDocLines = ["one two three four"] + @RangesManager.applyUpdate @project_id, @doc_id, @entries, @updates, @newDocLines, @callback + + it "should return an error", -> + # Save space in redis and don't store just {} + @callback.called.should.equal true + [error, entries] = @callback.args[0] + expect(error).to.not.be.null + expect(error.message).to.equal("too many comments or tracked changes") + + describe "inconsistent changes", -> + beforeEach -> + @updates = [{ + meta: + user_id: @user_id + op: [{ + c: "doesn't match" + p: 0 + }] + }] + @RangesManager.applyUpdate @project_id, @doc_id, @entries, @updates, @newDocLines, @callback + + it "should return an error", -> + # Save space in redis and don't store just {} + @callback.called.should.equal true + [error, entries] = @callback.args[0] + expect(error).to.not.be.null + expect(error.message).to.equal("Change ({\"op\":{\"i\":\"five\",\"p\":15},\"metadata\":{\"user_id\":\"user-id-123\"}}) doesn't match text (\"our \")") \ No newline at end of file diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index 33578cb6f0..fbf9b21ddc 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -179,7 +179,7 @@ describe "UpdateManager", -> it "should update the ranges", -> @RangesManager.applyUpdate - .calledWith(@project_id, @doc_id, @ranges, @appliedOps) + .calledWith(@project_id, @doc_id, @ranges, @appliedOps, @updatedDocLines) .should.equal true it "should save the document", -> From 9c5299ec7c4f1a421b461d2c1233a2fc1a9effac Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 16 Mar 2017 15:49:41 +0000 Subject: [PATCH 233/769] Update RangeTracker to support upserting and moving comments for cut and paste --- .../app/coffee/RangesTracker.coffee | 124 ++++++++++++------ 1 file changed, 85 insertions(+), 39 deletions(-) diff --git a/services/document-updater/app/coffee/RangesTracker.coffee b/services/document-updater/app/coffee/RangesTracker.coffee index 865ecf4ef6..14193f628d 100644 --- a/services/document-updater/app/coffee/RangesTracker.coffee +++ b/services/document-updater/app/coffee/RangesTracker.coffee @@ -1,5 +1,8 @@ -load = (EventEmitter) -> - class RangesTracker extends EventEmitter +# This file is shared between document-updater and web, so that the server and client share +# an identical track changes implementation. Do not edit it directly in web or document-updater, +# instead edit it at https://github.com/sharelatex/ranges-tracker, where it has a suite of tests +load = () -> + class RangesTracker # The purpose of this class is to track a set of inserts and deletes to a document, like # track changes in Word. We store these as a set of ShareJs style ranges: # {i: "foo", p: 42} # Insert 'foo' at offset 42 @@ -36,6 +39,7 @@ load = (EventEmitter) -> # middle of a previous insert by the first user, the original insert will be split into two. constructor: (@changes = [], @comments = []) -> @setIdSeed(RangesTracker.generateIdSeed()) + @resetDirtyState() getIdSeed: () -> return @id_seed @@ -75,8 +79,15 @@ load = (EventEmitter) -> comment = @getComment(comment_id) return if !comment? @comments = @comments.filter (c) -> c.id != comment_id - @emit "comment:removed", comment + @_markAsDirty comment, "comment", "removed" + moveCommentId: (comment_id, position, text) -> + for comment in @comments + if comment.id == comment_id + comment.op.p = position + comment.op.c = text + @_markAsDirty comment, "comment", "moved" + getChange: (change_id) -> change = null for c in @changes @@ -89,6 +100,18 @@ load = (EventEmitter) -> change = @getChange(change_id) return if !change? @_removeChange(change) + + validate: (text) -> + for change in @changes + if change.op.i? + content = text.slice(change.op.p, change.op.p + change.op.i.length) + if content != change.op.i + throw new Error("Change (#{JSON.stringify(change)}) doesn't match text (#{JSON.stringify(content)})") + for comment in @comments + content = text.slice(comment.op.p, comment.op.p + comment.op.c.length) + if content != comment.op.c + throw new Error("Comment (#{JSON.stringify(comment)}) doesn't match text (#{JSON.stringify(content)})") + return true applyOp: (op, metadata = {}) -> metadata.ts ?= new Date() @@ -103,29 +126,37 @@ load = (EventEmitter) -> @addComment(op, metadata) else throw new Error("unknown op type") - + + applyOps: (ops, metadata = {}) -> + for op in ops + @applyOp(op, metadata) + addComment: (op, metadata) -> - # TODO: Don't allow overlapping comments? - @comments.push comment = { - id: op.t or @newId() - op: # Copy because we'll modify in place - c: op.c - p: op.p - t: op.t - metadata - } - @emit "comment:added", comment - return comment + existing = @getComment(op.t) + if existing? + @moveCommentId(op.t, op.p, op.c) + return existing + else + @comments.push comment = { + id: op.t or @newId() + op: # Copy because we'll modify in place + c: op.c + p: op.p + t: op.t + metadata + } + @_markAsDirty comment, "comment", "added" + return comment applyInsertToComments: (op) -> for comment in @comments if op.p <= comment.op.p comment.op.p += op.i.length - @emit "comment:moved", comment + @_markAsDirty comment, "comment", "moved" else if op.p < comment.op.p + comment.op.c.length offset = op.p - comment.op.p comment.op.c = comment.op.c[0..(offset-1)] + op.i + comment.op.c[offset...] - @emit "comment:moved", comment + @_markAsDirty comment, "comment", "moved" applyDeleteToComments: (op) -> op_start = op.p @@ -138,7 +169,7 @@ load = (EventEmitter) -> if op_end <= comment_start # delete is fully before comment comment.op.p -= op_length - @emit "comment:moved", comment + @_markAsDirty comment, "comment", "moved" else if op_start >= comment_end # delete is fully after comment, nothing to do else @@ -161,12 +192,13 @@ load = (EventEmitter) -> comment.op.p = Math.min(comment_start, op_start) comment.op.c = remaining_before + remaining_after - @emit "comment:moved", comment + @_markAsDirty comment, "comment", "moved" applyInsertToChanges: (op, metadata) -> op_start = op.p op_length = op.i.length op_end = op.p + op_length + undoing = !!op.u already_merged = false @@ -184,8 +216,9 @@ load = (EventEmitter) -> change.op.p += op_length moved_changes.push change else if op_start == change_start - # If the insert matches the start of the delete, just remove it from the delete instead - if change.op.d.length >= op.i.length and change.op.d.slice(0, op.i.length) == op.i + # If we are undoing, then we want to cancel any existing delete ranges if we can. + # Check if the insert matches the start of the delete, and just remove it from the delete instead if so. + if undoing and change.op.d.length >= op.i.length and change.op.d.slice(0, op.i.length) == op.i change.op.d = change.op.d.slice(op.i.length) change.op.p += op.i.length if change.op.d == "" @@ -203,15 +236,15 @@ load = (EventEmitter) -> # Only merge inserts if they are from the same user is_same_user = metadata.user_id == change.metadata.user_id - # If this is an insert op at the end of an existing insert with a delete following, and it cancels out the following - # delete then we shouldn't append it to this insert, but instead only cancel the following delete. + # If we are undoing, then our changes will be removed from any delete ops just after. In that case, if there is also + # an insert op just before, then we shouldn't append it to this insert, but instead only cancel the following delete. # E.g. # foo|<--- about to insert 'b' here # inserted 'foo' --^ ^-- deleted 'bar' # should become just 'foo' not 'foob' (with the delete marker becoming just 'ar'), . next_change = @changes[i+1] is_op_adjacent_to_next_delete = next_change? and next_change.op.d? and op.p == change_end and next_change.op.p == op.p - will_op_cancel_next_delete = is_op_adjacent_to_next_delete and next_change.op.d.slice(0, op.i.length) == op.i + will_op_cancel_next_delete = undoing and is_op_adjacent_to_next_delete and next_change.op.d.slice(0, op.i.length) == op.i # If there is a delete at the start of the insert, and we're inserting # at the start, we SHOULDN'T merge since the delete acts as a partition. @@ -281,8 +314,8 @@ load = (EventEmitter) -> for change in remove_changes @_removeChange change - if moved_changes.length > 0 - @emit "changes:moved", moved_changes + for change in moved_changes + @_markAsDirty change, "change", "moved" applyDeleteToChanges: (op, metadata) -> op_start = op.p @@ -406,8 +439,8 @@ load = (EventEmitter) -> @_removeChange change moved_changes = moved_changes.filter (c) -> c != change - if moved_changes.length > 0 - @emit "changes:moved", moved_changes + for change in moved_changes + @_markAsDirty change, "change", "moved" _addOp: (op, metadata) -> change = { @@ -427,17 +460,11 @@ load = (EventEmitter) -> else return -1 - if op.d? - @emit "delete:added", change - else if op.i? - @emit "insert:added", change + @_markAsDirty(change, "change", "added") _removeChange: (change) -> @changes = @changes.filter (c) -> c.id != change.id - if change.op.d? - @emit "delete:removed", change - else if change.op.i? - @emit "insert:removed", change + @_markAsDirty change, "change", "removed" _applyOpModifications: (content, op_modifications) -> # Put in descending position order, with deleting first if at the same offset @@ -486,13 +513,32 @@ load = (EventEmitter) -> previous_change = change return { moved_changes, remove_changes } + resetDirtyState: () -> + @_dirtyState = { + comment: { + moved: {} + removed: {} + added: {} + } + change: { + moved: {} + removed: {} + added: {} + } + } + + getDirtyState: () -> + return @_dirtyState + + _markAsDirty: (object, type, action) -> + @_dirtyState[type][action][object.id] = object + _clone: (object) -> clone = {} (clone[k] = v for k,v of object) return clone if define? - define ["utils/EventEmitter"], load + define [], load else - EventEmitter = require("events").EventEmitter - module.exports = load(EventEmitter) \ No newline at end of file + module.exports = load() From 667fb91567dc7a763a42e62dadadea76ee130356 Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 17 Mar 2017 15:44:37 +0000 Subject: [PATCH 234/769] Update RangesTracker to include undo logic --- .../app/coffee/RangesTracker.coffee | 78 ++++++++++++------- 1 file changed, 49 insertions(+), 29 deletions(-) diff --git a/services/document-updater/app/coffee/RangesTracker.coffee b/services/document-updater/app/coffee/RangesTracker.coffee index 865ecf4ef6..a9c43e9816 100644 --- a/services/document-updater/app/coffee/RangesTracker.coffee +++ b/services/document-updater/app/coffee/RangesTracker.coffee @@ -1,5 +1,5 @@ -load = (EventEmitter) -> - class RangesTracker extends EventEmitter +load = () -> + class RangesTracker # The purpose of this class is to track a set of inserts and deletes to a document, like # track changes in Word. We store these as a set of ShareJs style ranges: # {i: "foo", p: 42} # Insert 'foo' at offset 42 @@ -36,6 +36,7 @@ load = (EventEmitter) -> # middle of a previous insert by the first user, the original insert will be split into two. constructor: (@changes = [], @comments = []) -> @setIdSeed(RangesTracker.generateIdSeed()) + @resetDirtyState() getIdSeed: () -> return @id_seed @@ -75,7 +76,7 @@ load = (EventEmitter) -> comment = @getComment(comment_id) return if !comment? @comments = @comments.filter (c) -> c.id != comment_id - @emit "comment:removed", comment + @_markAsDirty comment, "comment", "removed" getChange: (change_id) -> change = null @@ -103,7 +104,11 @@ load = (EventEmitter) -> @addComment(op, metadata) else throw new Error("unknown op type") - + + applyOps: (ops, metadata = {}) -> + for op in ops + @applyOp(op, metadata) + addComment: (op, metadata) -> # TODO: Don't allow overlapping comments? @comments.push comment = { @@ -114,18 +119,18 @@ load = (EventEmitter) -> t: op.t metadata } - @emit "comment:added", comment + @_markAsDirty comment, "comment", "added" return comment applyInsertToComments: (op) -> for comment in @comments if op.p <= comment.op.p comment.op.p += op.i.length - @emit "comment:moved", comment + @_markAsDirty comment, "comment", "moved" else if op.p < comment.op.p + comment.op.c.length offset = op.p - comment.op.p comment.op.c = comment.op.c[0..(offset-1)] + op.i + comment.op.c[offset...] - @emit "comment:moved", comment + @_markAsDirty comment, "comment", "moved" applyDeleteToComments: (op) -> op_start = op.p @@ -138,7 +143,7 @@ load = (EventEmitter) -> if op_end <= comment_start # delete is fully before comment comment.op.p -= op_length - @emit "comment:moved", comment + @_markAsDirty comment, "comment", "moved" else if op_start >= comment_end # delete is fully after comment, nothing to do else @@ -161,12 +166,13 @@ load = (EventEmitter) -> comment.op.p = Math.min(comment_start, op_start) comment.op.c = remaining_before + remaining_after - @emit "comment:moved", comment + @_markAsDirty comment, "comment", "moved" applyInsertToChanges: (op, metadata) -> op_start = op.p op_length = op.i.length op_end = op.p + op_length + undoing = !!op.u already_merged = false @@ -184,8 +190,9 @@ load = (EventEmitter) -> change.op.p += op_length moved_changes.push change else if op_start == change_start - # If the insert matches the start of the delete, just remove it from the delete instead - if change.op.d.length >= op.i.length and change.op.d.slice(0, op.i.length) == op.i + # If we are undoing, then we want to cancel any existing delete ranges if we can. + # Check if the insert matches the start of the delete, and just remove it from the delete instead if so. + if undoing and change.op.d.length >= op.i.length and change.op.d.slice(0, op.i.length) == op.i change.op.d = change.op.d.slice(op.i.length) change.op.p += op.i.length if change.op.d == "" @@ -203,15 +210,15 @@ load = (EventEmitter) -> # Only merge inserts if they are from the same user is_same_user = metadata.user_id == change.metadata.user_id - # If this is an insert op at the end of an existing insert with a delete following, and it cancels out the following - # delete then we shouldn't append it to this insert, but instead only cancel the following delete. + # If we are undoing, then our changes will be removed from any delete ops just after. In that case, if there is also + # an insert op just before, then we shouldn't append it to this insert, but instead only cancel the following delete. # E.g. # foo|<--- about to insert 'b' here # inserted 'foo' --^ ^-- deleted 'bar' # should become just 'foo' not 'foob' (with the delete marker becoming just 'ar'), . next_change = @changes[i+1] is_op_adjacent_to_next_delete = next_change? and next_change.op.d? and op.p == change_end and next_change.op.p == op.p - will_op_cancel_next_delete = is_op_adjacent_to_next_delete and next_change.op.d.slice(0, op.i.length) == op.i + will_op_cancel_next_delete = undoing and is_op_adjacent_to_next_delete and next_change.op.d.slice(0, op.i.length) == op.i # If there is a delete at the start of the insert, and we're inserting # at the start, we SHOULDN'T merge since the delete acts as a partition. @@ -281,8 +288,8 @@ load = (EventEmitter) -> for change in remove_changes @_removeChange change - if moved_changes.length > 0 - @emit "changes:moved", moved_changes + for change in moved_changes + @_markAsDirty change, "change", "moved" applyDeleteToChanges: (op, metadata) -> op_start = op.p @@ -406,8 +413,8 @@ load = (EventEmitter) -> @_removeChange change moved_changes = moved_changes.filter (c) -> c != change - if moved_changes.length > 0 - @emit "changes:moved", moved_changes + for change in moved_changes + @_markAsDirty change, "change", "moved" _addOp: (op, metadata) -> change = { @@ -427,17 +434,11 @@ load = (EventEmitter) -> else return -1 - if op.d? - @emit "delete:added", change - else if op.i? - @emit "insert:added", change + @_markAsDirty(change, "change", "added") _removeChange: (change) -> @changes = @changes.filter (c) -> c.id != change.id - if change.op.d? - @emit "delete:removed", change - else if change.op.i? - @emit "insert:removed", change + @_markAsDirty change, "change", "removed" _applyOpModifications: (content, op_modifications) -> # Put in descending position order, with deleting first if at the same offset @@ -486,13 +487,32 @@ load = (EventEmitter) -> previous_change = change return { moved_changes, remove_changes } + resetDirtyState: () -> + @_dirtyState = { + comment: { + moved: {} + removed: {} + added: {} + } + change: { + moved: {} + removed: {} + added: {} + } + } + + getDirtyState: () -> + return @_dirtyState + + _markAsDirty: (object, type, action) -> + @_dirtyState[type][action][object.id] = object + _clone: (object) -> clone = {} (clone[k] = v for k,v of object) return clone if define? - define ["utils/EventEmitter"], load + define [], load else - EventEmitter = require("events").EventEmitter - module.exports = load(EventEmitter) \ No newline at end of file + module.exports = load() From a3a55247784368d70b56c4b83bcb753deaa73fde Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 20 Mar 2017 10:37:02 +0000 Subject: [PATCH 235/769] Fix broken unit test from merge --- .../test/unit/coffee/RangesManager/RangesManagerTests.coffee | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee b/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee index 0c0c556a38..fd3bc8faec 100644 --- a/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee @@ -94,6 +94,7 @@ describe "RangesManager", -> op: [{ c: "one" p: 0 + t: "thread-id-1" }] }] @entries = { @@ -101,12 +102,14 @@ describe "RangesManager", -> op: c: "three " p: 4 + t: "thread-id-2" metadata: user_id: @user_id }, { op: c: "four " p: 10 + t: "thread-id-3" metadata: user_id: @user_id }] @@ -115,7 +118,6 @@ describe "RangesManager", -> @RangesManager.applyUpdate @project_id, @doc_id, @entries, @updates, @newDocLines, @callback it "should return an error", -> - # Save space in redis and don't store just {} @callback.called.should.equal true [error, entries] = @callback.args[0] expect(error).to.not.be.null From 0245bfd03123eafc7d147d9dbc236e27b599b602 Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 21 Mar 2017 11:20:38 +0000 Subject: [PATCH 236/769] Support an incoming undo flag for history restores --- .../app/coffee/DocumentManager.coffee | 9 ++- .../app/coffee/HttpController.coffee | 8 +-- .../coffee/SettingADocumentTests.coffee | 61 ++++++++++++++++++- .../coffee/helpers/DocUpdaterClient.coffee | 3 +- .../DocumentManagerTests.coffee | 18 +++++- .../HttpController/HttpControllerTests.coffee | 11 ++-- 6 files changed, 91 insertions(+), 19 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 9ac651b5e7..be47ec4c8c 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -43,7 +43,7 @@ module.exports = DocumentManager = return callback(error) if error? callback null, lines, version, ops, ranges - setDoc: (project_id, doc_id, newLines, source, user_id, _callback = (error) ->) -> + setDoc: (project_id, doc_id, newLines, source, user_id, undoing, _callback = (error) ->) -> timer = new Metrics.Timer("docManager.setDoc") callback = (args...) -> timer.done() @@ -63,6 +63,9 @@ module.exports = DocumentManager = logger.log doc_id: doc_id, project_id: project_id, oldLines: oldLines, newLines: newLines, "setting a document via http" DiffCodec.diffAsShareJsOp oldLines, newLines, (error, op) -> return callback(error) if error? + if undoing + for o in op or [] + o.u = true # Turn on undo flag for each op for track changes update = doc: doc_id op: op @@ -161,9 +164,9 @@ module.exports = DocumentManager = UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.getDocAndRecentOps, project_id, doc_id, fromVersion, callback - setDocWithLock: (project_id, doc_id, lines, source, user_id, callback = (error) ->) -> + setDocWithLock: (project_id, doc_id, lines, source, user_id, undoing, callback = (error) ->) -> UpdateManager = require "./UpdateManager" - UpdateManager.lockUpdatesAndDo DocumentManager.setDoc, project_id, doc_id, lines, source, user_id, callback + UpdateManager.lockUpdatesAndDo DocumentManager.setDoc, project_id, doc_id, lines, source, user_id, undoing, callback flushDocIfLoadedWithLock: (project_id, doc_id, callback = (error) ->) -> UpdateManager = require "./UpdateManager" diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 8448361930..aae2b51f8e 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -40,16 +40,14 @@ module.exports = HttpController = setDoc: (req, res, next = (error) ->) -> doc_id = req.params.doc_id project_id = req.params.project_id - lines = req.body.lines - source = req.body.source - user_id = req.body.user_id + {lines, source, user_id, undoing} = req.body lineSize = HttpController._getTotalSizeOfLines(lines) if lineSize > TWO_MEGABYTES logger.log {project_id, doc_id, source, lineSize, user_id}, "document too large, returning 406 response" return res.send 406 - logger.log project_id: project_id, doc_id: doc_id, lines: lines, source: source, user_id: user_id, "setting doc via http" + logger.log {project_id, doc_id, lines, source, user_id, undoing}, "setting doc via http" timer = new Metrics.Timer("http.setDoc") - DocumentManager.setDocWithLock project_id, doc_id, lines, source, user_id, (error) -> + DocumentManager.setDocWithLock project_id, doc_id, lines, source, user_id, undoing, (error) -> timer.done() return next(error) if error? logger.log project_id: project_id, doc_id: doc_id, "set doc via http" diff --git a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee index 3232c6e219..1a5d790be8 100644 --- a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee @@ -41,7 +41,7 @@ describe "Setting a document", -> DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) => throw error if error? setTimeout () => - DocUpdaterClient.setDocLines @project_id, @doc_id, @newLines, @source, @user_id, (error, res, body) => + DocUpdaterClient.setDocLines @project_id, @doc_id, @newLines, @source, @user_id, false, (error, res, body) => @statusCode = res.statusCode done() , 200 @@ -74,7 +74,7 @@ describe "Setting a document", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} - DocUpdaterClient.setDocLines @project_id, @doc_id, @newLines, @source, @user_id, (error, res, body) => + DocUpdaterClient.setDocLines @project_id, @doc_id, @newLines, @source, @user_id, false, (error, res, body) => @statusCode = res.statusCode setTimeout done, 200 @@ -94,3 +94,60 @@ describe "Setting a document", -> throw error if error? expect(lines).to.not.exist done() + + describe "with track changes", -> + before -> + @lines = ["one", "one and a half", "two", "three"] + @id_seed = "587357bd35e64f6157" + @update = + doc: @doc_id + op: [{ + d: "one and a half\n" + p: 4 + }] + meta: + tc: @id_seed + user_id: @user_id + v: @version + + describe "with the undo flag", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + throw error if error? + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) => + throw error if error? + # Go back to old lines, with undo flag + DocUpdaterClient.setDocLines @project_id, @doc_id, @lines, @source, @user_id, true, (error, res, body) => + @statusCode = res.statusCode + setTimeout done, 200 + + it "should undo the tracked changes", (done) -> + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, data) => + throw error if error? + ranges = data.ranges + expect(ranges.changes).to.be.undefined + done() + + describe "without the undo flag", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + throw error if error? + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) => + throw error if error? + # Go back to old lines, without undo flag + DocUpdaterClient.setDocLines @project_id, @doc_id, @lines, @source, @user_id, false, (error, res, body) => + @statusCode = res.statusCode + setTimeout done, 200 + + it "should not undo the tracked changes", (done) -> + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, data) => + throw error if error? + ranges = data.ranges + expect(ranges.changes.length).to.equal 1 + done() + + diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee index d2e8dbe51d..7755b656f1 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee @@ -53,13 +53,14 @@ module.exports = DocUpdaterClient = request.post "http://localhost:3003/project/#{project_id}/doc/#{doc_id}/flush", (error, res, body) -> callback error, res, body - setDocLines: (project_id, doc_id, lines, source, user_id, callback = (error) ->) -> + setDocLines: (project_id, doc_id, lines, source, user_id, undoing, callback = (error) ->) -> request.post { url: "http://localhost:3003/project/#{project_id}/doc/#{doc_id}" json: lines: lines source: source user_id: user_id + undoing: undoing }, (error, res, body) -> callback error, res, body diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index b7ea49ffc9..47fbde021b 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -194,6 +194,7 @@ describe "DocumentManager", -> beforeEach -> @beforeLines = ["before", "lines"] @afterLines = ["after", "lines"] + @ops = [{ i: "foo", p: 4 }, { d: "bar", p: 42 }] @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, @ranges, true) @DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, @ops) @UpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null) @@ -202,7 +203,7 @@ describe "DocumentManager", -> describe "when already loaded", -> beforeEach -> - @DocumentManager.setDoc @project_id, @doc_id, @afterLines, @source, @user_id, @callback + @DocumentManager.setDoc @project_id, @doc_id, @afterLines, @source, @user_id, false, @callback it "should get the current doc lines", -> @DocumentManager.getDoc @@ -246,7 +247,7 @@ describe "DocumentManager", -> describe "when not already loaded", -> beforeEach -> @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, false) - @DocumentManager.setDoc @project_id, @doc_id, @afterLines, @source, @user_id, @callback + @DocumentManager.setDoc @project_id, @doc_id, @afterLines, @source, @user_id, false, @callback it "should flush and delete the doc from the doc updater", -> @DocumentManager.flushAndDeleteDoc @@ -255,13 +256,24 @@ describe "DocumentManager", -> describe "without new lines", -> beforeEach -> - @DocumentManager.setDoc @project_id, @doc_id, null, @source, @user_id, @callback + @DocumentManager.setDoc @project_id, @doc_id, null, @source, @user_id, false, @callback it "should return the callback with an error", -> @callback.calledWith(new Error("No lines were passed to setDoc")) it "should not try to get the doc lines", -> @DocumentManager.getDoc.called.should.equal false + + describe "with the undoing flag", -> + beforeEach -> + # Copy ops so we don't interfere with other tests + @ops = [{ i: "foo", p: 4 }, { d: "bar", p: 42 }] + @DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, @ops) + @DocumentManager.setDoc @project_id, @doc_id, @afterLines, @source, @user_id, true, @callback + + it "should set the undo flag on each op", -> + for op in @ops + op.u.should.equal true describe "acceptChanges", -> beforeEach -> diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index 859a0d1089..69b40c85d2 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -125,15 +125,16 @@ describe "HttpController", -> lines: @lines source: @source user_id: @user_id + undoing: @undoing = true describe "successfully", -> beforeEach -> - @DocumentManager.setDocWithLock = sinon.stub().callsArgWith(5) + @DocumentManager.setDocWithLock = sinon.stub().callsArgWith(6) @HttpController.setDoc(@req, @res, @next) it "should set the doc", -> @DocumentManager.setDocWithLock - .calledWith(@project_id, @doc_id, @lines, @source, @user_id) + .calledWith(@project_id, @doc_id, @lines, @source, @user_id, @undoing) .should.equal true it "should return a successful No Content response", -> @@ -143,7 +144,7 @@ describe "HttpController", -> it "should log the request", -> @logger.log - .calledWith(doc_id: @doc_id, project_id: @project_id, lines: @lines, source: @source, user_id: @user_id, "setting doc via http") + .calledWith(doc_id: @doc_id, project_id: @project_id, lines: @lines, source: @source, user_id: @user_id, undoing: @undoing, "setting doc via http") .should.equal true it "should time the request", -> @@ -151,7 +152,7 @@ describe "HttpController", -> describe "when an errors occurs", -> beforeEach -> - @DocumentManager.setDocWithLock = sinon.stub().callsArgWith(5, new Error("oops")) + @DocumentManager.setDocWithLock = sinon.stub().callsArgWith(6, new Error("oops")) @HttpController.setDoc(@req, @res, @next) it "should call next with the error", -> @@ -165,7 +166,7 @@ describe "HttpController", -> for _ in [0..200000] lines.push "test test test" @req.body.lines = lines - @DocumentManager.setDocWithLock = sinon.stub().callsArgWith(5) + @DocumentManager.setDocWithLock = sinon.stub().callsArgWith(6) @HttpController.setDoc(@req, @res, @next) it 'should send back a 406 response', -> From 64658d0034a8a04f90517155926a1acb95eaa578 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Mon, 27 Mar 2017 14:50:09 +0100 Subject: [PATCH 237/769] Add a .nvmrc file --- services/document-updater/.nvmrc | 1 + 1 file changed, 1 insertion(+) create mode 100644 services/document-updater/.nvmrc diff --git a/services/document-updater/.nvmrc b/services/document-updater/.nvmrc new file mode 100644 index 0000000000..d87edbfc10 --- /dev/null +++ b/services/document-updater/.nvmrc @@ -0,0 +1 @@ +4.2.1 \ No newline at end of file From 527c02a00b9a62fbdb8c5536e2e7ab0e78b03369 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 30 Mar 2017 11:20:41 +0100 Subject: [PATCH 238/769] log op versions pushed to track changes --- services/document-updater/app/coffee/WebRedisManager.coffee | 3 +++ 1 file changed, 3 insertions(+) diff --git a/services/document-updater/app/coffee/WebRedisManager.coffee b/services/document-updater/app/coffee/WebRedisManager.coffee index eb3b6a583c..f500c62daf 100644 --- a/services/document-updater/app/coffee/WebRedisManager.coffee +++ b/services/document-updater/app/coffee/WebRedisManager.coffee @@ -1,6 +1,7 @@ Settings = require('settings-sharelatex') rclient = require("redis-sharelatex").createClient(Settings.redis.web) async = require "async" +logger = require('logger-sharelatex') module.exports = WebRedisManager = getPendingUpdatesForDoc : (doc_id, callback)-> @@ -25,6 +26,8 @@ module.exports = WebRedisManager = pushUncompressedHistoryOps: (project_id, doc_id, ops = [], callback = (error, length) ->) -> if ops.length == 0 return callback(new Error("cannot push no ops")) # This should never be called with no ops, but protect against a redis error if we sent an empty array to rpush + opVersions = ops.map (op) -> op?.v + logger.log project_id: project_id, doc_id: doc_id, op_versions: opVersions, "pushing uncompressed history ops" jsonOps = ops.map (op) -> JSON.stringify op async.parallel [ (cb) -> rclient.rpush "UncompressedHistoryOps:#{doc_id}", jsonOps..., cb From b2b4bc44df090794f4425e880a234cfc308e4f4a Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 30 Mar 2017 14:16:35 +0100 Subject: [PATCH 239/769] upgrade logger-sharelatex to v1.5.6 --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 9f8439ea04..94e8881810 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -11,7 +11,7 @@ "coffee-script": "1.4.0", "express": "3.3.4", "ioredis": "^2.2.0", - "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.4", + "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.6", "lynx": "0.0.11", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.5.0", "redis-sharelatex": "0.0.9", From c11618b475036a4e7a6fff79c3dd2fef93f66ef6 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 30 Mar 2017 15:31:34 +0100 Subject: [PATCH 240/769] improve unlock error handling --- .../app/coffee/LockManager.coffee | 10 +++-- .../LockManager/ReleasingTheLock.coffee | 45 ++++++++++++------- 2 files changed, 37 insertions(+), 18 deletions(-) diff --git a/services/document-updater/app/coffee/LockManager.coffee b/services/document-updater/app/coffee/LockManager.coffee index aae60f123d..4926f2e935 100644 --- a/services/document-updater/app/coffee/LockManager.coffee +++ b/services/document-updater/app/coffee/LockManager.coffee @@ -70,6 +70,10 @@ module.exports = LockManager = releaseLock: (doc_id, lockValue, callback)-> key = keys.blockingKey(doc_id:doc_id) - rclient.eval LockManager.unlockScript, 1, key, lockValue, callback - - + rclient.eval LockManager.unlockScript, 1, key, lockValue, (err, result) -> + if err? + return callback(err) + if result? and result isnt 1 # successful unlock should release exactly one key + logger.error {doc_id:doc_id, lockValue:lockValue, redis_err:err, redis_result:result}, "unlocking error" + return callback(new Error("tried to release timed out lock")) + callback(err,result) diff --git a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee index ed502fb587..5c6b6a6381 100644 --- a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee @@ -8,21 +8,36 @@ doc_id = 5678 SandboxedModule = require('sandboxed-module') describe 'LockManager - releasing the lock', ()-> + beforeEach -> + @client = { + auth: -> + eval: sinon.stub() + } + mocks = + "logger-sharelatex": + log:-> + error:-> + "redis-sharelatex": + createClient : () => @client + "./Metrics": {inc: () ->} + @LockManager = SandboxedModule.require(modulePath, requires: mocks) + @lockValue = "lock-value-stub" - evalStub = sinon.stub().yields(1) - mocks = - "logger-sharelatex": log:-> - "redis-sharelatex": - createClient : ()-> - auth:-> - eval: evalStub - "./Metrics": {inc: () ->} - - LockManager = SandboxedModule.require(modulePath, requires: mocks) + describe "when the lock is current", -> + beforeEach -> + @client.eval = sinon.stub().yields(null, 1) + @LockManager.releaseLock doc_id, @lockValue, @callback - it 'should put a all data into memory', (done)-> - lockValue = "lock-value-stub" - LockManager.releaseLock doc_id, lockValue, -> - evalStub.calledWith(LockManager.unlockScript, 1, "Blocking:#{doc_id}", lockValue).should.equal true - done() + it 'should clear the data from redis', -> + @client.eval.calledWith(@LockManager.unlockScript, 1, "Blocking:#{doc_id}", @lockValue).should.equal true + it 'should call the callback', -> + @callback.called.should.equal true + + describe "when the lock has expired", -> + beforeEach -> + @client.eval = sinon.stub().yields(null, 0) + @LockManager.releaseLock doc_id, @lockValue, @callback + + it 'should return an error if the lock has expired', -> + @callback.calledWith(new Error("tried to release timed out lock")).should.equal true From 5583764d05a1236f7996fd50357f32540b301f43 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 30 Mar 2017 15:31:47 +0100 Subject: [PATCH 241/769] fix callback prototype in getLock --- services/document-updater/app/coffee/LockManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/LockManager.coffee b/services/document-updater/app/coffee/LockManager.coffee index 4926f2e935..289075bca9 100644 --- a/services/document-updater/app/coffee/LockManager.coffee +++ b/services/document-updater/app/coffee/LockManager.coffee @@ -41,7 +41,7 @@ module.exports = LockManager = logger.log {doc_id}, "doc is locked" callback err, false - getLock: (doc_id, callback = (error) ->) -> + getLock: (doc_id, callback = (error, lockValue) ->) -> startTime = Date.now() do attempt = () -> if Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME From f21208e8414e4398dbf0fa5b773ae6287147b878 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 12 Apr 2017 14:53:03 +0100 Subject: [PATCH 242/769] Use new redis-sharelatex instead of RedisBackend for cluster abstraction --- services/document-updater/app.coffee | 18 +- .../app/coffee/RedisBackend.coffee | 206 ------- .../app/coffee/RedisKeyBuilder.coffee | 44 -- .../app/coffee/RedisManager.coffee | 5 +- .../config/settings.defaults.coffee | 36 +- services/document-updater/package.json | 2 +- .../coffee/ApplyingUpdatesToADocTests.coffee | 18 +- .../coffee/SettingADocumentTests.coffee | 7 +- .../RedisBackend/RedisBackendTests.coffee | 504 ------------------ .../RedisManager/RedisManagerTests.coffee | 30 +- .../WebRedisManagerTests.coffee | 1 + 11 files changed, 63 insertions(+), 808 deletions(-) delete mode 100644 services/document-updater/app/coffee/RedisBackend.coffee delete mode 100644 services/document-updater/app/coffee/RedisKeyBuilder.coffee delete mode 100644 services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 36c0cb3a72..31e8ebb3b3 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -8,7 +8,6 @@ if Settings.sentry?.dsn? RedisManager = require('./app/js/RedisManager') DispatchManager = require('./app/js/DispatchManager') -Keys = require('./app/js/RedisKeyBuilder') Errors = require "./app/js/Errors" HttpController = require "./app/js/HttpController" @@ -63,15 +62,18 @@ app.get '/status', (req, res)-> else res.send('document updater is alive') -redisCheck = require("redis-sharelatex").activeHealthCheckRedis(Settings.redis.web) -app.get "/health_check/redis", (req, res, next)-> - if redisCheck.isAlive() - res.send 200 - else - res.send 500 +webRedisClient = require("redis-sharelatex").createClient(Settings.redis.web) +app.get "/health_check/redis", (req, res, next) -> + webRedisClient.healthCheck (error) -> + if error? + logger.err {err: error}, "failed redis health check" + res.send 500 + else + res.send 200 +docUpdaterRedisClient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) app.get "/health_check/redis_cluster", (req, res, next) -> - RedisManager.rclient.healthCheck (error, alive) -> + docUpdaterRedisClient.healthCheck (error) -> if error? logger.err {err: error}, "failed redis cluster health check" res.send 500 diff --git a/services/document-updater/app/coffee/RedisBackend.coffee b/services/document-updater/app/coffee/RedisBackend.coffee deleted file mode 100644 index d69cd21a6e..0000000000 --- a/services/document-updater/app/coffee/RedisBackend.coffee +++ /dev/null @@ -1,206 +0,0 @@ -Settings = require "settings-sharelatex" -async = require "async" -_ = require "underscore" -logger = require "logger-sharelatex" -Metrics = require "metrics-sharelatex" - -class Client - constructor: (@clients) -> - @SECONDARY_TIMEOUT = 600 - @HEARTBEAT_TIMEOUT = 2000 - - multi: () -> - return new MultiClient( - @clients.map (client) -> { - rclient: client.rclient.multi() - key_schema: client.key_schema - primary: client.primary - driver: client.driver - } - ) - - healthCheck: (callback) -> - jobs = @clients.map (client) => - (cb) => @_healthCheckClient(client, cb) - async.parallel jobs, callback - - _healthCheckClient: (client, callback) -> - if client.driver == "ioredis" - @_healthCheckClusterClient(client, callback) - else - @_healthCheckNodeRedisClient(client, callback) - - _healthCheckNodeRedisClient: (client, callback) -> - client.healthCheck ?= require("redis-sharelatex").activeHealthCheckRedis(Settings.redis.web) - if client.healthCheck.isAlive() - return callback() - else - return callback(new Error("node-redis client failed health check")) - - _healthCheckClusterClient: (client, callback) -> - jobs = client.rclient.nodes("all").map (n) => - (cb) => @_checkNode(n, cb) - async.parallel jobs, callback - - _checkNode: (node, _callback) -> - callback = (args...) -> - _callback(args...) - _callback = () -> - timer = setTimeout () -> - error = new Error("ioredis node ping check timed out") - logger.error {err: error, key: node.options.key}, "node timed out" - callback(error) - , @HEARTBEAT_TIMEOUT - node.ping (err) -> - clearTimeout timer - callback(err) - -class MultiClient - constructor: (@clients) -> - @SECONDARY_TIMEOUT = 600 - - exec: (callback) -> - primaryError = null - primaryResult = null - jobs = @clients.map (client) => - (cb) => - cb = _.once(cb) - timer = new Metrics.Timer("redis.#{client.driver}.exec") - - timeout = null - if !client.primary - timeout = setTimeout () -> - logger.error {err: new Error("#{client.driver} backend timed out")}, "backend timed out" - cb() - , @SECONDARY_TIMEOUT - - client.rclient.exec (error, result) => - timer.done() - if client.driver == "ioredis" - # ioredis returns an results like: - # [ [null, 42], [null, "foo"] ] - # where the first entries in each 2-tuple are - # presumably errors for each individual command, - # and the second entry is the result. We need to transform - # this into the same result as the old redis driver: - # [ 42, "foo" ] - filtered_result = [] - for entry in result or [] - if entry[0]? - return cb(entry[0]) - else - filtered_result.push entry[1] - result = filtered_result - - if client.primary - primaryError = error - primaryResult = result - if timeout? - clearTimeout(timeout) - cb(error, result) - async.parallel jobs, (error, results) -> - if error? - # suppress logging of errors - # logger.error {err: error}, "error in redis backend" - else - compareResults(results, "exec") - callback(primaryError, primaryResult) - -COMMANDS = { - "get": 0, - "smembers": 0, - "set": 0, - "srem": 0, - "sadd": 0, - "del": 0, - "lrange": 0, - "llen": 0, - "rpush": 0, - "expire": 0, - "ltrim": 0, - "incr": 0, - "eval": 2 -} -for command, key_pos of COMMANDS - do (command, key_pos) -> - Client.prototype[command] = (args..., callback) -> - primaryError = null - primaryResult = [] - jobs = @clients.map (client) => - (cb) => - cb = _.once(cb) - key_builder = args[key_pos] - key = key_builder(client.key_schema) - args_with_key = args.slice(0) - args_with_key[key_pos] = key - timer = new Metrics.Timer("redis.#{client.driver}.#{command}") - - timeout = null - if !client.primary - timeout = setTimeout () -> - logger.error {err: new Error("#{client.driver} backend timed out")}, "backend timed out" - cb() - , @SECONDARY_TIMEOUT - - client.rclient[command] args_with_key..., (error, result...) => - timer.done() - if client.primary - primaryError = error - primaryResult = result - if timeout? - clearTimeout(timeout) - cb(error, result...) - async.parallel jobs, (error, results) -> - if error? - logger.error {err: error}, "error in redis backend" - else - compareResults(results, command) - callback(primaryError, primaryResult...) - - MultiClient.prototype[command] = (args...) -> - for client in @clients - key_builder = args[key_pos] - key = key_builder(client.key_schema) - args_with_key = args.slice(0) - args_with_key[key_pos] = key - client.rclient[command] args_with_key... - -compareResults = (results, command) -> - return if results.length < 2 - first = results[0] - if command == "smembers" and first? - first = first.slice().sort() - for result in results.slice(1) - if command == "smembers" and result? - result = result.slice().sort() - if not _.isEqual(first, result) - logger.error results: results, "redis backend conflict" - Metrics.inc "backend-conflict" - else - Metrics.inc "backend-match" - -module.exports = - createClient: () -> - client_configs = Settings.redis.documentupdater - unless client_configs instanceof Array - client_configs.primary = true - client_configs = [client_configs] - clients = client_configs.map (config) -> - if config.cluster? - Redis = require("ioredis") - rclient = new Redis.Cluster(config.cluster) - driver = "ioredis" - else - redis_config = {} - for key in ["host", "port", "password", "endpoints", "masterName"] - if config[key]? - redis_config[key] = config[key] - rclient = require("redis-sharelatex").createClient(redis_config) - driver = "noderedis" - return { - rclient: rclient - key_schema: config.key_schema - primary: config.primary - driver: driver - } - return new Client(clients) \ No newline at end of file diff --git a/services/document-updater/app/coffee/RedisKeyBuilder.coffee b/services/document-updater/app/coffee/RedisKeyBuilder.coffee deleted file mode 100644 index adde3ee1c9..0000000000 --- a/services/document-updater/app/coffee/RedisKeyBuilder.coffee +++ /dev/null @@ -1,44 +0,0 @@ -# The default key schema looks like: -# doclines:foo -# DocVersion:foo -# but if we use redis cluster, we want all 'foo' keys to map to the same -# node, so we must use: -# doclines:{foo} -# DocVersion:{foo} -# since redis hashes on the contents of {...}. -# -# To transparently support different key schemas for different clients -# (potential writing/reading to both a cluster and single instance -# while we migrate), instead of keys, we now pass around functions which -# will build the key when passed a schema. -# -# E.g. -# key_schema = Settings.redis.keys -# key_schema == { docLines: ({doc_id}) -> "doclines:#{doc_id}", ... } -# key_builder = RedisKeyBuilder.docLines({doc_id: "foo"}) -# key_builder == (key_schema) -> key_schema.docLines({doc_id: "foo"}) -# key = key_builder(key_schema) -# key == "doclines:foo" -module.exports = RedisKeyBuilder = - blockingKey: ({doc_id}) -> - return (key_schema) -> key_schema.blockingKey({doc_id}) - docLines: ({doc_id}) -> - return (key_schema) -> key_schema.docLines({doc_id}) - docOps: ({doc_id}) -> - return (key_schema) -> key_schema.docOps({doc_id}) - docVersion: ({doc_id}) -> - return (key_schema) -> key_schema.docVersion({doc_id}) - docHash: ({doc_id}) -> - return (key_schema) -> key_schema.docHash({doc_id}) - projectKey: ({doc_id}) -> - return (key_schema) -> key_schema.projectKey({doc_id}) - uncompressedHistoryOp: ({doc_id}) -> - return (key_schema) -> key_schema.uncompressedHistoryOp({doc_id}) - pendingUpdates: ({doc_id}) -> - return (key_schema) -> key_schema.pendingUpdates({doc_id}) - ranges: ({doc_id}) -> - return (key_schema) -> key_schema.ranges({doc_id}) - docsInProject: ({project_id}) -> - return (key_schema) -> key_schema.docsInProject({project_id}) - docsWithHistoryOps: ({project_id}) -> - return (key_schema) -> key_schema.docsWithHistoryOps({project_id}) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index cf8249dbd7..3359a36231 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -1,8 +1,7 @@ Settings = require('settings-sharelatex') async = require('async') -rclient = require("./RedisBackend").createClient() +rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) _ = require('underscore') -keys = require('./RedisKeyBuilder') logger = require('logger-sharelatex') metrics = require('./Metrics') Errors = require "./Errors" @@ -25,6 +24,8 @@ logHashWriteErrors = logHashErrors?.write MEGABYTES = 1024 * 1024 MAX_RANGES_SIZE = 3 * MEGABYTES +keys = Settings.redis.documentupdater.key_schema + module.exports = RedisManager = rclient: rclient diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index ae0f9fe681..d638329622 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -20,11 +20,10 @@ module.exports = port:"6379" host:"localhost" password:"" - documentupdater: [{ - primary: true - port:"6379" - host:"localhost" - password:"" + documentupdater: + port: "6379" + host: "localhost" + password: "" key_schema: blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" docLines: ({doc_id}) -> "doclines:#{doc_id}" @@ -34,20 +33,19 @@ module.exports = projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" docsInProject: ({project_id}) -> "DocsIn:#{project_id}" ranges: ({doc_id}) -> "Ranges:#{doc_id}" - # }, { - # cluster: [{ - # port: "7000" - # host: "localhost" - # }] - # key_schema: - # blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" - # docLines: ({doc_id}) -> "doclines:{#{doc_id}}" - # docOps: ({doc_id}) -> "DocOps:{#{doc_id}}" - # docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}" - # projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" - # docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" - # ranges: ({doc_id}) -> "Ranges:{#{doc_id}}" - }] + # cluster: [{ + # port: "7000" + # host: "localhost" + # }] + # key_schema: + # blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" + # docLines: ({doc_id}) -> "doclines:{#{doc_id}}" + # docOps: ({doc_id}) -> "DocOps:{#{doc_id}}" + # docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}" + # docHash: ({doc_id}) -> "DocHash:{#{doc_id}}" + # projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" + # docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" + # ranges: ({doc_id}) -> "Ranges:{#{doc_id}}" max_doc_length: 2 * 1024 * 1024 # 2mb diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 94e8881810..fecda6f936 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -14,7 +14,7 @@ "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.6", "lynx": "0.0.11", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.5.0", - "redis-sharelatex": "0.0.9", + "redis-sharelatex": "git+https://github.com/sharelatex/redis-sharelatex.git#v1.0.0", "request": "2.25.0", "sandboxed-module": "~0.2.0", "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0", diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index bdfe89b990..b0cca1d18b 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -4,7 +4,9 @@ chai.should() expect = chai.expect async = require "async" Settings = require('settings-sharelatex') -rclient = require("redis-sharelatex").createClient(Settings.redis.web) +rclient_web = require("redis-sharelatex").createClient(Settings.redis.web) +rclient_du = require("redis-sharelatex").createClient(Settings.redis.documentupdater) +Keys = Settings.redis.documentupdater.key_schema MockTrackChangesApi = require "./helpers/MockTrackChangesApi" MockWebApi = require "./helpers/MockWebApi" @@ -47,10 +49,10 @@ describe "Applying updates to a doc", -> done() it "should push the applied updates to the track changes api", (done) -> - rclient.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => + rclient_web.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => throw error if error? JSON.parse(updates[0]).op.should.deep.equal @update.op - rclient.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => + rclient_web.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => throw error if error? result.should.equal 1 done() @@ -80,9 +82,9 @@ describe "Applying updates to a doc", -> done() it "should push the applied updates to the track changes api", (done) -> - rclient.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => + rclient_web.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => JSON.parse(updates[0]).op.should.deep.equal @update.op - rclient.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => + rclient_web.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => result.should.equal 1 done() @@ -125,17 +127,17 @@ describe "Applying updates to a doc", -> done() it "should push the applied updates to the track changes api", (done) -> - rclient.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => + rclient_web.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => updates = (JSON.parse(u) for u in updates) for appliedUpdate, i in @updates appliedUpdate.op.should.deep.equal updates[i].op - rclient.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => + rclient_web.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => result.should.equal 1 done() it "should store the doc ops in the correct order", (done) -> - rclient.lrange "DocOps:#{@doc_id}", 0, -1, (error, updates) => + rclient_du.lrange Keys.docOps({doc_id: @doc_id}), 0, -1, (error, updates) => updates = (JSON.parse(u) for u in updates) for appliedUpdate, i in @updates appliedUpdate.op.should.deep.equal updates[i].op diff --git a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee index 1a5d790be8..97fae5cf14 100644 --- a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee @@ -3,7 +3,8 @@ chai = require("chai") chai.should() expect = require("chai").expect Settings = require('settings-sharelatex') -rclient = require("redis-sharelatex").createClient(Settings.redis.web) +rclient_du = require("redis-sharelatex").createClient(Settings.redis.documentupdater) +Keys = Settings.redis.documentupdater.key_schema MockTrackChangesApi = require "./helpers/MockTrackChangesApi" MockWebApi = require "./helpers/MockWebApi" @@ -65,7 +66,7 @@ describe "Setting a document", -> done() it "should leave the document in redis", (done) -> - rclient.get "doclines:#{@doc_id}", (error, lines) => + rclient_du.get Keys.docLines({doc_id: @doc_id}), (error, lines) => throw error if error? expect(JSON.parse(lines)).to.deep.equal @newLines done() @@ -90,7 +91,7 @@ describe "Setting a document", -> MockTrackChangesApi.flushDoc.calledWith(@doc_id).should.equal true it "should remove the document from redis", (done) -> - rclient.get "doclines:#{@doc_id}", (error, lines) => + rclient_du.get Keys.docLines({doc_id: @doc_id}), (error, lines) => throw error if error? expect(lines).to.not.exist done() diff --git a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee deleted file mode 100644 index 4a136baae1..0000000000 --- a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee +++ /dev/null @@ -1,504 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/RedisBackend.js" -SandboxedModule = require('sandboxed-module') -RedisKeyBuilder = require "../../../../app/js/RedisKeyBuilder" - -describe "RedisBackend", -> - beforeEach -> - @Settings = - redis: - documentupdater: [{ - primary: true - port: "6379" - host: "localhost" - password: "single-password" - key_schema: - blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" - docLines: ({doc_id}) -> "doclines:#{doc_id}" - docOps: ({doc_id}) -> "DocOps:#{doc_id}" - docVersion: ({doc_id}) -> "DocVersion:#{doc_id}" - docHash: ({doc_id}) -> "DocHash:#{doc_id}" - projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" - pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" - docsInProject: ({project_id}) -> "DocsIn:#{project_id}" - }, { - cluster: [{ - port: "7000" - host: "localhost" - }] - password: "cluster-password" - key_schema: - blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" - docLines: ({doc_id}) -> "doclines:{#{doc_id}}" - docOps: ({doc_id}) -> "DocOps:{#{doc_id}}" - docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}" - docHash: ({doc_id}) -> "DocHash:{#{doc_id}}" - projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" - pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" - docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" - }] - - test_context = @ - class Cluster - constructor: (@config) -> - test_context.rclient_ioredis = @ - - nodes: sinon.stub() - - @timer = timer = sinon.stub() - class Timer - constructor: (args...) -> timer(args...) - done: () -> - - @RedisBackend = SandboxedModule.require modulePath, requires: - "settings-sharelatex": @Settings - "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } - "redis-sharelatex": @redis = - createClient: sinon.stub().returns @rclient_redis = {} - activeHealthCheck: sinon.stub() - "ioredis": @ioredis = - Cluster: Cluster - "metrics-sharelatex": - @Metrics = - inc: sinon.stub() - Timer: Timer - - @client = @RedisBackend.createClient() - - @doc_id = "mock-doc-id" - @project_id = "mock-project-id" - - it "should create a redis client", -> - @redis.createClient - .calledWith({ - port: "6379" - host: "localhost" - password: "single-password" - }) - .should.equal true - - it "should create an ioredis cluster client", -> - @rclient_ioredis.config.should.deep.equal [{ - port: "7000" - host: "localhost" - }] - - describe "individual commands", -> - describe "with the same results", -> - beforeEach (done) -> - @content = "bar" - @rclient_redis.get = sinon.stub() - @rclient_redis.get.withArgs("doclines:#{@doc_id}").yields(null, @content) - @rclient_ioredis.get = sinon.stub() - @rclient_ioredis.get.withArgs("doclines:{#{@doc_id}}").yields(null, @content) - @client.get RedisKeyBuilder.docLines({doc_id: @doc_id}), (error, @result) => - setTimeout () -> # Let all background requests complete - done(error) - - it "should return the result", -> - @result.should.equal @content - - it "should have called the redis client with the appropriate key", -> - @rclient_redis.get - .calledWith("doclines:#{@doc_id}") - .should.equal true - - it "should have called the ioredis cluster client with the appropriate key", -> - @rclient_ioredis.get - .calledWith("doclines:{#{@doc_id}}") - .should.equal true - - it "should send a metric", -> - @Metrics.inc - .calledWith("backend-match") - .should.equal true - - it "should time the commands", -> - @timer - .calledWith("redis.ioredis.get") - .should.equal true - @timer - .calledWith("redis.noderedis.get") - .should.equal true - - describe "with different results", -> - beforeEach (done) -> - @rclient_redis.get = sinon.stub() - @rclient_redis.get.withArgs("doclines:#{@doc_id}").yields(null, "primary-result") - @rclient_ioredis.get = sinon.stub() - @rclient_ioredis.get.withArgs("doclines:{#{@doc_id}}").yields(null, "secondary-result") - @client.get RedisKeyBuilder.docLines({doc_id: @doc_id}), (error, @result) => - setTimeout () -> # Let all background requests complete - done(error) - - it "should return the primary result", -> - @result.should.equal "primary-result" - - it "should send a metric", -> - @Metrics.inc - .calledWith("backend-conflict") - .should.equal true - - describe "with differently ordered results from smembers", -> - beforeEach (done) -> - @rclient_redis.smembers = sinon.stub() - @rclient_redis.smembers.withArgs("DocsIn:#{@project_id}").yields(null, ["one", "two"]) - @rclient_ioredis.smembers = sinon.stub() - @rclient_ioredis.smembers.withArgs("DocsIn:{#{@project_id}}").yields(null, ["two", "one"]) - @client.smembers RedisKeyBuilder.docsInProject({project_id: @project_id}), (error, @result) => - setTimeout () -> # Let all background requests complete - done(error) - - it "should return the primary result", -> - @result.should.deep.equal ["one", "two"] - - it "should send a metric indicating a match", -> - @Metrics.inc - .calledWith("backend-match") - .should.equal true - - describe "when the secondary errors", -> - beforeEach (done) -> - @rclient_redis.get = sinon.stub() - @rclient_redis.get.withArgs("doclines:#{@doc_id}").yields(null, "primary-result") - @rclient_ioredis.get = sinon.stub() - @rclient_ioredis.get.withArgs("doclines:{#{@doc_id}}").yields(@error = new Error("oops")) - @client.get RedisKeyBuilder.docLines({doc_id: @doc_id}), (error, @result) => - setTimeout () -> # Let all background requests complete - done(error) - - it "should return the primary result", -> - @result.should.equal "primary-result" - - it "should log out the secondary error", -> - @logger.error - .calledWith({ - err: @error - }, "error in redis backend") - .should.equal true - - describe "when the primary errors", -> - beforeEach (done) -> - @rclient_redis.get = sinon.stub() - @rclient_redis.get.withArgs("doclines:#{@doc_id}").yields(@error = new Error("oops")) - @rclient_ioredis.get = sinon.stub() - @rclient_ioredis.get.withArgs("doclines:{#{@doc_id}}").yields(null, "secondary-result") - @client.get RedisKeyBuilder.docLines({doc_id: @doc_id}), (@returned_error, @result) => - setTimeout () -> # Let all background requests complete - done() - - it "should return the error", -> - @returned_error.should.equal @error - - it "should log out the error", -> - @logger.error - .calledWith({ - err: @error - }, "error in redis backend") - .should.equal true - - describe "when the command has the key in a non-zero argument index", -> - beforeEach (done) -> - @script = "mock-script" - @key_count = 1 - @value = "mock-value" - @rclient_redis.eval = sinon.stub() - @rclient_redis.eval.withArgs(@script, @key_count, "Blocking:#{@doc_id}", @value).yields(null) - @rclient_ioredis.eval = sinon.stub() - @rclient_ioredis.eval.withArgs(@script, @key_count, "Blocking:{#{@doc_id}}", @value).yields(null, @content) - @client.eval @script, @key_count, RedisKeyBuilder.blockingKey({doc_id: @doc_id}), @value, (error) => - setTimeout () -> # Let all background requests complete - done(error) - - it "should have called the redis client with the appropriate key", -> - @rclient_redis.eval - .calledWith(@script, @key_count, "Blocking:#{@doc_id}", @value) - .should.equal true - - it "should have called the ioredis cluster client with the appropriate key", -> - @rclient_ioredis.eval - .calledWith(@script, @key_count, "Blocking:{#{@doc_id}}", @value) - .should.equal true - - describe "when the secondary takes longer than SECONDARY_TIMEOUT", -> - beforeEach (done) -> - @client.SECONDARY_TIMEOUT = 10 - @content = "bar" - @rclient_redis.get = (key, cb) => - key.should.equal "doclines:#{@doc_id}" - setTimeout () => - cb(null, @content) - , @client.SECONDARY_TIMEOUT * 3 # If the secondary errors first, don't affect the primary result - @rclient_ioredis.get = (key, cb) => - key.should.equal "doclines:{#{@doc_id}}" - setTimeout () => - cb(null, @content) - , @client.SECONDARY_TIMEOUT * 2 - @client.get RedisKeyBuilder.docLines({doc_id: @doc_id}), (error, @result) => - done(error) - - it "should log out an error for the backend", -> - @logger.error - .calledWith({err: new Error("backend timed out")}, "backend timed out") - .should.equal true - - it "should return the primary result", -> - @result.should.equal @content - - describe "when the primary takes longer than SECONDARY_TIMEOUT", -> - beforeEach (done) -> - @client.SECONDARY_TIMEOUT = 10 - @content = "bar" - @rclient_ioredis.get = sinon.stub() - @rclient_ioredis.get.withArgs("doclines:{#{@doc_id}}").yields(null, @content) - @rclient_redis.get = (key, cb) => - key.should.equal "doclines:#{@doc_id}" - setTimeout () => - cb(null, @content) - , @client.SECONDARY_TIMEOUT * 2 - @client.get RedisKeyBuilder.docLines({doc_id: @doc_id}), (error, @result) => - done(error) - - it "should not consider this an error", -> - @logger.error - .called - .should.equal false - - describe "multi commands", -> - beforeEach -> - # We will test with: - # rclient.multi() - # .get("doclines:foo") - # .get("DocVersion:foo") - # .exec (...) -> - @doclines = "mock-doclines" - @version = "42" - @rclient_redis.multi = sinon.stub().returns @rclient_redis - @rclient_ioredis.multi = sinon.stub().returns @rclient_ioredis - - describe "with the same results", -> - beforeEach (done) -> - @rclient_redis.get = sinon.stub() - @rclient_redis.exec = sinon.stub().yields(null, [@doclines, @version]) - @rclient_ioredis.get = sinon.stub() - @rclient_ioredis.exec = sinon.stub().yields(null, [ [null, @doclines], [null, @version] ]) - - multi = @client.multi() - multi.get RedisKeyBuilder.docLines({doc_id: @doc_id}) - multi.get RedisKeyBuilder.docVersion({doc_id: @doc_id}) - multi.exec (error, @result) => - setTimeout () -> - done(error) - - it "should return the result", -> - @result.should.deep.equal [@doclines, @version] - - it "should have called the redis client with the appropriate keys", -> - @rclient_redis.get - .calledWith("doclines:#{@doc_id}") - .should.equal true - @rclient_redis.get - .calledWith("DocVersion:#{@doc_id}") - .should.equal true - @rclient_ioredis.exec - .called - .should.equal true - - it "should have called the ioredis cluster client with the appropriate keys", -> - @rclient_ioredis.get - .calledWith("doclines:{#{@doc_id}}") - .should.equal true - @rclient_ioredis.get - .calledWith("DocVersion:{#{@doc_id}}") - .should.equal true - @rclient_ioredis.exec - .called - .should.equal true - - it "should send a metric", -> - @Metrics.inc - .calledWith("backend-match") - .should.equal true - - it "should time the exec", -> - @timer - .calledWith("redis.ioredis.exec") - .should.equal true - @timer - .calledWith("redis.noderedis.exec") - .should.equal true - - describe "with different results", -> - beforeEach (done) -> - @rclient_redis.get = sinon.stub() - @rclient_redis.exec = sinon.stub().yields(null, [@doclines, @version]) - @rclient_ioredis.get = sinon.stub() - @rclient_ioredis.exec = sinon.stub().yields(null, [ [null, "different-doc-lines"], [null, @version] ]) - - multi = @client.multi() - multi.get RedisKeyBuilder.docLines({doc_id: @doc_id}) - multi.get RedisKeyBuilder.docVersion({doc_id: @doc_id}) - multi.exec (error, @result) => - setTimeout () -> - done(error) - - it "should return the primary result", -> - @result.should.deep.equal [@doclines, @version] - - it "should send a metric", -> - @Metrics.inc - .calledWith("backend-conflict") - .should.equal true - - describe "when the secondary errors", -> - beforeEach (done) -> - @rclient_redis.get = sinon.stub() - @rclient_redis.exec = sinon.stub().yields(null, [@doclines, @version]) - @rclient_ioredis.get = sinon.stub() - @rclient_ioredis.exec = sinon.stub().yields(@error = new Error("oops")) - - multi = @client.multi() - multi.get RedisKeyBuilder.docLines({doc_id: @doc_id}) - multi.get RedisKeyBuilder.docVersion({doc_id: @doc_id}) - multi.exec (error, @result) => - setTimeout () -> - done(error) - - it "should return the primary result", -> - @result.should.deep.equal [@doclines, @version] - - describe "when the primary errors", -> - beforeEach (done) -> - @rclient_redis.get = sinon.stub() - @rclient_redis.exec = sinon.stub().yields(@error = new Error("oops")) - @rclient_ioredis.get = sinon.stub() - @rclient_ioredis.exec = sinon.stub().yields([ [null, @doclines], [null, @version] ]) - - multi = @client.multi() - multi.get RedisKeyBuilder.docLines({doc_id: @doc_id}) - multi.get RedisKeyBuilder.docVersion({doc_id: @doc_id}) - multi.exec (@returned_error) => - setTimeout () -> done() - - it "should return the error", -> - @returned_error.should.equal @error - - describe "when the secondary takes longer than SECONDARY_TIMEOUT", -> - beforeEach (done) -> - @rclient_redis.get = sinon.stub() - @rclient_redis.exec = (cb) => - setTimeout () => - cb(null, [@doclines, @version]) - , 30 # If secondary errors first, don't affect the primary result - @rclient_ioredis.get = sinon.stub() - @rclient_ioredis.exec = (cb) => - setTimeout () => - cb(null, [ [null, @doclines], [null, @version] ]) - , 20 - - multi = @client.multi() - multi.SECONDARY_TIMEOUT = 10 - multi.get RedisKeyBuilder.docLines({doc_id: @doc_id}) - multi.get RedisKeyBuilder.docVersion({doc_id: @doc_id}) - multi.exec (error, @result) => - done(error) - - it "should log out an error for the backend", -> - @logger.error - .calledWith({err: new Error("backend timed out")}, "backend timed out") - .should.equal true - - it "should return the primary result", -> - @result.should.deep.equal [@doclines, @version] - - describe "when the primary takes longer than SECONDARY_TIMEOUT", -> - beforeEach (done) -> - @rclient_redis.get = sinon.stub() - @rclient_redis.exec = (cb) => - setTimeout () => - cb(null, [@doclines, @version]) - , 20 - @rclient_ioredis.get = sinon.stub() - @rclient_ioredis.exec = sinon.stub().yields(null, [ [null, @doclines], [null, @version] ]) - - multi = @client.multi() - multi.SECONDARY_TIMEOUT = 10 - multi.get RedisKeyBuilder.docLines({doc_id: @doc_id}) - multi.get RedisKeyBuilder.docVersion({doc_id: @doc_id}) - multi.exec (error, @result) => - done(error) - - it "should not consider this an error", -> - @logger.error - .called - .should.equal false - - describe "_healthCheckNodeRedisClient", -> - beforeEach -> - @redis.activeHealthCheckRedis = sinon.stub().returns @healthCheck = { - isAlive: sinon.stub() - } - - describe "successfully", -> - beforeEach (done) -> - @healthCheck.isAlive.returns true - @redis_client = {} - @client._healthCheckNodeRedisClient(@redis_client, done) - - it "should check the status of the node redis client", -> - @healthCheck.isAlive.called.should.equal true - - it "should only create one health check when called multiple times", (done) -> - @client._healthCheckNodeRedisClient @redis_client, () => - @redis.activeHealthCheckRedis.calledOnce.should.equal true - @healthCheck.isAlive.calledTwice.should.equal true - done() - - describe "when failing", -> - beforeEach -> - @healthCheck.isAlive.returns false - @redis_client = {} - - it "should return an error", (done) -> - @client._healthCheckNodeRedisClient @redis_client, (error) -> - error.message.should.equal "node-redis client failed health check" - done() - - describe "_healthCheckClusterClient", -> - beforeEach -> - @client.HEARTBEAT_TIMEOUT = 10 - @nodes = [{ - options: key: "node-0" - stream: destroy: sinon.stub() - }, { - options: key: "node-1" - stream: destroy: sinon.stub() - }] - @rclient_ioredis.nodes = sinon.stub().returns(@nodes) - - describe "when both clients are successful", -> - beforeEach (done) -> - @nodes[0].ping = sinon.stub().yields() - @nodes[1].ping = sinon.stub().yields() - @client._healthCheckClusterClient({ rclient: @rclient_ioredis }, done) - - it "should get all cluster nodes", -> - @rclient_ioredis.nodes - .calledWith("all") - .should.equal true - - it "should ping each cluster node", -> - for node in @nodes - node.ping.called.should.equal true - - describe "when ping fails to a node", -> - beforeEach -> - @nodes[0].ping = (cb) -> cb() - @nodes[1].ping = (cb) -> # Just hang - - it "should return an error", (done) -> - @client._healthCheckClusterClient { rclient: @rclient_ioredis }, (error) -> - error.message.should.equal "ioredis node ping check timed out" - done() diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 258603be9b..070abd859a 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -14,20 +14,24 @@ describe "RedisManager", -> @rclient.multi = () => @rclient @RedisManager = SandboxedModule.require modulePath, requires: - "./RedisBackend": - createClient: () => @rclient - "./RedisKeyBuilder": - blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" - docLines: ({doc_id}) -> "doclines:#{doc_id}" - docOps: ({doc_id}) -> "DocOps:#{doc_id}" - docVersion: ({doc_id}) -> "DocVersion:#{doc_id}" - docHash: ({doc_id}) -> "DocHash:#{doc_id}" - projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" - pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" - docsInProject: ({project_id}) -> "DocsIn:#{project_id}" - ranges: ({doc_id}) -> "Ranges:#{doc_id}" "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } - "settings-sharelatex": {documentupdater: {logHashErrors: {write:true, read:true}}} + "settings-sharelatex": { + documentupdater: {logHashErrors: {write:true, read:true}} + redis: + documentupdater: + key_schema: + blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" + docLines: ({doc_id}) -> "doclines:#{doc_id}" + docOps: ({doc_id}) -> "DocOps:#{doc_id}" + docVersion: ({doc_id}) -> "DocVersion:#{doc_id}" + docHash: ({doc_id}) -> "DocHash:#{doc_id}" + projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" + pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" + docsInProject: ({project_id}) -> "DocsIn:#{project_id}" + ranges: ({doc_id}) -> "Ranges:#{doc_id}" + } + "redis-sharelatex": + createClient: () => @rclient "./Metrics": @metrics = inc: sinon.stub() Timer: class Timer diff --git a/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee index f3f0d8afdc..a0f88b33f1 100644 --- a/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee @@ -14,6 +14,7 @@ describe "WebRedisManager", -> @WebRedisManager = SandboxedModule.require modulePath, requires: "redis-sharelatex": createClient: () => @rclient "settings-sharelatex": redis: web: @settings = {"mock": "settings"} + "logger-sharelatex": { log: () -> } @doc_id = "doc-id-123" @project_id = "project-id-123" @callback = sinon.stub() From c5449ae282efccb32220b03161647617d7370c08 Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 13 Apr 2017 17:00:42 +0100 Subject: [PATCH 243/769] Split out redis config for real-time and track-changes into separate cluster-compatible configs --- .../app/coffee/HistoryManager.coffee | 4 +- .../app/coffee/HistoryRedisManager.coffee | 20 ++++++ .../app/coffee/WebRedisManager.coffee | 24 ++----- .../config/settings.defaults.coffee | 11 +++ .../HistoryManager/HistoryManagerTests.coffee | 18 ++--- .../HistoryRedisManagerTests.coffee | 69 +++++++++++++++++++ .../WebRedisManagerTests.coffee | 50 ++------------ 7 files changed, 121 insertions(+), 75 deletions(-) create mode 100644 services/document-updater/app/coffee/HistoryRedisManager.coffee create mode 100644 services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index 637fd2cb5f..512fd5e68b 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -2,7 +2,7 @@ settings = require "settings-sharelatex" request = require "request" logger = require "logger-sharelatex" async = require "async" -WebRedisManager = require "./WebRedisManager" +HistoryRedisManager = require "./HistoryRedisManager" module.exports = HistoryManager = flushDocChanges: (project_id, doc_id, callback = (error) ->) -> @@ -25,7 +25,7 @@ module.exports = HistoryManager = pushUncompressedHistoryOps: (project_id, doc_id, ops = [], callback = (error) ->) -> if ops.length == 0 return callback() - WebRedisManager.pushUncompressedHistoryOps project_id, doc_id, ops, (error, length) -> + HistoryRedisManager.pushUncompressedHistoryOps project_id, doc_id, ops, (error, length) -> return callback(error) if error? # We want to flush every 50 ops, i.e. 50, 100, 150, etc # Find out which 'block' (i.e. 0-49, 50-99) we were in before and after pushing these diff --git a/services/document-updater/app/coffee/HistoryRedisManager.coffee b/services/document-updater/app/coffee/HistoryRedisManager.coffee new file mode 100644 index 0000000000..315d9daabf --- /dev/null +++ b/services/document-updater/app/coffee/HistoryRedisManager.coffee @@ -0,0 +1,20 @@ +Settings = require('settings-sharelatex') +rclient = require("redis-sharelatex").createClient(Settings.redis.history) +Keys = Settings.redis.history.key_schema +async = require "async" +logger = require('logger-sharelatex') + +module.exports = HistoryRedisManager = + pushUncompressedHistoryOps: (project_id, doc_id, ops = [], callback = (error, length) ->) -> + if ops.length == 0 + return callback(new Error("cannot push no ops")) # This should never be called with no ops, but protect against a redis error if we sent an empty array to rpush + opVersions = ops.map (op) -> op?.v + logger.log project_id: project_id, doc_id: doc_id, op_versions: opVersions, "pushing uncompressed history ops" + jsonOps = ops.map (op) -> JSON.stringify op + async.parallel [ + (cb) -> rclient.rpush Keys.uncompressedHistoryOps({doc_id}), jsonOps..., cb + (cb) -> rclient.sadd Keys.docsWithHistoryOps({project_id}), doc_id, cb + ], (error, results) -> + return callback(error) if error? + [length, _] = results + callback(error, length) \ No newline at end of file diff --git a/services/document-updater/app/coffee/WebRedisManager.coffee b/services/document-updater/app/coffee/WebRedisManager.coffee index f500c62daf..6b7326d73a 100644 --- a/services/document-updater/app/coffee/WebRedisManager.coffee +++ b/services/document-updater/app/coffee/WebRedisManager.coffee @@ -1,13 +1,13 @@ Settings = require('settings-sharelatex') rclient = require("redis-sharelatex").createClient(Settings.redis.web) -async = require "async" +Keys = Settings.redis.web.key_schema logger = require('logger-sharelatex') module.exports = WebRedisManager = getPendingUpdatesForDoc : (doc_id, callback)-> multi = rclient.multi() - multi.lrange "PendingUpdates:#{doc_id}", 0 , -1 - multi.del "PendingUpdates:#{doc_id}" + multi.lrange Keys.pendingUpdates({doc_id}), 0 , -1 + multi.del Keys.pendingUpdates({doc_id}) multi.exec (error, replys) -> return callback(error) if error? jsonUpdates = replys[0] @@ -21,21 +21,7 @@ module.exports = WebRedisManager = callback error, updates getUpdatesLength: (doc_id, callback)-> - rclient.llen "PendingUpdates:#{doc_id}", callback + rclient.llen Keys.pendingUpdates({doc_id}), callback - pushUncompressedHistoryOps: (project_id, doc_id, ops = [], callback = (error, length) ->) -> - if ops.length == 0 - return callback(new Error("cannot push no ops")) # This should never be called with no ops, but protect against a redis error if we sent an empty array to rpush - opVersions = ops.map (op) -> op?.v - logger.log project_id: project_id, doc_id: doc_id, op_versions: opVersions, "pushing uncompressed history ops" - jsonOps = ops.map (op) -> JSON.stringify op - async.parallel [ - (cb) -> rclient.rpush "UncompressedHistoryOps:#{doc_id}", jsonOps..., cb - (cb) -> rclient.sadd "DocsWithHistoryOps:#{project_id}", doc_id, cb - ], (error, results) -> - return callback(error) if error? - [length, _] = results - callback(error, length) - sendData: (data) -> - rclient.publish "applied-ops", JSON.stringify(data) \ No newline at end of file + rclient.publish "applied-ops", JSON.stringify(data) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index d638329622..973cfca970 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -20,6 +20,8 @@ module.exports = port:"6379" host:"localhost" password:"" + key_schema: + pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" documentupdater: port: "6379" host: "localhost" @@ -33,6 +35,15 @@ module.exports = projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" docsInProject: ({project_id}) -> "DocsIn:#{project_id}" ranges: ({doc_id}) -> "Ranges:#{doc_id}" + history: + port:"6379" + host:"localhost" + password:"" + key_schema: + uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}" + docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}" + + # cluster: [{ # port: "7000" # host: "localhost" diff --git a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee index c33a18d4e6..66ffd98e80 100644 --- a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee @@ -9,7 +9,7 @@ describe "HistoryManager", -> "request": @request = {} "settings-sharelatex": @Settings = {} "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } - "./WebRedisManager": @WebRedisManager = {} + "./HistoryRedisManager": @HistoryRedisManager = {} @project_id = "mock-project-id" @doc_id = "mock-doc-id" @callback = sinon.stub() @@ -47,11 +47,11 @@ describe "HistoryManager", -> describe "pushing the op", -> beforeEach -> - @WebRedisManager.pushUncompressedHistoryOps = sinon.stub().callsArgWith(3, null, 1) + @HistoryRedisManager.pushUncompressedHistoryOps = sinon.stub().callsArgWith(3, null, 1) @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, @callback it "should push the ops into redis", -> - @WebRedisManager.pushUncompressedHistoryOps + @HistoryRedisManager.pushUncompressedHistoryOps .calledWith(@project_id, @doc_id, @ops) .should.equal true @@ -63,7 +63,7 @@ describe "HistoryManager", -> describe "when we hit a multiple of FLUSH_EVERY_N_OPS ops", -> beforeEach -> - @WebRedisManager.pushUncompressedHistoryOps = + @HistoryRedisManager.pushUncompressedHistoryOps = sinon.stub().callsArgWith(3, null, 2 * @HistoryManager.FLUSH_EVERY_N_OPS) @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, @callback @@ -75,7 +75,7 @@ describe "HistoryManager", -> describe "when we go over a multiple of FLUSH_EVERY_N_OPS ops", -> beforeEach -> @ops = ["op1", "op2", "op3"] - @WebRedisManager.pushUncompressedHistoryOps = + @HistoryRedisManager.pushUncompressedHistoryOps = sinon.stub().callsArgWith(3, null, 2 * @HistoryManager.FLUSH_EVERY_N_OPS + 1) @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, @callback @@ -86,7 +86,7 @@ describe "HistoryManager", -> describe "when HistoryManager errors", -> beforeEach -> - @WebRedisManager.pushUncompressedHistoryOps = + @HistoryRedisManager.pushUncompressedHistoryOps = sinon.stub().callsArgWith(3, null, 2 * @HistoryManager.FLUSH_EVERY_N_OPS) @HistoryManager.flushDocChanges = sinon.stub().callsArgWith(2, @error = new Error("oops")) @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, @callback @@ -103,10 +103,10 @@ describe "HistoryManager", -> describe "with no ops", -> beforeEach -> - @WebRedisManager.pushUncompressedHistoryOps = sinon.stub().callsArgWith(3, null, 1) + @HistoryRedisManager.pushUncompressedHistoryOps = sinon.stub().callsArgWith(3, null, 1) @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, [], @callback - it "should not call WebRedisManager.pushUncompressedHistoryOps", -> - @WebRedisManager.pushUncompressedHistoryOps.called.should.equal false + it "should not call HistoryRedisManager.pushUncompressedHistoryOps", -> + @HistoryRedisManager.pushUncompressedHistoryOps.called.should.equal false diff --git a/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee new file mode 100644 index 0000000000..f51942c1e1 --- /dev/null +++ b/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee @@ -0,0 +1,69 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/HistoryRedisManager.js" +SandboxedModule = require('sandboxed-module') +Errors = require "../../../../app/js/Errors" + +describe "HistoryRedisManager", -> + beforeEach -> + @rclient = + auth: () -> + exec: sinon.stub() + @rclient.multi = () => @rclient + @HistoryRedisManager = SandboxedModule.require modulePath, requires: + "redis-sharelatex": createClient: () => @rclient + "settings-sharelatex": + redis: + history: @settings = + key_schema: + uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}" + docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}" + "logger-sharelatex": { log: () -> } + @doc_id = "doc-id-123" + @project_id = "project-id-123" + @callback = sinon.stub() + + describe "pushUncompressedHistoryOps", -> + beforeEach -> + @ops = [{ op: [{ i: "foo", p: 4 }] },{ op: [{ i: "bar", p: 56 }] }] + @rclient.rpush = sinon.stub().yields(null, @length = 42) + @rclient.sadd = sinon.stub().yields() + + describe "with ops", -> + beforeEach (done) -> + @HistoryRedisManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, (args...) => + @callback(args...) + done() + + it "should push the doc op into the doc ops list as JSON", -> + @rclient.rpush + .calledWith("UncompressedHistoryOps:#{@doc_id}", JSON.stringify(@ops[0]), JSON.stringify(@ops[1])) + .should.equal true + + it "should add the doc_id to the set of which records the project docs", -> + @rclient.sadd + .calledWith("DocsWithHistoryOps:#{@project_id}", @doc_id) + .should.equal true + + it "should call the callback with the length", -> + @callback.calledWith(null, @length).should.equal true + + describe "with no ops", -> + beforeEach (done) -> + @HistoryRedisManager.pushUncompressedHistoryOps @project_id, @doc_id, [], (args...) => + @callback(args...) + done() + + it "should not push the doc op into the doc ops list as JSON", -> + @rclient.rpush + .called + .should.equal false + + it "should not add the doc_id to the set of which records the project docs", -> + @rclient.sadd + .called + .should.equal false + + it "should call the callback with an error", -> + @callback.calledWith(new Error("cannot push no ops")).should.equal true diff --git a/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee index a0f88b33f1..e1fb89eaed 100644 --- a/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee @@ -13,7 +13,11 @@ describe "WebRedisManager", -> @rclient.multi = () => @rclient @WebRedisManager = SandboxedModule.require modulePath, requires: "redis-sharelatex": createClient: () => @rclient - "settings-sharelatex": redis: web: @settings = {"mock": "settings"} + "settings-sharelatex": + redis: + web: @settings = + key_schema: + pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" "logger-sharelatex": { log: () -> } @doc_id = "doc-id-123" @project_id = "project-id-123" @@ -70,47 +74,3 @@ describe "WebRedisManager", -> it "should return the length", -> @callback.calledWith(null, @length).should.equal true - - describe "pushUncompressedHistoryOps", -> - beforeEach -> - @ops = [{ op: [{ i: "foo", p: 4 }] },{ op: [{ i: "bar", p: 56 }] }] - @rclient.rpush = sinon.stub().yields(null, @length = 42) - @rclient.sadd = sinon.stub().yields() - - describe "with ops", -> - beforeEach (done) -> - @WebRedisManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, (args...) => - @callback(args...) - done() - - it "should push the doc op into the doc ops list as JSON", -> - @rclient.rpush - .calledWith("UncompressedHistoryOps:#{@doc_id}", JSON.stringify(@ops[0]), JSON.stringify(@ops[1])) - .should.equal true - - it "should add the doc_id to the set of which records the project docs", -> - @rclient.sadd - .calledWith("DocsWithHistoryOps:#{@project_id}", @doc_id) - .should.equal true - - it "should call the callback with the length", -> - @callback.calledWith(null, @length).should.equal true - - describe "with no ops", -> - beforeEach (done) -> - @WebRedisManager.pushUncompressedHistoryOps @project_id, @doc_id, [], (args...) => - @callback(args...) - done() - - it "should not push the doc op into the doc ops list as JSON", -> - @rclient.rpush - .called - .should.equal false - - it "should not add the doc_id to the set of which records the project docs", -> - @rclient.sadd - .called - .should.equal false - - it "should call the callback with an error", -> - @callback.calledWith(new Error("cannot push no ops")).should.equal true From dc77bc207d92e90639ad5bceaf998775e240488f Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 24 Apr 2017 16:31:23 +0100 Subject: [PATCH 244/769] change acceptance test to use redis history client --- .../coffee/ApplyingUpdatesToADocTests.coffee | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index b0cca1d18b..abb10cc95a 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -4,7 +4,7 @@ chai.should() expect = chai.expect async = require "async" Settings = require('settings-sharelatex') -rclient_web = require("redis-sharelatex").createClient(Settings.redis.web) +rclient_history = require("redis-sharelatex").createClient(Settings.redis.history) rclient_du = require("redis-sharelatex").createClient(Settings.redis.documentupdater) Keys = Settings.redis.documentupdater.key_schema @@ -49,10 +49,10 @@ describe "Applying updates to a doc", -> done() it "should push the applied updates to the track changes api", (done) -> - rclient_web.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => + rclient_history.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => throw error if error? JSON.parse(updates[0]).op.should.deep.equal @update.op - rclient_web.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => + rclient_history.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => throw error if error? result.should.equal 1 done() @@ -82,9 +82,9 @@ describe "Applying updates to a doc", -> done() it "should push the applied updates to the track changes api", (done) -> - rclient_web.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => + rclient_history.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => JSON.parse(updates[0]).op.should.deep.equal @update.op - rclient_web.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => + rclient_history.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => result.should.equal 1 done() @@ -127,12 +127,12 @@ describe "Applying updates to a doc", -> done() it "should push the applied updates to the track changes api", (done) -> - rclient_web.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => + rclient_history.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => updates = (JSON.parse(u) for u in updates) for appliedUpdate, i in @updates appliedUpdate.op.should.deep.equal updates[i].op - rclient_web.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => + rclient_history.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => result.should.equal 1 done() From 64aef0b55a7884e0e12d17ba010759f15b3a1fc4 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 27 Apr 2017 10:42:43 +0100 Subject: [PATCH 245/769] fix acceptance test to work with redis cluster too --- .../coffee/ApplyingUpdatesToADocTests.coffee | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index abb10cc95a..d06119c690 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -7,6 +7,7 @@ Settings = require('settings-sharelatex') rclient_history = require("redis-sharelatex").createClient(Settings.redis.history) rclient_du = require("redis-sharelatex").createClient(Settings.redis.documentupdater) Keys = Settings.redis.documentupdater.key_schema +HistoryKeys = Settings.redis.history.key_schema MockTrackChangesApi = require "./helpers/MockTrackChangesApi" MockWebApi = require "./helpers/MockWebApi" @@ -49,10 +50,10 @@ describe "Applying updates to a doc", -> done() it "should push the applied updates to the track changes api", (done) -> - rclient_history.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => + rclient_history.lrange HistoryKeys.uncompressedHistoryOps({@doc_id}), 0, -1, (error, updates) => throw error if error? JSON.parse(updates[0]).op.should.deep.equal @update.op - rclient_history.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => + rclient_history.sismember HistoryKeys.docsWithHistoryOps({@project_id}), @doc_id, (error, result) => throw error if error? result.should.equal 1 done() @@ -82,9 +83,9 @@ describe "Applying updates to a doc", -> done() it "should push the applied updates to the track changes api", (done) -> - rclient_history.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => + rclient_history.lrange HistoryKeys.uncompressedHistoryOps({@doc_id}), 0, -1, (error, updates) => JSON.parse(updates[0]).op.should.deep.equal @update.op - rclient_history.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => + rclient_history.sismember HistoryKeys.docsWithHistoryOps({@project_id}), @doc_id, (error, result) => result.should.equal 1 done() @@ -127,12 +128,12 @@ describe "Applying updates to a doc", -> done() it "should push the applied updates to the track changes api", (done) -> - rclient_history.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => + rclient_history.lrange HistoryKeys.uncompressedHistoryOps({@doc_id}), 0, -1, (error, updates) => updates = (JSON.parse(u) for u in updates) for appliedUpdate, i in @updates appliedUpdate.op.should.deep.equal updates[i].op - rclient_history.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => + rclient_history.sismember HistoryKeys.docsWithHistoryOps({@project_id}), @doc_id, (error, result) => result.should.equal 1 done() From ed493d8ad3ec6b355d0a1c1ecb4c9bb25f548ea6 Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 2 May 2017 15:38:33 +0100 Subject: [PATCH 246/769] Rename web -> realtime for consistency with realtime --- services/document-updater/app.coffee | 2 +- .../app/coffee/DocumentManager.coffee | 2 +- ...ager.coffee => RealTimeRedisManager.coffee} | 6 +++--- .../app/coffee/ShareJsUpdateManager.coffee | 4 ++-- .../app/coffee/UpdateManager.coffee | 8 ++++---- .../config/settings.defaults.coffee | 2 +- .../DocumentManagerTests.coffee | 2 +- .../RealTimeRedisManagerTests.coffee} | 14 +++++++------- .../ShareJsUpdateManagerTests.coffee | 6 +++--- .../UpdateManager/UpdateManagerTests.coffee | 18 +++++++++--------- 10 files changed, 32 insertions(+), 32 deletions(-) rename services/document-updater/app/coffee/{WebRedisManager.coffee => RealTimeRedisManager.coffee} (88%) rename services/document-updater/test/unit/coffee/{WebRedisManager/WebRedisManagerTests.coffee => RealTimeRedisManager/RealTimeRedisManagerTests.coffee} (83%) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 31e8ebb3b3..eb0ea771aa 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -62,7 +62,7 @@ app.get '/status', (req, res)-> else res.send('document updater is alive') -webRedisClient = require("redis-sharelatex").createClient(Settings.redis.web) +webRedisClient = require("redis-sharelatex").createClient(Settings.redis.realtime) app.get "/health_check/redis", (req, res, next) -> webRedisClient.healthCheck (error) -> if error? diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index be47ec4c8c..c155de58fe 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -4,7 +4,7 @@ DiffCodec = require "./DiffCodec" logger = require "logger-sharelatex" Metrics = require "./Metrics" HistoryManager = require "./HistoryManager" -WebRedisManager = require "./WebRedisManager" +RealTimeRedisManager = require "./RealTimeRedisManager" Errors = require "./Errors" RangesManager = require "./RangesManager" diff --git a/services/document-updater/app/coffee/WebRedisManager.coffee b/services/document-updater/app/coffee/RealTimeRedisManager.coffee similarity index 88% rename from services/document-updater/app/coffee/WebRedisManager.coffee rename to services/document-updater/app/coffee/RealTimeRedisManager.coffee index 6b7326d73a..197a4708c1 100644 --- a/services/document-updater/app/coffee/WebRedisManager.coffee +++ b/services/document-updater/app/coffee/RealTimeRedisManager.coffee @@ -1,9 +1,9 @@ Settings = require('settings-sharelatex') -rclient = require("redis-sharelatex").createClient(Settings.redis.web) -Keys = Settings.redis.web.key_schema +rclient = require("redis-sharelatex").createClient(Settings.redis.realtime) +Keys = Settings.redis.realtime.key_schema logger = require('logger-sharelatex') -module.exports = WebRedisManager = +module.exports = RealTimeRedisManager = getPendingUpdatesForDoc : (doc_id, callback)-> multi = rclient.multi() multi.lrange Keys.pendingUpdates({doc_id}), 0 , -1 diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee index f175796467..3ec90e4f62 100644 --- a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee +++ b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee @@ -6,7 +6,7 @@ Settings = require('settings-sharelatex') Keys = require "./UpdateKeys" {EventEmitter} = require "events" util = require "util" -WebRedisManager = require "./WebRedisManager" +RealTimeRedisManager = require "./RealTimeRedisManager" ShareJsModel:: = {} util.inherits ShareJsModel, EventEmitter @@ -52,5 +52,5 @@ module.exports = ShareJsUpdateManager = ShareJsUpdateManager._sendOp(project_id, doc_id, opData) _sendOp: (project_id, doc_id, op) -> - WebRedisManager.sendData {project_id, doc_id, op} + RealTimeRedisManager.sendData {project_id, doc_id, op} diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index b6a5f98c4c..5022f6bb38 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -1,6 +1,6 @@ LockManager = require "./LockManager" RedisManager = require "./RedisManager" -WebRedisManager = require "./WebRedisManager" +RealTimeRedisManager = require "./RealTimeRedisManager" ShareJsUpdateManager = require "./ShareJsUpdateManager" HistoryManager = require "./HistoryManager" Settings = require('settings-sharelatex') @@ -30,7 +30,7 @@ module.exports = UpdateManager = UpdateManager.continueProcessingUpdatesWithLock project_id, doc_id, callback continueProcessingUpdatesWithLock: (project_id, doc_id, callback = (error) ->) -> - WebRedisManager.getUpdatesLength doc_id, (error, length) => + RealTimeRedisManager.getUpdatesLength doc_id, (error, length) => return callback(error) if error? if length > 0 UpdateManager.processOutstandingUpdatesWithLock project_id, doc_id, callback @@ -38,7 +38,7 @@ module.exports = UpdateManager = callback() fetchAndApplyUpdates: (project_id, doc_id, callback = (error) ->) -> - WebRedisManager.getPendingUpdatesForDoc doc_id, (error, updates) => + RealTimeRedisManager.getPendingUpdatesForDoc doc_id, (error, updates) => return callback(error) if error? if updates.length == 0 return callback() @@ -49,7 +49,7 @@ module.exports = UpdateManager = applyUpdate: (project_id, doc_id, update, _callback = (error) ->) -> callback = (error) -> if error? - WebRedisManager.sendData {project_id, doc_id, error: error.message || error} + RealTimeRedisManager.sendData {project_id, doc_id, error: error.message || error} _callback(error) UpdateManager._sanitizeUpdate update diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 973cfca970..ae5ff0522b 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -16,7 +16,7 @@ module.exports = url: "http://localhost:3015" redis: - web: + realtime: port:"6379" host:"localhost" password:"" diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index 47fbde021b..e781546ec6 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -16,7 +16,7 @@ describe "DocumentManager", -> "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() - "./WebRedisManager": @WebRedisManager = {} + "./RealTimeRedisManager": @RealTimeRedisManager = {} "./DiffCodec": @DiffCodec = {} "./UpdateManager": @UpdateManager = {} "./RangesManager": @RangesManager = {} diff --git a/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee similarity index 83% rename from services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee rename to services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee index e1fb89eaed..b6aa35ac72 100644 --- a/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee @@ -1,21 +1,21 @@ sinon = require('sinon') chai = require('chai') should = chai.should() -modulePath = "../../../../app/js/WebRedisManager.js" +modulePath = "../../../../app/js/RealTimeRedisManager.js" SandboxedModule = require('sandboxed-module') Errors = require "../../../../app/js/Errors" -describe "WebRedisManager", -> +describe "RealTimeRedisManager", -> beforeEach -> @rclient = auth: () -> exec: sinon.stub() @rclient.multi = () => @rclient - @WebRedisManager = SandboxedModule.require modulePath, requires: + @RealTimeRedisManager = SandboxedModule.require modulePath, requires: "redis-sharelatex": createClient: () => @rclient "settings-sharelatex": redis: - web: @settings = + realtime: @settings = key_schema: pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" "logger-sharelatex": { log: () -> } @@ -36,7 +36,7 @@ describe "WebRedisManager", -> ] @jsonUpdates = @updates.map (update) -> JSON.stringify update @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonUpdates]) - @WebRedisManager.getPendingUpdatesForDoc @doc_id, @callback + @RealTimeRedisManager.getPendingUpdatesForDoc @doc_id, @callback it "should get the pending updates", -> @rclient.lrange @@ -58,7 +58,7 @@ describe "WebRedisManager", -> "broken json" ] @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonUpdates]) - @WebRedisManager.getPendingUpdatesForDoc @doc_id, @callback + @RealTimeRedisManager.getPendingUpdatesForDoc @doc_id, @callback it "should return an error to the callback", -> @callback.calledWith(new Error("JSON parse error")).should.equal true @@ -67,7 +67,7 @@ describe "WebRedisManager", -> describe "getUpdatesLength", -> beforeEach -> @rclient.llen = sinon.stub().yields(null, @length = 3) - @WebRedisManager.getUpdatesLength @doc_id, @callback + @RealTimeRedisManager.getUpdatesLength @doc_id, @callback it "should look up the length", -> @rclient.llen.calledWith("PendingUpdates:#{@doc_id}").should.equal true diff --git a/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee index 42ba3f331b..b7364b00a4 100644 --- a/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee @@ -17,7 +17,7 @@ describe "ShareJsUpdateManager", -> "./ShareJsDB" : @ShareJsDB = { mockDB: true } "redis-sharelatex" : createClient: () => @rclient = auth:-> "logger-sharelatex": @logger = { log: sinon.stub() } - "./WebRedisManager": @WebRedisManager = {} + "./RealTimeRedisManager": @RealTimeRedisManager = {} globals: clearTimeout: @clearTimeout = sinon.stub() @@ -105,11 +105,11 @@ describe "ShareJsUpdateManager", -> @opData = op: {t: "foo", p: 1} meta: source: "bar" - @WebRedisManager.sendData = sinon.stub() + @RealTimeRedisManager.sendData = sinon.stub() @callback("#{@project_id}:#{@doc_id}", @opData) it "should publish the op to redis", -> - @WebRedisManager.sendData + @RealTimeRedisManager.sendData .calledWith({project_id: @project_id, doc_id: @doc_id, op: @opData}) .should.equal true diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index fbf9b21ddc..3e659ee078 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -12,7 +12,7 @@ describe "UpdateManager", -> @UpdateManager = SandboxedModule.require modulePath, requires: "./LockManager" : @LockManager = {} "./RedisManager" : @RedisManager = {} - "./WebRedisManager" : @WebRedisManager = {} + "./RealTimeRedisManager" : @RealTimeRedisManager = {} "./ShareJsUpdateManager" : @ShareJsUpdateManager = {} "./HistoryManager" : @HistoryManager = {} "logger-sharelatex": @logger = { log: sinon.stub() } @@ -94,7 +94,7 @@ describe "UpdateManager", -> describe "continueProcessingUpdatesWithLock", -> describe "when there are outstanding updates", -> beforeEach -> - @WebRedisManager.getUpdatesLength = sinon.stub().callsArgWith(1, null, 3) + @RealTimeRedisManager.getUpdatesLength = sinon.stub().callsArgWith(1, null, 3) @UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArg(2) @UpdateManager.continueProcessingUpdatesWithLock @project_id, @doc_id, @callback @@ -106,7 +106,7 @@ describe "UpdateManager", -> describe "when there are no outstanding updates", -> beforeEach -> - @WebRedisManager.getUpdatesLength = sinon.stub().callsArgWith(1, null, 0) + @RealTimeRedisManager.getUpdatesLength = sinon.stub().callsArgWith(1, null, 0) @UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArg(2) @UpdateManager.continueProcessingUpdatesWithLock @project_id, @doc_id, @callback @@ -122,12 +122,12 @@ describe "UpdateManager", -> @updates = [{p: 1, t: "foo"}] @updatedDocLines = ["updated", "lines"] @version = 34 - @WebRedisManager.getPendingUpdatesForDoc = sinon.stub().callsArgWith(1, null, @updates) + @RealTimeRedisManager.getPendingUpdatesForDoc = sinon.stub().callsArgWith(1, null, @updates) @UpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null, @updatedDocLines, @version) @UpdateManager.fetchAndApplyUpdates @project_id, @doc_id, @callback it "should get the pending updates", -> - @WebRedisManager.getPendingUpdatesForDoc.calledWith(@doc_id).should.equal true + @RealTimeRedisManager.getPendingUpdatesForDoc.calledWith(@doc_id).should.equal true it "should apply the updates", -> for update in @updates @@ -141,7 +141,7 @@ describe "UpdateManager", -> describe "when there are no updates", -> beforeEach -> @updates = [] - @WebRedisManager.getPendingUpdatesForDoc = sinon.stub().callsArgWith(1, null, @updates) + @RealTimeRedisManager.getPendingUpdatesForDoc = sinon.stub().callsArgWith(1, null, @updates) @UpdateManager.applyUpdate = sinon.stub() @RedisManager.setDocument = sinon.stub() @UpdateManager.fetchAndApplyUpdates @project_id, @doc_id, @callback @@ -165,7 +165,7 @@ describe "UpdateManager", -> @RangesManager.applyUpdate = sinon.stub().yields(null, @updated_ranges) @ShareJsUpdateManager.applyUpdate = sinon.stub().yields(null, @updatedDocLines, @version, @appliedOps) @RedisManager.updateDocument = sinon.stub().yields() - @WebRedisManager.sendData = sinon.stub() + @RealTimeRedisManager.sendData = sinon.stub() @HistoryManager.pushUncompressedHistoryOps = sinon.stub().callsArg(3) describe "normally", -> @@ -214,8 +214,8 @@ describe "UpdateManager", -> @ShareJsUpdateManager.applyUpdate = sinon.stub().yields(@error) @UpdateManager.applyUpdate @project_id, @doc_id, @update, @callback - it "should call WebRedisManager.sendData with the error", -> - @WebRedisManager.sendData + it "should call RealTimeRedisManager.sendData with the error", -> + @RealTimeRedisManager.sendData .calledWith({ project_id: @project_id, doc_id: @doc_id, From 4104ca48894d0b064939ff285e101a435448c027 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 3 May 2017 16:27:32 +0100 Subject: [PATCH 247/769] Add in separate redis config for the lock and fix a few web -> realtime --- services/document-updater/app.coffee | 4 -- .../app/coffee/DispatchManager.coffee | 2 +- .../app/coffee/LockManager.coffee | 6 +-- .../config/settings.defaults.coffee | 41 +++++++++++++++---- .../coffee/helpers/DocUpdaterClient.coffee | 7 ++-- .../DispatchManagerTests.coffee | 2 +- 6 files changed, 40 insertions(+), 22 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index eb0ea771aa..d036beda55 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -11,10 +11,6 @@ DispatchManager = require('./app/js/DispatchManager') Errors = require "./app/js/Errors" HttpController = require "./app/js/HttpController" -redis = require("redis-sharelatex") -rclient = redis.createClient(Settings.redis.web) - - Path = require "path" Metrics = require "metrics-sharelatex" Metrics.initialize("doc-updater") diff --git a/services/document-updater/app/coffee/DispatchManager.coffee b/services/document-updater/app/coffee/DispatchManager.coffee index 28397185dc..b7e50291b0 100644 --- a/services/document-updater/app/coffee/DispatchManager.coffee +++ b/services/document-updater/app/coffee/DispatchManager.coffee @@ -8,7 +8,7 @@ Metrics = require('./Metrics') module.exports = DispatchManager = createDispatcher: () -> - client = redis.createClient(Settings.redis.web) + client = redis.createClient(Settings.redis.realtime) worker = { client: client _waitForUpdateThenDispatchWorker: (callback = (error) ->) -> diff --git a/services/document-updater/app/coffee/LockManager.coffee b/services/document-updater/app/coffee/LockManager.coffee index 289075bca9..d237b51feb 100644 --- a/services/document-updater/app/coffee/LockManager.coffee +++ b/services/document-updater/app/coffee/LockManager.coffee @@ -1,7 +1,8 @@ metrics = require('./Metrics') Settings = require('settings-sharelatex') redis = require("redis-sharelatex") -rclient = redis.createClient(Settings.redis.web) +rclient = redis.createClient(Settings.redis.lock) +keys = Settings.redis.lock.key_schema logger = require "logger-sharelatex" os = require "os" crypto = require "crypto" @@ -11,9 +12,6 @@ PID = process.pid RND = crypto.randomBytes(4).toString('hex') COUNT = 0 -keys = - blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" - module.exports = LockManager = LOCK_TEST_INTERVAL: 50 # 50ms between each test of the lock MAX_LOCK_WAIT_TIME: 10000 # 10s maximum time to spend trying to get the lock diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index ae5ff0522b..b06b9d8bf9 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -22,6 +22,12 @@ module.exports = password:"" key_schema: pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" + # cluster: [{ + # port: "7000" + # host: "localhost" + # }] + # key_schema: + # pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" documentupdater: port: "6379" host: "localhost" @@ -35,15 +41,6 @@ module.exports = projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" docsInProject: ({project_id}) -> "DocsIn:#{project_id}" ranges: ({doc_id}) -> "Ranges:#{doc_id}" - history: - port:"6379" - host:"localhost" - password:"" - key_schema: - uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}" - docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}" - - # cluster: [{ # port: "7000" # host: "localhost" @@ -57,6 +54,32 @@ module.exports = # projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" # docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" # ranges: ({doc_id}) -> "Ranges:{#{doc_id}}" + history: + port:"6379" + host:"localhost" + password:"" + key_schema: + uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}" + docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}" + # cluster: [{ + # port: "7000" + # host: "localhost" + # }] + # key_schema: + # uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:{#{doc_id}}" + # docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:{#{project_id}}" + lock: + port:"6379" + host:"localhost" + password:"" + key_schema: + blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" + # cluster: [{ + # port: "7000" + # host: "localhost" + # }] + # key_schema: + # blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" max_doc_length: 2 * 1024 * 1024 # 2mb diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee index 7755b656f1..4b57e0659f 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee @@ -1,9 +1,10 @@ Settings = require('settings-sharelatex') -rclient = require("redis-sharelatex").createClient(Settings.redis.web) +rclient = require("redis-sharelatex").createClient(Settings.redis.realtime) +keys = Settings.redis.realtime.key_schema request = require("request").defaults(jar: false) async = require "async" -rclient_sub = require("redis-sharelatex").createClient(Settings.redis.web) +rclient_sub = require("redis-sharelatex").createClient(Settings.redis.realtime) rclient_sub.subscribe "applied-ops" rclient_sub.setMaxListeners(0) @@ -17,7 +18,7 @@ module.exports = DocUpdaterClient = rclient_sub.on "message", callback sendUpdate: (project_id, doc_id, update, callback = (error) ->) -> - rclient.rpush "PendingUpdates:#{doc_id}", JSON.stringify(update), (error)-> + rclient.rpush keys.pendingUpdates({doc_id}), JSON.stringify(update), (error)-> return callback(error) if error? doc_key = "#{project_id}:#{doc_id}" rclient.sadd "DocsWithPendingUpdates", doc_key, (error) -> diff --git a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee index eddb1eaddb..a82a40af04 100644 --- a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee @@ -11,7 +11,7 @@ describe "DispatchManager", -> "logger-sharelatex": @logger = { log: sinon.stub() } "settings-sharelatex": @settings = redis: - web: {} + realtime: {} "redis-sharelatex": @redis = {} @callback = sinon.stub() From 5f93640077ad1ffcf8c334b9e0f97a91883fbaf2 Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 4 May 2017 11:14:17 +0100 Subject: [PATCH 248/769] Add scripts for testing cluster failover scenarios --- .../coffee/test_blpop_failover.coffee | 41 +++++++++++++++++++ .../coffee/test_pubsub_failover.coffee | 33 +++++++++++++++ 2 files changed, 74 insertions(+) create mode 100644 services/document-updater/test/cluster_failover/coffee/test_blpop_failover.coffee create mode 100644 services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.coffee diff --git a/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.coffee b/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.coffee new file mode 100644 index 0000000000..72a11164a4 --- /dev/null +++ b/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.coffee @@ -0,0 +1,41 @@ +redis = require "redis-sharelatex" +rclient1 = redis.createClient(cluster: [{ + port: "7000" + host: "localhost" +}]) + +rclient2 = redis.createClient(cluster: [{ + port: "7000" + host: "localhost" +}]) + +counter = 0 +sendPing = (cb = () ->) -> + rclient1.rpush "test-blpop", counter, (error) -> + console.error "[SENDING ERROR]", error.message if error? + if !error? + counter += 1 + cb() + +previous = null +listenForPing = (cb) -> + rclient2.blpop "test-blpop", 200, (error, result) -> + return cb(error) if error? + [key, value] = result + value = parseInt(value, 10) + if value % 10 == 0 + console.log "." + if previous? and value != previous + 1 + error = new Error("Counter not in order. Got #{value}, expected #{previous + 1}") + previous = value + return cb(error, value) + +PING_DELAY = 100 +do sendPings = () -> + sendPing () -> + setTimeout sendPings, PING_DELAY + +do listenInBackground = (cb = () ->) -> + listenForPing (error, value) -> + console.error "[RECEIVING ERROR]", error.message if error + setTimeout listenInBackground \ No newline at end of file diff --git a/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.coffee b/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.coffee new file mode 100644 index 0000000000..31bddb5bca --- /dev/null +++ b/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.coffee @@ -0,0 +1,33 @@ +redis = require "redis-sharelatex" +rclient1 = redis.createClient(cluster: [{ + port: "7000" + host: "localhost" +}]) + +rclient2 = redis.createClient(cluster: [{ + port: "7000" + host: "localhost" +}]) + +counter = 0 +sendPing = (cb = () ->) -> + rclient1.publish "test-pubsub", counter, (error) -> + console.error "[SENDING ERROR]", error.message if error? + if !error? + counter += 1 + cb() + +previous = null +rclient2.subscribe "test-pubsub" +rclient2.on "message", (channel, value) -> + value = parseInt(value, 10) + if value % 10 == 0 + console.log "." + if previous? and value != previous + 1 + console.error "[RECEIVING ERROR]", "Counter not in order. Got #{value}, expected #{previous + 1}" + previous = value + +PING_DELAY = 100 +do sendPings = () -> + sendPing () -> + setTimeout sendPings, PING_DELAY From d530ee9501eba8e51cf6a30cefa2f3b5811f73c7 Mon Sep 17 00:00:00 2001 From: Paulo Reis Date: Thu, 4 May 2017 15:32:54 +0100 Subject: [PATCH 249/769] Add methods to bulk accept changes. --- services/document-updater/app.coffee | 15 +++++++------- .../app/coffee/DocumentManager.coffee | 20 +++++++++++++++++++ .../app/coffee/HttpController.coffee | 12 +++++++++++ .../app/coffee/RangesManager.coffee | 9 +++++++++ 4 files changed, 49 insertions(+), 7 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 36c0cb3a72..1672d1b3b5 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -42,14 +42,15 @@ app.param 'doc_id', (req, res, next, doc_id) -> else next new Error("invalid doc id") -app.get '/project/:project_id/doc/:doc_id', HttpController.getDoc -app.post '/project/:project_id/doc/:doc_id', HttpController.setDoc -app.post '/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLoaded -app.delete '/project/:project_id/doc/:doc_id', HttpController.flushAndDeleteDoc -app.delete '/project/:project_id', HttpController.deleteProject -app.post '/project/:project_id/flush', HttpController.flushProject +app.get '/project/:project_id/doc/:doc_id', HttpController.getDoc +app.post '/project/:project_id/doc/:doc_id', HttpController.setDoc +app.post '/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLoaded +app.delete '/project/:project_id/doc/:doc_id', HttpController.flushAndDeleteDoc +app.delete '/project/:project_id', HttpController.deleteProject +app.post '/project/:project_id/flush', HttpController.flushProject app.post '/project/:project_id/doc/:doc_id/change/:change_id/accept', HttpController.acceptChange -app.del '/project/:project_id/doc/:doc_id/comment/:comment_id', HttpController.deleteComment +app.post '/project/:project_id/doc/:doc_id/change/accept', HttpController.bulkAcceptChanges +app.del '/project/:project_id/doc/:doc_id/comment/:comment_id', HttpController.deleteComment app.get '/total', (req, res)-> timer = new Metrics.Timer("http.allDocList") diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index be47ec4c8c..a327f04af4 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -139,6 +139,22 @@ module.exports = DocumentManager = RedisManager.updateDocument doc_id, lines, version, [], new_ranges, (error) -> return callback(error) if error? callback() + + bulkAcceptChanges: (project_id, doc_id, change_ids, _callback = (error) ->) -> + timer = new Metrics.Timer("docManager.bulkAcceptChanges") + callback = (args...) -> + timer.done() + _callback(args...) + + DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges) -> + return callback(error) if error? + if !lines? or !version? + return callback(new Errors.NotFoundError("document not found: #{doc_id}")) + RangesManager.bulkAcceptChanges change_ids, ranges, (error, new_ranges) -> + return callback(error) if error? + RedisManager.updateDocument doc_id, lines, version, [], new_ranges, (error) -> + return callback(error) if error? + callback() deleteComment: (project_id, doc_id, comment_id, _callback = (error) ->) -> timer = new Metrics.Timer("docManager.deleteComment") @@ -180,6 +196,10 @@ module.exports = DocumentManager = UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.acceptChange, project_id, doc_id, change_id, callback + bulkAcceptChangesWithLock: (project_id, doc_id, change_ids, callback = (error) ->) -> + UpdateManager = require "./UpdateManager" + UpdateManager.lockUpdatesAndDo DocumentManager.bulkAcceptChanges, project_id, doc_id, change_ids, callback + deleteCommentWithLock: (project_id, doc_id, thread_id, callback = (error) ->) -> UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.deleteComment, project_id, doc_id, thread_id, callback diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index aae2b51f8e..075d1f8e40 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -105,6 +105,18 @@ module.exports = HttpController = return next(error) if error? logger.log {project_id, doc_id, change_id}, "accepted change via http" res.send 204 # No Content + + bulkAcceptChanges: (req, res, next = (error) ->) -> + {project_id, doc_id} = req.params + {change_ids} = req.body + change_ids ?= [] + logger.log {project_id, doc_id}, "accepting #{ change_ids.length } changes via http" + timer = new Metrics.Timer("http.bulkAcceptChanges") + DocumentManager.bulkAcceptChangesWithLock project_id, doc_id, change_ids, (error) -> + timer.done() + return next(error) if error? + logger.log {project_id, doc_id}, "accepted #{ change_ids.length } changes via http" + res.send 204 # No Content deleteComment: (req, res, next = (error) ->) -> {project_id, doc_id, comment_id} = req.params diff --git a/services/document-updater/app/coffee/RangesManager.coffee b/services/document-updater/app/coffee/RangesManager.coffee index ebef566424..559cd18da4 100644 --- a/services/document-updater/app/coffee/RangesManager.coffee +++ b/services/document-updater/app/coffee/RangesManager.coffee @@ -41,6 +41,15 @@ module.exports = RangesManager = response = RangesManager._getRanges(rangesTracker) callback null, response + bulkAcceptChanges: (change_ids, ranges, callback = (error, ranges) ->) -> + {changes, comments} = ranges + logger.log {change_id}, "accepting #{ change_ids.length } changes in ranges" + rangesTracker = new RangesTracker(changes, comments) + for change_id in change_ids + rangesTracker.removeChangeId(change_id) + response = RangesManager._getRanges(rangesTracker) + callback null, response + deleteComment: (comment_id, ranges, callback = (error, ranges) ->) -> {changes, comments} = ranges logger.log {comment_id}, "deleting comment in ranges" From 7456238a71a64fbfdefc1244f1337deecd36c92b Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 4 May 2017 15:42:10 +0100 Subject: [PATCH 250/769] Bump redis-sharelatex version --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index fecda6f936..fd566f7b74 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -14,7 +14,7 @@ "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.6", "lynx": "0.0.11", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.5.0", - "redis-sharelatex": "git+https://github.com/sharelatex/redis-sharelatex.git#v1.0.0", + "redis-sharelatex": "git+https://github.com/sharelatex/redis-sharelatex.git#v1.0.1", "request": "2.25.0", "sandboxed-module": "~0.2.0", "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0", From 87a5e30a0b3aefbf7b470001c142e8e5cb219eee Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 5 May 2017 13:37:58 +0100 Subject: [PATCH 251/769] Bump redis-sharelatex version --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index fd566f7b74..07deac38bb 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -14,7 +14,7 @@ "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.6", "lynx": "0.0.11", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.5.0", - "redis-sharelatex": "git+https://github.com/sharelatex/redis-sharelatex.git#v1.0.1", + "redis-sharelatex": "git+https://github.com/sharelatex/redis-sharelatex.git#v1.0.2", "request": "2.25.0", "sandboxed-module": "~0.2.0", "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0", From 9b8a2c3030fa0354d1a56fc7ccb6e9e3f9fd8462 Mon Sep 17 00:00:00 2001 From: Paulo Reis Date: Fri, 5 May 2017 15:12:06 +0100 Subject: [PATCH 252/769] Consolidate methods to accept changes. --- services/document-updater/app.coffee | 4 +-- .../app/coffee/DocumentManager.coffee | 32 ++++--------------- .../app/coffee/HttpController.coffee | 11 ++++--- .../app/coffee/RangesManager.coffee | 10 +----- 4 files changed, 15 insertions(+), 42 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 1672d1b3b5..9e96d2b859 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -48,8 +48,8 @@ app.post '/project/:project_id/doc/:doc_id/flush', HttpCont app.delete '/project/:project_id/doc/:doc_id', HttpController.flushAndDeleteDoc app.delete '/project/:project_id', HttpController.deleteProject app.post '/project/:project_id/flush', HttpController.flushProject -app.post '/project/:project_id/doc/:doc_id/change/:change_id/accept', HttpController.acceptChange -app.post '/project/:project_id/doc/:doc_id/change/accept', HttpController.bulkAcceptChanges +app.post '/project/:project_id/doc/:doc_id/change/:change_id/accept', HttpController.acceptChanges +app.post '/project/:project_id/doc/:doc_id/change/accept', HttpController.acceptChanges app.del '/project/:project_id/doc/:doc_id/comment/:comment_id', HttpController.deleteComment app.get '/total', (req, res)-> diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index a327f04af4..a608331003 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -123,9 +123,9 @@ module.exports = DocumentManager = RedisManager.removeDocFromMemory project_id, doc_id, (error) -> return callback(error) if error? callback null - - acceptChange: (project_id, doc_id, change_id, _callback = (error) ->) -> - timer = new Metrics.Timer("docManager.acceptChange") + + acceptChanges: (project_id, doc_id, change_ids = [], _callback = (error) ->) -> + timer = new Metrics.Timer("docManager.acceptChanges") callback = (args...) -> timer.done() _callback(args...) @@ -134,23 +134,7 @@ module.exports = DocumentManager = return callback(error) if error? if !lines? or !version? return callback(new Errors.NotFoundError("document not found: #{doc_id}")) - RangesManager.acceptChange change_id, ranges, (error, new_ranges) -> - return callback(error) if error? - RedisManager.updateDocument doc_id, lines, version, [], new_ranges, (error) -> - return callback(error) if error? - callback() - - bulkAcceptChanges: (project_id, doc_id, change_ids, _callback = (error) ->) -> - timer = new Metrics.Timer("docManager.bulkAcceptChanges") - callback = (args...) -> - timer.done() - _callback(args...) - - DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges) -> - return callback(error) if error? - if !lines? or !version? - return callback(new Errors.NotFoundError("document not found: #{doc_id}")) - RangesManager.bulkAcceptChanges change_ids, ranges, (error, new_ranges) -> + RangesManager.acceptChanges change_ids, ranges, (error, new_ranges) -> return callback(error) if error? RedisManager.updateDocument doc_id, lines, version, [], new_ranges, (error) -> return callback(error) if error? @@ -192,13 +176,9 @@ module.exports = DocumentManager = UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.flushAndDeleteDoc, project_id, doc_id, callback - acceptChangeWithLock: (project_id, doc_id, change_id, callback = (error) ->) -> + acceptChangesWithLock: (project_id, doc_id, change_ids, callback = (error) ->) -> UpdateManager = require "./UpdateManager" - UpdateManager.lockUpdatesAndDo DocumentManager.acceptChange, project_id, doc_id, change_id, callback - - bulkAcceptChangesWithLock: (project_id, doc_id, change_ids, callback = (error) ->) -> - UpdateManager = require "./UpdateManager" - UpdateManager.lockUpdatesAndDo DocumentManager.bulkAcceptChanges, project_id, doc_id, change_ids, callback + UpdateManager.lockUpdatesAndDo DocumentManager.acceptChanges, project_id, doc_id, change_ids, callback deleteCommentWithLock: (project_id, doc_id, thread_id, callback = (error) ->) -> UpdateManager = require "./UpdateManager" diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 075d1f8e40..8267df38f5 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -100,19 +100,20 @@ module.exports = HttpController = {project_id, doc_id, change_id} = req.params logger.log {project_id, doc_id, change_id}, "accepting change via http" timer = new Metrics.Timer("http.acceptChange") - DocumentManager.acceptChangeWithLock project_id, doc_id, change_id, (error) -> + DocumentManager.acceptChangesWithLock project_id, doc_id, [ change_id ], (error) -> timer.done() return next(error) if error? logger.log {project_id, doc_id, change_id}, "accepted change via http" res.send 204 # No Content - bulkAcceptChanges: (req, res, next = (error) ->) -> + acceptChanges: (req, res, next = (error) ->) -> {project_id, doc_id} = req.params {change_ids} = req.body - change_ids ?= [] + if !change_ids? + change_ids = [ req.params.change_id ] logger.log {project_id, doc_id}, "accepting #{ change_ids.length } changes via http" - timer = new Metrics.Timer("http.bulkAcceptChanges") - DocumentManager.bulkAcceptChangesWithLock project_id, doc_id, change_ids, (error) -> + timer = new Metrics.Timer("http.acceptChanges") + DocumentManager.acceptChangesWithLock project_id, doc_id, change_ids, (error) -> timer.done() return next(error) if error? logger.log {project_id, doc_id}, "accepted #{ change_ids.length } changes via http" diff --git a/services/document-updater/app/coffee/RangesManager.coffee b/services/document-updater/app/coffee/RangesManager.coffee index 559cd18da4..977790f825 100644 --- a/services/document-updater/app/coffee/RangesManager.coffee +++ b/services/document-updater/app/coffee/RangesManager.coffee @@ -33,15 +33,7 @@ module.exports = RangesManager = logger.log {project_id, doc_id, changesCount: response.changes?.length, commentsCount: response.comments?.length}, "applied updates to ranges" callback null, response - acceptChange: (change_id, ranges, callback = (error, ranges) ->) -> - {changes, comments} = ranges - logger.log {change_id}, "accepting change in ranges" - rangesTracker = new RangesTracker(changes, comments) - rangesTracker.removeChangeId(change_id) - response = RangesManager._getRanges(rangesTracker) - callback null, response - - bulkAcceptChanges: (change_ids, ranges, callback = (error, ranges) ->) -> + acceptChanges: (change_ids, ranges, callback = (error, ranges) ->) -> {changes, comments} = ranges logger.log {change_id}, "accepting #{ change_ids.length } changes in ranges" rangesTracker = new RangesTracker(changes, comments) From e96e6d6a2903385c6ebeadbf69035c0a3181dd9d Mon Sep 17 00:00:00 2001 From: Paulo Reis Date: Fri, 5 May 2017 15:17:19 +0100 Subject: [PATCH 253/769] Remove unused method. --- .../document-updater/app/coffee/HttpController.coffee | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 8267df38f5..cac024a802 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -95,16 +95,6 @@ module.exports = HttpController = return next(error) if error? logger.log project_id: project_id, "deleted project via http" res.send 204 # No Content - - acceptChange: (req, res, next = (error) ->) -> - {project_id, doc_id, change_id} = req.params - logger.log {project_id, doc_id, change_id}, "accepting change via http" - timer = new Metrics.Timer("http.acceptChange") - DocumentManager.acceptChangesWithLock project_id, doc_id, [ change_id ], (error) -> - timer.done() - return next(error) if error? - logger.log {project_id, doc_id, change_id}, "accepted change via http" - res.send 204 # No Content acceptChanges: (req, res, next = (error) ->) -> {project_id, doc_id} = req.params From 79d8fced496f366a586fd3228d7db3a0069973ee Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 8 May 2017 15:56:02 +0100 Subject: [PATCH 254/769] make history update more atomic --- .../app/coffee/HistoryRedisManager.coffee | 14 +++++--------- .../app/coffee/RedisManager.coffee | 9 ++++++--- .../HistoryRedisManagerTests.coffee | 12 +----------- .../coffee/RedisManager/RedisManagerTests.coffee | 6 +++++- 4 files changed, 17 insertions(+), 24 deletions(-) diff --git a/services/document-updater/app/coffee/HistoryRedisManager.coffee b/services/document-updater/app/coffee/HistoryRedisManager.coffee index 315d9daabf..6d9a482ced 100644 --- a/services/document-updater/app/coffee/HistoryRedisManager.coffee +++ b/services/document-updater/app/coffee/HistoryRedisManager.coffee @@ -8,13 +8,9 @@ module.exports = HistoryRedisManager = pushUncompressedHistoryOps: (project_id, doc_id, ops = [], callback = (error, length) ->) -> if ops.length == 0 return callback(new Error("cannot push no ops")) # This should never be called with no ops, but protect against a redis error if we sent an empty array to rpush - opVersions = ops.map (op) -> op?.v - logger.log project_id: project_id, doc_id: doc_id, op_versions: opVersions, "pushing uncompressed history ops" - jsonOps = ops.map (op) -> JSON.stringify op - async.parallel [ - (cb) -> rclient.rpush Keys.uncompressedHistoryOps({doc_id}), jsonOps..., cb - (cb) -> rclient.sadd Keys.docsWithHistoryOps({project_id}), doc_id, cb - ], (error, results) -> + logger.log project_id: project_id, doc_id: doc_id, "marking doc in project for history ops" + rclient.sadd Keys.docsWithHistoryOps({project_id}), doc_id, (error) -> return callback(error) if error? - [length, _] = results - callback(error, length) \ No newline at end of file + rclient.llen Keys.uncompressedHistoryOps({doc_id}), (error, length) -> + return callback(error) if error? + callback(null, length) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 3359a36231..1942b86dc3 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -25,6 +25,7 @@ MEGABYTES = 1024 * 1024 MAX_RANGES_SIZE = 3 * MEGABYTES keys = Settings.redis.documentupdater.key_schema +historyKeys = Settings.redis.history.key_schema module.exports = RedisManager = rclient: rclient @@ -167,9 +168,10 @@ module.exports = RedisManager = logger.error err: error, doc_id: doc_id, newDocLines: newDocLines, error.message return callback(error) newHash = RedisManager._computeHash(newDocLines) - - logger.log doc_id: doc_id, version: newVersion, hash: newHash, "updating doc in redis" - + + opVersions = appliedOps.map (op) -> op?.v + logger.log doc_id: doc_id, version: newVersion, hash: newHash, op_versions: opVersions, "updating doc in redis" + RedisManager._serializeRanges ranges, (error, ranges) -> if error? logger.error {err: error, doc_id}, error.message @@ -180,6 +182,7 @@ module.exports = RedisManager = multi.set keys.docHash(doc_id:doc_id), newHash if jsonOps.length > 0 multi.rpush keys.docOps(doc_id: doc_id), jsonOps... + multi.rpush historyKeys.uncompressedHistoryOps(doc_id: doc_id), jsonOps... multi.expire keys.docOps(doc_id: doc_id), RedisManager.DOC_OPS_TTL multi.ltrim keys.docOps(doc_id: doc_id), -RedisManager.DOC_OPS_MAX_LENGTH, -1 if ranges? diff --git a/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee index f51942c1e1..0137c2128a 100644 --- a/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee @@ -27,7 +27,7 @@ describe "HistoryRedisManager", -> describe "pushUncompressedHistoryOps", -> beforeEach -> @ops = [{ op: [{ i: "foo", p: 4 }] },{ op: [{ i: "bar", p: 56 }] }] - @rclient.rpush = sinon.stub().yields(null, @length = 42) + @rclient.llen = sinon.stub().yields(null, @length = 42) @rclient.sadd = sinon.stub().yields() describe "with ops", -> @@ -36,11 +36,6 @@ describe "HistoryRedisManager", -> @callback(args...) done() - it "should push the doc op into the doc ops list as JSON", -> - @rclient.rpush - .calledWith("UncompressedHistoryOps:#{@doc_id}", JSON.stringify(@ops[0]), JSON.stringify(@ops[1])) - .should.equal true - it "should add the doc_id to the set of which records the project docs", -> @rclient.sadd .calledWith("DocsWithHistoryOps:#{@project_id}", @doc_id) @@ -55,11 +50,6 @@ describe "HistoryRedisManager", -> @callback(args...) done() - it "should not push the doc op into the doc ops list as JSON", -> - @rclient.rpush - .called - .should.equal false - it "should not add the doc_id to the set of which records the project docs", -> @rclient.sadd .called diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 070abd859a..26eaaf0892 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -19,7 +19,7 @@ describe "RedisManager", -> documentupdater: {logHashErrors: {write:true, read:true}} redis: documentupdater: - key_schema: + key_schema: blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" docLines: ({doc_id}) -> "doclines:#{doc_id}" docOps: ({doc_id}) -> "DocOps:#{doc_id}" @@ -29,6 +29,10 @@ describe "RedisManager", -> pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" docsInProject: ({project_id}) -> "DocsIn:#{project_id}" ranges: ({doc_id}) -> "Ranges:#{doc_id}" + history: + key_schema: + uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}" + docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}" } "redis-sharelatex": createClient: () => @rclient From e2f70aca1a19e628983f5652cba04d3ae3845ff8 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 8 May 2017 16:02:40 +0100 Subject: [PATCH 255/769] fix tests for redis cluster --- .../test/unit/coffee/LockManager/ReleasingTheLock.coffee | 6 ++++++ .../test/unit/coffee/LockManager/tryLockTests.coffee | 6 ++++++ 2 files changed, 12 insertions(+) diff --git a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee index 5c6b6a6381..36c458cb71 100644 --- a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee @@ -19,6 +19,12 @@ describe 'LockManager - releasing the lock', ()-> error:-> "redis-sharelatex": createClient : () => @client + "settings-sharelatex": { + redis: + lock: + key_schema: + blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" + } "./Metrics": {inc: () ->} @LockManager = SandboxedModule.require(modulePath, requires: mocks) @lockValue = "lock-value-stub" diff --git a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee index 33c3eb3d51..b3ff7cdd7e 100644 --- a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee @@ -13,6 +13,12 @@ describe 'LockManager - trying the lock', -> auth:-> set: @set = sinon.stub() "./Metrics": {inc: () ->} + "settings-sharelatex": { + redis: + lock: + key_schema: + blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" + } @callback = sinon.stub() @doc_id = "doc-id-123" From 70341e4e8045eb8aed4abb037538e637c872de8d Mon Sep 17 00:00:00 2001 From: Paulo Reis Date: Mon, 8 May 2017 16:08:34 +0100 Subject: [PATCH 256/769] Add bulk method from range tracker. --- .../app/coffee/RangesTracker.coffee | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/services/document-updater/app/coffee/RangesTracker.coffee b/services/document-updater/app/coffee/RangesTracker.coffee index 14193f628d..f589098440 100644 --- a/services/document-updater/app/coffee/RangesTracker.coffee +++ b/services/document-updater/app/coffee/RangesTracker.coffee @@ -100,6 +100,21 @@ load = () -> change = @getChange(change_id) return if !change? @_removeChange(change) + + removeChangeIds: (change_to_remove_ids) -> + return if !change_to_remove_ids?.length > 0 + i = @changes.length + remove_change_id = {} + for change_id in change_to_remove_ids + remove_change_id[change_id] = true + + while (i--) + if remove_change_id[@changes[i].id] + delete remove_change_id[@changes[i].id] + removed_change = @changes.splice(i, 1)[0] + @_markAsDirty removed_change, "change", "removed" + if Object.keys(remove_change_id).length == 0 + break validate: (text) -> for change in @changes From d7ac57d054f7ff51d92ee0080d0500dfcf4ef7d6 Mon Sep 17 00:00:00 2001 From: Paulo Reis Date: Mon, 8 May 2017 16:08:42 +0100 Subject: [PATCH 257/769] Use new bulk method. --- services/document-updater/app/coffee/RangesManager.coffee | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/RangesManager.coffee b/services/document-updater/app/coffee/RangesManager.coffee index 977790f825..1fa56559b3 100644 --- a/services/document-updater/app/coffee/RangesManager.coffee +++ b/services/document-updater/app/coffee/RangesManager.coffee @@ -37,8 +37,7 @@ module.exports = RangesManager = {changes, comments} = ranges logger.log {change_id}, "accepting #{ change_ids.length } changes in ranges" rangesTracker = new RangesTracker(changes, comments) - for change_id in change_ids - rangesTracker.removeChangeId(change_id) + rangesTracker.removeChangeIds(change_ids) response = RangesManager._getRanges(rangesTracker) callback null, response From 144804e516dbc9e83493cb6ba37c7ba6e2f62c3a Mon Sep 17 00:00:00 2001 From: Paulo Reis Date: Mon, 8 May 2017 16:34:17 +0100 Subject: [PATCH 258/769] Update tests with new methods. --- .../app/coffee/HttpController.coffee | 2 +- .../app/coffee/RangesManager.coffee | 2 +- .../DocumentManager/DocumentManagerTests.coffee | 12 ++++++------ .../HttpController/HttpControllerTests.coffee | 16 ++++++++-------- 4 files changed, 16 insertions(+), 16 deletions(-) diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index cac024a802..13e618e734 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -98,7 +98,7 @@ module.exports = HttpController = acceptChanges: (req, res, next = (error) ->) -> {project_id, doc_id} = req.params - {change_ids} = req.body + change_ids = req.body?.change_ids if !change_ids? change_ids = [ req.params.change_id ] logger.log {project_id, doc_id}, "accepting #{ change_ids.length } changes via http" diff --git a/services/document-updater/app/coffee/RangesManager.coffee b/services/document-updater/app/coffee/RangesManager.coffee index 1fa56559b3..d0653bb6a2 100644 --- a/services/document-updater/app/coffee/RangesManager.coffee +++ b/services/document-updater/app/coffee/RangesManager.coffee @@ -35,7 +35,7 @@ module.exports = RangesManager = acceptChanges: (change_ids, ranges, callback = (error, ranges) ->) -> {changes, comments} = ranges - logger.log {change_id}, "accepting #{ change_ids.length } changes in ranges" + logger.log "accepting #{ change_ids.length } changes in ranges" rangesTracker = new RangesTracker(changes, comments) rangesTracker.removeChangeIds(change_ids) response = RangesManager._getRanges(rangesTracker) diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index 47fbde021b..7050f0f370 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -283,12 +283,12 @@ describe "DocumentManager", -> @ranges = { entries: "mock", comments: "mock" } @updated_ranges = { entries: "updated", comments: "updated" } @DocumentManager.getDoc = sinon.stub().yields(null, @lines, @version, @ranges) - @RangesManager.acceptChange = sinon.stub().yields(null, @updated_ranges) + @RangesManager.acceptChanges = sinon.stub().yields(null, @updated_ranges) @RedisManager.updateDocument = sinon.stub().yields() describe "successfully", -> beforeEach -> - @DocumentManager.acceptChange @project_id, @doc_id, @change_id, @callback + @DocumentManager.acceptChanges @project_id, @doc_id, [ @change_id ], @callback it "should get the document's current ranges", -> @DocumentManager.getDoc @@ -296,8 +296,8 @@ describe "DocumentManager", -> .should.equal true it "should apply the accept change to the ranges", -> - @RangesManager.acceptChange - .calledWith(@change_id, @ranges) + @RangesManager.acceptChanges + .calledWith([ @change_id ], @ranges) .should.equal true it "should save the updated ranges", -> @@ -311,7 +311,7 @@ describe "DocumentManager", -> describe "when the doc is not found", -> beforeEach -> @DocumentManager.getDoc = sinon.stub().yields(null, null, null, null) - @DocumentManager.acceptChange @project_id, @doc_id, @change_id, @callback + @DocumentManager.acceptChanges @project_id, @doc_id, [ @change_id ], @callback it "should not save anything", -> @RedisManager.updateDocument.called.should.equal false @@ -356,7 +356,7 @@ describe "DocumentManager", -> describe "when the doc is not found", -> beforeEach -> @DocumentManager.getDoc = sinon.stub().yields(null, null, null, null) - @DocumentManager.acceptChange @project_id, @doc_id, @comment_id, @callback + @DocumentManager.acceptChanges @project_id, @doc_id, [ @comment_id ], @callback it "should not save anything", -> @RedisManager.updateDocument.called.should.equal false diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index 69b40c85d2..ec910c9519 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -335,7 +335,7 @@ describe "HttpController", -> .calledWith(new Error("oops")) .should.equal true - describe "acceptChange", -> + describe "acceptChanges", -> beforeEach -> @req = params: @@ -345,12 +345,12 @@ describe "HttpController", -> describe "successfully", -> beforeEach -> - @DocumentManager.acceptChangeWithLock = sinon.stub().callsArgWith(3) - @HttpController.acceptChange(@req, @res, @next) + @DocumentManager.acceptChangesWithLock = sinon.stub().callsArgWith(3) + @HttpController.acceptChanges(@req, @res, @next) it "should accept the change", -> - @DocumentManager.acceptChangeWithLock - .calledWith(@project_id, @doc_id, @change_id) + @DocumentManager.acceptChangesWithLock + .calledWith(@project_id, @doc_id, [ @change_id ]) .should.equal true it "should return a successful No Content response", -> @@ -360,7 +360,7 @@ describe "HttpController", -> it "should log the request", -> @logger.log - .calledWith({@project_id, @doc_id, @change_id}, "accepting change via http") + .calledWith({@project_id, @doc_id}, "accepting 1 changes via http") .should.equal true it "should time the request", -> @@ -368,8 +368,8 @@ describe "HttpController", -> describe "when an errors occurs", -> beforeEach -> - @DocumentManager.acceptChangeWithLock = sinon.stub().callsArgWith(3, new Error("oops")) - @HttpController.acceptChange(@req, @res, @next) + @DocumentManager.acceptChangesWithLock = sinon.stub().callsArgWith(3, new Error("oops")) + @HttpController.acceptChanges(@req, @res, @next) it "should call next with the error", -> @next From 2d158b03d747204e463976f6c3cf6a324f394678 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 9 May 2017 09:32:56 +0100 Subject: [PATCH 259/769] rename pushUncompressedHistoryOps --- .../app/coffee/HistoryManager.coffee | 4 ++-- .../app/coffee/HistoryRedisManager.coffee | 2 +- .../HistoryManager/HistoryManagerTests.coffee | 16 ++++++++-------- .../HistoryRedisManagerTests.coffee | 6 +++--- 4 files changed, 14 insertions(+), 14 deletions(-) diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index 512fd5e68b..6c47c24c1c 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -25,7 +25,7 @@ module.exports = HistoryManager = pushUncompressedHistoryOps: (project_id, doc_id, ops = [], callback = (error) ->) -> if ops.length == 0 return callback() - HistoryRedisManager.pushUncompressedHistoryOps project_id, doc_id, ops, (error, length) -> + HistoryRedisManager.recordDocHasHistoryOps project_id, doc_id, ops, (error, length) -> return callback(error) if error? # We want to flush every 50 ops, i.e. 50, 100, 150, etc # Find out which 'block' (i.e. 0-49, 50-99) we were in before and after pushing these @@ -41,4 +41,4 @@ module.exports = HistoryManager = HistoryManager.flushDocChanges project_id, doc_id, (error) -> if error? logger.error err: error, doc_id: doc_id, project_id: project_id, "error flushing doc to track changes api" - callback() \ No newline at end of file + callback() diff --git a/services/document-updater/app/coffee/HistoryRedisManager.coffee b/services/document-updater/app/coffee/HistoryRedisManager.coffee index 6d9a482ced..2329b6f433 100644 --- a/services/document-updater/app/coffee/HistoryRedisManager.coffee +++ b/services/document-updater/app/coffee/HistoryRedisManager.coffee @@ -5,7 +5,7 @@ async = require "async" logger = require('logger-sharelatex') module.exports = HistoryRedisManager = - pushUncompressedHistoryOps: (project_id, doc_id, ops = [], callback = (error, length) ->) -> + recordDocHasHistoryOps: (project_id, doc_id, ops = [], callback = (error, length) ->) -> if ops.length == 0 return callback(new Error("cannot push no ops")) # This should never be called with no ops, but protect against a redis error if we sent an empty array to rpush logger.log project_id: project_id, doc_id: doc_id, "marking doc in project for history ops" diff --git a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee index 66ffd98e80..bff896cdd8 100644 --- a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee @@ -47,11 +47,11 @@ describe "HistoryManager", -> describe "pushing the op", -> beforeEach -> - @HistoryRedisManager.pushUncompressedHistoryOps = sinon.stub().callsArgWith(3, null, 1) + @HistoryRedisManager.recordDocHasHistoryOps = sinon.stub().callsArgWith(3, null, 1) @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, @callback it "should push the ops into redis", -> - @HistoryRedisManager.pushUncompressedHistoryOps + @HistoryRedisManager.recordDocHasHistoryOps .calledWith(@project_id, @doc_id, @ops) .should.equal true @@ -63,7 +63,7 @@ describe "HistoryManager", -> describe "when we hit a multiple of FLUSH_EVERY_N_OPS ops", -> beforeEach -> - @HistoryRedisManager.pushUncompressedHistoryOps = + @HistoryRedisManager.recordDocHasHistoryOps = sinon.stub().callsArgWith(3, null, 2 * @HistoryManager.FLUSH_EVERY_N_OPS) @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, @callback @@ -75,7 +75,7 @@ describe "HistoryManager", -> describe "when we go over a multiple of FLUSH_EVERY_N_OPS ops", -> beforeEach -> @ops = ["op1", "op2", "op3"] - @HistoryRedisManager.pushUncompressedHistoryOps = + @HistoryRedisManager.recordDocHasHistoryOps = sinon.stub().callsArgWith(3, null, 2 * @HistoryManager.FLUSH_EVERY_N_OPS + 1) @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, @callback @@ -86,7 +86,7 @@ describe "HistoryManager", -> describe "when HistoryManager errors", -> beforeEach -> - @HistoryRedisManager.pushUncompressedHistoryOps = + @HistoryRedisManager.recordDocHasHistoryOps = sinon.stub().callsArgWith(3, null, 2 * @HistoryManager.FLUSH_EVERY_N_OPS) @HistoryManager.flushDocChanges = sinon.stub().callsArgWith(2, @error = new Error("oops")) @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, @callback @@ -103,10 +103,10 @@ describe "HistoryManager", -> describe "with no ops", -> beforeEach -> - @HistoryRedisManager.pushUncompressedHistoryOps = sinon.stub().callsArgWith(3, null, 1) + @HistoryRedisManager.recordDocHasHistoryOps = sinon.stub().callsArgWith(3, null, 1) @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, [], @callback - it "should not call HistoryRedisManager.pushUncompressedHistoryOps", -> - @HistoryRedisManager.pushUncompressedHistoryOps.called.should.equal false + it "should not call HistoryRedisManager.recordDocHasHistoryOps", -> + @HistoryRedisManager.recordDocHasHistoryOps.called.should.equal false diff --git a/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee index 0137c2128a..f14e6da27f 100644 --- a/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee @@ -24,7 +24,7 @@ describe "HistoryRedisManager", -> @project_id = "project-id-123" @callback = sinon.stub() - describe "pushUncompressedHistoryOps", -> + describe "recordDocHasHistoryOps", -> beforeEach -> @ops = [{ op: [{ i: "foo", p: 4 }] },{ op: [{ i: "bar", p: 56 }] }] @rclient.llen = sinon.stub().yields(null, @length = 42) @@ -32,7 +32,7 @@ describe "HistoryRedisManager", -> describe "with ops", -> beforeEach (done) -> - @HistoryRedisManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, (args...) => + @HistoryRedisManager.recordDocHasHistoryOps @project_id, @doc_id, @ops, (args...) => @callback(args...) done() @@ -46,7 +46,7 @@ describe "HistoryRedisManager", -> describe "with no ops", -> beforeEach (done) -> - @HistoryRedisManager.pushUncompressedHistoryOps @project_id, @doc_id, [], (args...) => + @HistoryRedisManager.recordDocHasHistoryOps @project_id, @doc_id, [], (args...) => @callback(args...) done() From fdf5e8e0b8ab8af120140ecee2a94206adbe37ec Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 9 May 2017 10:34:31 +0100 Subject: [PATCH 260/769] get history ops length directly from redis update --- .../app/coffee/HistoryManager.coffee | 5 ++-- .../app/coffee/HistoryRedisManager.coffee | 6 ++--- .../app/coffee/RedisManager.coffee | 25 +++++++++++-------- .../app/coffee/UpdateManager.coffee | 4 +-- .../HistoryManager/HistoryManagerTests.coffee | 20 +++++++-------- .../HistoryRedisManagerTests.coffee | 4 --- .../UpdateManager/UpdateManagerTests.coffee | 2 +- 7 files changed, 32 insertions(+), 34 deletions(-) diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index 6c47c24c1c..c6aa7797cf 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -22,11 +22,12 @@ module.exports = HistoryManager = return callback(error) FLUSH_EVERY_N_OPS: 50 - pushUncompressedHistoryOps: (project_id, doc_id, ops = [], callback = (error) ->) -> + pushUncompressedHistoryOps: (project_id, doc_id, ops = [], length, callback = (error) ->) -> if ops.length == 0 return callback() - HistoryRedisManager.recordDocHasHistoryOps project_id, doc_id, ops, (error, length) -> + HistoryRedisManager.recordDocHasHistoryOps project_id, doc_id, ops, (error) -> return callback(error) if error? + return callback() if not length? # don't flush unless we know the length # We want to flush every 50 ops, i.e. 50, 100, 150, etc # Find out which 'block' (i.e. 0-49, 50-99) we were in before and after pushing these # ops. If we've changed, then we've gone over a multiple of 50 and should flush. diff --git a/services/document-updater/app/coffee/HistoryRedisManager.coffee b/services/document-updater/app/coffee/HistoryRedisManager.coffee index 2329b6f433..0ac8723359 100644 --- a/services/document-updater/app/coffee/HistoryRedisManager.coffee +++ b/services/document-updater/app/coffee/HistoryRedisManager.coffee @@ -5,12 +5,10 @@ async = require "async" logger = require('logger-sharelatex') module.exports = HistoryRedisManager = - recordDocHasHistoryOps: (project_id, doc_id, ops = [], callback = (error, length) ->) -> + recordDocHasHistoryOps: (project_id, doc_id, ops = [], callback = (error) ->) -> if ops.length == 0 return callback(new Error("cannot push no ops")) # This should never be called with no ops, but protect against a redis error if we sent an empty array to rpush logger.log project_id: project_id, doc_id: doc_id, "marking doc in project for history ops" rclient.sadd Keys.docsWithHistoryOps({project_id}), doc_id, (error) -> return callback(error) if error? - rclient.llen Keys.uncompressedHistoryOps({doc_id}), (error, length) -> - return callback(error) if error? - callback(null, length) + callback() diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 1942b86dc3..4aad7ec109 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -177,25 +177,28 @@ module.exports = RedisManager = logger.error {err: error, doc_id}, error.message return callback(error) multi = rclient.multi() - multi.eval setScript, 1, keys.docLines(doc_id:doc_id), newDocLines - multi.set keys.docVersion(doc_id:doc_id), newVersion - multi.set keys.docHash(doc_id:doc_id), newHash - if jsonOps.length > 0 - multi.rpush keys.docOps(doc_id: doc_id), jsonOps... - multi.rpush historyKeys.uncompressedHistoryOps(doc_id: doc_id), jsonOps... - multi.expire keys.docOps(doc_id: doc_id), RedisManager.DOC_OPS_TTL - multi.ltrim keys.docOps(doc_id: doc_id), -RedisManager.DOC_OPS_MAX_LENGTH, -1 + multi.eval setScript, 1, keys.docLines(doc_id:doc_id), newDocLines # index 0 + multi.set keys.docVersion(doc_id:doc_id), newVersion # index 1 + multi.set keys.docHash(doc_id:doc_id), newHash # index 2 + multi.expire keys.docOps(doc_id: doc_id), RedisManager.DOC_OPS_TTL # index 3 + multi.ltrim keys.docOps(doc_id: doc_id), -RedisManager.DOC_OPS_MAX_LENGTH, -1 # index 4 if ranges? - multi.set keys.ranges(doc_id:doc_id), ranges + multi.set keys.ranges(doc_id:doc_id), ranges # index 5 else - multi.del keys.ranges(doc_id:doc_id) + multi.del keys.ranges(doc_id:doc_id) # also index 5 + # push the ops last so we can get the lengths at fixed index positions 6 and 7 + if jsonOps.length > 0 + multi.rpush keys.docOps(doc_id: doc_id), jsonOps... # index 6 + multi.rpush historyKeys.uncompressedHistoryOps(doc_id: doc_id), jsonOps... # index 7 multi.exec (error, result) -> return callback(error) if error? # check the hash computed on the redis server writeHash = result?[0] if logHashWriteErrors and writeHash? and writeHash isnt newHash logger.error doc_id: doc_id, writeHash: writeHash, origHash: newHash, docLines:newDocLines, "hash mismatch on updateDocument" - return callback() + # return length of uncompressedHistoryOps queue (index 7) + uncompressedHistoryOpsLength = result?[7] + return callback(null, uncompressedHistoryOpsLength) getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) -> rclient.smembers keys.docsInProject(project_id: project_id), callback diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index 5022f6bb38..b903f6615f 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -61,9 +61,9 @@ module.exports = UpdateManager = return callback(error) if error? RangesManager.applyUpdate project_id, doc_id, ranges, appliedOps, updatedDocLines, (error, new_ranges) -> return callback(error) if error? - RedisManager.updateDocument doc_id, updatedDocLines, version, appliedOps, new_ranges, (error) -> + RedisManager.updateDocument doc_id, updatedDocLines, version, appliedOps, new_ranges, (error, historyOpsLength) -> return callback(error) if error? - HistoryManager.pushUncompressedHistoryOps project_id, doc_id, appliedOps, callback + HistoryManager.pushUncompressedHistoryOps project_id, doc_id, appliedOps, historyOpsLength, callback lockUpdatesAndDo: (method, project_id, doc_id, args..., callback) -> LockManager.getLock doc_id, (error, lockValue) -> diff --git a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee index bff896cdd8..b41c9b9f7a 100644 --- a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee @@ -47,8 +47,8 @@ describe "HistoryManager", -> describe "pushing the op", -> beforeEach -> - @HistoryRedisManager.recordDocHasHistoryOps = sinon.stub().callsArgWith(3, null, 1) - @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, @callback + @HistoryRedisManager.recordDocHasHistoryOps = sinon.stub().callsArgWith(3, null) + @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, 1, @callback it "should push the ops into redis", -> @HistoryRedisManager.recordDocHasHistoryOps @@ -64,8 +64,8 @@ describe "HistoryManager", -> describe "when we hit a multiple of FLUSH_EVERY_N_OPS ops", -> beforeEach -> @HistoryRedisManager.recordDocHasHistoryOps = - sinon.stub().callsArgWith(3, null, 2 * @HistoryManager.FLUSH_EVERY_N_OPS) - @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, @callback + sinon.stub().callsArgWith(3, null) + @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, 2 * @HistoryManager.FLUSH_EVERY_N_OPS,@callback it "should tell the track changes api to flush", -> @HistoryManager.flushDocChanges @@ -76,8 +76,8 @@ describe "HistoryManager", -> beforeEach -> @ops = ["op1", "op2", "op3"] @HistoryRedisManager.recordDocHasHistoryOps = - sinon.stub().callsArgWith(3, null, 2 * @HistoryManager.FLUSH_EVERY_N_OPS + 1) - @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, @callback + sinon.stub().callsArgWith(3, null) + @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, 2 * @HistoryManager.FLUSH_EVERY_N_OPS + 1, @callback it "should tell the track changes api to flush", -> @HistoryManager.flushDocChanges @@ -87,9 +87,9 @@ describe "HistoryManager", -> describe "when HistoryManager errors", -> beforeEach -> @HistoryRedisManager.recordDocHasHistoryOps = - sinon.stub().callsArgWith(3, null, 2 * @HistoryManager.FLUSH_EVERY_N_OPS) + sinon.stub().callsArgWith(3, null) @HistoryManager.flushDocChanges = sinon.stub().callsArgWith(2, @error = new Error("oops")) - @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, @callback + @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, 2 * @HistoryManager.FLUSH_EVERY_N_OPS, @callback it "should log out the error", -> @logger.error @@ -103,8 +103,8 @@ describe "HistoryManager", -> describe "with no ops", -> beforeEach -> - @HistoryRedisManager.recordDocHasHistoryOps = sinon.stub().callsArgWith(3, null, 1) - @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, [], @callback + @HistoryRedisManager.recordDocHasHistoryOps = sinon.stub().callsArgWith(3, null) + @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, [], 1, @callback it "should not call HistoryRedisManager.recordDocHasHistoryOps", -> @HistoryRedisManager.recordDocHasHistoryOps.called.should.equal false diff --git a/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee index f14e6da27f..ca3937d4c5 100644 --- a/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee @@ -27,7 +27,6 @@ describe "HistoryRedisManager", -> describe "recordDocHasHistoryOps", -> beforeEach -> @ops = [{ op: [{ i: "foo", p: 4 }] },{ op: [{ i: "bar", p: 56 }] }] - @rclient.llen = sinon.stub().yields(null, @length = 42) @rclient.sadd = sinon.stub().yields() describe "with ops", -> @@ -41,9 +40,6 @@ describe "HistoryRedisManager", -> .calledWith("DocsWithHistoryOps:#{@project_id}", @doc_id) .should.equal true - it "should call the callback with the length", -> - @callback.calledWith(null, @length).should.equal true - describe "with no ops", -> beforeEach (done) -> @HistoryRedisManager.recordDocHasHistoryOps @project_id, @doc_id, [], (args...) => diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index 3e659ee078..57bd9166d1 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -166,7 +166,7 @@ describe "UpdateManager", -> @ShareJsUpdateManager.applyUpdate = sinon.stub().yields(null, @updatedDocLines, @version, @appliedOps) @RedisManager.updateDocument = sinon.stub().yields() @RealTimeRedisManager.sendData = sinon.stub() - @HistoryManager.pushUncompressedHistoryOps = sinon.stub().callsArg(3) + @HistoryManager.pushUncompressedHistoryOps = sinon.stub().callsArg(4) describe "normally", -> beforeEach -> From 7ce6285e3d39956a27a0c31f2fc1bf7de584ba1e Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 9 May 2017 10:44:26 +0100 Subject: [PATCH 261/769] increase flush threshold to 100 ops --- .../document-updater/app/coffee/HistoryManager.coffee | 10 +++++----- .../coffee/ApplyingUpdatesToADocTests.coffee | 4 ++-- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index c6aa7797cf..17b77a00e6 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -21,17 +21,17 @@ module.exports = HistoryManager = error = new Error("track changes api returned a failure status code: #{res.statusCode}") return callback(error) - FLUSH_EVERY_N_OPS: 50 + FLUSH_EVERY_N_OPS: 100 pushUncompressedHistoryOps: (project_id, doc_id, ops = [], length, callback = (error) ->) -> if ops.length == 0 return callback() HistoryRedisManager.recordDocHasHistoryOps project_id, doc_id, ops, (error) -> return callback(error) if error? return callback() if not length? # don't flush unless we know the length - # We want to flush every 50 ops, i.e. 50, 100, 150, etc - # Find out which 'block' (i.e. 0-49, 50-99) we were in before and after pushing these - # ops. If we've changed, then we've gone over a multiple of 50 and should flush. - # (Most of the time, we will only hit 50 and then flushing will put us back to 0) + # We want to flush every 100 ops, i.e. 100, 200, 300, etc + # Find out which 'block' (i.e. 0-99, 100-199) we were in before and after pushing these + # ops. If we've changed, then we've gone over a multiple of 100 and should flush. + # (Most of the time, we will only hit 100 and then flushing will put us back to 0) previousLength = length - ops.length prevBlock = Math.floor(previousLength / HistoryManager.FLUSH_EVERY_N_OPS) newBlock = Math.floor(length / HistoryManager.FLUSH_EVERY_N_OPS) diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index d06119c690..a2eba4c063 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -207,7 +207,7 @@ describe "Applying updates to a doc", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] updates = [] - for v in [0..99] # Should flush after 50 ops + for v in [0..199] # Should flush after 100 ops updates.push doc_id: @doc_id, op: [i: v.toString(), p: 0] @@ -219,7 +219,7 @@ describe "Applying updates to a doc", -> # Send updates in chunks to causes multiple flushes actions = [] - for i in [0..9] + for i in [0..19] do (i) => actions.push (cb) => DocUpdaterClient.sendUpdates @project_id, @doc_id, updates.slice(i*10, (i+1)*10), cb From 36407ac726d0aa3a8e3917426b5f77f40b32f815 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 9 May 2017 12:02:27 +0100 Subject: [PATCH 262/769] rename HistoryManager pushUncompressedHistoryOps --- .../app/coffee/HistoryManager.coffee | 2 +- .../document-updater/app/coffee/UpdateManager.coffee | 2 +- .../coffee/HistoryManager/HistoryManagerTests.coffee | 12 ++++++------ .../coffee/UpdateManager/UpdateManagerTests.coffee | 4 ++-- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index 17b77a00e6..9f78b5af4b 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -22,7 +22,7 @@ module.exports = HistoryManager = return callback(error) FLUSH_EVERY_N_OPS: 100 - pushUncompressedHistoryOps: (project_id, doc_id, ops = [], length, callback = (error) ->) -> + recordAndFlushHistoryOps: (project_id, doc_id, ops = [], length, callback = (error) ->) -> if ops.length == 0 return callback() HistoryRedisManager.recordDocHasHistoryOps project_id, doc_id, ops, (error) -> diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index b903f6615f..269b16ee67 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -63,7 +63,7 @@ module.exports = UpdateManager = return callback(error) if error? RedisManager.updateDocument doc_id, updatedDocLines, version, appliedOps, new_ranges, (error, historyOpsLength) -> return callback(error) if error? - HistoryManager.pushUncompressedHistoryOps project_id, doc_id, appliedOps, historyOpsLength, callback + HistoryManager.recordAndFlushHistoryOps project_id, doc_id, appliedOps, historyOpsLength, callback lockUpdatesAndDo: (method, project_id, doc_id, args..., callback) -> LockManager.getLock doc_id, (error, lockValue) -> diff --git a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee index b41c9b9f7a..37e35ca285 100644 --- a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee @@ -40,7 +40,7 @@ describe "HistoryManager", -> it "should return the callback with an error", -> @callback.calledWith(new Error("track changes api return non-success code: 500")).should.equal true - describe "pushUncompressedHistoryOps", -> + describe "recordAndFlushHistoryOps", -> beforeEach -> @ops = ["mock-ops"] @HistoryManager.flushDocChanges = sinon.stub().callsArg(2) @@ -48,7 +48,7 @@ describe "HistoryManager", -> describe "pushing the op", -> beforeEach -> @HistoryRedisManager.recordDocHasHistoryOps = sinon.stub().callsArgWith(3, null) - @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, 1, @callback + @HistoryManager.recordAndFlushHistoryOps @project_id, @doc_id, @ops, 1, @callback it "should push the ops into redis", -> @HistoryRedisManager.recordDocHasHistoryOps @@ -65,7 +65,7 @@ describe "HistoryManager", -> beforeEach -> @HistoryRedisManager.recordDocHasHistoryOps = sinon.stub().callsArgWith(3, null) - @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, 2 * @HistoryManager.FLUSH_EVERY_N_OPS,@callback + @HistoryManager.recordAndFlushHistoryOps @project_id, @doc_id, @ops, 2 * @HistoryManager.FLUSH_EVERY_N_OPS,@callback it "should tell the track changes api to flush", -> @HistoryManager.flushDocChanges @@ -77,7 +77,7 @@ describe "HistoryManager", -> @ops = ["op1", "op2", "op3"] @HistoryRedisManager.recordDocHasHistoryOps = sinon.stub().callsArgWith(3, null) - @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, 2 * @HistoryManager.FLUSH_EVERY_N_OPS + 1, @callback + @HistoryManager.recordAndFlushHistoryOps @project_id, @doc_id, @ops, 2 * @HistoryManager.FLUSH_EVERY_N_OPS + 1, @callback it "should tell the track changes api to flush", -> @HistoryManager.flushDocChanges @@ -89,7 +89,7 @@ describe "HistoryManager", -> @HistoryRedisManager.recordDocHasHistoryOps = sinon.stub().callsArgWith(3, null) @HistoryManager.flushDocChanges = sinon.stub().callsArgWith(2, @error = new Error("oops")) - @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, @ops, 2 * @HistoryManager.FLUSH_EVERY_N_OPS, @callback + @HistoryManager.recordAndFlushHistoryOps @project_id, @doc_id, @ops, 2 * @HistoryManager.FLUSH_EVERY_N_OPS, @callback it "should log out the error", -> @logger.error @@ -104,7 +104,7 @@ describe "HistoryManager", -> describe "with no ops", -> beforeEach -> @HistoryRedisManager.recordDocHasHistoryOps = sinon.stub().callsArgWith(3, null) - @HistoryManager.pushUncompressedHistoryOps @project_id, @doc_id, [], 1, @callback + @HistoryManager.recordAndFlushHistoryOps @project_id, @doc_id, [], 1, @callback it "should not call HistoryRedisManager.recordDocHasHistoryOps", -> @HistoryRedisManager.recordDocHasHistoryOps.called.should.equal false diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index 57bd9166d1..2de6e93e44 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -166,7 +166,7 @@ describe "UpdateManager", -> @ShareJsUpdateManager.applyUpdate = sinon.stub().yields(null, @updatedDocLines, @version, @appliedOps) @RedisManager.updateDocument = sinon.stub().yields() @RealTimeRedisManager.sendData = sinon.stub() - @HistoryManager.pushUncompressedHistoryOps = sinon.stub().callsArg(4) + @HistoryManager.recordAndFlushHistoryOps = sinon.stub().callsArg(4) describe "normally", -> beforeEach -> @@ -188,7 +188,7 @@ describe "UpdateManager", -> .should.equal true it "should push the applied ops into the history queue", -> - @HistoryManager.pushUncompressedHistoryOps + @HistoryManager.recordAndFlushHistoryOps .calledWith(@project_id, @doc_id, @appliedOps) .should.equal true From f690ef078aa823ea09c1e344fc6c8f254662240d Mon Sep 17 00:00:00 2001 From: Paulo Reis Date: Tue, 9 May 2017 16:16:25 +0100 Subject: [PATCH 263/769] Update tests; add new ones for multiple changes. --- .../DocumentManagerTests.coffee | 12 ++++++++- .../HttpController/HttpControllerTests.coffee | 20 +++++++++++++- .../RangesManager/RangesManagerTests.coffee | 27 +++++++++++++++++-- 3 files changed, 55 insertions(+), 4 deletions(-) diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index 7050f0f370..4a29f54321 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -278,6 +278,7 @@ describe "DocumentManager", -> describe "acceptChanges", -> beforeEach -> @change_id = "mock-change-id" + @change_ids = [ "mock-change-id-1", "mock-change-id-2", "mock-change-id-3", "mock-change-id-4" ] @version = 34 @lines = ["original", "lines"] @ranges = { entries: "mock", comments: "mock" } @@ -286,7 +287,7 @@ describe "DocumentManager", -> @RangesManager.acceptChanges = sinon.stub().yields(null, @updated_ranges) @RedisManager.updateDocument = sinon.stub().yields() - describe "successfully", -> + describe "successfully with a single change", -> beforeEach -> @DocumentManager.acceptChanges @project_id, @doc_id, [ @change_id ], @callback @@ -308,6 +309,15 @@ describe "DocumentManager", -> it "should call the callback", -> @callback.called.should.equal true + describe "successfully with multiple changes", -> + beforeEach -> + @DocumentManager.acceptChanges @project_id, @doc_id, @change_ids, @callback + + it "should apply the accept change to the ranges", -> + @RangesManager.acceptChanges + .calledWith(@change_ids, @ranges) + .should.equal true + describe "when the doc is not found", -> beforeEach -> @DocumentManager.getDoc = sinon.stub().yields(null, null, null, null) diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index ec910c9519..617146e787 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -343,7 +343,7 @@ describe "HttpController", -> doc_id: @doc_id change_id: @change_id = "mock-change-od-1" - describe "successfully", -> + describe "successfully with a single change", -> beforeEach -> @DocumentManager.acceptChangesWithLock = sinon.stub().callsArgWith(3) @HttpController.acceptChanges(@req, @res, @next) @@ -366,6 +366,24 @@ describe "HttpController", -> it "should time the request", -> @Metrics.Timer::done.called.should.equal true + describe "succesfully with with multiple changes", -> + beforeEach -> + @change_ids = [ "mock-change-od-1", "mock-change-od-2", "mock-change-od-3", "mock-change-od-4" ] + @req.body = + change_ids: @change_ids + @DocumentManager.acceptChangesWithLock = sinon.stub().callsArgWith(3) + @HttpController.acceptChanges(@req, @res, @next) + + it "should accept the changes in the body payload", -> + @DocumentManager.acceptChangesWithLock + .calledWith(@project_id, @doc_id, @change_ids) + .should.equal true + + it "should log the request with the correct number of changes", -> + @logger.log + .calledWith({@project_id, @doc_id}, "accepting #{ @change_ids.length } changes via http") + .should.equal true + describe "when an errors occurs", -> beforeEach -> @DocumentManager.acceptChangesWithLock = sinon.stub().callsArgWith(3, new Error("oops")) diff --git a/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee b/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee index fd3bc8faec..bdb075ebe6 100644 --- a/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee @@ -10,7 +10,6 @@ describe "RangesManager", -> @RangesManager = SandboxedModule.require modulePath, requires: "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } - @doc_id = "doc-id-123" @project_id = "project-id-123" @user_id = "user-id-123" @@ -178,4 +177,28 @@ describe "RangesManager", -> @callback.called.should.equal true [error, entries] = @callback.args[0] expect(error).to.not.be.null - expect(error.message).to.equal("Change ({\"op\":{\"i\":\"five\",\"p\":15},\"metadata\":{\"user_id\":\"user-id-123\"}}) doesn't match text (\"our \")") \ No newline at end of file + expect(error.message).to.equal("Change ({\"op\":{\"i\":\"five\",\"p\":15},\"metadata\":{\"user_id\":\"user-id-123\"}}) doesn't match text (\"our \")") + + describe "acceptChanges", -> + beforeEach -> + @ranges = { entries: "mock", comments: "mock" } + + describe "successfully with a single change", -> + beforeEach -> + @change_id = "mock-change-id" + @RangesManager.acceptChanges [ @change_id ], @ranges + + it "should log the call with the correct number of changes", -> + @logger.log + .calledWith("accepting 1 changes in ranges") + .should.equal true + + describe "successfully with multiple changes", -> + beforeEach -> + @change_ids = [ "mock-change-id-1", "mock-change-id-2", "mock-change-id-3", "mock-change-id-4" ] + @RangesManager.acceptChanges @change_ids, @ranges + + it "should log the call with the correct number of changes", -> + @logger.log + .calledWith("accepting #{ @change_ids.length } changes in ranges") + .should.equal true From 5677c7ad9af7668ad6b9d5f8e0892df9d0198032 Mon Sep 17 00:00:00 2001 From: Paulo Reis Date: Thu, 11 May 2017 11:31:19 +0100 Subject: [PATCH 264/769] Update range tracker. --- .../app/coffee/RangesTracker.coffee | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/services/document-updater/app/coffee/RangesTracker.coffee b/services/document-updater/app/coffee/RangesTracker.coffee index f589098440..4174dcdab0 100644 --- a/services/document-updater/app/coffee/RangesTracker.coffee +++ b/services/document-updater/app/coffee/RangesTracker.coffee @@ -108,13 +108,16 @@ load = () -> for change_id in change_to_remove_ids remove_change_id[change_id] = true - while (i--) - if remove_change_id[@changes[i].id] - delete remove_change_id[@changes[i].id] - removed_change = @changes.splice(i, 1)[0] - @_markAsDirty removed_change, "change", "removed" - if Object.keys(remove_change_id).length == 0 - break + remaining_changes = [] + + for change in @changes + if remove_change_id[change.id] + delete remove_change_id[change.id] + @_markAsDirty change, "change", "removed" + else + remaining_changes.push change + + @changes = remaining_changes validate: (text) -> for change in @changes From d53266ea2d947aa5a8e5e295021cf1b1322a7e20 Mon Sep 17 00:00:00 2001 From: Paulo Reis Date: Thu, 11 May 2017 12:00:34 +0100 Subject: [PATCH 265/769] Update web API stub. --- .../test/acceptance/coffee/FlushingDocsTests.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee index 47dddcd19a..f732d69bb6 100644 --- a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee +++ b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee @@ -63,7 +63,7 @@ describe "Flushing a doc to Mongo", -> lines: @lines version: @version } - sinon.stub MockWebApi, "setDocument", (project_id, doc_id, lines, version, callback = (error) ->) -> + sinon.stub MockWebApi, "setDocument", (project_id, doc_id, lines, version, ranges, callback = (error) ->) -> setTimeout callback, 30000 DocUpdaterClient.preloadDoc @project_id, @doc_id, done From d4a8d88750058aa0818f2a20cbf7b39bef88b073 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 12 May 2017 13:11:04 +0100 Subject: [PATCH 266/769] put a limit on the number of ops per iteration --- .../app/coffee/RealTimeRedisManager.coffee | 6 ++++-- .../RealTimeRedisManager/RealTimeRedisManagerTests.coffee | 8 ++++---- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/services/document-updater/app/coffee/RealTimeRedisManager.coffee b/services/document-updater/app/coffee/RealTimeRedisManager.coffee index 197a4708c1..7da7ca1f64 100644 --- a/services/document-updater/app/coffee/RealTimeRedisManager.coffee +++ b/services/document-updater/app/coffee/RealTimeRedisManager.coffee @@ -3,11 +3,13 @@ rclient = require("redis-sharelatex").createClient(Settings.redis.realtime) Keys = Settings.redis.realtime.key_schema logger = require('logger-sharelatex') +MAX_OPS_PER_ITERATION = 8 # process a limited number of ops for safety + module.exports = RealTimeRedisManager = getPendingUpdatesForDoc : (doc_id, callback)-> multi = rclient.multi() - multi.lrange Keys.pendingUpdates({doc_id}), 0 , -1 - multi.del Keys.pendingUpdates({doc_id}) + multi.lrange Keys.pendingUpdates({doc_id}), 0, (MAX_OPS_PER_ITERATION-1) + multi.ltrim Keys.pendingUpdates({doc_id}), MAX_OPS_PER_ITERATION, -1 multi.exec (error, replys) -> return callback(error) if error? jsonUpdates = replys[0] diff --git a/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee index b6aa35ac72..a04da996dc 100644 --- a/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee @@ -26,7 +26,7 @@ describe "RealTimeRedisManager", -> describe "getPendingUpdatesForDoc", -> beforeEach -> @rclient.lrange = sinon.stub() - @rclient.del = sinon.stub() + @rclient.ltrim = sinon.stub() describe "successfully", -> beforeEach -> @@ -40,12 +40,12 @@ describe "RealTimeRedisManager", -> it "should get the pending updates", -> @rclient.lrange - .calledWith("PendingUpdates:#{@doc_id}", 0, -1) + .calledWith("PendingUpdates:#{@doc_id}", 0, 7) .should.equal true it "should delete the pending updates", -> - @rclient.del - .calledWith("PendingUpdates:#{@doc_id}") + @rclient.ltrim + .calledWith("PendingUpdates:#{@doc_id}", 8, -1) .should.equal true it "should call the callback with the updates", -> From be96548199b8831580a9c6630506fd301ade4ab7 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 12 May 2017 14:07:59 +0100 Subject: [PATCH 267/769] log number of updates for future debugging --- services/document-updater/app/coffee/UpdateManager.coffee | 1 + 1 file changed, 1 insertion(+) diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index 269b16ee67..12c5c7e95b 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -40,6 +40,7 @@ module.exports = UpdateManager = fetchAndApplyUpdates: (project_id, doc_id, callback = (error) ->) -> RealTimeRedisManager.getPendingUpdatesForDoc doc_id, (error, updates) => return callback(error) if error? + logger.log {project_id: project_id, doc_id: doc_id, count: updates.length}, "processing updates" if updates.length == 0 return callback() async.eachSeries updates, From 2ee40d0748b1a73f199cf4aeffc55ce4ad6527b6 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 11 May 2017 16:54:41 +0100 Subject: [PATCH 268/769] ensure document is in redis before consuming ops --- .../app/coffee/DocumentManager.coffee | 7 +++ .../app/coffee/RedisManager.coffee | 6 +++ .../app/coffee/UpdateManager.coffee | 10 +++-- .../DocumentManagerTests.coffee | 45 ++++++++++++++++++- .../UpdateManager/UpdateManagerTests.coffee | 5 ++- 5 files changed, 67 insertions(+), 6 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index c155de58fe..fa5284adab 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -28,6 +28,13 @@ module.exports = DocumentManager = else callback null, lines, version, ranges, true + ensureDocIsLoaded: (project_id, doc_id, callback = (error) ->) -> + RedisManager.checkDocInMemory project_id, doc_id, (error) -> + if error? + DocumentManager.getDoc project_id, doc_id, callback + else + callback() + getDocAndRecentOps: (project_id, doc_id, fromVersion, _callback = (error, lines, version, recentOps, ranges) ->) -> timer = new Metrics.Timer("docManager.getDocAndRecentOps") callback = (args...) -> diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 4aad7ec109..857e19c88c 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -30,6 +30,12 @@ historyKeys = Settings.redis.history.key_schema module.exports = RedisManager = rclient: rclient + checkDocInMemory: (project_id, doc_id, callback) -> + rclient.exists keys.docLines(doc_id:doc_id), (error, result) -> + return callback(error) if error? + return callback new Error("doc not in memory") if result isnt 1 + callback() + putDocInMemory : (project_id, doc_id, docLines, version, ranges, _callback)-> timer = new metrics.Timer("redis.put-doc") callback = (error) -> diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index 269b16ee67..257bc71601 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -14,10 +14,14 @@ RangesManager = require "./RangesManager" module.exports = UpdateManager = processOutstandingUpdates: (project_id, doc_id, callback = (error) ->) -> timer = new Metrics.Timer("updateManager.processOutstandingUpdates") - UpdateManager.fetchAndApplyUpdates project_id, doc_id, (error) -> - timer.done() + DocumentManager.ensureDocIsLoaded project_id, doc_id, (error) -> + # an error at this point is safe, because we haven't consumed any ops yet return callback(error) if error? - callback() + UpdateManager.fetchAndApplyUpdates project_id, doc_id, (error) -> + timer.done() + # now we have taken ops off the queue so errors here will cause data loss + return callback(error) if error? + callback() processOutstandingUpdatesWithLock: (project_id, doc_id, callback = (error) ->) -> LockManager.tryLock doc_id, (error, gotLock, lockValue) => diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index e781546ec6..0844c208eb 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -188,7 +188,48 @@ describe "DocumentManager", -> it "should time the execution", -> @Metrics.Timer::done.called.should.equal true - + + + describe "ensureDocIsLoaded", -> + describe "when the doc exists in Redis", -> + beforeEach -> + @RedisManager.checkDocInMemory = sinon.stub().callsArgWith(2) + @DocumentManager.ensureDocIsLoaded @project_id, @doc_id, @callback + + it "should check the doc in Redis", -> + @RedisManager.checkDocInMemory + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should call the callback", -> + @callback.called.should.equal true + + describe "when the doc does not exist in Redis", -> + beforeEach -> + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null, null, null) + @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges) + @RedisManager.putDocInMemory = sinon.stub().yields() + @RedisManager.checkDocInMemory = sinon.stub().callsArgWith(2, new Error("doc is not loaded")) + @DocumentManager.ensureDocIsLoaded @project_id, @doc_id, @callback + + it "should try to get the doc from Redis", -> + @RedisManager.getDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should get the doc from the PersistenceManager", -> + @PersistenceManager.getDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should set the doc in Redis", -> + @RedisManager.putDocInMemory + .calledWith(@project_id, @doc_id, @lines, @version, @ranges) + .should.equal true + + it "should call the callback", -> + @callback.calledWith(null).should.equal true + describe "setDoc", -> describe "with plain tex lines", -> beforeEach -> @@ -363,4 +404,4 @@ describe "DocumentManager", -> it "should call the callback with a not found error", -> error = new Errors.NotFoundError("document not found: #{@doc_id}") - @callback.calledWith(error).should.equal true \ No newline at end of file + @callback.calledWith(error).should.equal true diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index 2de6e93e44..f458f5d0b4 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -25,6 +25,7 @@ describe "UpdateManager", -> describe "processOutstandingUpdates", -> beforeEach -> + @DocumentManager.ensureDocIsLoaded = sinon.stub().callsArg(2) @UpdateManager.fetchAndApplyUpdates = sinon.stub().callsArg(2) @UpdateManager.processOutstandingUpdates @project_id, @doc_id, @callback @@ -42,6 +43,7 @@ describe "UpdateManager", -> beforeEach -> @LockManager.tryLock = sinon.stub().callsArgWith(1, null, true, @lockValue = "mock-lock-value") @LockManager.releaseLock = sinon.stub().callsArg(2) + @RedisManager.checkDocInMemory = sinon.stub().callsArg(2) @UpdateManager.continueProcessingUpdatesWithLock = sinon.stub().callsArg(2) @UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2) @@ -90,7 +92,8 @@ describe "UpdateManager", -> it "should not process the updates", -> @UpdateManager.processOutstandingUpdates.called.should.equal false - + + describe "continueProcessingUpdatesWithLock", -> describe "when there are outstanding updates", -> beforeEach -> From 36d16a76e0e30b19e7d95a81f9387519482ac19e Mon Sep 17 00:00:00 2001 From: Paulo Reis Date: Fri, 12 May 2017 14:42:40 +0100 Subject: [PATCH 269/769] Unit test accept changes in the ranges manager. --- .../RangesManager/RangesManagerTests.coffee | 95 +++++++++++++++++-- 1 file changed, 88 insertions(+), 7 deletions(-) diff --git a/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee b/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee index bdb075ebe6..b11c73489e 100644 --- a/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee @@ -10,6 +10,7 @@ describe "RangesManager", -> @RangesManager = SandboxedModule.require modulePath, requires: "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } + @doc_id = "doc-id-123" @project_id = "project-id-123" @user_id = "user-id-123" @@ -181,24 +182,104 @@ describe "RangesManager", -> describe "acceptChanges", -> beforeEach -> - @ranges = { entries: "mock", comments: "mock" } + @RangesManager = SandboxedModule.require modulePath, + requires: + "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } + "./RangesTracker":@RangesTracker = SandboxedModule.require "../../../../app/js/RangesTracker.js" + + @ranges = { + comments: [] + changes: [{ + id: "a1" + op: + i: "lorem" + p: 0 + }, { + id: "a2" + op: + i: "ipsum" + p: 10 + }, { + id: "a3" + op: + i: "dolor" + p: 20 + }, { + id: "a4" + op: + i: "sit" + p: 30 + }, { + id: "a5" + op: + i: "amet" + p: 40 + }] + } + @removeChangeIdsSpy = sinon.spy @RangesTracker.prototype, "removeChangeIds" describe "successfully with a single change", -> - beforeEach -> - @change_id = "mock-change-id" - @RangesManager.acceptChanges [ @change_id ], @ranges + beforeEach (done) -> + @change_ids = [ @ranges.changes[1].id ] + @RangesManager.acceptChanges @change_ids, @ranges, (err, ranges) => + @rangesResponse = ranges + done() it "should log the call with the correct number of changes", -> @logger.log .calledWith("accepting 1 changes in ranges") .should.equal true + it "should delegate the change removal to the ranges tracker", -> + @removeChangeIdsSpy + .calledWith(@change_ids) + .should.equal true + + it "should remove the change", -> + expect(@rangesResponse.changes + .find((change) => change.id == @ranges.changes[1].id)) + .to.be.undefined + + it "should return the original number of changes minus 1", -> + @rangesResponse.changes.length + .should.equal @ranges.changes.length - 1 + + it "should not touch other changes", -> + for i in [ 0, 2, 3, 4] + expect(@rangesResponse.changes + .find((change) => change.id == @ranges.changes[i].id)) + .to.deep.equal @ranges.changes[i] + describe "successfully with multiple changes", -> - beforeEach -> - @change_ids = [ "mock-change-id-1", "mock-change-id-2", "mock-change-id-3", "mock-change-id-4" ] - @RangesManager.acceptChanges @change_ids, @ranges + beforeEach (done) -> + @change_ids = [ @ranges.changes[1].id, @ranges.changes[3].id, @ranges.changes[4].id ] + @RangesManager.acceptChanges @change_ids, @ranges, (err, ranges) => + @rangesResponse = ranges + done() it "should log the call with the correct number of changes", -> @logger.log .calledWith("accepting #{ @change_ids.length } changes in ranges") .should.equal true + + it "should delegate the change removal to the ranges tracker", -> + @removeChangeIdsSpy + .calledWith(@change_ids) + .should.equal true + + it "should remove the changes", -> + for i in [ 1, 3, 4] + expect(@rangesResponse.changes + .find((change) => change.id == @ranges.changes[1].id)) + .to.be.undefined + + it "should return the original number of changes minus the number of accepted changes", -> + @rangesResponse.changes.length + .should.equal @ranges.changes.length - 3 + + it "should not touch other changes", -> + for i in [ 0, 2 ] + expect(@rangesResponse.changes + .find((change) => change.id == @ranges.changes[i].id)) + .to.deep.equal @ranges.changes[i] + From ef7fa5925747fc131b3013ff342c282348764d9d Mon Sep 17 00:00:00 2001 From: Paulo Reis Date: Mon, 15 May 2017 11:11:14 +0100 Subject: [PATCH 270/769] Update ranges tracker. --- .../app/coffee/RangesTracker.coffee | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/services/document-updater/app/coffee/RangesTracker.coffee b/services/document-updater/app/coffee/RangesTracker.coffee index 4174dcdab0..869d63159b 100644 --- a/services/document-updater/app/coffee/RangesTracker.coffee +++ b/services/document-updater/app/coffee/RangesTracker.coffee @@ -96,6 +96,20 @@ load = () -> break return change + getChanges: (change_ids) -> + changes_response = [] + ids_map = {} + + for change_id in change_ids + ids_map[change_id] = true + + for change in @changes + if ids_map[change.id] + delete ids_map[change.id] + changes_response.push change + + return changes_response + removeChangeId: (change_id) -> change = @getChange(change_id) return if !change? From b15d2ef796e89b4ccac20bd59cfb96b6af478b72 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 15 May 2017 16:18:40 +0100 Subject: [PATCH 271/769] Revert "fail safely if doc cannot be loaded" --- .../app/coffee/DocumentManager.coffee | 7 --- .../app/coffee/RedisManager.coffee | 6 --- .../app/coffee/UpdateManager.coffee | 10 ++--- .../DocumentManagerTests.coffee | 45 +------------------ .../UpdateManager/UpdateManagerTests.coffee | 5 +-- 5 files changed, 6 insertions(+), 67 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 489b79bd56..054baca47e 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -28,13 +28,6 @@ module.exports = DocumentManager = else callback null, lines, version, ranges, true - ensureDocIsLoaded: (project_id, doc_id, callback = (error) ->) -> - RedisManager.checkDocInMemory project_id, doc_id, (error) -> - if error? - DocumentManager.getDoc project_id, doc_id, callback - else - callback() - getDocAndRecentOps: (project_id, doc_id, fromVersion, _callback = (error, lines, version, recentOps, ranges) ->) -> timer = new Metrics.Timer("docManager.getDocAndRecentOps") callback = (args...) -> diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 857e19c88c..4aad7ec109 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -30,12 +30,6 @@ historyKeys = Settings.redis.history.key_schema module.exports = RedisManager = rclient: rclient - checkDocInMemory: (project_id, doc_id, callback) -> - rclient.exists keys.docLines(doc_id:doc_id), (error, result) -> - return callback(error) if error? - return callback new Error("doc not in memory") if result isnt 1 - callback() - putDocInMemory : (project_id, doc_id, docLines, version, ranges, _callback)-> timer = new metrics.Timer("redis.put-doc") callback = (error) -> diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index 257bc71601..269b16ee67 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -14,14 +14,10 @@ RangesManager = require "./RangesManager" module.exports = UpdateManager = processOutstandingUpdates: (project_id, doc_id, callback = (error) ->) -> timer = new Metrics.Timer("updateManager.processOutstandingUpdates") - DocumentManager.ensureDocIsLoaded project_id, doc_id, (error) -> - # an error at this point is safe, because we haven't consumed any ops yet + UpdateManager.fetchAndApplyUpdates project_id, doc_id, (error) -> + timer.done() return callback(error) if error? - UpdateManager.fetchAndApplyUpdates project_id, doc_id, (error) -> - timer.done() - # now we have taken ops off the queue so errors here will cause data loss - return callback(error) if error? - callback() + callback() processOutstandingUpdatesWithLock: (project_id, doc_id, callback = (error) ->) -> LockManager.tryLock doc_id, (error, gotLock, lockValue) => diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index d5607220f0..b2eaa321d6 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -188,48 +188,7 @@ describe "DocumentManager", -> it "should time the execution", -> @Metrics.Timer::done.called.should.equal true - - - describe "ensureDocIsLoaded", -> - describe "when the doc exists in Redis", -> - beforeEach -> - @RedisManager.checkDocInMemory = sinon.stub().callsArgWith(2) - @DocumentManager.ensureDocIsLoaded @project_id, @doc_id, @callback - - it "should check the doc in Redis", -> - @RedisManager.checkDocInMemory - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should call the callback", -> - @callback.called.should.equal true - - describe "when the doc does not exist in Redis", -> - beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null, null, null) - @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges) - @RedisManager.putDocInMemory = sinon.stub().yields() - @RedisManager.checkDocInMemory = sinon.stub().callsArgWith(2, new Error("doc is not loaded")) - @DocumentManager.ensureDocIsLoaded @project_id, @doc_id, @callback - - it "should try to get the doc from Redis", -> - @RedisManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should get the doc from the PersistenceManager", -> - @PersistenceManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should set the doc in Redis", -> - @RedisManager.putDocInMemory - .calledWith(@project_id, @doc_id, @lines, @version, @ranges) - .should.equal true - - it "should call the callback", -> - @callback.calledWith(null).should.equal true - + describe "setDoc", -> describe "with plain tex lines", -> beforeEach -> @@ -414,4 +373,4 @@ describe "DocumentManager", -> it "should call the callback with a not found error", -> error = new Errors.NotFoundError("document not found: #{@doc_id}") - @callback.calledWith(error).should.equal true + @callback.calledWith(error).should.equal true \ No newline at end of file diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index f458f5d0b4..2de6e93e44 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -25,7 +25,6 @@ describe "UpdateManager", -> describe "processOutstandingUpdates", -> beforeEach -> - @DocumentManager.ensureDocIsLoaded = sinon.stub().callsArg(2) @UpdateManager.fetchAndApplyUpdates = sinon.stub().callsArg(2) @UpdateManager.processOutstandingUpdates @project_id, @doc_id, @callback @@ -43,7 +42,6 @@ describe "UpdateManager", -> beforeEach -> @LockManager.tryLock = sinon.stub().callsArgWith(1, null, true, @lockValue = "mock-lock-value") @LockManager.releaseLock = sinon.stub().callsArg(2) - @RedisManager.checkDocInMemory = sinon.stub().callsArg(2) @UpdateManager.continueProcessingUpdatesWithLock = sinon.stub().callsArg(2) @UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2) @@ -92,8 +90,7 @@ describe "UpdateManager", -> it "should not process the updates", -> @UpdateManager.processOutstandingUpdates.called.should.equal false - - + describe "continueProcessingUpdatesWithLock", -> describe "when there are outstanding updates", -> beforeEach -> From c7d8fbbb8ab29788385b510c97248cf2fbc2cf2d Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 9 May 2017 14:06:09 +0100 Subject: [PATCH 272/769] exponential backoff for lock up to maximum time --- services/document-updater/app/coffee/LockManager.coffee | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/LockManager.coffee b/services/document-updater/app/coffee/LockManager.coffee index d237b51feb..0321a68841 100644 --- a/services/document-updater/app/coffee/LockManager.coffee +++ b/services/document-updater/app/coffee/LockManager.coffee @@ -14,6 +14,7 @@ COUNT = 0 module.exports = LockManager = LOCK_TEST_INTERVAL: 50 # 50ms between each test of the lock + MAX_TEST_INTERVAL: 1000 # back off to 1s between each test of the lock MAX_LOCK_WAIT_TIME: 10000 # 10s maximum time to spend trying to get the lock LOCK_TTL: 30 # seconds. Time until lock auto expires in redis. @@ -41,6 +42,7 @@ module.exports = LockManager = getLock: (doc_id, callback = (error, lockValue) ->) -> startTime = Date.now() + testInterval = LockManager.LOCK_TEST_INTERVAL do attempt = () -> if Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME e = new Error("Timeout") @@ -52,7 +54,9 @@ module.exports = LockManager = if gotLock callback(null, lockValue) else - setTimeout attempt, LockManager.LOCK_TEST_INTERVAL + setTimeout attempt, testInterval + # back off when the lock is taken to avoid overloading + testInterval = Math.max(testInterval * 2, LockManager.MAX_TEST_INTERVAL) checkLock: (doc_id, callback = (err, isFree)->)-> key = keys.blockingKey(doc_id:doc_id) From dff6e2c3da4eeeeaf0bef1bd6f0c6dbb831a33b1 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 16 May 2017 16:31:28 +0100 Subject: [PATCH 273/769] bug fix in backoff --- services/document-updater/app/coffee/LockManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/LockManager.coffee b/services/document-updater/app/coffee/LockManager.coffee index 0321a68841..37cbe49914 100644 --- a/services/document-updater/app/coffee/LockManager.coffee +++ b/services/document-updater/app/coffee/LockManager.coffee @@ -56,7 +56,7 @@ module.exports = LockManager = else setTimeout attempt, testInterval # back off when the lock is taken to avoid overloading - testInterval = Math.max(testInterval * 2, LockManager.MAX_TEST_INTERVAL) + testInterval = Math.min(testInterval * 2, LockManager.MAX_TEST_INTERVAL) checkLock: (doc_id, callback = (err, isFree)->)-> key = keys.blockingKey(doc_id:doc_id) From 387ecac6cb9e4362e8527f7c6add3689cd391975 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 17 May 2017 16:11:48 +0100 Subject: [PATCH 274/769] log for each get/try/release lock --- .../document-updater/app/coffee/LockManager.coffee | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/services/document-updater/app/coffee/LockManager.coffee b/services/document-updater/app/coffee/LockManager.coffee index 37cbe49914..1f710b448d 100644 --- a/services/document-updater/app/coffee/LockManager.coffee +++ b/services/document-updater/app/coffee/LockManager.coffee @@ -30,14 +30,16 @@ module.exports = LockManager = tryLock : (doc_id, callback = (err, isFree)->)-> lockValue = LockManager.randomLock() key = keys.blockingKey(doc_id:doc_id) + logger.log {doc_id: doc_id, key: key}, "tryLock" rclient.set key, lockValue, "EX", @LOCK_TTL, "NX", (err, gotLock)-> return callback(err) if err? if gotLock == "OK" metrics.inc "doc-not-blocking" + logger.log {doc_id: doc_id, key: key, lockValue: lockValue}, "got lock" callback err, true, lockValue else metrics.inc "doc-blocking" - logger.log {doc_id}, "doc is locked" + logger.log {doc_id: doc_id, key: key}, "doc is locked" callback err, false getLock: (doc_id, callback = (error, lockValue) ->) -> @@ -54,6 +56,7 @@ module.exports = LockManager = if gotLock callback(null, lockValue) else + logger.log {doc_id: doc_id, delay: testInterval}, "will retry lock" setTimeout attempt, testInterval # back off when the lock is taken to avoid overloading testInterval = Math.min(testInterval * 2, LockManager.MAX_TEST_INTERVAL) @@ -75,7 +78,9 @@ module.exports = LockManager = rclient.eval LockManager.unlockScript, 1, key, lockValue, (err, result) -> if err? return callback(err) - if result? and result isnt 1 # successful unlock should release exactly one key - logger.error {doc_id:doc_id, lockValue:lockValue, redis_err:err, redis_result:result}, "unlocking error" + else if result? and result isnt 1 # successful unlock should release exactly one key + logger.error {doc_id:doc_id, key:key, lockValue:lockValue, redis_err:err, redis_result:result}, "unlocking error" return callback(new Error("tried to release timed out lock")) - callback(err,result) + else + logger.log {doc_id:doc_id, key:key, lockValue:lockValue}, "released lock" + callback(err,result) From b6efb051e145367caab6e10b8282c3e9b16af74d Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 18 May 2017 11:00:07 +0100 Subject: [PATCH 275/769] add profiling --- .../app/coffee/Profiler.coffee | 28 +++++++++++++++++++ .../app/coffee/UpdateManager.coffee | 23 +++++++++++++-- 2 files changed, 48 insertions(+), 3 deletions(-) create mode 100644 services/document-updater/app/coffee/Profiler.coffee diff --git a/services/document-updater/app/coffee/Profiler.coffee b/services/document-updater/app/coffee/Profiler.coffee new file mode 100644 index 0000000000..8aacf2a0a8 --- /dev/null +++ b/services/document-updater/app/coffee/Profiler.coffee @@ -0,0 +1,28 @@ +Settings = require('settings-sharelatex') +logger = require('logger-sharelatex') + +deltaMs = (ta, tb) -> + return Math.floor(((ta[0]-tb[0])*1e9 + (ta[1]-tb[1]))*1e-6) + +module.exports = class Profiler + LOG_CUTOFF_TIME: 100 + + constructor: (@name, @args) -> + @t0 = @t = process.hrtime() + @updateTimes = [] + + log: (label) -> + t1 = process.hrtime() + dtMilliSec = deltaMs(t1, @t) + @t = t1 + @updateTimes.push [label, dtMilliSec] # timings in ms + return @ # make it chainable + + end: (message) -> + totalTime = deltaMs(@t, @t0) + return if totalTime < @LOG_CUTOFF_TIME # skip anything less than cutoff + args = {} + for k,v of @args + args[k] = v + args.updateTimes = @updateTimes + logger.log args, @name diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index 269b16ee67..70caaf1e03 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -10,6 +10,7 @@ Metrics = require "./Metrics" Errors = require "./Errors" DocumentManager = require "./DocumentManager" RangesManager = require "./RangesManager" +Profiler = require "./Profiler" module.exports = UpdateManager = processOutstandingUpdates: (project_id, doc_id, callback = (error) ->) -> @@ -20,13 +21,17 @@ module.exports = UpdateManager = callback() processOutstandingUpdatesWithLock: (project_id, doc_id, callback = (error) ->) -> + profile = new Profiler("processOutstandingUpdatesWithLock", {project_id, doc_id}) LockManager.tryLock doc_id, (error, gotLock, lockValue) => return callback(error) if error? return callback() if !gotLock + profile.log("tryLock") UpdateManager.processOutstandingUpdates project_id, doc_id, (error) -> return UpdateManager._handleErrorInsideLock(doc_id, lockValue, error, callback) if error? + profile.log("processOutstandingUpdates") LockManager.releaseLock doc_id, lockValue, (error) => return callback(error) if error? + profile.log("releaseLock").end() UpdateManager.continueProcessingUpdatesWithLock project_id, doc_id, callback continueProcessingUpdatesWithLock: (project_id, doc_id, callback = (error) ->) -> @@ -38,13 +43,20 @@ module.exports = UpdateManager = callback() fetchAndApplyUpdates: (project_id, doc_id, callback = (error) ->) -> + profile = new Profiler("fetchAndApplyUpdates", {project_id, doc_id}) RealTimeRedisManager.getPendingUpdatesForDoc doc_id, (error, updates) => return callback(error) if error? if updates.length == 0 return callback() - async.eachSeries updates, - (update, cb) -> UpdateManager.applyUpdate project_id, doc_id, update, cb - callback + profile.log("getPendingUpdatesForDoc") + doUpdate = (update, cb)-> + UpdateManager.applyUpdate project_id, doc_id, update, (err) -> + profile.log("applyUpdate") + cb(err) + finalCallback = (err) -> + profile.log("async done").end() + callback(err) + async.eachSeries updates, doUpdate, finalCallback applyUpdate: (project_id, doc_id, update, _callback = (error) ->) -> callback = (error) -> @@ -66,14 +78,19 @@ module.exports = UpdateManager = HistoryManager.recordAndFlushHistoryOps project_id, doc_id, appliedOps, historyOpsLength, callback lockUpdatesAndDo: (method, project_id, doc_id, args..., callback) -> + profile = new Profiler("lockUpdatesAndDo", {project_id, doc_id}) LockManager.getLock doc_id, (error, lockValue) -> + profile.log("getLock") return callback(error) if error? UpdateManager.processOutstandingUpdates project_id, doc_id, (error) -> return UpdateManager._handleErrorInsideLock(doc_id, lockValue, error, callback) if error? + profile.log("processOutStandingUpdates") method project_id, doc_id, args..., (error, response_args...) -> return UpdateManager._handleErrorInsideLock(doc_id, lockValue, error, callback) if error? + profile.log("method") LockManager.releaseLock doc_id, lockValue, (error) -> return callback(error) if error? + profile.log("releaseLock").end() callback null, response_args... # We held the lock for a while so updates might have queued up UpdateManager.continueProcessingUpdatesWithLock project_id, doc_id From e11f64f83a1f918afa7998b817e551e0d7311a09 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 18 May 2017 15:02:08 +0100 Subject: [PATCH 276/769] increase profiling cutoff to 1s --- services/document-updater/app/coffee/Profiler.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/Profiler.coffee b/services/document-updater/app/coffee/Profiler.coffee index 8aacf2a0a8..f4ee219afb 100644 --- a/services/document-updater/app/coffee/Profiler.coffee +++ b/services/document-updater/app/coffee/Profiler.coffee @@ -5,7 +5,7 @@ deltaMs = (ta, tb) -> return Math.floor(((ta[0]-tb[0])*1e9 + (ta[1]-tb[1]))*1e-6) module.exports = class Profiler - LOG_CUTOFF_TIME: 100 + LOG_CUTOFF_TIME: 1000 constructor: (@name, @args) -> @t0 = @t = process.hrtime() From e00f4dde8e16bd433d241c519b04718a3fe722fa Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 18 May 2017 15:02:44 +0100 Subject: [PATCH 277/769] log profilers start and end times for reference --- services/document-updater/app/coffee/Profiler.coffee | 3 +++ 1 file changed, 3 insertions(+) diff --git a/services/document-updater/app/coffee/Profiler.coffee b/services/document-updater/app/coffee/Profiler.coffee index f4ee219afb..70a0813b67 100644 --- a/services/document-updater/app/coffee/Profiler.coffee +++ b/services/document-updater/app/coffee/Profiler.coffee @@ -9,6 +9,7 @@ module.exports = class Profiler constructor: (@name, @args) -> @t0 = @t = process.hrtime() + @start = new Date() @updateTimes = [] log: (label) -> @@ -25,4 +26,6 @@ module.exports = class Profiler for k,v of @args args[k] = v args.updateTimes = @updateTimes + args.start = @start + args.end = new Date() logger.log args, @name From 95596061d44be949e52b53aea1a52a406c57611d Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 18 May 2017 15:02:54 +0100 Subject: [PATCH 278/769] clarify calculation of process.hrtime --- services/document-updater/app/coffee/Profiler.coffee | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/Profiler.coffee b/services/document-updater/app/coffee/Profiler.coffee index 70a0813b67..dc88345334 100644 --- a/services/document-updater/app/coffee/Profiler.coffee +++ b/services/document-updater/app/coffee/Profiler.coffee @@ -2,7 +2,9 @@ Settings = require('settings-sharelatex') logger = require('logger-sharelatex') deltaMs = (ta, tb) -> - return Math.floor(((ta[0]-tb[0])*1e9 + (ta[1]-tb[1]))*1e-6) + nanoSeconds = (ta[0]-tb[0])*1e9 + (ta[1]-tb[1]) + milliSeconds = Math.floor(nanoSeconds*1e-6) + return milliSeconds module.exports = class Profiler LOG_CUTOFF_TIME: 1000 From a5500780711507ebea0d55800c06cb55813ae5ff Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 18 May 2017 15:04:12 +0100 Subject: [PATCH 279/769] replace verbose logging by slow query log --- .../app/coffee/LockManager.coffee | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/services/document-updater/app/coffee/LockManager.coffee b/services/document-updater/app/coffee/LockManager.coffee index 1f710b448d..d918dc4332 100644 --- a/services/document-updater/app/coffee/LockManager.coffee +++ b/services/document-updater/app/coffee/LockManager.coffee @@ -7,6 +7,8 @@ logger = require "logger-sharelatex" os = require "os" crypto = require "crypto" +Profiler = require "./Profiler" + HOST = os.hostname() PID = process.pid RND = crypto.randomBytes(4).toString('hex') @@ -30,33 +32,36 @@ module.exports = LockManager = tryLock : (doc_id, callback = (err, isFree)->)-> lockValue = LockManager.randomLock() key = keys.blockingKey(doc_id:doc_id) - logger.log {doc_id: doc_id, key: key}, "tryLock" + profile = new Profiler("tryLock", {doc_id, key, lockValue}) rclient.set key, lockValue, "EX", @LOCK_TTL, "NX", (err, gotLock)-> return callback(err) if err? if gotLock == "OK" metrics.inc "doc-not-blocking" - logger.log {doc_id: doc_id, key: key, lockValue: lockValue}, "got lock" + profile.log("got lock").end() callback err, true, lockValue else metrics.inc "doc-blocking" - logger.log {doc_id: doc_id, key: key}, "doc is locked" + profile.log("doc is locked").end() callback err, false getLock: (doc_id, callback = (error, lockValue) ->) -> startTime = Date.now() testInterval = LockManager.LOCK_TEST_INTERVAL + profile = new Profiler("getLock", {doc_id}) do attempt = () -> if Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME e = new Error("Timeout") e.doc_id = doc_id + profile.log("timeout").end() return callback(e) LockManager.tryLock doc_id, (error, gotLock, lockValue) -> return callback(error) if error? + profile.log("tryLock") if gotLock + profile.end() callback(null, lockValue) else - logger.log {doc_id: doc_id, delay: testInterval}, "will retry lock" setTimeout attempt, testInterval # back off when the lock is taken to avoid overloading testInterval = Math.min(testInterval * 2, LockManager.MAX_TEST_INTERVAL) @@ -75,12 +80,14 @@ module.exports = LockManager = releaseLock: (doc_id, lockValue, callback)-> key = keys.blockingKey(doc_id:doc_id) + profile = new Profiler("releaseLock", {doc_id, key, lockValue}) rclient.eval LockManager.unlockScript, 1, key, lockValue, (err, result) -> if err? return callback(err) else if result? and result isnt 1 # successful unlock should release exactly one key + profile.log("unlockScript:expired-lock").end() logger.error {doc_id:doc_id, key:key, lockValue:lockValue, redis_err:err, redis_result:result}, "unlocking error" return callback(new Error("tried to release timed out lock")) else - logger.log {doc_id:doc_id, key:key, lockValue:lockValue}, "released lock" + profile.log("unlockScript:ok").end() callback(err,result) From 78828ebbc5e3a844015b2c1f42ba546f9e50d2a4 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 19 May 2017 16:00:16 +0100 Subject: [PATCH 280/769] fine grained logging for cpu usage in applyUpdate --- .../app/coffee/UpdateManager.coffee | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index 70caaf1e03..3a69761977 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -62,20 +62,31 @@ module.exports = UpdateManager = callback = (error) -> if error? RealTimeRedisManager.sendData {project_id, doc_id, error: error.message || error} + profile.log("sendData") + profile.end() _callback(error) - + + profile = new Profiler("applyUpdate", {project_id, doc_id}) UpdateManager._sanitizeUpdate update + profile.log("sanitizeUpdate") DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges) -> + profile.log("getDoc") return callback(error) if error? if !lines? or !version? return callback(new Errors.NotFoundError("document not found: #{doc_id}")) ShareJsUpdateManager.applyUpdate project_id, doc_id, update, lines, version, (error, updatedDocLines, version, appliedOps) -> + profile.log("sharejs.applyUpdate") return callback(error) if error? RangesManager.applyUpdate project_id, doc_id, ranges, appliedOps, updatedDocLines, (error, new_ranges) -> + profile.log("RangesManager.applyUpdate") return callback(error) if error? RedisManager.updateDocument doc_id, updatedDocLines, version, appliedOps, new_ranges, (error, historyOpsLength) -> + profile.log("RedisManager.updateDocument") return callback(error) if error? - HistoryManager.recordAndFlushHistoryOps project_id, doc_id, appliedOps, historyOpsLength, callback + HistoryManager.recordAndFlushHistoryOps project_id, doc_id, appliedOps, historyOpsLength, (error) -> + profile.log("recordAndFlushHistoryOps") + return callback(error) if error? + callback() lockUpdatesAndDo: (method, project_id, doc_id, args..., callback) -> profile = new Profiler("lockUpdatesAndDo", {project_id, doc_id}) @@ -84,7 +95,7 @@ module.exports = UpdateManager = return callback(error) if error? UpdateManager.processOutstandingUpdates project_id, doc_id, (error) -> return UpdateManager._handleErrorInsideLock(doc_id, lockValue, error, callback) if error? - profile.log("processOutStandingUpdates") + profile.log("processOutstandingUpdates") method project_id, doc_id, args..., (error, response_args...) -> return UpdateManager._handleErrorInsideLock(doc_id, lockValue, error, callback) if error? profile.log("method") From a13f055d62cb5f3912141765c55f3ef8e403bc53 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 22 May 2017 14:58:38 +0100 Subject: [PATCH 281/769] add null byte check for ops --- services/document-updater/app/coffee/RedisManager.coffee | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 4aad7ec109..8be17c647d 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -162,6 +162,11 @@ module.exports = RedisManager = return callback(error) jsonOps = appliedOps.map (op) -> JSON.stringify op + if jsonOps.indexOf("\u0000") != -1 + error = new Error("null bytes found in jsonOps") + logger.error err: error, doc_id: doc_id, jsonOps: jsonOps, error.message + return callback(error) + newDocLines = JSON.stringify(docLines) if newDocLines.indexOf("\u0000") != -1 error = new Error("null bytes found in doc lines") From 54ad45c870e6e7d1bc08da24368d657acb888d27 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 22 May 2017 14:58:51 +0100 Subject: [PATCH 282/769] add null byte check for ranges --- services/document-updater/app/coffee/RedisManager.coffee | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 8be17c647d..e3bae7d56f 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -181,6 +181,10 @@ module.exports = RedisManager = if error? logger.error {err: error, doc_id}, error.message return callback(error) + if ranges? and ranges.indexOf("\u0000") != -1 + error = new Error("null bytes found in ranges") + logger.error err: error, doc_id: doc_id, ranges: ranges, error.message + return callback(error) multi = rclient.multi() multi.eval setScript, 1, keys.docLines(doc_id:doc_id), newDocLines # index 0 multi.set keys.docVersion(doc_id:doc_id), newVersion # index 1 From a3947c587fd4aa9a44ea4657f09e8e3145a3dea8 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 22 May 2017 15:20:28 +0100 Subject: [PATCH 283/769] fix bug, jsonOps is array not string --- services/document-updater/app/coffee/RedisManager.coffee | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index e3bae7d56f..5e2cc5c84b 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -162,10 +162,11 @@ module.exports = RedisManager = return callback(error) jsonOps = appliedOps.map (op) -> JSON.stringify op - if jsonOps.indexOf("\u0000") != -1 - error = new Error("null bytes found in jsonOps") - logger.error err: error, doc_id: doc_id, jsonOps: jsonOps, error.message - return callback(error) + for op in jsonOps + if op.indexOf("\u0000") != -1 + error = new Error("null bytes found in jsonOps") + logger.error err: error, doc_id: doc_id, jsonOps: jsonOps, error.message + return callback(error) newDocLines = JSON.stringify(docLines) if newDocLines.indexOf("\u0000") != -1 From c43f97e8e3c60b54cc21934c963f5f77e4535d70 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 24 May 2017 11:47:06 +0100 Subject: [PATCH 284/769] add metric for active workers --- services/document-updater/app/coffee/DispatchManager.coffee | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/DispatchManager.coffee b/services/document-updater/app/coffee/DispatchManager.coffee index b7e50291b0..4444c0183b 100644 --- a/services/document-updater/app/coffee/DispatchManager.coffee +++ b/services/document-updater/app/coffee/DispatchManager.coffee @@ -20,7 +20,9 @@ module.exports = DispatchManager = [list_name, doc_key] = result [project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key) # Dispatch this in the background + Metrics.gauge "processingUpdates", "+1" # increments/decrements gauge with +/- sign UpdateManager.processOutstandingUpdatesWithLock project_id, doc_id, (error) -> + Metrics.gauge "processingUpdates", "-1" logger.error err: error, project_id: project_id, doc_id: doc_id, "error processing update" if error? callback() @@ -39,4 +41,4 @@ module.exports = DispatchManager = createAndStartDispatchers: (number) -> for i in [1..number] worker = DispatchManager.createDispatcher() - worker.run() \ No newline at end of file + worker.run() From 891ffda3bfc19f473c70360e8ece9ac7afeb34e4 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 26 May 2017 12:10:57 +0100 Subject: [PATCH 285/769] put a rate limit on worker calls --- .../app/coffee/DispatchManager.coffee | 16 ++-- .../app/coffee/RateLimitManager.coffee | 36 ++++++++ .../DispatchManagerTests.coffee | 3 +- .../RateLimitManager/RateLimitManager.coffee | 90 +++++++++++++++++++ 4 files changed, 137 insertions(+), 8 deletions(-) create mode 100644 services/document-updater/app/coffee/RateLimitManager.coffee create mode 100644 services/document-updater/test/unit/coffee/RateLimitManager/RateLimitManager.coffee diff --git a/services/document-updater/app/coffee/DispatchManager.coffee b/services/document-updater/app/coffee/DispatchManager.coffee index 4444c0183b..e60d226660 100644 --- a/services/document-updater/app/coffee/DispatchManager.coffee +++ b/services/document-updater/app/coffee/DispatchManager.coffee @@ -5,9 +5,10 @@ redis = require("redis-sharelatex") UpdateManager = require('./UpdateManager') Metrics = require('./Metrics') +RateLimitManager = require('./RateLimitManager') module.exports = DispatchManager = - createDispatcher: () -> + createDispatcher: (RateLimiter) -> client = redis.createClient(Settings.redis.realtime) worker = { client: client @@ -20,11 +21,11 @@ module.exports = DispatchManager = [list_name, doc_key] = result [project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key) # Dispatch this in the background - Metrics.gauge "processingUpdates", "+1" # increments/decrements gauge with +/- sign - UpdateManager.processOutstandingUpdatesWithLock project_id, doc_id, (error) -> - Metrics.gauge "processingUpdates", "-1" - logger.error err: error, project_id: project_id, doc_id: doc_id, "error processing update" if error? - callback() + backgroundTask = (cb) -> + UpdateManager.processOutstandingUpdatesWithLock project_id, doc_id, (error) -> + logger.error err: error, project_id: project_id, doc_id: doc_id, "error processing update" if error? + cb() + RateLimiter.run backgroundTask, callback run: () -> return if Settings.shuttingDown @@ -39,6 +40,7 @@ module.exports = DispatchManager = return worker createAndStartDispatchers: (number) -> + RateLimiter = new RateLimitManager(number) for i in [1..number] - worker = DispatchManager.createDispatcher() + worker = DispatchManager.createDispatcher(RateLimiter) worker.run() diff --git a/services/document-updater/app/coffee/RateLimitManager.coffee b/services/document-updater/app/coffee/RateLimitManager.coffee new file mode 100644 index 0000000000..ce61232af3 --- /dev/null +++ b/services/document-updater/app/coffee/RateLimitManager.coffee @@ -0,0 +1,36 @@ +Settings = require('settings-sharelatex') +logger = require('logger-sharelatex') +Metrics = require('./Metrics') + +module.exports = class RateLimiter + + constructor: (number = 10) -> + @ActiveWorkerCount = 0 + @CurrentWorkerLimit = number + @BaseWorkerCount = number + + _adjustLimitUp: () -> + @CurrentWorkerLimit += 0.1 # allow target worker limit to increase gradually + + _adjustLimitDown: () -> + @CurrentWorkerLimit = Math.max @BaseWorkerCount, (@CurrentWorkerLimit * 0.9) + logger.log {currentLimit: Math.ceil(@CurrentWorkerLimit)}, "reducing rate limit" + + _trackAndRun: (task, callback = () ->) -> + @ActiveWorkerCount++ + Metrics.gauge "processingUpdates", "+1" # increments/decrements gauge with +/- sign + task (err) => + @ActiveWorkerCount-- + Metrics.gauge "processingUpdates", "-1" + callback(err) + + run: (task, callback) -> + if @ActiveWorkerCount < @CurrentWorkerLimit + @_trackAndRun task # below the limit, just put the task in the background + callback() # return immediately + if @CurrentWorkerLimit > @BaseWorkerCount + @_adjustLimitDown() + else + logger.log {active: @ActiveWorkerCount, currentLimit: Math.ceil(@CurrentWorkerLimit)}, "hit rate limit" + @_trackAndRun task, callback # only return after task completes + @_adjustLimitUp() diff --git a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee index a82a40af04..dcd643fcfe 100644 --- a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee @@ -14,6 +14,7 @@ describe "DispatchManager", -> realtime: {} "redis-sharelatex": @redis = {} @callback = sinon.stub() + @RateLimiter = { run: (task,cb) -> task(cb) } # run task without rate limit describe "each worker", -> beforeEach -> @@ -21,7 +22,7 @@ describe "DispatchManager", -> auth: sinon.stub() @redis.createClient = sinon.stub().returns @client - @worker = @DispatchManager.createDispatcher() + @worker = @DispatchManager.createDispatcher(@RateLimiter) it "should create a new redis client", -> @redis.createClient.called.should.equal true diff --git a/services/document-updater/test/unit/coffee/RateLimitManager/RateLimitManager.coffee b/services/document-updater/test/unit/coffee/RateLimitManager/RateLimitManager.coffee new file mode 100644 index 0000000000..866532a4da --- /dev/null +++ b/services/document-updater/test/unit/coffee/RateLimitManager/RateLimitManager.coffee @@ -0,0 +1,90 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +expect = chai.expect +modulePath = "../../../../app/js/RateLimitManager.js" +SandboxedModule = require('sandboxed-module') + +describe "RateLimitManager", -> + beforeEach -> + @RateLimitManager = SandboxedModule.require modulePath, requires: + "logger-sharelatex": @logger = { log: sinon.stub() } + "settings-sharelatex": @settings = + redis: + realtime: {} + "./Metrics": @Metrics = + Timer: class Timer + done: sinon.stub() + gauge: sinon.stub() + @callback = sinon.stub() + @RateLimiter = new @RateLimitManager(1) + + describe "for a single task", -> + beforeEach -> + @task = sinon.stub() + @RateLimiter.run @task, @callback + + it "should execute the task in the background", -> + @task.called.should.equal true + + it "should call the callback", -> + @callback.called.should.equal true + + it "should finish with a worker count of one", -> + # because it's in the background + expect(@RateLimiter.ActiveWorkerCount).to.equal 1 + + describe "for multiple tasks", -> + beforeEach (done) -> + @task = sinon.stub() + @finalTask = sinon.stub() + task = (cb) => + @task() + setTimeout cb, 100 + finalTask = (cb) => + @finalTask() + setTimeout cb, 100 + @RateLimiter.run task, @callback + @RateLimiter.run task, @callback + @RateLimiter.run task, @callback + @RateLimiter.run finalTask, (err) => + @callback(err) + done() + + it "should execute the first three tasks", -> + @task.calledThrice.should.equal true + + it "should execute the final task", -> + @finalTask.called.should.equal true + + it "should call the callback", -> + @callback.called.should.equal true + + it "should finish with worker count of zero", -> + expect(@RateLimiter.ActiveWorkerCount).to.equal 0 + + describe "for a mixture of long-running tasks", -> + beforeEach (done) -> + @task = sinon.stub() + @finalTask = sinon.stub() + finalTask = (cb) => + @finalTask() + setTimeout cb, 100 + @RateLimiter.run @task, @callback + @RateLimiter.run @task, @callback + @RateLimiter.run @task, @callback + @RateLimiter.run finalTask, (err) => + @callback(err) + done() + + it "should execute the first three tasks", -> + @task.calledThrice.should.equal true + + it "should execute the final task", -> + @finalTask.called.should.equal true + + it "should call the callback", -> + @callback.called.should.equal true + + it "should finish with worker count of three", -> + expect(@RateLimiter.ActiveWorkerCount).to.equal 3 From fa089dc28d4e78f051d89fd6671e7cf8958b45cc Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 30 May 2017 14:36:51 +0100 Subject: [PATCH 286/769] avoid graphite drift in active worker count --- .../document-updater/app/coffee/RateLimitManager.coffee | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/RateLimitManager.coffee b/services/document-updater/app/coffee/RateLimitManager.coffee index ce61232af3..118323f6f1 100644 --- a/services/document-updater/app/coffee/RateLimitManager.coffee +++ b/services/document-updater/app/coffee/RateLimitManager.coffee @@ -11,17 +11,19 @@ module.exports = class RateLimiter _adjustLimitUp: () -> @CurrentWorkerLimit += 0.1 # allow target worker limit to increase gradually + Metrics.gauge "currentLimit", Math.ceil(@CurrentWorkerLimit) _adjustLimitDown: () -> @CurrentWorkerLimit = Math.max @BaseWorkerCount, (@CurrentWorkerLimit * 0.9) logger.log {currentLimit: Math.ceil(@CurrentWorkerLimit)}, "reducing rate limit" + Metrics.gauge "currentLimit", Math.ceil(@CurrentWorkerLimit) _trackAndRun: (task, callback = () ->) -> @ActiveWorkerCount++ - Metrics.gauge "processingUpdates", "+1" # increments/decrements gauge with +/- sign + Metrics.gauge "processingUpdates", @ActiveWorkerCount task (err) => @ActiveWorkerCount-- - Metrics.gauge "processingUpdates", "-1" + Metrics.gauge "processingUpdates", @ActiveWorkerCount callback(err) run: (task, callback) -> From bed64d26c89d430efb0a6f2792bb4fd19f08b50e Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 31 May 2017 15:33:59 +0100 Subject: [PATCH 287/769] check if doc is missing from DocsIn set --- .../document-updater/app/coffee/RedisManager.coffee | 12 +++++++++++- .../coffee/RedisManager/RedisManagerTests.coffee | 1 + 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 5e2cc5c84b..365156ebf6 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -113,7 +113,17 @@ module.exports = RedisManager = if doc_project_id? and doc_project_id isnt project_id logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, "doc not in project" return callback(new Errors.NotFoundError("document not found")) - callback null, docLines, version, ranges + + # doc is not in redis, bail out + if !lines? + return callback null, docLines, version, ranges + + # doc should be in project set, check if missing (workaround for missing docs from putDoc) + rclient.sadd keys.docsInProject(project_id:project_id), doc_id, (error, result) -> + return callback(error) if error? + if result isnt 0 # doc should already be in set + logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, "doc missing from docsInProject set" + callback null, docLines, version, ranges getDocVersion: (doc_id, callback = (error, version) ->) -> rclient.get keys.docVersion(doc_id: doc_id), (error, version) -> diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 26eaaf0892..b97bdd5a0d 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -58,6 +58,7 @@ describe "RedisManager", -> @json_ranges = JSON.stringify @ranges @rclient.get = sinon.stub() @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @hash, @project_id, @json_ranges]) + @rclient.sadd = sinon.stub().yields() describe "successfully", -> beforeEach -> From 12e8eaa9b6a8b240c27940a9acf90695c06833a6 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 31 May 2017 16:08:33 +0100 Subject: [PATCH 288/769] fix bug in doclines check --- services/document-updater/app/coffee/RedisManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 365156ebf6..935c569d94 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -115,7 +115,7 @@ module.exports = RedisManager = return callback(new Errors.NotFoundError("document not found")) # doc is not in redis, bail out - if !lines? + if !docLines? return callback null, docLines, version, ranges # doc should be in project set, check if missing (workaround for missing docs from putDoc) From 729216c9b147a66498073a519481a31eef281782 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 31 May 2017 16:08:45 +0100 Subject: [PATCH 289/769] add unit tests for DocsIn check --- .../RedisManager/RedisManagerTests.coffee | 41 ++++++++++++++++++- 1 file changed, 40 insertions(+), 1 deletion(-) diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index b97bdd5a0d..d57f507865 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -58,7 +58,7 @@ describe "RedisManager", -> @json_ranges = JSON.stringify @ranges @rclient.get = sinon.stub() @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @hash, @project_id, @json_ranges]) - @rclient.sadd = sinon.stub().yields() + @rclient.sadd = sinon.stub().yields(null, 0) describe "successfully", -> beforeEach -> @@ -84,6 +84,11 @@ describe "RedisManager", -> .calledWith("Ranges:#{@doc_id}") .should.equal true + it "should check if the document is in the DocsIn set", -> + @rclient.sadd + .calledWith("DocsIn:#{@project_id}") + .should.equal true + it 'should return the document', -> @callback .calledWith(null, @lines, @version, @ranges) @@ -93,6 +98,40 @@ describe "RedisManager", -> @logger.error.calledWith() .should.equal false + describe "when the document is not present", -> + beforeEach -> + @rclient.exec = sinon.stub().callsArgWith(0, null, [null, null, null, null, null]) + @rclient.sadd = sinon.stub().yields() + @RedisManager.getDoc @project_id, @doc_id, @callback + + it "should not check if the document is in the DocsIn set", -> + @rclient.sadd + .calledWith("DocsIn:#{@project_id}") + .should.equal false + + it 'should return an empty result', -> + @callback + .calledWith(null, null, 0, {}) + .should.equal true + + it 'should not log any errors', -> + @logger.error.calledWith() + .should.equal false + + describe "when the document is missing from the DocsIn set", -> + beforeEach -> + @rclient.sadd = sinon.stub().yields(null, 1) + @RedisManager.getDoc @project_id, @doc_id, @callback + + it 'should log an error', -> + @logger.error.calledWith() + .should.equal true + + it 'should return the document', -> + @callback + .calledWith(null, @lines, @version, @ranges) + .should.equal true + describe "with a corrupted document", -> beforeEach -> @badHash = "INVALID-HASH-VALUE" From 673f4228ced9c75c0ad21d1bf4d3e23bc4708fc5 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 5 Jun 2017 16:29:58 +0100 Subject: [PATCH 290/769] add metric for unlock errors sentry does not record them reliably, due to rate limiting --- services/document-updater/app/coffee/LockManager.coffee | 1 + 1 file changed, 1 insertion(+) diff --git a/services/document-updater/app/coffee/LockManager.coffee b/services/document-updater/app/coffee/LockManager.coffee index d918dc4332..716cac2291 100644 --- a/services/document-updater/app/coffee/LockManager.coffee +++ b/services/document-updater/app/coffee/LockManager.coffee @@ -87,6 +87,7 @@ module.exports = LockManager = else if result? and result isnt 1 # successful unlock should release exactly one key profile.log("unlockScript:expired-lock").end() logger.error {doc_id:doc_id, key:key, lockValue:lockValue, redis_err:err, redis_result:result}, "unlocking error" + metrics.inc "unlock-error" return callback(new Error("tried to release timed out lock")) else profile.log("unlockScript:ok").end() From 1ef258c8788465ba3811f252e53f6f7806609fe3 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 7 Jun 2017 11:34:42 +0100 Subject: [PATCH 291/769] add a timeout on the redis getDoc request --- .../app/coffee/RedisManager.coffee | 13 +++++++++-- .../RedisManager/RedisManagerTests.coffee | 22 +++++++++++++++++++ 2 files changed, 33 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 935c569d94..523780883f 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -7,6 +7,12 @@ metrics = require('./Metrics') Errors = require "./Errors" crypto = require "crypto" +# Sometimes Redis calls take an unexpectedly long time. We have to be +# quick with Redis calls because we're holding a lock that expires +# after 30 seconds. We can't let any errors in the rest of the stack +# hold us up, and need to bail out quickly if there is a problem. +MAX_REDIS_REQUEST_LENGTH = 5000 # 5 seconds + # Make times easy to read minutes = 60 # seconds for Redis expire @@ -93,9 +99,12 @@ module.exports = RedisManager = multi.get keys.projectKey(doc_id:doc_id) multi.get keys.ranges(doc_id:doc_id) multi.exec (error, [docLines, version, storedHash, doc_project_id, ranges])-> - timer.done() + timeSpan = timer.done() return callback(error) if error? - + # check if request took too long and bail out. only do this for + # get, because it is the first call in each update, so if this + # passes we'll assume others have a reasonable chance to succeed. + return callback(new Error("redis getDoc exceeded timeout")) if timeSpan > MAX_REDIS_REQUEST_LENGTH # check sha1 hash value if present if docLines? and storedHash? computedHash = RedisManager._computeHash(docLines) diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index d57f507865..ecd6ecdf73 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -39,7 +39,11 @@ describe "RedisManager", -> "./Metrics": @metrics = inc: sinon.stub() Timer: class Timer + constructor: () -> + this.start = new Date() done: () -> + timeSpan = new Date - this.start + return timeSpan "./Errors": Errors globals: JSON: @JSON = JSON @@ -148,6 +152,24 @@ describe "RedisManager", -> .should.equal true + describe "with a slow request to redis", -> + beforeEach -> + @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @badHash, @project_id, @json_ranges]) + @clock = sinon.useFakeTimers(); + @rclient.exec = (cb) => + @clock.tick(6000); + cb(null, [@jsonlines, @version, @another_project_id, @json_ranges]) + + @RedisManager.getDoc @project_id, @doc_id, @callback + + afterEach -> + @clock.restore() + + it 'should return an error', -> + @callback + .calledWith(new Error("redis getDoc exceeded timeout")) + .should.equal true + describe "getDoc with an invalid project id", -> beforeEach -> @another_project_id = "project-id-456" From 2e5d57cd994b168cbf4c291a30ed776790dbd8df Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 7 Jun 2017 11:47:28 +0100 Subject: [PATCH 292/769] don't increase rate limit when tasks are failing --- services/document-updater/app/coffee/RateLimitManager.coffee | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/RateLimitManager.coffee b/services/document-updater/app/coffee/RateLimitManager.coffee index 118323f6f1..7128b5d988 100644 --- a/services/document-updater/app/coffee/RateLimitManager.coffee +++ b/services/document-updater/app/coffee/RateLimitManager.coffee @@ -34,5 +34,6 @@ module.exports = class RateLimiter @_adjustLimitDown() else logger.log {active: @ActiveWorkerCount, currentLimit: Math.ceil(@CurrentWorkerLimit)}, "hit rate limit" - @_trackAndRun task, callback # only return after task completes - @_adjustLimitUp() + @_trackAndRun task, (err) => + @_adjustLimitUp() if !err? # don't increment rate limit if there was an error + callback(err) # only return after task completes From a3d726061911f0fa721b31245ca86dcf58301360 Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 8 Jun 2017 16:43:06 +0100 Subject: [PATCH 293/769] Ensure expires is called after the key is created --- .../document-updater/app/coffee/RedisManager.coffee | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 935c569d94..b399629a07 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -200,15 +200,16 @@ module.exports = RedisManager = multi.eval setScript, 1, keys.docLines(doc_id:doc_id), newDocLines # index 0 multi.set keys.docVersion(doc_id:doc_id), newVersion # index 1 multi.set keys.docHash(doc_id:doc_id), newHash # index 2 - multi.expire keys.docOps(doc_id: doc_id), RedisManager.DOC_OPS_TTL # index 3 - multi.ltrim keys.docOps(doc_id: doc_id), -RedisManager.DOC_OPS_MAX_LENGTH, -1 # index 4 + multi.ltrim keys.docOps(doc_id: doc_id), -RedisManager.DOC_OPS_MAX_LENGTH, -1 # index 3 if ranges? - multi.set keys.ranges(doc_id:doc_id), ranges # index 5 + multi.set keys.ranges(doc_id:doc_id), ranges # index 4 else - multi.del keys.ranges(doc_id:doc_id) # also index 5 - # push the ops last so we can get the lengths at fixed index positions 6 and 7 + multi.del keys.ranges(doc_id:doc_id) # also index 4 + # push the ops last so we can get the lengths at fixed index position 7 if jsonOps.length > 0 - multi.rpush keys.docOps(doc_id: doc_id), jsonOps... # index 6 + multi.rpush keys.docOps(doc_id: doc_id), jsonOps... # index 5 + # expire must come after rpush since before it will be a no-op if the list is empty + multi.expire keys.docOps(doc_id: doc_id), RedisManager.DOC_OPS_TTL # index 6 multi.rpush historyKeys.uncompressedHistoryOps(doc_id: doc_id), jsonOps... # index 7 multi.exec (error, result) -> return callback(error) if error? From 770b53f6afc474c2299d20b2c5452dd109f58917 Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 8 Jun 2017 16:43:27 +0100 Subject: [PATCH 294/769] Add missing mocks to speed up tests and clean up output --- .../unit/coffee/DispatchManager/DispatchManagerTests.coffee | 1 + .../test/unit/coffee/LockManager/CheckingTheLock.coffee | 3 +++ .../test/unit/coffee/LockManager/ReleasingTheLock.coffee | 3 +++ .../test/unit/coffee/LockManager/getLockTests.coffee | 3 +++ .../test/unit/coffee/LockManager/tryLockTests.coffee | 3 +++ .../test/unit/coffee/UpdateManager/UpdateManagerTests.coffee | 3 +++ 6 files changed, 16 insertions(+) diff --git a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee index dcd643fcfe..b749e83a5d 100644 --- a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee @@ -13,6 +13,7 @@ describe "DispatchManager", -> redis: realtime: {} "redis-sharelatex": @redis = {} + "./RateLimitManager": {} @callback = sinon.stub() @RateLimiter = { run: (task,cb) -> task(cb) } # run task without rate limit diff --git a/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee b/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee index f6670c8b35..a080e563f1 100644 --- a/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee @@ -19,6 +19,9 @@ describe 'LockManager - checking the lock', ()-> auth:-> exists: existsStub "./Metrics": {inc: () ->} + "./Profiler": class Profiler + log: sinon.stub().returns { end: sinon.stub() } + end: sinon.stub() LockManager = SandboxedModule.require(modulePath, requires: mocks) it 'should return true if the key does not exists', (done)-> diff --git a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee index 36c458cb71..06dd2aa6bf 100644 --- a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee @@ -26,6 +26,9 @@ describe 'LockManager - releasing the lock', ()-> blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" } "./Metrics": {inc: () ->} + "./Profiler": class Profiler + log: sinon.stub().returns { end: sinon.stub() } + end: sinon.stub() @LockManager = SandboxedModule.require(modulePath, requires: mocks) @lockValue = "lock-value-stub" diff --git a/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee b/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee index 84cc3208a3..7093ab223a 100644 --- a/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee @@ -12,6 +12,9 @@ describe 'LockManager - getting the lock', -> createClient : () => auth:-> "./Metrics": {inc: () ->} + "./Profiler": class Profiler + log: sinon.stub().returns { end: sinon.stub() } + end: sinon.stub() @callback = sinon.stub() @doc_id = "doc-id-123" diff --git a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee index b3ff7cdd7e..7b52f416ab 100644 --- a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee @@ -19,6 +19,9 @@ describe 'LockManager - trying the lock', -> key_schema: blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" } + "./Profiler": class Profiler + log: sinon.stub().returns { end: sinon.stub() } + end: sinon.stub() @callback = sinon.stub() @doc_id = "doc-id-123" diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index 2de6e93e44..4f456992f0 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -22,6 +22,9 @@ describe "UpdateManager", -> "settings-sharelatex": Settings = {} "./DocumentManager": @DocumentManager = {} "./RangesManager": @RangesManager = {} + "./Profiler": class Profiler + log: sinon.stub().returns { end: sinon.stub() } + end: sinon.stub() describe "processOutstandingUpdates", -> beforeEach -> From 7b6966899bdb6b374d02dfc30ed3e4e93594c1a6 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 14 Jun 2017 12:09:34 +0100 Subject: [PATCH 295/769] Add script to expire existing DocOps lists --- .../document-updater/expire_docops.coffee | 44 +++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 services/document-updater/expire_docops.coffee diff --git a/services/document-updater/expire_docops.coffee b/services/document-updater/expire_docops.coffee new file mode 100644 index 0000000000..1eb7d93c8f --- /dev/null +++ b/services/document-updater/expire_docops.coffee @@ -0,0 +1,44 @@ +Settings = require "settings-sharelatex" +rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) +keys = Settings.redis.documentupdater.key_schema +async = require "async" +RedisManager = require "./app/js/RedisManager" + +getKeysFromNode = (node, pattern, callback) -> + cursor = 0 # redis iterator + keySet = {} # use hash to avoid duplicate results + # scan over all keys looking for pattern + doIteration = (cb) -> + node.scan cursor, "MATCH", pattern, "COUNT", 1000, (error, reply) -> + return callback(error) if error? + [cursor, keys] = reply + console.log "SCAN", keys.length + for key in keys + keySet[key] = true + if cursor == '0' # note redis returns string result not numeric + return callback(null, Object.keys(keySet)) + else + doIteration() + doIteration() + +getKeys = (pattern, callback) -> + nodes = rclient.nodes?('master') || [ rclient ]; + console.log "GOT NODES", nodes.length + doKeyLookupForNode = (node, cb) -> + getKeysFromNode node, pattern, cb + async.concatSeries nodes, doKeyLookupForNode, callback + +TTL = 60 * 60 # 1 hour +expireDocOps = (callback) -> + getKeys keys.docOps(doc_id: "*"), (error, keys) -> + async.mapSeries keys, + (key, cb) -> + console.log "EXPIRE #{key} #{RedisManager.DOC_OPS_TTL}" + rclient.expire key, RedisManager.DOC_OPS_TTL, cb + callback + +setTimeout () -> # Give redis a chance to connect + expireDocOps (error) -> + throw error if error? + process.exit() +, 1000 \ No newline at end of file From 963e513057db1fe2243cb414c54434951468b4ec Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 23 Jun 2017 15:50:21 +0100 Subject: [PATCH 296/769] add a timeout for getPreviousDocOps it uses several redis operations and this makes it prone to subsequent timeouts if getDoc succeeds but is slow --- .../app/coffee/RedisManager.coffee | 3 ++ .../RedisManager/RedisManagerTests.coffee | 29 +++++++++++++++++++ 2 files changed, 32 insertions(+) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 22ed27d7f8..edf56c3d50 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -141,6 +141,7 @@ module.exports = RedisManager = callback null, version getPreviousDocOps: (doc_id, start, end, callback = (error, jsonOps) ->) -> + timer = new metrics.Timer("redis.get-prev-docops") rclient.llen keys.docOps(doc_id: doc_id), (error, length) -> return callback(error) if error? rclient.get keys.docVersion(doc_id: doc_id), (error, version) -> @@ -168,6 +169,8 @@ module.exports = RedisManager = ops = jsonOps.map (jsonOp) -> JSON.parse jsonOp catch e return callback(e) + timeSpan = timer.done() + return callback(new Error("redis getPreviousDocOps exceeded timeout")) if timeSpan > MAX_REDIS_REQUEST_LENGTH callback null, ops DOC_OPS_TTL: 60 * minutes diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index ecd6ecdf73..d0521fa87e 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -265,6 +265,35 @@ describe "RedisManager", -> it "should log out the problem", -> @logger.warn.called.should.equal true + describe "with a slow request to redis", -> + beforeEach -> + @first_version_in_redis = 30 + @version = 70 + @length = @version - @first_version_in_redis + @start = 50 + @end = 60 + @ops = [ + { "mock": "op-1" }, + { "mock": "op-2" } + ] + @jsonOps = @ops.map (op) -> JSON.stringify op + @rclient.llen = sinon.stub().callsArgWith(1, null, @length) + @rclient.get = sinon.stub().callsArgWith(1, null, @version.toString()) + @clock = sinon.useFakeTimers(); + @rclient.lrange = (key, start, end, cb) => + @clock.tick(6000); + cb(null, @jsonOps) + @RedisManager.getPreviousDocOps(@doc_id, @start, @end, @callback) + + afterEach -> + @clock.restore() + + it 'should return an error', -> + @callback + .calledWith(new Error("redis getPreviousDocOps exceeded timeout")) + .should.equal true + + describe "updateDocument", -> beforeEach -> @rclient.set = sinon.stub() From 8e1e14c9ca62919b9c0c86280e766fe3d98d3ef8 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 26 Jun 2017 13:49:44 +0100 Subject: [PATCH 297/769] update to redis-sharelatex 1.0.3 adds keepalive support --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 07deac38bb..c2976e2934 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -14,7 +14,7 @@ "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.6", "lynx": "0.0.11", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.5.0", - "redis-sharelatex": "git+https://github.com/sharelatex/redis-sharelatex.git#v1.0.2", + "redis-sharelatex": "git+https://github.com/sharelatex/redis-sharelatex.git#v1.0.3", "request": "2.25.0", "sandboxed-module": "~0.2.0", "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0", From 59ace9d5c542459d60fbb336dfd50ebe192be126 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 26 Jun 2017 16:36:05 +0100 Subject: [PATCH 298/769] avoid long lines in timeout check --- services/document-updater/app/coffee/RedisManager.coffee | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index edf56c3d50..be5018297d 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -104,7 +104,9 @@ module.exports = RedisManager = # check if request took too long and bail out. only do this for # get, because it is the first call in each update, so if this # passes we'll assume others have a reasonable chance to succeed. - return callback(new Error("redis getDoc exceeded timeout")) if timeSpan > MAX_REDIS_REQUEST_LENGTH + if timeSpan > MAX_REDIS_REQUEST_LENGTH + error = new Error("redis getDoc exceeded timeout") + return callback(error) # check sha1 hash value if present if docLines? and storedHash? computedHash = RedisManager._computeHash(docLines) @@ -170,7 +172,9 @@ module.exports = RedisManager = catch e return callback(e) timeSpan = timer.done() - return callback(new Error("redis getPreviousDocOps exceeded timeout")) if timeSpan > MAX_REDIS_REQUEST_LENGTH + if timeSpan > MAX_REDIS_REQUEST_LENGTH + error = new Error("redis getPreviousDocOps exceeded timeout") + return callback(error) callback null, ops DOC_OPS_TTL: 60 * minutes From ffea31290cd5cd1db29772d0745c465e4461e04e Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 11 Jul 2017 14:25:26 +0100 Subject: [PATCH 299/769] return total time from the profile .end() method --- .../document-updater/app/coffee/Profiler.coffee | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/services/document-updater/app/coffee/Profiler.coffee b/services/document-updater/app/coffee/Profiler.coffee index dc88345334..1d85f9bd98 100644 --- a/services/document-updater/app/coffee/Profiler.coffee +++ b/services/document-updater/app/coffee/Profiler.coffee @@ -23,11 +23,12 @@ module.exports = class Profiler end: (message) -> totalTime = deltaMs(@t, @t0) - return if totalTime < @LOG_CUTOFF_TIME # skip anything less than cutoff - args = {} - for k,v of @args - args[k] = v - args.updateTimes = @updateTimes - args.start = @start - args.end = new Date() - logger.log args, @name + if totalTime > @LOG_CUTOFF_TIME # log anything greater than cutoff + args = {} + for k,v of @args + args[k] = v + args.updateTimes = @updateTimes + args.start = @start + args.end = new Date() + logger.log args, @name + return totalTime From 54c0fc1180b20ce52d943459d3deadab561af304 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 11 Jul 2017 14:25:59 +0100 Subject: [PATCH 300/769] put a 5 second timeout in getting redis lock --- services/document-updater/app/coffee/LockManager.coffee | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/LockManager.coffee b/services/document-updater/app/coffee/LockManager.coffee index 716cac2291..cd6df46878 100644 --- a/services/document-updater/app/coffee/LockManager.coffee +++ b/services/document-updater/app/coffee/LockManager.coffee @@ -14,6 +14,8 @@ PID = process.pid RND = crypto.randomBytes(4).toString('hex') COUNT = 0 +MAX_REDIS_REQUEST_LENGTH = 5000 # 5 seconds + module.exports = LockManager = LOCK_TEST_INTERVAL: 50 # 50ms between each test of the lock MAX_TEST_INTERVAL: 1000 # back off to 1s between each test of the lock @@ -37,8 +39,11 @@ module.exports = LockManager = return callback(err) if err? if gotLock == "OK" metrics.inc "doc-not-blocking" - profile.log("got lock").end() - callback err, true, lockValue + timeTaken = profile.log("got lock").end() + if timeTaken > MAX_REDIS_REQUEST_LENGTH + callback err, false # took too long to get the lock, bail out + else + callback err, true, lockValue else metrics.inc "doc-blocking" profile.log("doc is locked").end() From bb0dc4b4d5a2885529b2c804ece5f1460cf78788 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 12 Jul 2017 10:45:23 +0100 Subject: [PATCH 301/769] release the lock if it took too long to acquire it --- services/document-updater/app/coffee/LockManager.coffee | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/LockManager.coffee b/services/document-updater/app/coffee/LockManager.coffee index cd6df46878..0aa1e3695e 100644 --- a/services/document-updater/app/coffee/LockManager.coffee +++ b/services/document-updater/app/coffee/LockManager.coffee @@ -41,7 +41,10 @@ module.exports = LockManager = metrics.inc "doc-not-blocking" timeTaken = profile.log("got lock").end() if timeTaken > MAX_REDIS_REQUEST_LENGTH - callback err, false # took too long to get the lock, bail out + # took too long, so try to free the lock + LockManager.releaseLock doc_id, lockValue, (err, result) -> + return callback(err) if err? # error freeing lock + callback null, false # tell caller they didn't get the lock else callback err, true, lockValue else From d2730c8d6b7da77acb408d22c573b3b97ddae1b1 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 12 Jul 2017 10:45:44 +0100 Subject: [PATCH 302/769] unit tests for locking timeouts --- .../coffee/LockManager/tryLockTests.coffee | 38 ++++++++++++++++++- 1 file changed, 37 insertions(+), 1 deletion(-) diff --git a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee index 7b52f416ab..82de2f45b8 100644 --- a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee @@ -19,7 +19,7 @@ describe 'LockManager - trying the lock', -> key_schema: blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" } - "./Profiler": class Profiler + "./Profiler": @Profiler = class Profiler log: sinon.stub().returns { end: sinon.stub() } end: sinon.stub() @@ -48,3 +48,39 @@ describe 'LockManager - trying the lock', -> it "should return the callback with false", -> @callback.calledWith(null, false).should.equal true + describe "when it takes a long time for redis to set the lock", -> + beforeEach -> + @Profiler.prototype.end = () -> 7000 # take a long time + @Profiler.prototype.log = sinon.stub().returns { end: @Profiler.prototype.end } + @lockValue = "mock-lock-value" + @LockManager.randomLock = sinon.stub().returns @lockValue + @LockManager.releaseLock = sinon.stub().callsArgWith(2,null) + @set.callsArgWith(5, null, "OK") + + describe "in all cases", -> + beforeEach -> + @LockManager.tryLock @doc_id, @callback + + it "should set the lock key with an expiry if it is not set", -> + @set.calledWith("Blocking:#{@doc_id}", @lockValue, "EX", 30, "NX") + .should.equal true + + it "should try to release the lock", -> + @LockManager.releaseLock.calledWith(@doc_id, @lockValue).should.equal true + + describe "if the lock is released successfully", -> + beforeEach -> + @LockManager.releaseLock = sinon.stub().callsArgWith(2,null) + @LockManager.tryLock @doc_id, @callback + + it "should return the callback with false", -> + @callback.calledWith(null, false).should.equal true + + describe "if the lock has already timed out", -> + beforeEach -> + @LockManager.releaseLock = sinon.stub().callsArgWith(2, new Error("tried to release timed out lock")) + @LockManager.tryLock @doc_id, @callback + + it "should return the callback with an error", -> + e = new Error("tried to release timed out lock") + @callback.calledWith(e).should.equal true From c21cc013f3fe9fb9c60da7c6b91124c7bd0d1d2b Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 12 Jul 2017 10:47:23 +0100 Subject: [PATCH 303/769] use null instead of err in callbacks on success --- .../document-updater/app/coffee/LockManager.coffee | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/services/document-updater/app/coffee/LockManager.coffee b/services/document-updater/app/coffee/LockManager.coffee index 0aa1e3695e..8f62e46ccb 100644 --- a/services/document-updater/app/coffee/LockManager.coffee +++ b/services/document-updater/app/coffee/LockManager.coffee @@ -46,11 +46,11 @@ module.exports = LockManager = return callback(err) if err? # error freeing lock callback null, false # tell caller they didn't get the lock else - callback err, true, lockValue + callback null, true, lockValue else metrics.inc "doc-blocking" profile.log("doc is locked").end() - callback err, false + callback null, false getLock: (doc_id, callback = (error, lockValue) ->) -> startTime = Date.now() @@ -81,10 +81,10 @@ module.exports = LockManager = exists = parseInt exists if exists == 1 metrics.inc "doc-blocking" - callback err, false + callback null, false else metrics.inc "doc-not-blocking" - callback err, true + callback null, true releaseLock: (doc_id, lockValue, callback)-> key = keys.blockingKey(doc_id:doc_id) @@ -99,4 +99,4 @@ module.exports = LockManager = return callback(new Error("tried to release timed out lock")) else profile.log("unlockScript:ok").end() - callback(err,result) + callback(null,result) From 0d46c3e2b31a65dd5c69bf11c9e8572221da3c0f Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Mon, 17 Jul 2017 10:08:21 +0100 Subject: [PATCH 304/769] WIP: auto-retry web requests --- .../document-updater/app/coffee/PersistenceManager.coffee | 4 +++- services/document-updater/package.json | 1 + .../coffee/PersistenceManager/PersistenceManagerTests.coffee | 4 +++- 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/PersistenceManager.coffee b/services/document-updater/app/coffee/PersistenceManager.coffee index 457627982f..31298e31b6 100644 --- a/services/document-updater/app/coffee/PersistenceManager.coffee +++ b/services/document-updater/app/coffee/PersistenceManager.coffee @@ -1,4 +1,4 @@ -request = require "request" +request = require "requestretry" Settings = require "settings-sharelatex" Errors = require "./Errors" Metrics = require "./Metrics" @@ -28,6 +28,7 @@ module.exports = PersistenceManager = sendImmediately: true jar: false timeout: MAX_HTTP_REQUEST_LENGTH + maxAttempts: 2 # for requestretry }, (error, res, body) -> return callback(error) if error? if res.statusCode >= 200 and res.statusCode < 300 @@ -65,6 +66,7 @@ module.exports = PersistenceManager = sendImmediately: true jar: false timeout: MAX_HTTP_REQUEST_LENGTH + maxAttempts: 2 # for requestretry }, (error, res, body) -> return callback(error) if error? if res.statusCode >= 200 and res.statusCode < 300 diff --git a/services/document-updater/package.json b/services/document-updater/package.json index c2976e2934..f1787e4f41 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -16,6 +16,7 @@ "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.5.0", "redis-sharelatex": "git+https://github.com/sharelatex/redis-sharelatex.git#v1.0.3", "request": "2.25.0", + "requestretry": "^1.12.0", "sandboxed-module": "~0.2.0", "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0", "sinon": "~1.5.2", diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee index 19a3d547a2..cd44ce52b3 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee @@ -8,7 +8,7 @@ Errors = require "../../../../app/js/Errors" describe "PersistenceManager", -> beforeEach -> @PersistenceManager = SandboxedModule.require modulePath, requires: - "request": @request = sinon.stub() + "requestretry": @request = sinon.stub() "settings-sharelatex": @Settings = {} "./Metrics": @Metrics = Timer: class Timer @@ -50,6 +50,7 @@ describe "PersistenceManager", -> sendImmediately: true jar: false timeout: 5000 + maxAttempts: 2 }) .should.equal true @@ -129,6 +130,7 @@ describe "PersistenceManager", -> sendImmediately: true jar: false timeout: 5000 + maxAttempts: 2 }) .should.equal true From dc8d08c030fcb3a6436e9ca5bc7575b0fde4d181 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Mon, 17 Jul 2017 11:13:21 +0100 Subject: [PATCH 305/769] Update acceptance tests to reflect retry logic --- services/document-updater/Gruntfile.coffee | 2 +- .../test/acceptance/coffee/FlushingDocsTests.coffee | 13 +++++++------ 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/services/document-updater/Gruntfile.coffee b/services/document-updater/Gruntfile.coffee index 2e0e12dd66..698ee9c733 100644 --- a/services/document-updater/Gruntfile.coffee +++ b/services/document-updater/Gruntfile.coffee @@ -57,7 +57,7 @@ module.exports = (grunt) -> options: reporter: grunt.option('reporter') or 'spec' grep: grunt.option("grep") - timeout: 10000 + timeout: 50000 shell: fullAcceptanceTests: diff --git a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee index f732d69bb6..4c9c893e44 100644 --- a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee +++ b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee @@ -55,25 +55,26 @@ describe "Flushing a doc to Mongo", -> it "should not flush the doc to the web api", -> MockWebApi.setDocument.called.should.equal false - describe "when the web api http request takes a long time", -> + describe "when the web api http request takes a long time on first request", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - @timeout = 10000 MockWebApi.insertDoc @project_id, @doc_id, { lines: @lines version: @version } + t = 30000 sinon.stub MockWebApi, "setDocument", (project_id, doc_id, lines, version, ranges, callback = (error) ->) -> - setTimeout callback, 30000 + setTimeout callback, t + t = 0 DocUpdaterClient.preloadDoc @project_id, @doc_id, done after -> MockWebApi.setDocument.restore() - it "should return quickly(ish)", (done) -> + it "should still work", (done) -> start = Date.now() DocUpdaterClient.flushDoc @project_id, @doc_id, (error, res, doc) => - res.statusCode.should.equal 500 + res.statusCode.should.equal 204 delta = Date.now() - start expect(delta).to.be.below 20000 - done() \ No newline at end of file + done() From c3465cf3d494d429807d9e8bef8e3d4ac3a0c0a8 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Tue, 18 Jul 2017 15:28:18 +0100 Subject: [PATCH 306/769] Reduce retry delay to 10ms, and refactor --- .../document-updater/app/coffee/PersistenceManager.coffee | 7 ++++--- .../PersistenceManager/PersistenceManagerTests.coffee | 6 +++--- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/services/document-updater/app/coffee/PersistenceManager.coffee b/services/document-updater/app/coffee/PersistenceManager.coffee index 31298e31b6..1e168f5495 100644 --- a/services/document-updater/app/coffee/PersistenceManager.coffee +++ b/services/document-updater/app/coffee/PersistenceManager.coffee @@ -1,8 +1,11 @@ -request = require "requestretry" Settings = require "settings-sharelatex" Errors = require "./Errors" Metrics = require "./Metrics" logger = require "logger-sharelatex" +request = (require("requestretry")).defaults({ + maxAttempts: 2 + retryDelay: 10 +}) # We have to be quick with HTTP calls because we're holding a lock that # expires after 30 seconds. We can't let any errors in the rest of the stack @@ -28,7 +31,6 @@ module.exports = PersistenceManager = sendImmediately: true jar: false timeout: MAX_HTTP_REQUEST_LENGTH - maxAttempts: 2 # for requestretry }, (error, res, body) -> return callback(error) if error? if res.statusCode >= 200 and res.statusCode < 300 @@ -66,7 +68,6 @@ module.exports = PersistenceManager = sendImmediately: true jar: false timeout: MAX_HTTP_REQUEST_LENGTH - maxAttempts: 2 # for requestretry }, (error, res, body) -> return callback(error) if error? if res.statusCode >= 200 and res.statusCode < 300 diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee index cd44ce52b3..d6fa519b4c 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee @@ -7,8 +7,10 @@ Errors = require "../../../../app/js/Errors" describe "PersistenceManager", -> beforeEach -> + @request = sinon.stub() + @request.defaults = () => @request @PersistenceManager = SandboxedModule.require modulePath, requires: - "requestretry": @request = sinon.stub() + "requestretry": @request "settings-sharelatex": @Settings = {} "./Metrics": @Metrics = Timer: class Timer @@ -50,7 +52,6 @@ describe "PersistenceManager", -> sendImmediately: true jar: false timeout: 5000 - maxAttempts: 2 }) .should.equal true @@ -130,7 +131,6 @@ describe "PersistenceManager", -> sendImmediately: true jar: false timeout: 5000 - maxAttempts: 2 }) .should.equal true From 010958509291be90ec33432ee629f3bf9f7308af Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 28 Jul 2017 16:34:32 +0100 Subject: [PATCH 307/769] provide endpoint for current project docs in redis --- services/document-updater/app.coffee | 1 + .../app/coffee/HttpController.coffee | 17 ++++++++++++ .../app/coffee/ProjectManager.coffee | 27 +++++++++++++++++++ .../app/coffee/RedisManager.coffee | 7 ++++- 4 files changed, 51 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 49cfa1f1b8..288995a201 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -38,6 +38,7 @@ app.param 'doc_id', (req, res, next, doc_id) -> next new Error("invalid doc id") app.get '/project/:project_id/doc/:doc_id', HttpController.getDoc +app.get '/project/:project_id', HttpController.getProjectDocs app.post '/project/:project_id/doc/:doc_id', HttpController.setDoc app.post '/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLoaded app.delete '/project/:project_id/doc/:doc_id', HttpController.flushAndDeleteDoc diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 13e618e734..4dad5b7333 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -37,6 +37,23 @@ module.exports = HttpController = size += (line.length + 1) return size + getProjectDocs: (req, res, next = (error) ->) -> + project_id = req.params.project_id + # filter is string of existing docs "id:version,id:version,..." + filterItems = req.query?.filter?.split(',') or [] + logger.log project_id: project_id, filter: filterItems, "getting docs via http" + timer = new Metrics.Timer("http.getAllDocs") + excludeVersions = {} + for item in filterItems + [id,version] = item?.split(':') + excludeVersions[id] = version + logger.log {project_id: project_id, excludeVersions: excludeVersions}, "excluding versions" + ProjectManager.getProjectDocs project_id, excludeVersions, (error, result) -> + timer.done() + return next(error) if error? + logger.log project_id: project_id, result: result, "got docs via http" + res.send result + setDoc: (req, res, next = (error) ->) -> doc_id = req.params.doc_id project_id = req.params.project_id diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index cd4c66ae8d..e1d346204b 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -56,3 +56,30 @@ module.exports = ProjectManager = callback new Error("Errors deleting docs. See log for details") else callback(null) + + getProjectDocs: (project_id, excludeVersions = {}, _callback = (error) ->) -> + timer = new Metrics.Timer("projectManager.getProjectDocs") + callback = (args...) -> + timer.done() + _callback(args...) + + RedisManager.getDocIdsInProject project_id, (error, doc_ids) -> + return callback(error) if error? + jobs = [] + docs = [] + for doc_id in doc_ids or [] + do (doc_id) -> + jobs.push (cb) -> + # check the doc version first + RedisManager.getDocVersion doc_id, (error, version) -> + return cb(error) if error? + # skip getting the doc if we already have that version + return cb() if version is excludeVersions[doc_id] + # otherwise get the doc lines from redis + RedisManager.getDocLines doc_id, (error, lines) -> + return cb(error) if error? + docs.push {_id: doc_id, lines: lines, rev: version} + cb() + async.series jobs, (error) -> + return callback(error) if error? + callback(null, docs) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index be5018297d..5d3ae922b5 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -142,6 +142,11 @@ module.exports = RedisManager = version = parseInt(version, 10) callback null, version + getDocLines: (doc_id, callback = (error, version) ->) -> + rclient.get keys.docLines(doc_id: doc_id), (error, docLines) -> + return callback(error) if error? + callback null, docLines + getPreviousDocOps: (doc_id, start, end, callback = (error, jsonOps) ->) -> timer = new metrics.Timer("redis.get-prev-docops") rclient.llen keys.docOps(doc_id: doc_id), (error, length) -> @@ -239,7 +244,7 @@ module.exports = RedisManager = getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) -> rclient.smembers keys.docsInProject(project_id: project_id), callback - + _serializeRanges: (ranges, callback = (error, serializedRanges) ->) -> jsonRanges = JSON.stringify(ranges) if jsonRanges? and jsonRanges.length > MAX_RANGES_SIZE From 54e02cd89533cc2739a526d56d831d0b1b73a2ee Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 3 Aug 2017 14:41:34 +0100 Subject: [PATCH 308/769] update docupdater endpoint to /project/id/docs --- services/document-updater/app.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 288995a201..a580f8f113 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -38,7 +38,7 @@ app.param 'doc_id', (req, res, next, doc_id) -> next new Error("invalid doc id") app.get '/project/:project_id/doc/:doc_id', HttpController.getDoc -app.get '/project/:project_id', HttpController.getProjectDocs +app.get '/project/:project_id/doc', HttpController.getProjectDocs app.post '/project/:project_id/doc/:doc_id', HttpController.setDoc app.post '/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLoaded app.delete '/project/:project_id/doc/:doc_id', HttpController.flushAndDeleteDoc From 80be5adc0d96498a9a7e6040b38dd0f6ca6fddd2 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 3 Aug 2017 14:42:08 +0100 Subject: [PATCH 309/769] change filter to exclude in getProjectDocs --- .../document-updater/app/coffee/HttpController.coffee | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 4dad5b7333..20eaa2c6f3 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -39,12 +39,12 @@ module.exports = HttpController = getProjectDocs: (req, res, next = (error) ->) -> project_id = req.params.project_id - # filter is string of existing docs "id:version,id:version,..." - filterItems = req.query?.filter?.split(',') or [] - logger.log project_id: project_id, filter: filterItems, "getting docs via http" + # exclude is string of existing docs "id:version,id:version,..." + excludeItems = req.query?.exclude?.split(',') or [] + logger.log project_id: project_id, exclude: excludeItems, "getting docs via http" timer = new Metrics.Timer("http.getAllDocs") excludeVersions = {} - for item in filterItems + for item in excludeItems [id,version] = item?.split(':') excludeVersions[id] = version logger.log {project_id: project_id, excludeVersions: excludeVersions}, "excluding versions" From 860537146b35a1947b780205d3c67c17645f28b7 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 3 Aug 2017 14:42:21 +0100 Subject: [PATCH 310/769] avoid logging doclines in getProjectDocs --- services/document-updater/app/coffee/HttpController.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 20eaa2c6f3..73d5d24b23 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -51,7 +51,7 @@ module.exports = HttpController = ProjectManager.getProjectDocs project_id, excludeVersions, (error, result) -> timer.done() return next(error) if error? - logger.log project_id: project_id, result: result, "got docs via http" + logger.log project_id: project_id, result: ("#{doc._id}:#{doc.rev}" for doc in result), "got docs via http" res.send result setDoc: (req, res, next = (error) ->) -> From f5f516a910d03d1c069f1fe5736b0e35a20c73dd Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 3 Aug 2017 15:03:30 +0100 Subject: [PATCH 311/769] delete clsi state when deleting doc (if used) --- services/document-updater/app/coffee/RedisManager.coffee | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 5d3ae922b5..472098d91f 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -88,7 +88,11 @@ module.exports = RedisManager = multi.del keys.ranges(doc_id:doc_id) multi.exec (error) -> return callback(error) if error? - rclient.srem keys.docsInProject(project_id:project_id), doc_id, callback + multi = rclient.multi() + multi.srem keys.docsInProject(project_id:project_id), doc_id + if keys.clsiState? + multi.del keys.clsiState(project_id:project_id) + multi.exec callback getDoc : (project_id, doc_id, callback = (error, lines, version, ranges) ->)-> timer = new metrics.Timer("redis.get-doc") From 9f3ec72f8126fb8db2b608f218e986b2bf8f4e16 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 7 Aug 2017 14:43:28 +0100 Subject: [PATCH 312/769] switch to single get/set method for getProjectDocs if project state hasn't changed, return the docs. Otherwise set the hash and return a 409 Conflict response. --- .../document-updater/app/coffee/Errors.coffee | 9 +++- .../app/coffee/HttpController.coffee | 15 ++++--- .../app/coffee/ProjectManager.coffee | 44 +++++++++++-------- .../app/coffee/RedisManager.coffee | 16 ++++++- 4 files changed, 57 insertions(+), 27 deletions(-) diff --git a/services/document-updater/app/coffee/Errors.coffee b/services/document-updater/app/coffee/Errors.coffee index 941bfcc9f1..e5e93fa458 100644 --- a/services/document-updater/app/coffee/Errors.coffee +++ b/services/document-updater/app/coffee/Errors.coffee @@ -12,7 +12,14 @@ OpRangeNotAvailableError = (message) -> return error OpRangeNotAvailableError.prototype.__proto__ = Error.prototype +ProjectStateChangedError = (message) -> + error = new Error(message) + error.name = "ProjectStateChangedError" + error.__proto__ = ProjectStateChangedError.prototype + return error +ProjectStateChangedError.prototype.__proto__ = Error.prototype + module.exports = Errors = NotFoundError: NotFoundError OpRangeNotAvailableError: OpRangeNotAvailableError - + ProjectStateChangedError: ProjectStateChangedError diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 73d5d24b23..8da4b68c2d 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -39,6 +39,7 @@ module.exports = HttpController = getProjectDocs: (req, res, next = (error) ->) -> project_id = req.params.project_id + projectStateHash = req.query?.state # exclude is string of existing docs "id:version,id:version,..." excludeItems = req.query?.exclude?.split(',') or [] logger.log project_id: project_id, exclude: excludeItems, "getting docs via http" @@ -47,12 +48,16 @@ module.exports = HttpController = for item in excludeItems [id,version] = item?.split(':') excludeVersions[id] = version - logger.log {project_id: project_id, excludeVersions: excludeVersions}, "excluding versions" - ProjectManager.getProjectDocs project_id, excludeVersions, (error, result) -> + logger.log {project_id: project_id, projectStateHash: projectStateHash, excludeVersions: excludeVersions}, "excluding versions" + ProjectManager.getProjectDocs project_id, projectStateHash, excludeVersions, (error, result) -> timer.done() - return next(error) if error? - logger.log project_id: project_id, result: ("#{doc._id}:#{doc.rev}" for doc in result), "got docs via http" - res.send result + if error instanceof Errors.ProjectStateChangedError + res.send 409 # conflict + else if error? + return next(error) + else + logger.log project_id: project_id, result: ("#{doc._id}:#{doc.rev}" for doc in result), "got docs via http" + res.send result setDoc: (req, res, next = (error) ->) -> doc_id = req.params.doc_id diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index e1d346204b..80f9b84a25 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -3,6 +3,7 @@ DocumentManager = require "./DocumentManager" async = require "async" logger = require "logger-sharelatex" Metrics = require "./Metrics" +Errors = require "./Errors" module.exports = ProjectManager = flushProjectWithLocks: (project_id, _callback = (error) ->) -> @@ -57,29 +58,34 @@ module.exports = ProjectManager = else callback(null) - getProjectDocs: (project_id, excludeVersions = {}, _callback = (error) ->) -> + getProjectDocs: (project_id, projectStateHash, excludeVersions = {}, _callback = (error) ->) -> timer = new Metrics.Timer("projectManager.getProjectDocs") callback = (args...) -> timer.done() _callback(args...) - RedisManager.getDocIdsInProject project_id, (error, doc_ids) -> + RedisManager.checkOrSetProjectState project_id, projectStateHash, (error, projectStateChanged) -> return callback(error) if error? - jobs = [] - docs = [] - for doc_id in doc_ids or [] - do (doc_id) -> - jobs.push (cb) -> - # check the doc version first - RedisManager.getDocVersion doc_id, (error, version) -> - return cb(error) if error? - # skip getting the doc if we already have that version - return cb() if version is excludeVersions[doc_id] - # otherwise get the doc lines from redis - RedisManager.getDocLines doc_id, (error, lines) -> - return cb(error) if error? - docs.push {_id: doc_id, lines: lines, rev: version} - cb() - async.series jobs, (error) -> + # we can't return docs if project structure has changed + return callback Errors.ProjectStateChangedError("project state changed") if projectStateChanged + # project structure hasn't changed, return doc content from redis + RedisManager.getDocIdsInProject project_id, (error, doc_ids) -> return callback(error) if error? - callback(null, docs) + jobs = [] + docs = [] + for doc_id in doc_ids or [] + do (doc_id) -> + jobs.push (cb) -> + # check the doc version first + RedisManager.getDocVersion doc_id, (error, version) -> + return cb(error) if error? + # skip getting the doc if we already have that version + return cb() if version is excludeVersions[doc_id] + # otherwise get the doc lines from redis + RedisManager.getDocLines doc_id, (error, lines) -> + return cb(error) if error? + docs.push {_id: doc_id, lines: lines, rev: version} + cb() + async.series jobs, (error) -> + return callback(error) if error? + callback(null, docs) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 472098d91f..b3e6132e4e 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -90,10 +90,22 @@ module.exports = RedisManager = return callback(error) if error? multi = rclient.multi() multi.srem keys.docsInProject(project_id:project_id), doc_id - if keys.clsiState? - multi.del keys.clsiState(project_id:project_id) + if keys.projectState? + multi.del keys.projectState(project_id:project_id) multi.exec callback + checkOrSetProjectState: (project_id, newState, callback = (error, stateChanged) ->) -> + if keys.projectState? + multi = rclient.multi() + multi.getset keys.projectState(project_id:project_id), newState + multi.expire keys.projectState(project_id:project_id), 30 * minutes + multi.exec (error, response) -> + return callback(error) if error? + logger.log project_id: project_id, newState:newState, oldState: response[0], "checking project state" + callback(null, response[0] isnt newState) + else + callback(null,true) + getDoc : (project_id, doc_id, callback = (error, lines, version, ranges) ->)-> timer = new metrics.Timer("redis.get-doc") multi = rclient.multi() From ca7d9dce85d92a5a3de5748995557d9218e5a98d Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 8 Aug 2017 09:40:39 +0100 Subject: [PATCH 313/769] fix unit tests --- .../test/unit/coffee/RedisManager/RedisManagerTests.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index d0521fa87e..0fe4470d32 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -551,7 +551,7 @@ describe "RedisManager", -> describe "removeDocFromMemory", -> beforeEach (done) -> @rclient.del = sinon.stub() - @rclient.srem = sinon.stub().yields() + @rclient.srem = sinon.stub() @rclient.exec.yields() @RedisManager.removeDocFromMemory @project_id, @doc_id, done From bd6b5b2c596dcab95b5f004f7b71648178040952 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 9 Aug 2017 15:29:58 +0100 Subject: [PATCH 314/769] use v instead of rev in getProjectDocs --- services/document-updater/app/coffee/ProjectManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index 80f9b84a25..3ef16d03d4 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -84,7 +84,7 @@ module.exports = ProjectManager = # otherwise get the doc lines from redis RedisManager.getDocLines doc_id, (error, lines) -> return cb(error) if error? - docs.push {_id: doc_id, lines: lines, rev: version} + docs.push {_id: doc_id, lines: lines, v: version} cb() async.series jobs, (error) -> return callback(error) if error? From 47bebf963861947eef0e861fd16688072261b591 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 9 Aug 2017 16:45:08 +0100 Subject: [PATCH 315/769] no need to support optional keys.projectState --- .../app/coffee/RedisManager.coffee | 20 ++++++++----------- .../config/settings.defaults.coffee | 2 ++ 2 files changed, 10 insertions(+), 12 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index b3e6132e4e..718eb1f5b8 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -90,21 +90,17 @@ module.exports = RedisManager = return callback(error) if error? multi = rclient.multi() multi.srem keys.docsInProject(project_id:project_id), doc_id - if keys.projectState? - multi.del keys.projectState(project_id:project_id) + multi.del keys.projectState(project_id:project_id) multi.exec callback checkOrSetProjectState: (project_id, newState, callback = (error, stateChanged) ->) -> - if keys.projectState? - multi = rclient.multi() - multi.getset keys.projectState(project_id:project_id), newState - multi.expire keys.projectState(project_id:project_id), 30 * minutes - multi.exec (error, response) -> - return callback(error) if error? - logger.log project_id: project_id, newState:newState, oldState: response[0], "checking project state" - callback(null, response[0] isnt newState) - else - callback(null,true) + multi = rclient.multi() + multi.getset keys.projectState(project_id:project_id), newState + multi.expire keys.projectState(project_id:project_id), 30 * minutes + multi.exec (error, response) -> + return callback(error) if error? + logger.log project_id: project_id, newState:newState, oldState: response[0], "checking project state" + callback(null, response[0] isnt newState) getDoc : (project_id, doc_id, callback = (error, lines, version, ranges) ->)-> timer = new metrics.Timer("redis.get-doc") diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index b06b9d8bf9..838ffa19a8 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -41,6 +41,7 @@ module.exports = projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" docsInProject: ({project_id}) -> "DocsIn:#{project_id}" ranges: ({doc_id}) -> "Ranges:#{doc_id}" + projectState: ({project_id}) -> "ProjectState:#{project_id}" # cluster: [{ # port: "7000" # host: "localhost" @@ -54,6 +55,7 @@ module.exports = # projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" # docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" # ranges: ({doc_id}) -> "Ranges:{#{doc_id}}" + # projectState: ({project_id}) -> "ProjectState:{#{project_id}}" history: port:"6379" host:"localhost" From 1eb80936f2e34492db2a2424bcd3940aea4be1c2 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 10 Aug 2017 14:57:27 +0100 Subject: [PATCH 316/769] fix log line --- services/document-updater/app/coffee/HttpController.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 8da4b68c2d..369b429ab1 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -56,7 +56,7 @@ module.exports = HttpController = else if error? return next(error) else - logger.log project_id: project_id, result: ("#{doc._id}:#{doc.rev}" for doc in result), "got docs via http" + logger.log project_id: project_id, result: ("#{doc._id}:#{doc.v}" for doc in result), "got docs via http" res.send result setDoc: (req, res, next = (error) ->) -> From f696ccb0d9b16cd85a20940b01b37f369a36de6d Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 10 Aug 2017 14:57:40 +0100 Subject: [PATCH 317/769] add unit tests for getProjectDocs --- .../app/coffee/ProjectManager.coffee | 12 ++-- .../HttpController/HttpControllerTests.coffee | 60 ++++++++++++++++++- 2 files changed, 67 insertions(+), 5 deletions(-) diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index 3ef16d03d4..c25f51277f 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -58,7 +58,7 @@ module.exports = ProjectManager = else callback(null) - getProjectDocs: (project_id, projectStateHash, excludeVersions = {}, _callback = (error) ->) -> + getProjectDocs: (project_id, projectStateHash, excludeVersions = {}, _callback = (error, docs) ->) -> timer = new Metrics.Timer("projectManager.getProjectDocs") callback = (args...) -> timer.done() @@ -78,12 +78,16 @@ module.exports = ProjectManager = jobs.push (cb) -> # check the doc version first RedisManager.getDocVersion doc_id, (error, version) -> - return cb(error) if error? + if error? + logger.error err: error, project_id: project_id, doc_id: doc_id, "error getting project doc version" + return cb(error) # skip getting the doc if we already have that version - return cb() if version is excludeVersions[doc_id] + return cb() if version? and version is excludeVersions[doc_id] # otherwise get the doc lines from redis RedisManager.getDocLines doc_id, (error, lines) -> - return cb(error) if error? + if error? + logger.error err: error, project_id: project_id, doc_id: doc_id, "error getting project doc lines" + return cb(error) docs.push {_id: doc_id, lines: lines, v: version} cb() async.series jobs, (error) -> diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index 617146e787..cadea20c04 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -12,7 +12,7 @@ describe "HttpController", -> "./ProjectManager": @ProjectManager = {} "logger-sharelatex" : @logger = { log: sinon.stub() } "./Metrics": @Metrics = {} - + "./Errors" : Errors @Metrics.Timer = class Timer done: sinon.stub() @project_id = "project-id-123" @@ -434,3 +434,61 @@ describe "HttpController", -> @next .calledWith(new Error("oops")) .should.equal true + + describe "getProjectDocs", -> + beforeEach -> + @state = "01234567890abcdef" + @docs = [{_id: "1234", lines: "hello", v: 23}, {_id: "4567", lines: "world", v: 45}] + @req = + params: + project_id: @project_id + query: + state: @state + + describe "successfully", -> + beforeEach -> + @ProjectManager.getProjectDocs = sinon.stub().callsArgWith(3,null, @docs) + @HttpController.getProjectDocs(@req, @res, @next) + + it "should get docs from the project manager", -> + @ProjectManager.getProjectDocs + .calledWith(@project_id, @state, {}) + .should.equal true + + it "should return a successful response", -> + @res.send + .calledWith(@docs) + .should.equal true + + it "should log the request", -> + @logger.log + .calledWith({project_id: @project_id, exclude: []}, "getting docs via http") + .should.equal true + + it "should log the response", -> + @logger.log + .calledWith({project_id: @project_id, result: ["1234:23", "4567:45"]}, "got docs via http") + .should.equal true + + it "should time the request", -> + @Metrics.Timer::done.called.should.equal true + + describe "when there is a conflict", -> + beforeEach -> + @ProjectManager.getProjectDocs = sinon.stub().callsArgWith(3, new Errors.ProjectStateChangedError("project state changed")) + @HttpController.getProjectDocs(@req, @res, @next) + + it "should return an HTTP 409 Conflict response", -> + @res.send + .calledWith(409) + .should.equal true + + describe "when an error occurs", -> + beforeEach -> + @ProjectManager.getProjectDocs = sinon.stub().callsArgWith(3, new Error("oops")) + @HttpController.getProjectDocs(@req, @res, @next) + + it "should call next with the error", -> + @next + .calledWith(new Error("oops")) + .should.equal true From 1146253c0ab82ab384ecd648af48f02ffaa41161 Mon Sep 17 00:00:00 2001 From: Joe Green Date: Fri, 11 Aug 2017 11:06:36 +0100 Subject: [PATCH 318/769] Create Jenkinsfile --- services/document-updater/Jenkinsfile | 83 +++++++++++++++++++++++++++ 1 file changed, 83 insertions(+) create mode 100644 services/document-updater/Jenkinsfile diff --git a/services/document-updater/Jenkinsfile b/services/document-updater/Jenkinsfile new file mode 100644 index 0000000000..a2150d086b --- /dev/null +++ b/services/document-updater/Jenkinsfile @@ -0,0 +1,83 @@ +pipeline { + + agent { + docker { + image 'node:4.2.6' + args "-v /var/lib/jenkins/.npm:/tmp/.npm" + } + } + + environment { + HOME = "/tmp" + } + + triggers { + pollSCM('* * * * *') + cron('@daily') + } + + stages { + stage('Set up') { + steps { + // we need to disable logallrefupdates, else git clones during the npm install will require git to lookup the user id + // which does not exist in the container's /etc/passwd file, causing the clone to fail. + sh 'git config --global core.logallrefupdates false' + } + } + stage('Install') { + steps { + sh 'rm -fr node_modules' + sh 'npm install' + sh 'npm rebuild' + sh 'npm install --quiet grunt-cli' + } + } + stage('Compile') { + steps { + sh 'node_modules/.bin/grunt compile' + } + } + stage('Test') { + steps { + sh 'NODE_ENV=development node_modules/.bin/grunt test:unit' + } + } + stage('Acceptance Tests') { + steps { + echo "TODO - Run Acceptance Tests" + //sh 'docker run -v "$(pwd):/app" --rm sl-acceptance-test-runner' + } + } + stage('Package') { + steps { + sh 'touch build.tar.gz' // Avoid tar warning about files changing during read + sh 'tar -czf build.tar.gz --exclude=build.tar.gz --exclude-vcs .' + } + } + stage('Publish') { + steps { + withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") { + s3Upload(file:'build.tar.gz', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/${BUILD_NUMBER}.tar.gz") + } + } + } + } + + post { + failure { + mail(from: "${EMAIL_ALERT_FROM}", + to: "${EMAIL_ALERT_TO}", + subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}", + body: "Build: ${BUILD_URL}") + } + } + + // The options directive is for configuration that applies to the whole job. + options { + // we'd like to make sure remove old builds, so we don't fill up our storage! + buildDiscarder(logRotator(numToKeepStr:'50')) + + // And we'd really like to be sure that this build doesn't hang forever, so let's time it out after: + timeout(time: 30, unit: 'MINUTES') + } +} From f6be68eb080436465c25165ff0987203288bf760 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 11 Aug 2017 16:49:16 +0100 Subject: [PATCH 319/769] added missing unit tests file --- .../ProjectManager/getProjectDocsTests.coffee | 101 ++++++++++++++++++ 1 file changed, 101 insertions(+) create mode 100644 services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee diff --git a/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee new file mode 100644 index 0000000000..21ed1d0eb1 --- /dev/null +++ b/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee @@ -0,0 +1,101 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/ProjectManager.js" +SandboxedModule = require('sandboxed-module') +Errors = require "../../../../app/js/Errors.js" + +describe "ProjectManager - getProjectDocs", -> + beforeEach -> + @ProjectManager = SandboxedModule.require modulePath, requires: + "./RedisManager": @RedisManager = {} + "./DocumentManager": @DocumentManager = {} + "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } + "./Metrics": @Metrics = + Timer: class Timer + done: sinon.stub() + @project_id = "project-id-123" + @callback = sinon.stub() + + describe "successfully", -> + beforeEach (done) -> + @doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"] + @doc_versions = [111, 222, 333] + @doc_lines = [["aaa","aaa"],["bbb","bbb"],["ccc","ccc"]] + @docs = [ + {_id: @doc_ids[0], lines: @doc_lines[0], v: @doc_versions[0]} + {_id: @doc_ids[1], lines: @doc_lines[1], v: @doc_versions[1]} + {_id: @doc_ids[2], lines: @doc_lines[2], v: @doc_versions[2]} + ] + @RedisManager.checkOrSetProjectState = sinon.stub().callsArgWith(2, null) + @RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, @doc_ids) + @RedisManager.getDocVersion = sinon.stub() + @RedisManager.getDocVersion.withArgs(@doc_ids[0]).callsArgWith(1, null, @doc_versions[0]) + @RedisManager.getDocVersion.withArgs(@doc_ids[1]).callsArgWith(1, null, @doc_versions[1]) + @RedisManager.getDocVersion.withArgs(@doc_ids[2]).callsArgWith(1, null, @doc_versions[2]) + @RedisManager.getDocLines = sinon.stub() + @RedisManager.getDocLines.withArgs(@doc_ids[0]).callsArgWith(1, null, @doc_lines[0]) + @RedisManager.getDocLines.withArgs(@doc_ids[1]).callsArgWith(1, null, @doc_lines[1]) + @RedisManager.getDocLines.withArgs(@doc_ids[2]).callsArgWith(1, null, @doc_lines[2]) + @ProjectManager.getProjectDocs @project_id, @projectStateHash, @excludeVersions, (error, docs) => + @callback(error, docs) + done() + + it "should check the project state", -> + @RedisManager.checkOrSetProjectState + .calledWith(@project_id, @projectStateHash) + .should.equal true + + it "should get the doc ids in the project", -> + @RedisManager.getDocIdsInProject + .calledWith(@project_id) + .should.equal true + + it "should call the callback without error", -> + @callback.calledWith(null, @docs).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when the state does not match", -> + beforeEach (done) -> + @doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"] + @RedisManager.checkOrSetProjectState = sinon.stub().callsArgWith(2, null, true) + @ProjectManager.getProjectDocs @project_id, @projectStateHash, @excludeVersions, (error, docs) => + @callback(error, docs) + done() + + it "should check the project state", -> + @RedisManager.checkOrSetProjectState + .calledWith(@project_id, @projectStateHash) + .should.equal true + + it "should call the callback with an error", -> + @callback.calledWith(new Errors.ProjectStateChangedError("project state changed")).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true + + describe "when a doc errors", -> + beforeEach (done) -> + @doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"] + @RedisManager.checkOrSetProjectState = sinon.stub().callsArgWith(2, null) + @RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, @doc_ids) + @RedisManager.getDocVersion = sinon.stub().callsArgWith(1, null) + @RedisManager.getDocLines = sinon.stub() + @RedisManager.getDocLines.withArgs("doc-id-1").callsArgWith(1, null) + @RedisManager.getDocLines.withArgs("doc-id-2").callsArgWith(1, @error = new Error("oops")) # trigger an error + @ProjectManager.getProjectDocs @project_id, @projectStateHash, @excludeVersions, (error, docs) => + @callback(error) + done() + + it "should record the error", -> + @logger.error + .calledWith(err: @error, project_id: @project_id, doc_id: "doc-id-2", "error getting project doc lines") + .should.equal true + + it "should call the callback with an error", -> + @callback.calledWith(new Error("oops")).should.equal true + + it "should time the execution", -> + @Metrics.Timer::done.called.should.equal true From 5971a19084d0b4df28a1ea2af65e8220f46a2913 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 11 Aug 2017 16:55:31 +0100 Subject: [PATCH 320/769] added acceptance tests --- .../coffee/GettingProjectDocsTests.coffee | 68 +++++++++++++++++++ .../coffee/helpers/DocUpdaterClient.coffee | 6 ++ 2 files changed, 74 insertions(+) create mode 100644 services/document-updater/test/acceptance/coffee/GettingProjectDocsTests.coffee diff --git a/services/document-updater/test/acceptance/coffee/GettingProjectDocsTests.coffee b/services/document-updater/test/acceptance/coffee/GettingProjectDocsTests.coffee new file mode 100644 index 0000000000..708176ea69 --- /dev/null +++ b/services/document-updater/test/acceptance/coffee/GettingProjectDocsTests.coffee @@ -0,0 +1,68 @@ +sinon = require "sinon" +chai = require("chai") +chai.should() +expect = chai.expect + +MockWebApi = require "./helpers/MockWebApi" +DocUpdaterClient = require "./helpers/DocUpdaterClient" + +describe "Getting documents for project", -> + before (done) -> + @lines = ["one", "two", "three"] + @version = 42 + setTimeout done, 200 # Give MockWebApi a chance to start + + describe "when project state hash does not match", -> + before (done) -> + @projectStateHash = DocUpdaterClient.randomId() + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + throw error if error? + DocUpdaterClient.getProjectDocs @project_id, @projectStateHash, (error, @res, @returnedDocs) => + done() + + it "should return a 409 Conflict response", -> + @res.statusCode.should.equal 409 + + + describe "when project state hash matches", -> + before (done) -> + @projectStateHash = DocUpdaterClient.randomId() + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + throw error if error? + DocUpdaterClient.getProjectDocs @project_id, @projectStateHash, (error, @res0, @returnedDocs0) => + # set the hash + DocUpdaterClient.getProjectDocs @project_id, @projectStateHash, (error, @res, @returnedDocs) => + # the hash should now match + done() + + it "should return a 200 response", -> + @res.statusCode.should.equal 200 + + it "should return the documents", -> + @returnedDocs.should.deep.equal [ {_id: @doc_id, lines: @lines, v: @version} ] + + + describe "when the doc has been removed", -> + before (done) -> + @projectStateHash = DocUpdaterClient.randomId() + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + throw error if error? + DocUpdaterClient.getProjectDocs @project_id, @projectStateHash, (error, @res0, @returnedDocs0) => + # set the hash + DocUpdaterClient.deleteDoc @project_id, @doc_id, (error, res, body) => + # delete the doc + DocUpdaterClient.getProjectDocs @project_id, @projectStateHash, (error, @res, @returnedDocs) => + # the hash would match, but the doc has been deleted + done() + + it "should return a 409 Conflict response", -> + @res.statusCode.should.equal 409 diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee index 4b57e0659f..6b2a5ac2fb 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee @@ -80,3 +80,9 @@ module.exports = DocUpdaterClient = removeComment: (project_id, doc_id, comment, callback = () ->) -> request.del "http://localhost:3003/project/#{project_id}/doc/#{doc_id}/comment/#{comment}", callback + + getProjectDocs: (project_id, projectStateHash, callback = () ->) -> + request.get "http://localhost:3003/project/#{project_id}/doc?state=#{projectStateHash}", (error, res, body) -> + if body? and res.statusCode >= 200 and res.statusCode < 300 + body = JSON.parse(body) + callback error, res, body From 8372911a1b8cb057dd4883f47a453e35297568d6 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 11 Aug 2017 16:56:30 +0100 Subject: [PATCH 321/769] return doclines as array from getProjectDocs for compatibility with getDoc --- services/document-updater/app/coffee/ProjectManager.coffee | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index c25f51277f..60446485b0 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -88,7 +88,10 @@ module.exports = ProjectManager = if error? logger.error err: error, project_id: project_id, doc_id: doc_id, "error getting project doc lines" return cb(error) - docs.push {_id: doc_id, lines: lines, v: version} + try + docs.push {_id: doc_id, lines: JSON.parse(lines), v: version} + catch e + return cb(e) cb() async.series jobs, (error) -> return callback(error) if error? From b1a7f779d1d1d1016c1aec7e1708f91f9a80e130 Mon Sep 17 00:00:00 2001 From: Joe Green Date: Mon, 14 Aug 2017 15:28:04 +0100 Subject: [PATCH 322/769] add acceptance tests --- services/document-updater/Jenkinsfile | 56 +++++++++++++-------------- 1 file changed, 26 insertions(+), 30 deletions(-) diff --git a/services/document-updater/Jenkinsfile b/services/document-updater/Jenkinsfile index a2150d086b..a1852639b1 100644 --- a/services/document-updater/Jenkinsfile +++ b/services/document-updater/Jenkinsfile @@ -1,51 +1,47 @@ pipeline { + + agent any - agent { - docker { - image 'node:4.2.6' - args "-v /var/lib/jenkins/.npm:/tmp/.npm" - } - } - - environment { - HOME = "/tmp" - } - triggers { pollSCM('* * * * *') cron('@daily') } stages { - stage('Set up') { + stage('Install') { + agent { + docker { + image 'node:4.2.1' + args "-v /var/lib/jenkins/.npm:/tmp/.npm -e HOME=/tmp" + reuseNode true + } + } steps { // we need to disable logallrefupdates, else git clones during the npm install will require git to lookup the user id // which does not exist in the container's /etc/passwd file, causing the clone to fail. sh 'git config --global core.logallrefupdates false' - } - } - stage('Install') { - steps { sh 'rm -fr node_modules' - sh 'npm install' - sh 'npm rebuild' + sh 'npm install && npm rebuild' sh 'npm install --quiet grunt-cli' } } - stage('Compile') { + stage('Compile and Test') { + agent { + docker { + image 'node:4.2.1' + reuseNode true + } + } steps { sh 'node_modules/.bin/grunt compile' - } - } - stage('Test') { - steps { + // sh 'node_modules/.bin/grunt compile:acceptance_tests' sh 'NODE_ENV=development node_modules/.bin/grunt test:unit' } } stage('Acceptance Tests') { steps { - echo "TODO - Run Acceptance Tests" - //sh 'docker run -v "$(pwd):/app" --rm sl-acceptance-test-runner' + sh 'docker pull sharelatex/acceptance-test-runner' + sh 'docker run --rm -v $(pwd):/app sharelatex/acceptance-test-runner' } } stage('Package') { @@ -62,21 +58,21 @@ pipeline { } } } - + post { failure { - mail(from: "${EMAIL_ALERT_FROM}", - to: "${EMAIL_ALERT_TO}", + mail(from: "${EMAIL_ALERT_FROM}", + to: "${EMAIL_ALERT_TO}", subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}", body: "Build: ${BUILD_URL}") } } - + // The options directive is for configuration that applies to the whole job. options { // we'd like to make sure remove old builds, so we don't fill up our storage! buildDiscarder(logRotator(numToKeepStr:'50')) - + // And we'd really like to be sure that this build doesn't hang forever, so let's time it out after: timeout(time: 30, unit: 'MINUTES') } From 0e8ce294146d6a6f96dbaea93715018fb2d1c47a Mon Sep 17 00:00:00 2001 From: Joe Green Date: Mon, 14 Aug 2017 15:33:30 +0100 Subject: [PATCH 323/769] compile acceptance tests --- services/document-updater/Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/Jenkinsfile b/services/document-updater/Jenkinsfile index a1852639b1..e798bbaa51 100644 --- a/services/document-updater/Jenkinsfile +++ b/services/document-updater/Jenkinsfile @@ -34,7 +34,7 @@ pipeline { } steps { sh 'node_modules/.bin/grunt compile' - // sh 'node_modules/.bin/grunt compile:acceptance_tests' + sh 'node_modules/.bin/grunt compile:acceptance_tests' sh 'NODE_ENV=development node_modules/.bin/grunt test:unit' } } From 230c93766d6a8e3063021fb2bb52f65c5a22a15e Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 18 Aug 2017 11:59:31 +0100 Subject: [PATCH 324/769] fix broken RedisManager unit test --- .../test/unit/coffee/RedisManager/RedisManagerTests.coffee | 1 + 1 file changed, 1 insertion(+) diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 0fe4470d32..61598974c6 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -29,6 +29,7 @@ describe "RedisManager", -> pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" docsInProject: ({project_id}) -> "DocsIn:#{project_id}" ranges: ({doc_id}) -> "Ranges:#{doc_id}" + projectState: ({project_id}) -> "ProjectState:#{project_id}" history: key_schema: uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}" From 9d931a3fa7e73170fec0c7a15e3ba6cee583a412 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 18 Aug 2017 12:08:39 +0100 Subject: [PATCH 325/769] fix broken unit tests --- .../unit/coffee/ProjectManager/getProjectDocsTests.coffee | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee index 21ed1d0eb1..59840b048c 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee @@ -34,9 +34,9 @@ describe "ProjectManager - getProjectDocs", -> @RedisManager.getDocVersion.withArgs(@doc_ids[1]).callsArgWith(1, null, @doc_versions[1]) @RedisManager.getDocVersion.withArgs(@doc_ids[2]).callsArgWith(1, null, @doc_versions[2]) @RedisManager.getDocLines = sinon.stub() - @RedisManager.getDocLines.withArgs(@doc_ids[0]).callsArgWith(1, null, @doc_lines[0]) - @RedisManager.getDocLines.withArgs(@doc_ids[1]).callsArgWith(1, null, @doc_lines[1]) - @RedisManager.getDocLines.withArgs(@doc_ids[2]).callsArgWith(1, null, @doc_lines[2]) + @RedisManager.getDocLines.withArgs(@doc_ids[0]).callsArgWith(1, null, JSON.stringify(@doc_lines[0])) + @RedisManager.getDocLines.withArgs(@doc_ids[1]).callsArgWith(1, null, JSON.stringify(@doc_lines[1])) + @RedisManager.getDocLines.withArgs(@doc_ids[2]).callsArgWith(1, null, JSON.stringify(@doc_lines[2])) @ProjectManager.getProjectDocs @project_id, @projectStateHash, @excludeVersions, (error, docs) => @callback(error, docs) done() @@ -83,7 +83,7 @@ describe "ProjectManager - getProjectDocs", -> @RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, @doc_ids) @RedisManager.getDocVersion = sinon.stub().callsArgWith(1, null) @RedisManager.getDocLines = sinon.stub() - @RedisManager.getDocLines.withArgs("doc-id-1").callsArgWith(1, null) + @RedisManager.getDocLines.withArgs("doc-id-1").callsArgWith(1, null, JSON.stringify(["test doc content"])) @RedisManager.getDocLines.withArgs("doc-id-2").callsArgWith(1, @error = new Error("oops")) # trigger an error @ProjectManager.getProjectDocs @project_id, @projectStateHash, @excludeVersions, (error, docs) => @callback(error) From c16c6c3bd35e82d035cd920dad117e053733bbdc Mon Sep 17 00:00:00 2001 From: Joe Green Date: Mon, 4 Sep 2017 14:48:47 +0100 Subject: [PATCH 326/769] added build.txt --- services/document-updater/Jenkinsfile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/services/document-updater/Jenkinsfile b/services/document-updater/Jenkinsfile index e798bbaa51..43b6722ba9 100644 --- a/services/document-updater/Jenkinsfile +++ b/services/document-updater/Jenkinsfile @@ -46,6 +46,7 @@ pipeline { } stage('Package') { steps { + sh 'echo ${BUILD_NUMBER} > build_number.txt' sh 'touch build.tar.gz' // Avoid tar warning about files changing during read sh 'tar -czf build.tar.gz --exclude=build.tar.gz --exclude-vcs .' } @@ -54,6 +55,8 @@ pipeline { steps { withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") { s3Upload(file:'build.tar.gz', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/${BUILD_NUMBER}.tar.gz") + // The deployment process uses this file to figure out the latest build + s3Upload(file:'build_number.txt', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/latest") } } } From 1e54b4c72c2484c7c7b919cb6af414c3a738e61b Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 7 Sep 2017 14:12:48 +0100 Subject: [PATCH 327/769] log all errors in getProjectDocs --- .../app/coffee/ProjectManager.coffee | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index 60446485b0..6aa7b14434 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -65,12 +65,16 @@ module.exports = ProjectManager = _callback(args...) RedisManager.checkOrSetProjectState project_id, projectStateHash, (error, projectStateChanged) -> - return callback(error) if error? + if error? + logger.error err: error, project_id: project_id, "error getting/setting project state in getProjectDocs" + return callback(error) # we can't return docs if project structure has changed return callback Errors.ProjectStateChangedError("project state changed") if projectStateChanged # project structure hasn't changed, return doc content from redis RedisManager.getDocIdsInProject project_id, (error, doc_ids) -> - return callback(error) if error? + if error? + logger.error err: error, project_id: project_id, "error getting doc ids in getProjectDocs" + return callback(error) jobs = [] docs = [] for doc_id in doc_ids or [] @@ -79,18 +83,21 @@ module.exports = ProjectManager = # check the doc version first RedisManager.getDocVersion doc_id, (error, version) -> if error? - logger.error err: error, project_id: project_id, doc_id: doc_id, "error getting project doc version" + logger.error err: error, project_id: project_id, doc_id: doc_id, "error getting project doc version in getProjectDocs" return cb(error) # skip getting the doc if we already have that version - return cb() if version? and version is excludeVersions[doc_id] + if version? and version is excludeVersions[doc_id] + logger.error err: error, project_id: project_id, doc_id: doc_id, version: version, "skipping doc version in getProjectDocs" + return cb() # otherwise get the doc lines from redis RedisManager.getDocLines doc_id, (error, lines) -> if error? - logger.error err: error, project_id: project_id, doc_id: doc_id, "error getting project doc lines" + logger.error err: error, project_id: project_id, doc_id: doc_id, "error getting project doc lines in getProjectDocs" return cb(error) try docs.push {_id: doc_id, lines: JSON.parse(lines), v: version} catch e + logger.error err: e, project_id: project_id, doc_id: doc_id, lines: lines, version: version, "error parsing doc lines in getProjectDocs" return cb(e) cb() async.series jobs, (error) -> From 215a939d4e37f21840e7bddea78e6fd5894bb348 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 8 Sep 2017 13:43:22 +0100 Subject: [PATCH 328/769] add comment about log.error for excludeVersions --- services/document-updater/app/coffee/ProjectManager.coffee | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index 6aa7b14434..a6621d1be0 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -87,6 +87,8 @@ module.exports = ProjectManager = return cb(error) # skip getting the doc if we already have that version if version? and version is excludeVersions[doc_id] + # not currently using excludeVersions so we shouldn't get here! + # change to logger.log when this code path is in use logger.error err: error, project_id: project_id, doc_id: doc_id, version: version, "skipping doc version in getProjectDocs" return cb() # otherwise get the doc lines from redis From f34c12fdc79fd7d152ef4f970dc32e0a49a73a2b Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 8 Sep 2017 15:50:26 +0100 Subject: [PATCH 329/769] add endpoint to clear project state --- services/document-updater/app.coffee | 1 + .../app/coffee/HttpController.coffee | 11 +++++++++++ .../app/coffee/ProjectManager.coffee | 3 +++ .../app/coffee/RedisManager.coffee | 3 +++ .../ProjectManager/getProjectDocsTests.coffee | 15 +++++++++++++++ .../coffee/RedisManager/RedisManagerTests.coffee | 9 +++++++++ 6 files changed, 42 insertions(+) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index a580f8f113..ba46a933db 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -39,6 +39,7 @@ app.param 'doc_id', (req, res, next, doc_id) -> app.get '/project/:project_id/doc/:doc_id', HttpController.getDoc app.get '/project/:project_id/doc', HttpController.getProjectDocs +app.post '/project/:project_id/clear', HttpController.clearProjectState app.post '/project/:project_id/doc/:doc_id', HttpController.setDoc app.post '/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLoaded app.delete '/project/:project_id/doc/:doc_id', HttpController.flushAndDeleteDoc diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 369b429ab1..1c8e70656d 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -59,6 +59,17 @@ module.exports = HttpController = logger.log project_id: project_id, result: ("#{doc._id}:#{doc.v}" for doc in result), "got docs via http" res.send result + clearProjectState: (req, res, next = (error) ->) -> + project_id = req.params.project_id + timer = new Metrics.Timer("http.clearProjectState") + logger.log project_id: project_id, "clearing project state via http" + ProjectManager.clearProjectState project_id, (error) -> + timer.done() + if error? + return next(error) + else + res.send 200 + setDoc: (req, res, next = (error) ->) -> doc_id = req.params.doc_id project_id = req.params.project_id diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index 60446485b0..5ecd0193f2 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -96,3 +96,6 @@ module.exports = ProjectManager = async.series jobs, (error) -> return callback(error) if error? callback(null, docs) + + clearProjectState: (project_id, callback = (error) ->) -> + RedisManager.clearProjectState project_id, callback diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 718eb1f5b8..b233cdf7e7 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -102,6 +102,9 @@ module.exports = RedisManager = logger.log project_id: project_id, newState:newState, oldState: response[0], "checking project state" callback(null, response[0] isnt newState) + clearProjectState: (project_id, callback = (error) ->) -> + rclient.del keys.projectState(project_id:project_id), callback + getDoc : (project_id, doc_id, callback = (error, lines, version, ranges) ->)-> timer = new metrics.Timer("redis.get-doc") multi = rclient.multi() diff --git a/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee index 59840b048c..a983e09d34 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee @@ -99,3 +99,18 @@ describe "ProjectManager - getProjectDocs", -> it "should time the execution", -> @Metrics.Timer::done.called.should.equal true + + describe "clearing the project state with clearProjectState", -> + beforeEach (done) -> + @RedisManager.clearProjectState = sinon.stub().callsArg(1) + @ProjectManager.clearProjectState @project_id, (error) => + @callback(error) + done() + + it "should clear the project state", -> + @RedisManager.clearProjectState + .calledWith(@project_id) + .should.equal true + + it "should call the callback", -> + @callback.called.should.equal true diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 61598974c6..0a5149c552 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -581,3 +581,12 @@ describe "RedisManager", -> .calledWith("DocsIn:#{@project_id}", @doc_id) .should.equal true + describe "clearProjectState", -> + beforeEach (done) -> + @rclient.del = sinon.stub().callsArg(1) + @RedisManager.clearProjectState @project_id, done + + it "should delete the project state", -> + @rclient.del + .calledWith("ProjectState:#{@project_id}") + .should.equal true From 2047bd551667ac32c89fce75cc4004559da2f356 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 11 Sep 2017 09:44:05 +0100 Subject: [PATCH 330/769] fix broken unit test --- .../test/unit/coffee/ProjectManager/getProjectDocsTests.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee index 59840b048c..a2e3be26b5 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee @@ -91,7 +91,7 @@ describe "ProjectManager - getProjectDocs", -> it "should record the error", -> @logger.error - .calledWith(err: @error, project_id: @project_id, doc_id: "doc-id-2", "error getting project doc lines") + .calledWith(err: @error, project_id: @project_id, doc_id: "doc-id-2", "error getting project doc lines in getProjectDocs") .should.equal true it "should call the callback with an error", -> From 3842a27278e758ac70f00621a7e3127e7a46125b Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 12 Sep 2017 11:39:18 +0100 Subject: [PATCH 331/769] change .../clear endpoint to .../clearState --- services/document-updater/app.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index ba46a933db..dccedadd92 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -39,7 +39,7 @@ app.param 'doc_id', (req, res, next, doc_id) -> app.get '/project/:project_id/doc/:doc_id', HttpController.getDoc app.get '/project/:project_id/doc', HttpController.getProjectDocs -app.post '/project/:project_id/clear', HttpController.clearProjectState +app.post '/project/:project_id/clearState', HttpController.clearProjectState app.post '/project/:project_id/doc/:doc_id', HttpController.setDoc app.post '/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLoaded app.delete '/project/:project_id/doc/:doc_id', HttpController.flushAndDeleteDoc From 340aa98de5dd1df7f48c694f4b593b0aa12fbe8c Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 4 Oct 2017 11:27:19 +0100 Subject: [PATCH 332/769] Add OSS syncing task --- services/document-updater/Jenkinsfile | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/services/document-updater/Jenkinsfile b/services/document-updater/Jenkinsfile index 43b6722ba9..4ffd642356 100644 --- a/services/document-updater/Jenkinsfile +++ b/services/document-updater/Jenkinsfile @@ -60,6 +60,17 @@ pipeline { } } } + + stage('Sync OSS') { + when { + branch 'master' + } + steps { + sshagent (credentials: ['GIT_DEPLOY_KEY']) { + sh 'git push git@github.com:sharelatex/document-updater-sharelatex.git HEAD:master' + } + } + } } post { From 2bbbf3c005d828b954b16757b40861c9c74b8623 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 6 Oct 2017 12:23:23 +0100 Subject: [PATCH 333/769] add unflushed time to doc in redis --- .../app/coffee/DocumentManager.coffee | 29 +++++++-- .../app/coffee/ProjectManager.coffee | 35 +++------- .../app/coffee/RedisManager.coffee | 15 ++++- .../config/settings.defaults.coffee | 1 + .../DocumentManagerTests.coffee | 65 ++++++++++++++++++- .../ProjectManager/getProjectDocsTests.coffee | 24 +++---- .../RedisManager/RedisManagerTests.coffee | 36 ++++++++-- 7 files changed, 151 insertions(+), 54 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 054baca47e..50e08741cd 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -8,14 +8,16 @@ RealTimeRedisManager = require "./RealTimeRedisManager" Errors = require "./Errors" RangesManager = require "./RangesManager" +MAX_UNFLUSHED_AGE = 300 * 1000 # 5 mins, document should be flushed to mongo this time after a change + module.exports = DocumentManager = - getDoc: (project_id, doc_id, _callback = (error, lines, version, alreadyLoaded) ->) -> + getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, alreadyLoaded, unflushedTime) ->) -> timer = new Metrics.Timer("docManager.getDoc") callback = (args...) -> timer.done() _callback(args...) - RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges) -> + RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges, unflushedTime) -> return callback(error) if error? if !lines? or !version? logger.log {project_id, doc_id}, "doc not in redis so getting from persistence API" @@ -24,9 +26,9 @@ module.exports = DocumentManager = logger.log {project_id, doc_id, lines, version}, "got doc from persistence API" RedisManager.putDocInMemory project_id, doc_id, lines, version, ranges, (error) -> return callback(error) if error? - callback null, lines, version, ranges, false + callback null, lines, version, ranges, false, null else - callback null, lines, version, ranges, true + callback null, lines, version, ranges, true, unflushedTime getDocAndRecentOps: (project_id, doc_id, fromVersion, _callback = (error, lines, version, recentOps, ranges) ->) -> timer = new Metrics.Timer("docManager.getDocAndRecentOps") @@ -103,7 +105,7 @@ module.exports = DocumentManager = logger.log project_id: project_id, doc_id: doc_id, version: version, "flushing doc" PersistenceManager.setDoc project_id, doc_id, lines, version, ranges, (error) -> return callback(error) if error? - callback null + RedisManager.clearUnflushedTime doc_id, callback flushAndDeleteDoc: (project_id, doc_id, _callback = (error) ->) -> timer = new Metrics.Timer("docManager.flushAndDeleteDoc") @@ -156,6 +158,17 @@ module.exports = DocumentManager = return callback(error) if error? callback() + getDocAndFlushIfOld: (project_id, doc_id, callback = (error, doc) ->) -> + DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, alreadyLoaded, unflushedTime) -> + return callback(error) if error? + # if doc was already loaded see if it needs to be flushed + if alreadyLoaded and unflushedTime? and (Date.now() - unflushedTime) > MAX_UNFLUSHED_AGE + DocumentManager.flushDocIfLoaded project_id, doc_id, (error) -> + return callback(error) if error? + callback(null, lines, version) + else + callback(null, lines, version) + getDocWithLock: (project_id, doc_id, callback = (error, lines, version) ->) -> UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.getDoc, project_id, doc_id, callback @@ -163,7 +176,11 @@ module.exports = DocumentManager = getDocAndRecentOpsWithLock: (project_id, doc_id, fromVersion, callback = (error, lines, version) ->) -> UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.getDocAndRecentOps, project_id, doc_id, fromVersion, callback - + + getDocAndFlushIfOldWithLock: (project_id, doc_id, callback = (error, doc) ->) -> + UpdateManager = require "./UpdateManager" + UpdateManager.lockUpdatesAndDo DocumentManager.getDocAndFlushIfOld, project_id, doc_id, callback + setDocWithLock: (project_id, doc_id, lines, source, user_id, undoing, callback = (error) ->) -> UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.setDoc, project_id, doc_id, lines, source, user_id, undoing, callback diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index 7c290840c8..4a48351a1b 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -69,40 +69,25 @@ module.exports = ProjectManager = logger.error err: error, project_id: project_id, "error getting/setting project state in getProjectDocs" return callback(error) # we can't return docs if project structure has changed - return callback Errors.ProjectStateChangedError("project state changed") if projectStateChanged + if projectStateChanged + return callback Errors.ProjectStateChangedError("project state changed") # project structure hasn't changed, return doc content from redis RedisManager.getDocIdsInProject project_id, (error, doc_ids) -> if error? logger.error err: error, project_id: project_id, "error getting doc ids in getProjectDocs" return callback(error) jobs = [] - docs = [] for doc_id in doc_ids or [] do (doc_id) -> jobs.push (cb) -> - # check the doc version first - RedisManager.getDocVersion doc_id, (error, version) -> - if error? - logger.error err: error, project_id: project_id, doc_id: doc_id, "error getting project doc version in getProjectDocs" - return cb(error) - # skip getting the doc if we already have that version - if version? and version is excludeVersions[doc_id] - # not currently using excludeVersions so we shouldn't get here! - # change to logger.log when this code path is in use - logger.error err: error, project_id: project_id, doc_id: doc_id, version: version, "skipping doc version in getProjectDocs" - return cb() - # otherwise get the doc lines from redis - RedisManager.getDocLines doc_id, (error, lines) -> - if error? - logger.error err: error, project_id: project_id, doc_id: doc_id, "error getting project doc lines in getProjectDocs" - return cb(error) - try - docs.push {_id: doc_id, lines: JSON.parse(lines), v: version} - catch e - logger.error err: e, project_id: project_id, doc_id: doc_id, lines: lines, version: version, "error parsing doc lines in getProjectDocs" - return cb(e) - cb() - async.series jobs, (error) -> + # get the doc lines from redis + DocumentManager.getDocAndFlushIfOldWithLock project_id, doc_id, (err, lines, version) -> + if err? + logger.error err:err, project_id: project_id, doc_id: doc_id, "error getting project doc lines in getProjectDocs" + return cb(err) + doc = {_id:doc_id, lines:lines, v:version} # create a doc object to return + cb(null, doc) + async.series jobs, (error, docs) -> return callback(error) if error? callback(null, docs) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index b233cdf7e7..b873f9bd6a 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -86,6 +86,7 @@ module.exports = RedisManager = multi.del keys.docVersion(doc_id:doc_id) multi.del keys.docHash(doc_id:doc_id) multi.del keys.ranges(doc_id:doc_id) + multi.del keys.unflushedTime(doc_id:doc_id) multi.exec (error) -> return callback(error) if error? multi = rclient.multi() @@ -105,7 +106,7 @@ module.exports = RedisManager = clearProjectState: (project_id, callback = (error) ->) -> rclient.del keys.projectState(project_id:project_id), callback - getDoc : (project_id, doc_id, callback = (error, lines, version, ranges) ->)-> + getDoc : (project_id, doc_id, callback = (error, lines, version, ranges, unflushedTime) ->)-> timer = new metrics.Timer("redis.get-doc") multi = rclient.multi() multi.get keys.docLines(doc_id:doc_id) @@ -113,7 +114,8 @@ module.exports = RedisManager = multi.get keys.docHash(doc_id:doc_id) multi.get keys.projectKey(doc_id:doc_id) multi.get keys.ranges(doc_id:doc_id) - multi.exec (error, [docLines, version, storedHash, doc_project_id, ranges])-> + multi.get keys.unflushedTime(doc_id:doc_id) + multi.exec (error, [docLines, version, storedHash, doc_project_id, ranges, unflushedTime])-> timeSpan = timer.done() return callback(error) if error? # check if request took too long and bail out. only do this for @@ -149,7 +151,7 @@ module.exports = RedisManager = return callback(error) if error? if result isnt 0 # doc should already be in set logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, "doc missing from docsInProject set" - callback null, docLines, version, ranges + callback null, docLines, version, ranges, unflushedTime getDocVersion: (doc_id, callback = (error, version) ->) -> rclient.get keys.docVersion(doc_id: doc_id), (error, version) -> @@ -247,6 +249,10 @@ module.exports = RedisManager = # expire must come after rpush since before it will be a no-op if the list is empty multi.expire keys.docOps(doc_id: doc_id), RedisManager.DOC_OPS_TTL # index 6 multi.rpush historyKeys.uncompressedHistoryOps(doc_id: doc_id), jsonOps... # index 7 + # Set the unflushed timestamp to the current time if the doc + # hasn't been modified before (the content in mongo has been + # valid up to this point). Otherwise leave it alone ("NX" flag). + multi.set keys.unflushedTime(doc_id: doc_id), Date.now(), "NX" multi.exec (error, result) -> return callback(error) if error? # check the hash computed on the redis server @@ -257,6 +263,9 @@ module.exports = RedisManager = uncompressedHistoryOpsLength = result?[7] return callback(null, uncompressedHistoryOpsLength) + clearUnflushedTime: (doc_id, callback = (error) ->) -> + rclient.del keys.unflushedTime(doc_id:doc_id), callback + getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) -> rclient.smembers keys.docsInProject(project_id: project_id), callback diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 838ffa19a8..9b800f2729 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -42,6 +42,7 @@ module.exports = docsInProject: ({project_id}) -> "DocsIn:#{project_id}" ranges: ({doc_id}) -> "Ranges:#{doc_id}" projectState: ({project_id}) -> "ProjectState:#{project_id}" + unflushedTime: ({doc_id}) -> "UnflushedTime:#{doc_id}" # cluster: [{ # port: "7000" # host: "localhost" diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index b2eaa321d6..5bff4fda63 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -4,6 +4,7 @@ should = chai.should() modulePath = "../../../../app/js/DocumentManager.js" SandboxedModule = require('sandboxed-module') Errors = require "../../../../app/js/Errors" +tk = require "timekeeper" describe "DocumentManager", -> beforeEach -> @@ -60,6 +61,7 @@ describe "DocumentManager", -> describe "when the doc is in Redis", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges) + @RedisManager.clearUnflushedTime = sinon.stub().callsArgWith(1, null) @PersistenceManager.setDoc = sinon.stub().yields() @DocumentManager.flushDocIfLoaded @project_id, @doc_id, @callback @@ -373,4 +375,65 @@ describe "DocumentManager", -> it "should call the callback with a not found error", -> error = new Errors.NotFoundError("document not found: #{@doc_id}") - @callback.calledWith(error).should.equal true \ No newline at end of file + @callback.calledWith(error).should.equal true + + describe "getDocAndFlushIfOld", -> + beforeEach -> + tk.freeze(new Date()) + @DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2) + + afterEach -> + tk.reset() + + describe "when the doc is in Redis", -> + describe "and has changes to be flushed", -> + beforeEach -> + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, true, Date.now() - 1e9) + @DocumentManager.getDocAndFlushIfOld @project_id, @doc_id, @callback + + it "should get the doc", -> + @DocumentManager.getDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should flush the doc", -> + @DocumentManager.flushDocIfLoaded + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should call the callback with the lines and versions", -> + @callback.calledWith(null, @lines, @version).should.equal true + + describe "and has only changes that don't need to be flushed", -> + beforeEach -> + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, true, Date.now() - 100) + @DocumentManager.getDocAndFlushIfOld @project_id, @doc_id, @callback + + it "should get the doc", -> + @DocumentManager.getDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should not flush the doc", -> + @DocumentManager.flushDocIfLoaded + .called.should.equal false + + it "should call the callback with the lines and versions", -> + @callback.calledWith(null, @lines, @version).should.equal true + + describe "when the doc is not in Redis", -> + beforeEach -> + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, false) + @DocumentManager.getDocAndFlushIfOld @project_id, @doc_id, @callback + + it "should get the doc", -> + @DocumentManager.getDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should not flush the doc", -> + @DocumentManager.flushDocIfLoaded + .called.should.equal false + + it "should call the callback with the lines and versions", -> + @callback.calledWith(null, @lines, @version).should.equal true diff --git a/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee index d5b12c63d8..99c249d6cb 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee @@ -29,14 +29,13 @@ describe "ProjectManager - getProjectDocs", -> ] @RedisManager.checkOrSetProjectState = sinon.stub().callsArgWith(2, null) @RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, @doc_ids) - @RedisManager.getDocVersion = sinon.stub() - @RedisManager.getDocVersion.withArgs(@doc_ids[0]).callsArgWith(1, null, @doc_versions[0]) - @RedisManager.getDocVersion.withArgs(@doc_ids[1]).callsArgWith(1, null, @doc_versions[1]) - @RedisManager.getDocVersion.withArgs(@doc_ids[2]).callsArgWith(1, null, @doc_versions[2]) - @RedisManager.getDocLines = sinon.stub() - @RedisManager.getDocLines.withArgs(@doc_ids[0]).callsArgWith(1, null, JSON.stringify(@doc_lines[0])) - @RedisManager.getDocLines.withArgs(@doc_ids[1]).callsArgWith(1, null, JSON.stringify(@doc_lines[1])) - @RedisManager.getDocLines.withArgs(@doc_ids[2]).callsArgWith(1, null, JSON.stringify(@doc_lines[2])) + @DocumentManager.getDocAndFlushIfOldWithLock = sinon.stub() + @DocumentManager.getDocAndFlushIfOldWithLock.withArgs(@project_id, @doc_ids[0]) + .callsArgWith(2, null, @doc_lines[0], @doc_versions[0]) + @DocumentManager.getDocAndFlushIfOldWithLock.withArgs(@project_id, @doc_ids[1]) + .callsArgWith(2, null, @doc_lines[1], @doc_versions[1]) + @DocumentManager.getDocAndFlushIfOldWithLock.withArgs(@project_id, @doc_ids[2]) + .callsArgWith(2, null, @doc_lines[2], @doc_versions[2]) @ProjectManager.getProjectDocs @project_id, @projectStateHash, @excludeVersions, (error, docs) => @callback(error, docs) done() @@ -81,10 +80,11 @@ describe "ProjectManager - getProjectDocs", -> @doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"] @RedisManager.checkOrSetProjectState = sinon.stub().callsArgWith(2, null) @RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, @doc_ids) - @RedisManager.getDocVersion = sinon.stub().callsArgWith(1, null) - @RedisManager.getDocLines = sinon.stub() - @RedisManager.getDocLines.withArgs("doc-id-1").callsArgWith(1, null, JSON.stringify(["test doc content"])) - @RedisManager.getDocLines.withArgs("doc-id-2").callsArgWith(1, @error = new Error("oops")) # trigger an error + @DocumentManager.getDocAndFlushIfOldWithLock = sinon.stub() + @DocumentManager.getDocAndFlushIfOldWithLock.withArgs(@project_id, "doc-id-1") + .callsArgWith(2, null, ["test doc content"], @doc_versions[1]) + @DocumentManager.getDocAndFlushIfOldWithLock.withArgs(@project_id, "doc-id-2") + .callsArgWith(2, @error = new Error("oops")) # trigger an error @ProjectManager.getProjectDocs @project_id, @projectStateHash, @excludeVersions, (error, docs) => @callback(error) done() diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 0a5149c552..f0b37ae986 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -5,6 +5,7 @@ modulePath = "../../../../app/js/RedisManager.js" SandboxedModule = require('sandboxed-module') Errors = require "../../../../app/js/Errors" crypto = require "crypto" +tk = require "timekeeper" describe "RedisManager", -> beforeEach -> @@ -30,6 +31,7 @@ describe "RedisManager", -> docsInProject: ({project_id}) -> "DocsIn:#{project_id}" ranges: ({doc_id}) -> "Ranges:#{doc_id}" projectState: ({project_id}) -> "ProjectState:#{project_id}" + unflushedTime: ({doc_id}) -> "UnflushedTime:#{doc_id}" history: key_schema: uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}" @@ -61,8 +63,9 @@ describe "RedisManager", -> @hash = crypto.createHash('sha1').update(@jsonlines,'utf8').digest('hex') @ranges = { comments: "mock", entries: "mock" } @json_ranges = JSON.stringify @ranges + @unflushed_time = 12345 @rclient.get = sinon.stub() - @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @hash, @project_id, @json_ranges]) + @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @hash, @project_id, @json_ranges, @unflushed_time]) @rclient.sadd = sinon.stub().yields(null, 0) describe "successfully", -> @@ -89,6 +92,11 @@ describe "RedisManager", -> .calledWith("Ranges:#{@doc_id}") .should.equal true + it "should get the unflushed time", -> + @rclient.get + .calledWith("UnflushedTime:#{@doc_id}") + .should.equal true + it "should check if the document is in the DocsIn set", -> @rclient.sadd .calledWith("DocsIn:#{@project_id}") @@ -96,7 +104,7 @@ describe "RedisManager", -> it 'should return the document', -> @callback - .calledWith(null, @lines, @version, @ranges) + .calledWithExactly(null, @lines, @version, @ranges, @unflushed_time) .should.equal true it 'should not log any errors', -> @@ -116,7 +124,7 @@ describe "RedisManager", -> it 'should return an empty result', -> @callback - .calledWith(null, null, 0, {}) + .calledWithExactly(null, null, 0, {}) .should.equal true it 'should not log any errors', -> @@ -134,7 +142,7 @@ describe "RedisManager", -> it 'should return the document', -> @callback - .calledWith(null, @lines, @version, @ranges) + .calledWithExactly(null, @lines, @version, @ranges, @unflushed_time) .should.equal true describe "with a corrupted document", -> @@ -155,11 +163,11 @@ describe "RedisManager", -> describe "with a slow request to redis", -> beforeEach -> - @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @badHash, @project_id, @json_ranges]) + @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @badHash, @project_id, @json_ranges, @unflushed_time]) @clock = sinon.useFakeTimers(); @rclient.exec = (cb) => @clock.tick(6000); - cb(null, [@jsonlines, @version, @another_project_id, @json_ranges]) + cb(null, [@jsonlines, @version, @another_project_id, @json_ranges, @unflushed_time]) @RedisManager.getDoc @project_id, @doc_id, @callback @@ -174,7 +182,7 @@ describe "RedisManager", -> describe "getDoc with an invalid project id", -> beforeEach -> @another_project_id = "project-id-456" - @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @another_project_id, @json_ranges]) + @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @another_project_id, @json_ranges, @unflushed_time]) @RedisManager.getDoc @project_id, @doc_id, @callback it 'should return an error', -> @@ -317,6 +325,10 @@ describe "RedisManager", -> beforeEach -> @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) @RedisManager.updateDocument @doc_id, @lines, @version, @ops, @ranges, @callback + tk.freeze(new Date()) + + afterEach -> + tk.reset() it "should get the current doc version to check for consistency", -> @RedisManager.getDocVersion @@ -343,6 +355,11 @@ describe "RedisManager", -> .calledWith("Ranges:#{@doc_id}", JSON.stringify(@ranges)) .should.equal true + it "should set the unflushed time", -> + @rclient.set + .calledWith("UnflushedTime:#{@doc_id}", Date.now(), "NX") + .should.equal true + it "should push the doc op into the doc ops list", -> @rclient.rpush .calledWith("DocOps:#{@doc_id}", JSON.stringify(@ops[0]), JSON.stringify(@ops[1])) @@ -570,6 +587,11 @@ describe "RedisManager", -> @rclient.del .calledWith("DocHash:#{@doc_id}") .should.equal true + + it "should delete the unflushed time", -> + @rclient.del + .calledWith("UnflushedTime:#{@doc_id}") + .should.equal true it "should delete the project_id for the doc", -> @rclient.del From 8d3f82360c2e4527195f3294603ed7beea4767c8 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 11 Oct 2017 14:25:33 +0100 Subject: [PATCH 334/769] update DocumentManager.getDoc signature keep alreadyLoaded flag at the end for consistency --- .../app/coffee/DocumentManager.coffee | 10 +++++----- .../DocumentManager/DocumentManagerTests.coffee | 17 +++++++++-------- 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 50e08741cd..21251933d2 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -11,7 +11,7 @@ RangesManager = require "./RangesManager" MAX_UNFLUSHED_AGE = 300 * 1000 # 5 mins, document should be flushed to mongo this time after a change module.exports = DocumentManager = - getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, alreadyLoaded, unflushedTime) ->) -> + getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, unflushedTime, alreadyLoaded) ->) -> timer = new Metrics.Timer("docManager.getDoc") callback = (args...) -> timer.done() @@ -26,9 +26,9 @@ module.exports = DocumentManager = logger.log {project_id, doc_id, lines, version}, "got doc from persistence API" RedisManager.putDocInMemory project_id, doc_id, lines, version, ranges, (error) -> return callback(error) if error? - callback null, lines, version, ranges, false, null + callback null, lines, version, ranges, null, false else - callback null, lines, version, ranges, true, unflushedTime + callback null, lines, version, ranges, unflushedTime, true getDocAndRecentOps: (project_id, doc_id, fromVersion, _callback = (error, lines, version, recentOps, ranges) ->) -> timer = new Metrics.Timer("docManager.getDocAndRecentOps") @@ -55,7 +55,7 @@ module.exports = DocumentManager = return callback(new Error("No lines were provided to setDoc")) UpdateManager = require "./UpdateManager" - DocumentManager.getDoc project_id, doc_id, (error, oldLines, version, ranges, alreadyLoaded) -> + DocumentManager.getDoc project_id, doc_id, (error, oldLines, version, ranges, unflushedTime, alreadyLoaded) -> return callback(error) if error? if oldLines? and oldLines.length > 0 and oldLines[0].text? @@ -159,7 +159,7 @@ module.exports = DocumentManager = callback() getDocAndFlushIfOld: (project_id, doc_id, callback = (error, doc) ->) -> - DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, alreadyLoaded, unflushedTime) -> + DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, unflushedTime, alreadyLoaded) -> return callback(error) if error? # if doc was already loaded see if it needs to be flushed if alreadyLoaded and unflushedTime? and (Date.now() - unflushedTime) > MAX_UNFLUSHED_AGE diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index 5bff4fda63..aff1cf0bc6 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -27,6 +27,7 @@ describe "DocumentManager", -> @lines = ["one", "two", "three"] @version = 42 @ranges = { comments: "mock", entries: "mock" } + @unflushedTime = Date.now() describe "flushAndDeleteDoc", -> describe "successfully", -> @@ -149,7 +150,7 @@ describe "DocumentManager", -> describe "getDoc", -> describe "when the doc exists in Redis", -> beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges) + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @unflushedTime) @DocumentManager.getDoc @project_id, @doc_id, @callback it "should get the doc from Redis", -> @@ -158,7 +159,7 @@ describe "DocumentManager", -> .should.equal true it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, @ranges, true).should.equal true + @callback.calledWith(null, @lines, @version, @ranges, @unflushedTime, true).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true @@ -186,7 +187,7 @@ describe "DocumentManager", -> .should.equal true it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, @ranges, false).should.equal true + @callback.calledWith(null, @lines, @version, @ranges, null, false).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true @@ -197,7 +198,7 @@ describe "DocumentManager", -> @beforeLines = ["before", "lines"] @afterLines = ["after", "lines"] @ops = [{ i: "foo", p: 4 }, { d: "bar", p: 42 }] - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, @ranges, true) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, @ranges, @unflushedTime, true) @DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, @ops) @UpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null) @DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2) @@ -248,7 +249,7 @@ describe "DocumentManager", -> describe "when not already loaded", -> beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, false) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, null, false) @DocumentManager.setDoc @project_id, @doc_id, @afterLines, @source, @user_id, false, @callback it "should flush and delete the doc from the doc updater", -> @@ -388,7 +389,7 @@ describe "DocumentManager", -> describe "when the doc is in Redis", -> describe "and has changes to be flushed", -> beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, true, Date.now() - 1e9) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, Date.now() - 1e9, true) @DocumentManager.getDocAndFlushIfOld @project_id, @doc_id, @callback it "should get the doc", -> @@ -406,7 +407,7 @@ describe "DocumentManager", -> describe "and has only changes that don't need to be flushed", -> beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, true, Date.now() - 100) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, Date.now() - 100, true) @DocumentManager.getDocAndFlushIfOld @project_id, @doc_id, @callback it "should get the doc", -> @@ -423,7 +424,7 @@ describe "DocumentManager", -> describe "when the doc is not in Redis", -> beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, false) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, null, false) @DocumentManager.getDocAndFlushIfOld @project_id, @doc_id, @callback it "should get the doc", -> From 7cbb3e7af8b7ded4f9e47e90a4863ea236bb4b72 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 11 Oct 2017 15:29:57 +0100 Subject: [PATCH 335/769] change getProjectDocs endpoint from GET to POST also note that it flushes docs if they are considered old (i.e. not recently flushed) --- services/document-updater/app.coffee | 2 +- .../app/coffee/HttpController.coffee | 4 ++-- .../app/coffee/ProjectManager.coffee | 8 ++++---- .../HttpController/HttpControllerTests.coffee | 16 ++++++++-------- .../ProjectManager/getProjectDocsTests.coffee | 10 +++++----- 5 files changed, 20 insertions(+), 20 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index dccedadd92..740cd59da4 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -38,7 +38,7 @@ app.param 'doc_id', (req, res, next, doc_id) -> next new Error("invalid doc id") app.get '/project/:project_id/doc/:doc_id', HttpController.getDoc -app.get '/project/:project_id/doc', HttpController.getProjectDocs +app.post '/project/:project_id/get_and_flush_if_old', HttpController.getProjectDocsAndFlushIfOld app.post '/project/:project_id/clearState', HttpController.clearProjectState app.post '/project/:project_id/doc/:doc_id', HttpController.setDoc app.post '/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLoaded diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 1c8e70656d..0c03a4f7bd 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -37,7 +37,7 @@ module.exports = HttpController = size += (line.length + 1) return size - getProjectDocs: (req, res, next = (error) ->) -> + getProjectDocsAndFlushIfOld: (req, res, next = (error) ->) -> project_id = req.params.project_id projectStateHash = req.query?.state # exclude is string of existing docs "id:version,id:version,..." @@ -49,7 +49,7 @@ module.exports = HttpController = [id,version] = item?.split(':') excludeVersions[id] = version logger.log {project_id: project_id, projectStateHash: projectStateHash, excludeVersions: excludeVersions}, "excluding versions" - ProjectManager.getProjectDocs project_id, projectStateHash, excludeVersions, (error, result) -> + ProjectManager.getProjectDocsAndFlushIfOld project_id, projectStateHash, excludeVersions, (error, result) -> timer.done() if error instanceof Errors.ProjectStateChangedError res.send 409 # conflict diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index 4a48351a1b..26b6e79b0d 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -58,15 +58,15 @@ module.exports = ProjectManager = else callback(null) - getProjectDocs: (project_id, projectStateHash, excludeVersions = {}, _callback = (error, docs) ->) -> - timer = new Metrics.Timer("projectManager.getProjectDocs") + getProjectDocsAndFlushIfOld: (project_id, projectStateHash, excludeVersions = {}, _callback = (error, docs) ->) -> + timer = new Metrics.Timer("projectManager.getProjectDocsAndFlushIfOld") callback = (args...) -> timer.done() _callback(args...) RedisManager.checkOrSetProjectState project_id, projectStateHash, (error, projectStateChanged) -> if error? - logger.error err: error, project_id: project_id, "error getting/setting project state in getProjectDocs" + logger.error err: error, project_id: project_id, "error getting/setting project state in getProjectDocsAndFlushIfOld" return callback(error) # we can't return docs if project structure has changed if projectStateChanged @@ -83,7 +83,7 @@ module.exports = ProjectManager = # get the doc lines from redis DocumentManager.getDocAndFlushIfOldWithLock project_id, doc_id, (err, lines, version) -> if err? - logger.error err:err, project_id: project_id, doc_id: doc_id, "error getting project doc lines in getProjectDocs" + logger.error err:err, project_id: project_id, doc_id: doc_id, "error getting project doc lines in getProjectDocsAndFlushIfOld" return cb(err) doc = {_id:doc_id, lines:lines, v:version} # create a doc object to return cb(null, doc) diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index cadea20c04..17b5d10304 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -435,7 +435,7 @@ describe "HttpController", -> .calledWith(new Error("oops")) .should.equal true - describe "getProjectDocs", -> + describe "getProjectDocsAndFlushIfOld", -> beforeEach -> @state = "01234567890abcdef" @docs = [{_id: "1234", lines: "hello", v: 23}, {_id: "4567", lines: "world", v: 45}] @@ -447,11 +447,11 @@ describe "HttpController", -> describe "successfully", -> beforeEach -> - @ProjectManager.getProjectDocs = sinon.stub().callsArgWith(3,null, @docs) - @HttpController.getProjectDocs(@req, @res, @next) + @ProjectManager.getProjectDocsAndFlushIfOld = sinon.stub().callsArgWith(3,null, @docs) + @HttpController.getProjectDocsAndFlushIfOld(@req, @res, @next) it "should get docs from the project manager", -> - @ProjectManager.getProjectDocs + @ProjectManager.getProjectDocsAndFlushIfOld .calledWith(@project_id, @state, {}) .should.equal true @@ -475,8 +475,8 @@ describe "HttpController", -> describe "when there is a conflict", -> beforeEach -> - @ProjectManager.getProjectDocs = sinon.stub().callsArgWith(3, new Errors.ProjectStateChangedError("project state changed")) - @HttpController.getProjectDocs(@req, @res, @next) + @ProjectManager.getProjectDocsAndFlushIfOld = sinon.stub().callsArgWith(3, new Errors.ProjectStateChangedError("project state changed")) + @HttpController.getProjectDocsAndFlushIfOld(@req, @res, @next) it "should return an HTTP 409 Conflict response", -> @res.send @@ -485,8 +485,8 @@ describe "HttpController", -> describe "when an error occurs", -> beforeEach -> - @ProjectManager.getProjectDocs = sinon.stub().callsArgWith(3, new Error("oops")) - @HttpController.getProjectDocs(@req, @res, @next) + @ProjectManager.getProjectDocsAndFlushIfOld = sinon.stub().callsArgWith(3, new Error("oops")) + @HttpController.getProjectDocsAndFlushIfOld(@req, @res, @next) it "should call next with the error", -> @next diff --git a/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee index 99c249d6cb..8e3bc2206d 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee @@ -5,7 +5,7 @@ modulePath = "../../../../app/js/ProjectManager.js" SandboxedModule = require('sandboxed-module') Errors = require "../../../../app/js/Errors.js" -describe "ProjectManager - getProjectDocs", -> +describe "ProjectManager - getProjectDocsAndFlushIfOld", -> beforeEach -> @ProjectManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} @@ -36,7 +36,7 @@ describe "ProjectManager - getProjectDocs", -> .callsArgWith(2, null, @doc_lines[1], @doc_versions[1]) @DocumentManager.getDocAndFlushIfOldWithLock.withArgs(@project_id, @doc_ids[2]) .callsArgWith(2, null, @doc_lines[2], @doc_versions[2]) - @ProjectManager.getProjectDocs @project_id, @projectStateHash, @excludeVersions, (error, docs) => + @ProjectManager.getProjectDocsAndFlushIfOld @project_id, @projectStateHash, @excludeVersions, (error, docs) => @callback(error, docs) done() @@ -60,7 +60,7 @@ describe "ProjectManager - getProjectDocs", -> beforeEach (done) -> @doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"] @RedisManager.checkOrSetProjectState = sinon.stub().callsArgWith(2, null, true) - @ProjectManager.getProjectDocs @project_id, @projectStateHash, @excludeVersions, (error, docs) => + @ProjectManager.getProjectDocsAndFlushIfOld @project_id, @projectStateHash, @excludeVersions, (error, docs) => @callback(error, docs) done() @@ -85,13 +85,13 @@ describe "ProjectManager - getProjectDocs", -> .callsArgWith(2, null, ["test doc content"], @doc_versions[1]) @DocumentManager.getDocAndFlushIfOldWithLock.withArgs(@project_id, "doc-id-2") .callsArgWith(2, @error = new Error("oops")) # trigger an error - @ProjectManager.getProjectDocs @project_id, @projectStateHash, @excludeVersions, (error, docs) => + @ProjectManager.getProjectDocsAndFlushIfOld @project_id, @projectStateHash, @excludeVersions, (error, docs) => @callback(error) done() it "should record the error", -> @logger.error - .calledWith(err: @error, project_id: @project_id, doc_id: "doc-id-2", "error getting project doc lines in getProjectDocs") + .calledWith(err: @error, project_id: @project_id, doc_id: "doc-id-2", "error getting project doc lines in getProjectDocsAndFlushIfOld") .should.equal true it "should call the callback with an error", -> From 727a534d01b82b8c1454c116d21a81628cbc0295 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 11 Oct 2017 15:44:35 +0100 Subject: [PATCH 336/769] keep the existing GET method temporarily to avoid any problems due to deployment in the wrong order --- services/document-updater/app.coffee | 3 +++ 1 file changed, 3 insertions(+) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 740cd59da4..41cab59680 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -38,6 +38,9 @@ app.param 'doc_id', (req, res, next, doc_id) -> next new Error("invalid doc id") app.get '/project/:project_id/doc/:doc_id', HttpController.getDoc +# temporarily keep the GET method for backwards compatibility +app.get '/project/:project_id/doc', HttpController.getProjectDocsAndFlushIfOld +# will migrate to the POST method of get_and_flush_if_old instead app.post '/project/:project_id/get_and_flush_if_old', HttpController.getProjectDocsAndFlushIfOld app.post '/project/:project_id/clearState', HttpController.clearProjectState app.post '/project/:project_id/doc/:doc_id', HttpController.setDoc From 79c276ea77cd90bcc383d84a8f22712f60fe41f5 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 11 Oct 2017 17:00:01 +0100 Subject: [PATCH 337/769] update to ioredis 3 --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index f1787e4f41..4003960114 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -10,7 +10,7 @@ "async": "2.0.0-rc.5", "coffee-script": "1.4.0", "express": "3.3.4", - "ioredis": "^2.2.0", + "ioredis": "^3.1.4", "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.6", "lynx": "0.0.11", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.5.0", From 3954ecf85ea1f80ea9100d2b4dd945eaf3ecfd78 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 12 Oct 2017 11:23:24 +0100 Subject: [PATCH 338/769] add missing timekeeper package --- services/document-updater/package.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index f1787e4f41..9486e3a5a1 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -35,6 +35,7 @@ "grunt-execute": "~0.1.5", "grunt-forever": "^0.4.7", "grunt-mocha-test": "~0.9.0", - "grunt-shell": "^1.3.0" + "grunt-shell": "^1.3.0", + "timekeeper": "^2.0.0" } } From d387f979bc2609aa99826f22211786394ac8faf2 Mon Sep 17 00:00:00 2001 From: Joe Green Date: Thu, 12 Oct 2017 16:53:31 +0100 Subject: [PATCH 339/769] only alert on master --- services/document-updater/Jenkinsfile | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/services/document-updater/Jenkinsfile b/services/document-updater/Jenkinsfile index 4ffd642356..f8282d465d 100644 --- a/services/document-updater/Jenkinsfile +++ b/services/document-updater/Jenkinsfile @@ -75,6 +75,10 @@ pipeline { post { failure { + when { + branch 'master' + } + mail(from: "${EMAIL_ALERT_FROM}", to: "${EMAIL_ALERT_TO}", subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}", From 595d4dae71a1122d711e6d9a297a358aa77a1133 Mon Sep 17 00:00:00 2001 From: Joe Green Date: Mon, 16 Oct 2017 14:09:20 +0100 Subject: [PATCH 340/769] Update Jenkinsfile --- services/document-updater/Jenkinsfile | 4 ---- 1 file changed, 4 deletions(-) diff --git a/services/document-updater/Jenkinsfile b/services/document-updater/Jenkinsfile index f8282d465d..4ffd642356 100644 --- a/services/document-updater/Jenkinsfile +++ b/services/document-updater/Jenkinsfile @@ -75,10 +75,6 @@ pipeline { post { failure { - when { - branch 'master' - } - mail(from: "${EMAIL_ALERT_FROM}", to: "${EMAIL_ALERT_TO}", subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}", From 3df2d609481c876849544a8cf368e31128d225b1 Mon Sep 17 00:00:00 2001 From: Joe Green Date: Mon, 16 Oct 2017 14:14:06 +0100 Subject: [PATCH 341/769] Update Jenkinsfile --- services/document-updater/Jenkinsfile | 4 ---- 1 file changed, 4 deletions(-) diff --git a/services/document-updater/Jenkinsfile b/services/document-updater/Jenkinsfile index f8282d465d..4ffd642356 100644 --- a/services/document-updater/Jenkinsfile +++ b/services/document-updater/Jenkinsfile @@ -75,10 +75,6 @@ pipeline { post { failure { - when { - branch 'master' - } - mail(from: "${EMAIL_ALERT_FROM}", to: "${EMAIL_ALERT_TO}", subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}", From 448f131011e78c4cd22d2a617397d44b2f0f1c28 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 20 Oct 2017 14:56:12 +0100 Subject: [PATCH 342/769] exit if mock servers fail to start --- .../test/acceptance/coffee/helpers/MockTrackChangesApi.coffee | 3 +++ .../test/acceptance/coffee/helpers/MockWebApi.coffee | 3 +++ 2 files changed, 6 insertions(+) diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.coffee b/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.coffee index d6c2e05b3b..95caead368 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.coffee @@ -15,6 +15,9 @@ module.exports = MockTrackChangesApi = app.listen 3015, (error) -> throw error if error? + .on "error", (error) -> + console.error "error starting MockTrackChangesApi:", error.message + process.exit(1) MockTrackChangesApi.run() diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee index f2b8bce318..fabd4587e8 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee @@ -40,6 +40,9 @@ module.exports = MockWebApi = app.listen 3000, (error) -> throw error if error? + .on "error", (error) -> + console.error "error starting MockWebApi:", error.message + process.exit(1) MockWebApi.run() From 7c2a28c161a9f8e6a8ecc005dfc73960a5fd4caa Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 23 Oct 2017 13:33:14 +0100 Subject: [PATCH 343/769] fix use of timekeeper in unit tests --- .../test/unit/coffee/RedisManager/RedisManagerTests.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index f0b37ae986..82edbabbe9 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -323,9 +323,9 @@ describe "RedisManager", -> describe "with a consistent version", -> beforeEach -> + tk.freeze(new Date()) @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) @RedisManager.updateDocument @doc_id, @lines, @version, @ops, @ranges, @callback - tk.freeze(new Date()) afterEach -> tk.reset() From 02d3d1bd17dae65b2327487e5b42aa23573e6c71 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 23 Oct 2017 14:02:53 +0100 Subject: [PATCH 344/769] fix unit tests timekeeper must be called before SandboxedModule.require --- .../unit/coffee/RedisManager/RedisManagerTests.coffee | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 82edbabbe9..bc5f382874 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -13,6 +13,7 @@ describe "RedisManager", -> auth: () -> exec: sinon.stub() @rclient.multi = () => @rclient + tk.freeze(new Date()) @RedisManager = SandboxedModule.require modulePath, requires: "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } @@ -50,6 +51,9 @@ describe "RedisManager", -> "./Errors": Errors globals: JSON: @JSON = JSON + + afterEach -> + tk.reset() @doc_id = "doc-id-123" @project_id = "project-id-123" @@ -323,12 +327,8 @@ describe "RedisManager", -> describe "with a consistent version", -> beforeEach -> - tk.freeze(new Date()) @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) @RedisManager.updateDocument @doc_id, @lines, @version, @ops, @ranges, @callback - - afterEach -> - tk.reset() it "should get the current doc version to check for consistency", -> @RedisManager.getDocVersion From 05b93a629a276ef42b43833a58a66bbf8ada8d2c Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Fri, 29 Sep 2017 10:34:28 +0100 Subject: [PATCH 345/769] return pathname from PersistenceManager --- .../app/coffee/PersistenceManager.coffee | 4 ++-- .../PersistenceManagerTests.coffee | 14 ++++++++------ 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/services/document-updater/app/coffee/PersistenceManager.coffee b/services/document-updater/app/coffee/PersistenceManager.coffee index 1e168f5495..974dec5a2c 100644 --- a/services/document-updater/app/coffee/PersistenceManager.coffee +++ b/services/document-updater/app/coffee/PersistenceManager.coffee @@ -13,7 +13,7 @@ request = (require("requestretry")).defaults({ MAX_HTTP_REQUEST_LENGTH = 5000 # 5 seconds module.exports = PersistenceManager = - getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges) ->) -> + getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, pathname) ->) -> timer = new Metrics.Timer("persistenceManager.getDoc") callback = (args...) -> timer.done() @@ -42,7 +42,7 @@ module.exports = PersistenceManager = return callback(new Error("web API response had no doc lines")) if !body.version? or not body.version instanceof Number return callback(new Error("web API response had no valid doc version")) - return callback null, body.lines, body.version, body.ranges + return callback null, body.lines, body.version, body.ranges, body.pathname else if res.statusCode == 404 return callback(new Errors.NotFoundError("doc not not found: #{url}")) else diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee index d6fa519b4c..925274ac2f 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee @@ -22,6 +22,7 @@ describe "PersistenceManager", -> @version = 42 @callback = sinon.stub() @ranges = { comments: "mock", entries: "mock" } + @pathname = '/a/b/c.tex' @Settings.apis = web: url: @url = "www.example.com" @@ -29,13 +30,14 @@ describe "PersistenceManager", -> pass: @pass = "password" describe "getDoc", -> - + describe "with a successful response from the web api", -> beforeEach -> @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify({ lines: @lines, version: @version, ranges: @ranges + pathname: @pathname, })) @PersistenceManager.getDoc(@project_id, @doc_id, @callback) @@ -56,7 +58,7 @@ describe "PersistenceManager", -> .should.equal true it "should call the callback with the doc lines, version and ranges", -> - @callback.calledWith(null, @lines, @version, @ranges).should.equal true + @callback.calledWith(null, @lines, @version, @ranges, @pathname).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true @@ -76,7 +78,7 @@ describe "PersistenceManager", -> beforeEach -> @request.callsArgWith(1, null, {statusCode: 404}, "") @PersistenceManager.getDoc(@project_id, @doc_id, @callback) - + it "should return a NotFoundError", -> @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true @@ -87,7 +89,7 @@ describe "PersistenceManager", -> beforeEach -> @request.callsArgWith(1, null, {statusCode: 500}, "") @PersistenceManager.getDoc(@project_id, @doc_id, @callback) - + it "should return an error", -> @callback.calledWith(new Error("web api error")).should.equal true @@ -155,7 +157,7 @@ describe "PersistenceManager", -> beforeEach -> @request.callsArgWith(1, null, {statusCode: 404}, "") @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @ranges, @callback) - + it "should return a NotFoundError", -> @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true @@ -166,7 +168,7 @@ describe "PersistenceManager", -> beforeEach -> @request.callsArgWith(1, null, {statusCode: 500}, "") @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @ranges, @callback) - + it "should return an error", -> @callback.calledWith(new Error("web api error")).should.equal true From cbdace73861234a432bca9891293bcf9b25ea955 Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Fri, 29 Sep 2017 10:54:48 +0100 Subject: [PATCH 346/769] store pathname in Redis cache --- .../app/coffee/RedisManager.coffee | 13 +- .../config/settings.defaults.coffee | 1 + .../RedisManager/RedisManagerTests.coffee | 112 ++++++++++-------- 3 files changed, 74 insertions(+), 52 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index b873f9bd6a..08f8e7595c 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -36,7 +36,7 @@ historyKeys = Settings.redis.history.key_schema module.exports = RedisManager = rclient: rclient - putDocInMemory : (project_id, doc_id, docLines, version, ranges, _callback)-> + putDocInMemory : (project_id, doc_id, docLines, version, ranges, pathname, _callback)-> timer = new metrics.Timer("redis.put-doc") callback = (error) -> timer.done() @@ -61,6 +61,7 @@ module.exports = RedisManager = multi.set keys.ranges(doc_id:doc_id), ranges else multi.del keys.ranges(doc_id:doc_id) + multi.set keys.pathname(doc_id:doc_id), pathname multi.exec (error, result) -> return callback(error) if error? # check the hash computed on the redis server @@ -86,6 +87,7 @@ module.exports = RedisManager = multi.del keys.docVersion(doc_id:doc_id) multi.del keys.docHash(doc_id:doc_id) multi.del keys.ranges(doc_id:doc_id) + multi.del keys.pathname(doc_id:doc_id) multi.del keys.unflushedTime(doc_id:doc_id) multi.exec (error) -> return callback(error) if error? @@ -106,7 +108,7 @@ module.exports = RedisManager = clearProjectState: (project_id, callback = (error) ->) -> rclient.del keys.projectState(project_id:project_id), callback - getDoc : (project_id, doc_id, callback = (error, lines, version, ranges, unflushedTime) ->)-> + getDoc : (project_id, doc_id, callback = (error, lines, version, ranges, pathname, unflushedTime) ->)-> timer = new metrics.Timer("redis.get-doc") multi = rclient.multi() multi.get keys.docLines(doc_id:doc_id) @@ -114,8 +116,9 @@ module.exports = RedisManager = multi.get keys.docHash(doc_id:doc_id) multi.get keys.projectKey(doc_id:doc_id) multi.get keys.ranges(doc_id:doc_id) + multi.get keys.pathname(doc_id:doc_id) multi.get keys.unflushedTime(doc_id:doc_id) - multi.exec (error, [docLines, version, storedHash, doc_project_id, ranges, unflushedTime])-> + multi.exec (error, [docLines, version, storedHash, doc_project_id, ranges, pathname, unflushedTime])-> timeSpan = timer.done() return callback(error) if error? # check if request took too long and bail out. only do this for @@ -144,14 +147,14 @@ module.exports = RedisManager = # doc is not in redis, bail out if !docLines? - return callback null, docLines, version, ranges + return callback null, docLines, version, ranges, pathname, unflushedTime # doc should be in project set, check if missing (workaround for missing docs from putDoc) rclient.sadd keys.docsInProject(project_id:project_id), doc_id, (error, result) -> return callback(error) if error? if result isnt 0 # doc should already be in set logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, "doc missing from docsInProject set" - callback null, docLines, version, ranges, unflushedTime + callback null, docLines, version, ranges, pathname, unflushedTime getDocVersion: (doc_id, callback = (error, version) ->) -> rclient.get keys.docVersion(doc_id: doc_id), (error, version) -> diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 9b800f2729..b1d9be6b2e 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -41,6 +41,7 @@ module.exports = projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" docsInProject: ({project_id}) -> "DocsIn:#{project_id}" ranges: ({doc_id}) -> "Ranges:#{doc_id}" + pathname: ({doc_id}) -> "Pathname:#{doc_id}" projectState: ({project_id}) -> "ProjectState:#{project_id}" unflushedTime: ({doc_id}) -> "UnflushedTime:#{doc_id}" # cluster: [{ diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index bc5f382874..62115f4bd3 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -31,6 +31,7 @@ describe "RedisManager", -> pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" docsInProject: ({project_id}) -> "DocsIn:#{project_id}" ranges: ({doc_id}) -> "Ranges:#{doc_id}" + pathname: ({doc_id}) -> "Pathname:#{doc_id}" projectState: ({project_id}) -> "ProjectState:#{project_id}" unflushedTime: ({doc_id}) -> "UnflushedTime:#{doc_id}" history: @@ -54,7 +55,7 @@ describe "RedisManager", -> afterEach -> tk.reset() - + @doc_id = "doc-id-123" @project_id = "project-id-123" @callback = sinon.stub() @@ -68,8 +69,9 @@ describe "RedisManager", -> @ranges = { comments: "mock", entries: "mock" } @json_ranges = JSON.stringify @ranges @unflushed_time = 12345 + @pathname = '/a/b/c.tex' @rclient.get = sinon.stub() - @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @hash, @project_id, @json_ranges, @unflushed_time]) + @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @hash, @project_id, @json_ranges, @pathname, @unflushed_time]) @rclient.sadd = sinon.stub().yields(null, 0) describe "successfully", -> @@ -80,7 +82,7 @@ describe "RedisManager", -> @rclient.get .calledWith("doclines:#{@doc_id}") .should.equal true - + it "should get the version from", -> @rclient.get .calledWith("DocVersion:#{@doc_id}") @@ -101,6 +103,11 @@ describe "RedisManager", -> .calledWith("UnflushedTime:#{@doc_id}") .should.equal true + it "should get the pathname", -> + @rclient.get + .calledWith("Pathname:#{@doc_id}") + .should.equal true + it "should check if the document is in the DocsIn set", -> @rclient.sadd .calledWith("DocsIn:#{@project_id}") @@ -108,7 +115,7 @@ describe "RedisManager", -> it 'should return the document', -> @callback - .calledWithExactly(null, @lines, @version, @ranges, @unflushed_time) + .calledWithExactly(null, @lines, @version, @ranges, @pathname, @unflushed_time) .should.equal true it 'should not log any errors', -> @@ -117,7 +124,7 @@ describe "RedisManager", -> describe "when the document is not present", -> beforeEach -> - @rclient.exec = sinon.stub().callsArgWith(0, null, [null, null, null, null, null]) + @rclient.exec = sinon.stub().callsArgWith(0, null, [null, null, null, null, null, null, null]) @rclient.sadd = sinon.stub().yields() @RedisManager.getDoc @project_id, @doc_id, @callback @@ -128,7 +135,7 @@ describe "RedisManager", -> it 'should return an empty result', -> @callback - .calledWithExactly(null, null, 0, {}) + .calledWithExactly(null, null, 0, {}, null, null) .should.equal true it 'should not log any errors', -> @@ -146,7 +153,7 @@ describe "RedisManager", -> it 'should return the document', -> @callback - .calledWithExactly(null, @lines, @version, @ranges, @unflushed_time) + .calledWithExactly(null, @lines, @version, @ranges, @pathname, @unflushed_time) .should.equal true describe "with a corrupted document", -> @@ -167,11 +174,11 @@ describe "RedisManager", -> describe "with a slow request to redis", -> beforeEach -> - @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @badHash, @project_id, @json_ranges, @unflushed_time]) + @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @badHash, @project_id, @json_ranges, @pathname, @unflushed_time]) @clock = sinon.useFakeTimers(); @rclient.exec = (cb) => @clock.tick(6000); - cb(null, [@jsonlines, @version, @another_project_id, @json_ranges, @unflushed_time]) + cb(null, [@jsonlines, @version, @another_project_id, @json_ranges, @pathname, @unflushed_time]) @RedisManager.getDoc @project_id, @doc_id, @callback @@ -186,7 +193,7 @@ describe "RedisManager", -> describe "getDoc with an invalid project id", -> beforeEach -> @another_project_id = "project-id-456" - @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @another_project_id, @json_ranges, @unflushed_time]) + @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @hash, @another_project_id, @json_ranges, @pathname, @unflushed_time]) @RedisManager.getDoc @project_id, @doc_id, @callback it 'should return an error', -> @@ -271,7 +278,7 @@ describe "RedisManager", -> @rclient.get = sinon.stub().callsArgWith(1, null, @version.toString()) @rclient.lrange = sinon.stub().callsArgWith(3, null, @jsonOps) @RedisManager.getPreviousDocOps(@doc_id, @start, @end, @callback) - + it "should return an error", -> @callback.calledWith(new Errors.OpRangeNotAvailableError("doc ops range is not loaded in redis")).should.equal true @@ -316,7 +323,7 @@ describe "RedisManager", -> @rclient.del = sinon.stub() @rclient.eval = sinon.stub() @RedisManager.getDocVersion = sinon.stub() - + @lines = ["one", "two", "three", "これは"] @ops = [{ op: [{ i: "foo", p: 4 }] },{ op: [{ i: "bar", p: 8 }] }] @version = 42 @@ -329,17 +336,17 @@ describe "RedisManager", -> beforeEach -> @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) @RedisManager.updateDocument @doc_id, @lines, @version, @ops, @ranges, @callback - + it "should get the current doc version to check for consistency", -> @RedisManager.getDocVersion .calledWith(@doc_id) .should.equal true - + it "should set the doclines", -> @rclient.eval .calledWith(sinon.match(/redis.call/), 1, "doclines:#{@doc_id}", JSON.stringify(@lines)) .should.equal true - + it "should set the version", -> @rclient.set .calledWith("DocVersion:#{@doc_id}", @version) @@ -349,7 +356,7 @@ describe "RedisManager", -> @rclient.set .calledWith("DocHash:#{@doc_id}", @hash) .should.equal true - + it "should set the ranges", -> @rclient.set .calledWith("Ranges:#{@doc_id}", JSON.stringify(@ranges)) @@ -381,45 +388,45 @@ describe "RedisManager", -> it 'should not log any errors', -> @logger.error.calledWith() .should.equal false - + describe "with an inconsistent version", -> beforeEach -> @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length - 1) @RedisManager.updateDocument @doc_id, @lines, @version, @ops, @ranges, @callback - + it "should not call multi.exec", -> @rclient.exec.called.should.equal false - + it "should call the callback with an error", -> @callback .calledWith(new Error("Version mismatch. '#{@doc_id}' is corrupted.")) .should.equal true - + describe "with no updates", -> beforeEach -> @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version) @RedisManager.updateDocument @doc_id, @lines, @version, [], @ranges, @callback - + it "should not do an rpush", -> @rclient.rpush .called .should.equal false - + it "should still set the doclines", -> @rclient.eval .calledWith(sinon.match(/redis.call/), 1, "doclines:#{@doc_id}", JSON.stringify(@lines)) .should.equal true - + describe "with empty ranges", -> beforeEach -> @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) @RedisManager.updateDocument @doc_id, @lines, @version, @ops, {}, @callback - + it "should not set the ranges", -> @rclient.set .calledWith("Ranges:#{@doc_id}", JSON.stringify(@ranges)) .should.equal false - + it "should delete the ranges key", -> @rclient.del .calledWith("Ranges:#{@doc_id}") @@ -448,13 +455,13 @@ describe "RedisManager", -> afterEach -> @JSON.stringify = @_stringify - + it "should log an error", -> @logger.error.called.should.equal true it "should call the callback with an error", -> @callback.calledWith(new Error("null bytes found in doc lines")).should.equal true - + describe "with ranges that are too big", -> beforeEach -> @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) @@ -478,16 +485,17 @@ describe "RedisManager", -> @hash = crypto.createHash('sha1').update(JSON.stringify(@lines),'utf8').digest('hex') @rclient.exec = sinon.stub().callsArgWith(0, null, [@hash]) @ranges = { comments: "mock", entries: "mock" } - + @pathname = '/a/b/c.tex' + describe "with non-empty ranges", -> beforeEach (done) -> - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, done - + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, done + it "should set the lines", -> @rclient.eval .calledWith(sinon.match(/redis.call/), 1, "doclines:#{@doc_id}", JSON.stringify(@lines)) .should.equal true - + it "should set the version", -> @rclient.set .calledWith("DocVersion:#{@doc_id}", @version) @@ -497,17 +505,22 @@ describe "RedisManager", -> @rclient.set .calledWith("DocHash:#{@doc_id}", @hash) .should.equal true - + it "should set the ranges", -> @rclient.set .calledWith("Ranges:#{@doc_id}", JSON.stringify(@ranges)) .should.equal true - + it "should set the project_id for the doc", -> @rclient.set .calledWith("ProjectId:#{@doc_id}", @project_id) .should.equal true - + + it "should set the pathname for the doc", -> + @rclient.set + .calledWith("Pathname:#{@doc_id}", @pathname) + .should.equal true + it "should add the doc_id to the project set", -> @rclient.sadd .calledWith("DocsIn:#{@project_id}", @doc_id) @@ -516,16 +529,16 @@ describe "RedisManager", -> it 'should not log any errors', -> @logger.error.calledWith() .should.equal false - + describe "with empty ranges", -> beforeEach (done) -> - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, {}, done - + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, {}, @pathname, done + it "should delete the ranges key", -> @rclient.del .calledWith("Ranges:#{@doc_id}") .should.equal true - + it "should not set the ranges", -> @rclient.set .calledWith("Ranges:#{@doc_id}", JSON.stringify(@ranges)) @@ -534,7 +547,7 @@ describe "RedisManager", -> describe "with a corrupted write", -> beforeEach (done) -> @rclient.exec = sinon.stub().callsArgWith(0, null, ["INVALID-HASH-VALUE"]) - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, done + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, done it 'should log a hash error', -> @logger.error.calledWith() @@ -544,21 +557,21 @@ describe "RedisManager", -> beforeEach -> @_stringify = JSON.stringify @JSON.stringify = () -> return '["bad bytes! \u0000 <- here"]' - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @callback + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @callback afterEach -> @JSON.stringify = @_stringify - + it "should log an error", -> @logger.error.called.should.equal true it "should call the callback with an error", -> @callback.calledWith(new Error("null bytes found in doc lines")).should.equal true - + describe "with ranges that are too big", -> beforeEach -> @RedisManager._serializeRanges = sinon.stub().yields(new Error("ranges are too large")) - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @callback + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @callback it 'should log an error', -> @logger.error.called.should.equal true @@ -572,12 +585,12 @@ describe "RedisManager", -> @rclient.srem = sinon.stub() @rclient.exec.yields() @RedisManager.removeDocFromMemory @project_id, @doc_id, done - + it "should delete the lines", -> @rclient.del .calledWith("doclines:#{@doc_id}") .should.equal true - + it "should delete the version", -> @rclient.del .calledWith("DocVersion:#{@doc_id}") @@ -592,17 +605,22 @@ describe "RedisManager", -> @rclient.del .calledWith("UnflushedTime:#{@doc_id}") .should.equal true - + it "should delete the project_id for the doc", -> @rclient.del .calledWith("ProjectId:#{@doc_id}") .should.equal true - + it "should remove the doc_id from the project set", -> @rclient.srem .calledWith("DocsIn:#{@project_id}", @doc_id) .should.equal true + it "should delete the pathname for the doc", -> + @rclient.del + .calledWith("Pathname:#{@doc_id}") + .should.equal true + describe "clearProjectState", -> beforeEach (done) -> @rclient.del = sinon.stub().callsArg(1) From 748315aadc48b363b7038643b395ee3128f2ade7 Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Fri, 29 Sep 2017 11:06:20 +0100 Subject: [PATCH 347/769] handle pathname in DocumentManager.getDoc --- .../app/coffee/DocumentManager.coffee | 20 +++--- .../DocumentManagerTests.coffee | 67 ++++++++++--------- 2 files changed, 44 insertions(+), 43 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 21251933d2..366fd645ef 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -17,14 +17,14 @@ module.exports = DocumentManager = timer.done() _callback(args...) - RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges, unflushedTime) -> + RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, unflushedTime) -> return callback(error) if error? if !lines? or !version? logger.log {project_id, doc_id}, "doc not in redis so getting from persistence API" - PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges) -> + PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname) -> return callback(error) if error? logger.log {project_id, doc_id, lines, version}, "got doc from persistence API" - RedisManager.putDocInMemory project_id, doc_id, lines, version, ranges, (error) -> + RedisManager.putDocInMemory project_id, doc_id, lines, version, ranges, pathname, (error) -> return callback(error) if error? callback null, lines, version, ranges, null, false else @@ -35,7 +35,7 @@ module.exports = DocumentManager = callback = (args...) -> timer.done() _callback(args...) - + DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges) -> return callback(error) if error? if fromVersion == -1 @@ -57,7 +57,7 @@ module.exports = DocumentManager = UpdateManager = require "./UpdateManager" DocumentManager.getDoc project_id, doc_id, (error, oldLines, version, ranges, unflushedTime, alreadyLoaded) -> return callback(error) if error? - + if oldLines? and oldLines.length > 0 and oldLines[0].text? logger.log doc_id: doc_id, project_id: project_id, oldLines: oldLines, newLines: newLines, "document is JSON so not updating" return callback(null) @@ -115,7 +115,7 @@ module.exports = DocumentManager = DocumentManager.flushDocIfLoaded project_id, doc_id, (error) -> return callback(error) if error? - + # Flush in the background since it requires and http request # to track changes HistoryManager.flushDocChanges project_id, doc_id, (err) -> @@ -141,7 +141,7 @@ module.exports = DocumentManager = RedisManager.updateDocument doc_id, lines, version, [], new_ranges, (error) -> return callback(error) if error? callback() - + deleteComment: (project_id, doc_id, comment_id, _callback = (error) ->) -> timer = new Metrics.Timer("docManager.deleteComment") callback = (args...) -> @@ -159,7 +159,7 @@ module.exports = DocumentManager = callback() getDocAndFlushIfOld: (project_id, doc_id, callback = (error, doc) ->) -> - DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, unflushedTime, alreadyLoaded) -> + DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, unflushedTime, alreadyLoaded) -> return callback(error) if error? # if doc was already loaded see if it needs to be flushed if alreadyLoaded and unflushedTime? and (Date.now() - unflushedTime) > MAX_UNFLUSHED_AGE @@ -172,7 +172,7 @@ module.exports = DocumentManager = getDocWithLock: (project_id, doc_id, callback = (error, lines, version) ->) -> UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.getDoc, project_id, doc_id, callback - + getDocAndRecentOpsWithLock: (project_id, doc_id, fromVersion, callback = (error, lines, version) ->) -> UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.getDocAndRecentOps, project_id, doc_id, fromVersion, callback @@ -184,7 +184,7 @@ module.exports = DocumentManager = setDocWithLock: (project_id, doc_id, lines, source, user_id, undoing, callback = (error) ->) -> UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.setDoc, project_id, doc_id, lines, source, user_id, undoing, callback - + flushDocIfLoadedWithLock: (project_id, doc_id, callback = (error) ->) -> UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.flushDocIfLoaded, project_id, doc_id, callback diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index aff1cf0bc6..30db63a923 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -27,6 +27,7 @@ describe "DocumentManager", -> @lines = ["one", "two", "three"] @version = 42 @ranges = { comments: "mock", entries: "mock" } + @pathname = '/a/b/c.tex' @unflushedTime = Date.now() describe "flushAndDeleteDoc", -> @@ -36,7 +37,7 @@ describe "DocumentManager", -> @DocumentManager.flushDocIfLoaded = sinon.stub().callsArgWith(2) @HistoryManager.flushDocChanges = sinon.stub().callsArg(2) @DocumentManager.flushAndDeleteDoc @project_id, @doc_id, @callback - + it "should flush the doc", -> @DocumentManager.flushDocIfLoaded .calledWith(@project_id, @doc_id) @@ -52,12 +53,12 @@ describe "DocumentManager", -> it "should time the execution", -> @Metrics.Timer::done.called.should.equal true - + it "should flush to the history api", -> @HistoryManager.flushDocChanges .calledWith(@project_id, @doc_id) .should.equal true - + describe "flushDocIfLoaded", -> describe "when the doc is in Redis", -> beforeEach -> @@ -75,7 +76,7 @@ describe "DocumentManager", -> @PersistenceManager.setDoc .calledWith(@project_id, @doc_id, @lines, @version, @ranges) .should.equal true - + it "should call the callback without error", -> @callback.calledWith(null).should.equal true @@ -103,7 +104,7 @@ describe "DocumentManager", -> it "should time the execution", -> @Metrics.Timer::done.called.should.equal true - + describe "getDocAndRecentOps", -> describe "with a previous version specified", -> beforeEach -> @@ -146,18 +147,18 @@ describe "DocumentManager", -> it "should time the execution", -> @Metrics.Timer::done.called.should.equal true - + describe "getDoc", -> describe "when the doc exists in Redis", -> beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @unflushedTime) + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @unflushedTime) @DocumentManager.getDoc @project_id, @doc_id, @callback it "should get the doc from Redis", -> @RedisManager.getDoc .calledWith(@project_id, @doc_id) .should.equal true - + it "should call the callback with the doc info", -> @callback.calledWith(null, @lines, @version, @ranges, @unflushedTime, true).should.equal true @@ -166,8 +167,8 @@ describe "DocumentManager", -> describe "when the doc does not exist in Redis", -> beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null, null, null) - @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges) + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null, null, null, null) + @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname) @RedisManager.putDocInMemory = sinon.stub().yields() @DocumentManager.getDoc @project_id, @doc_id, @callback @@ -183,7 +184,7 @@ describe "DocumentManager", -> it "should set the doc in Redis", -> @RedisManager.putDocInMemory - .calledWith(@project_id, @doc_id, @lines, @version, @ranges) + .calledWith(@project_id, @doc_id, @lines, @version, @ranges, @pathname) .should.equal true it "should call the callback with the doc info", -> @@ -191,7 +192,7 @@ describe "DocumentManager", -> it "should time the execution", -> @Metrics.Timer::done.called.should.equal true - + describe "setDoc", -> describe "with plain tex lines", -> beforeEach -> @@ -240,13 +241,13 @@ describe "DocumentManager", -> @DocumentManager.flushDocIfLoaded .calledWith(@project_id, @doc_id) .should.equal true - + it "should call the callback", -> @callback.calledWith(null).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true - + describe "when not already loaded", -> beforeEach -> @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, null, false) @@ -263,21 +264,21 @@ describe "DocumentManager", -> it "should return the callback with an error", -> @callback.calledWith(new Error("No lines were passed to setDoc")) - + it "should not try to get the doc lines", -> @DocumentManager.getDoc.called.should.equal false - + describe "with the undoing flag", -> beforeEach -> # Copy ops so we don't interfere with other tests @ops = [{ i: "foo", p: 4 }, { d: "bar", p: 42 }] @DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, @ops) @DocumentManager.setDoc @project_id, @doc_id, @afterLines, @source, @user_id, true, @callback - + it "should set the undo flag on each op", -> for op in @ops op.u.should.equal true - + describe "acceptChanges", -> beforeEach -> @change_id = "mock-change-id" @@ -289,33 +290,33 @@ describe "DocumentManager", -> @DocumentManager.getDoc = sinon.stub().yields(null, @lines, @version, @ranges) @RangesManager.acceptChanges = sinon.stub().yields(null, @updated_ranges) @RedisManager.updateDocument = sinon.stub().yields() - + describe "successfully with a single change", -> beforeEach -> @DocumentManager.acceptChanges @project_id, @doc_id, [ @change_id ], @callback - + it "should get the document's current ranges", -> @DocumentManager.getDoc .calledWith(@project_id, @doc_id) .should.equal true - + it "should apply the accept change to the ranges", -> @RangesManager.acceptChanges .calledWith([ @change_id ], @ranges) .should.equal true - + it "should save the updated ranges", -> @RedisManager.updateDocument .calledWith(@doc_id, @lines, @version, [], @updated_ranges) .should.equal true - + it "should call the callback", -> @callback.called.should.equal true describe "successfully with multiple changes", -> beforeEach -> @DocumentManager.acceptChanges @project_id, @doc_id, @change_ids, @callback - + it "should apply the accept change to the ranges", -> @RangesManager.acceptChanges .calledWith(@change_ids, @ranges) @@ -328,11 +329,11 @@ describe "DocumentManager", -> it "should not save anything", -> @RedisManager.updateDocument.called.should.equal false - + it "should call the callback with a not found error", -> error = new Errors.NotFoundError("document not found: #{@doc_id}") @callback.calledWith(error).should.equal true - + describe "deleteComment", -> beforeEach -> @comment_id = "mock-comment-id" @@ -343,26 +344,26 @@ describe "DocumentManager", -> @DocumentManager.getDoc = sinon.stub().yields(null, @lines, @version, @ranges) @RangesManager.deleteComment = sinon.stub().yields(null, @updated_ranges) @RedisManager.updateDocument = sinon.stub().yields() - + describe "successfully", -> beforeEach -> @DocumentManager.deleteComment @project_id, @doc_id, @comment_id, @callback - + it "should get the document's current ranges", -> @DocumentManager.getDoc .calledWith(@project_id, @doc_id) .should.equal true - + it "should delete the comment from the ranges", -> @RangesManager.deleteComment .calledWith(@comment_id, @ranges) .should.equal true - + it "should save the updated ranges", -> @RedisManager.updateDocument .calledWith(@doc_id, @lines, @version, [], @updated_ranges) .should.equal true - + it "should call the callback", -> @callback.called.should.equal true @@ -373,7 +374,7 @@ describe "DocumentManager", -> it "should not save anything", -> @RedisManager.updateDocument.called.should.equal false - + it "should call the callback with a not found error", -> error = new Errors.NotFoundError("document not found: #{@doc_id}") @callback.calledWith(error).should.equal true @@ -389,7 +390,7 @@ describe "DocumentManager", -> describe "when the doc is in Redis", -> describe "and has changes to be flushed", -> beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, Date.now() - 1e9, true) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, Date.now() - 1e9, true) @DocumentManager.getDocAndFlushIfOld @project_id, @doc_id, @callback it "should get the doc", -> From 81f998afe931b16b2fe817962097cea0356f5591 Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Fri, 29 Sep 2017 11:45:21 +0100 Subject: [PATCH 348/769] return pathname from DocumentManager.getDoc --- .../document-updater/app/coffee/DocumentManager.coffee | 8 ++++---- .../coffee/DocumentManager/DocumentManagerTests.coffee | 10 +++++----- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 366fd645ef..ebd01adeda 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -11,7 +11,7 @@ RangesManager = require "./RangesManager" MAX_UNFLUSHED_AGE = 300 * 1000 # 5 mins, document should be flushed to mongo this time after a change module.exports = DocumentManager = - getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, unflushedTime, alreadyLoaded) ->) -> + getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, pathname, unflushedTime, alreadyLoaded) ->) -> timer = new Metrics.Timer("docManager.getDoc") callback = (args...) -> timer.done() @@ -26,9 +26,9 @@ module.exports = DocumentManager = logger.log {project_id, doc_id, lines, version}, "got doc from persistence API" RedisManager.putDocInMemory project_id, doc_id, lines, version, ranges, pathname, (error) -> return callback(error) if error? - callback null, lines, version, ranges, null, false + callback null, lines, version, ranges, pathname, null, false else - callback null, lines, version, ranges, unflushedTime, true + callback null, lines, version, ranges, pathname, unflushedTime, true getDocAndRecentOps: (project_id, doc_id, fromVersion, _callback = (error, lines, version, recentOps, ranges) ->) -> timer = new Metrics.Timer("docManager.getDocAndRecentOps") @@ -55,7 +55,7 @@ module.exports = DocumentManager = return callback(new Error("No lines were provided to setDoc")) UpdateManager = require "./UpdateManager" - DocumentManager.getDoc project_id, doc_id, (error, oldLines, version, ranges, unflushedTime, alreadyLoaded) -> + DocumentManager.getDoc project_id, doc_id, (error, oldLines, version, ranges, pathname, unflushedTime, alreadyLoaded) -> return callback(error) if error? if oldLines? and oldLines.length > 0 and oldLines[0].text? diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index 30db63a923..ae99ac2b8d 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -160,7 +160,7 @@ describe "DocumentManager", -> .should.equal true it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, @ranges, @unflushedTime, true).should.equal true + @callback.calledWith(null, @lines, @version, @ranges, @pathname, @unflushedTime, true).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true @@ -188,7 +188,7 @@ describe "DocumentManager", -> .should.equal true it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, @ranges, null, false).should.equal true + @callback.calledWith(null, @lines, @version, @ranges, @pathname, null, false).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true @@ -199,7 +199,7 @@ describe "DocumentManager", -> @beforeLines = ["before", "lines"] @afterLines = ["after", "lines"] @ops = [{ i: "foo", p: 4 }, { d: "bar", p: 42 }] - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, @ranges, @unflushedTime, true) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, @ranges, @pathname, @unflushedTime, true) @DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, @ops) @UpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null) @DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2) @@ -250,7 +250,7 @@ describe "DocumentManager", -> describe "when not already loaded", -> beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, null, false) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, @pathname, null, false) @DocumentManager.setDoc @project_id, @doc_id, @afterLines, @source, @user_id, false, @callback it "should flush and delete the doc from the doc updater", -> @@ -408,7 +408,7 @@ describe "DocumentManager", -> describe "and has only changes that don't need to be flushed", -> beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, Date.now() - 100, true) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, Date.now() - 100, true) @DocumentManager.getDocAndFlushIfOld @project_id, @doc_id, @callback it "should get the doc", -> From c27df0bfef078543dbebe8e8f2e92c4f73ae44ac Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Thu, 5 Oct 2017 10:20:58 +0100 Subject: [PATCH 349/769] split apart multi and rclient in tests --- .../RedisManager/RedisManagerTests.coffee | 121 +++++++++--------- 1 file changed, 60 insertions(+), 61 deletions(-) diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 62115f4bd3..b76eba319e 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -9,10 +9,8 @@ tk = require "timekeeper" describe "RedisManager", -> beforeEach -> - @rclient = - auth: () -> - exec: sinon.stub() - @rclient.multi = () => @rclient + @multi = exec: sinon.stub() + @rclient = multi: () => @multi tk.freeze(new Date()) @RedisManager = SandboxedModule.require modulePath, requires: @@ -70,8 +68,8 @@ describe "RedisManager", -> @json_ranges = JSON.stringify @ranges @unflushed_time = 12345 @pathname = '/a/b/c.tex' - @rclient.get = sinon.stub() - @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @hash, @project_id, @json_ranges, @pathname, @unflushed_time]) + @multi.get = sinon.stub() + @multi.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @hash, @project_id, @json_ranges, @pathname, @unflushed_time]) @rclient.sadd = sinon.stub().yields(null, 0) describe "successfully", -> @@ -79,32 +77,32 @@ describe "RedisManager", -> @RedisManager.getDoc @project_id, @doc_id, @callback it "should get the lines from redis", -> - @rclient.get + @multi.get .calledWith("doclines:#{@doc_id}") .should.equal true it "should get the version from", -> - @rclient.get + @multi.get .calledWith("DocVersion:#{@doc_id}") .should.equal true it 'should get the hash', -> - @rclient.get + @multi.get .calledWith("DocHash:#{@doc_id}") .should.equal true it "should get the ranges", -> - @rclient.get + @multi.get .calledWith("Ranges:#{@doc_id}") .should.equal true it "should get the unflushed time", -> - @rclient.get + @multi.get .calledWith("UnflushedTime:#{@doc_id}") .should.equal true it "should get the pathname", -> - @rclient.get + @multi.get .calledWith("Pathname:#{@doc_id}") .should.equal true @@ -124,7 +122,7 @@ describe "RedisManager", -> describe "when the document is not present", -> beforeEach -> - @rclient.exec = sinon.stub().callsArgWith(0, null, [null, null, null, null, null, null, null]) + @multi.exec = sinon.stub().callsArgWith(0, null, [null, null, null, null, null, null, null]) @rclient.sadd = sinon.stub().yields() @RedisManager.getDoc @project_id, @doc_id, @callback @@ -159,7 +157,7 @@ describe "RedisManager", -> describe "with a corrupted document", -> beforeEach -> @badHash = "INVALID-HASH-VALUE" - @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @badHash, @project_id, @json_ranges]) + @multi.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @badHash, @project_id, @json_ranges]) @RedisManager.getDoc @project_id, @doc_id, @callback it 'should log a hash error', -> @@ -174,9 +172,9 @@ describe "RedisManager", -> describe "with a slow request to redis", -> beforeEach -> - @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @badHash, @project_id, @json_ranges, @pathname, @unflushed_time]) + @multi.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @badHash, @project_id, @json_ranges, @pathname, @unflushed_time]) @clock = sinon.useFakeTimers(); - @rclient.exec = (cb) => + @multi.exec = (cb) => @clock.tick(6000); cb(null, [@jsonlines, @version, @another_project_id, @json_ranges, @pathname, @unflushed_time]) @@ -193,7 +191,7 @@ describe "RedisManager", -> describe "getDoc with an invalid project id", -> beforeEach -> @another_project_id = "project-id-456" - @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @hash, @another_project_id, @json_ranges, @pathname, @unflushed_time]) + @multi.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @hash, @another_project_id, @json_ranges, @pathname, @unflushed_time]) @RedisManager.getDoc @project_id, @doc_id, @callback it 'should return an error', -> @@ -316,12 +314,12 @@ describe "RedisManager", -> describe "updateDocument", -> beforeEach -> - @rclient.set = sinon.stub() - @rclient.rpush = sinon.stub() - @rclient.expire = sinon.stub() - @rclient.ltrim = sinon.stub() - @rclient.del = sinon.stub() - @rclient.eval = sinon.stub() + @multi.set = sinon.stub() + @multi.rpush = sinon.stub() + @multi.expire = sinon.stub() + @multi.ltrim = sinon.stub() + @multi.del = sinon.stub() + @multi.eval = sinon.stub() @RedisManager.getDocVersion = sinon.stub() @lines = ["one", "two", "three", "これは"] @@ -330,7 +328,7 @@ describe "RedisManager", -> @hash = crypto.createHash('sha1').update(JSON.stringify(@lines),'utf8').digest('hex') @ranges = { comments: "mock", entries: "mock" } - @rclient.exec = sinon.stub().callsArg(0, null, [@hash]) + @multi.exec = sinon.stub().callsArg(0, null, [@hash]) describe "with a consistent version", -> beforeEach -> @@ -343,42 +341,42 @@ describe "RedisManager", -> .should.equal true it "should set the doclines", -> - @rclient.eval + @multi.eval .calledWith(sinon.match(/redis.call/), 1, "doclines:#{@doc_id}", JSON.stringify(@lines)) .should.equal true it "should set the version", -> - @rclient.set + @multi.set .calledWith("DocVersion:#{@doc_id}", @version) .should.equal true it "should set the hash", -> - @rclient.set + @multi.set .calledWith("DocHash:#{@doc_id}", @hash) .should.equal true it "should set the ranges", -> - @rclient.set + @multi.set .calledWith("Ranges:#{@doc_id}", JSON.stringify(@ranges)) .should.equal true it "should set the unflushed time", -> - @rclient.set + @multi.set .calledWith("UnflushedTime:#{@doc_id}", Date.now(), "NX") .should.equal true it "should push the doc op into the doc ops list", -> - @rclient.rpush + @multi.rpush .calledWith("DocOps:#{@doc_id}", JSON.stringify(@ops[0]), JSON.stringify(@ops[1])) .should.equal true it "should renew the expiry ttl on the doc ops array", -> - @rclient.expire + @multi.expire .calledWith("DocOps:#{@doc_id}", @RedisManager.DOC_OPS_TTL) .should.equal true it "should truncate the list to 100 members", -> - @rclient.ltrim + @multi.ltrim .calledWith("DocOps:#{@doc_id}", -@RedisManager.DOC_OPS_MAX_LENGTH, -1) .should.equal true @@ -395,7 +393,7 @@ describe "RedisManager", -> @RedisManager.updateDocument @doc_id, @lines, @version, @ops, @ranges, @callback it "should not call multi.exec", -> - @rclient.exec.called.should.equal false + @multi.exec.called.should.equal false it "should call the callback with an error", -> @callback @@ -404,16 +402,17 @@ describe "RedisManager", -> describe "with no updates", -> beforeEach -> + @multi.rpush = sinon.stub().callsArg(1) @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version) @RedisManager.updateDocument @doc_id, @lines, @version, [], @ranges, @callback it "should not do an rpush", -> - @rclient.rpush + @multi.rpush .called .should.equal false it "should still set the doclines", -> - @rclient.eval + @multi.eval .calledWith(sinon.match(/redis.call/), 1, "doclines:#{@doc_id}", JSON.stringify(@lines)) .should.equal true @@ -423,19 +422,19 @@ describe "RedisManager", -> @RedisManager.updateDocument @doc_id, @lines, @version, @ops, {}, @callback it "should not set the ranges", -> - @rclient.set + @multi.set .calledWith("Ranges:#{@doc_id}", JSON.stringify(@ranges)) .should.equal false it "should delete the ranges key", -> - @rclient.del + @multi.del .calledWith("Ranges:#{@doc_id}") .should.equal true describe "with a corrupted write", -> beforeEach -> @badHash = "INVALID-HASH-VALUE" - @rclient.exec = sinon.stub().callsArgWith(0, null, [@badHash]) + @multi.exec = sinon.stub().callsArgWith(0, null, [@badHash]) @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) @RedisManager.updateDocument @doc_id, @lines, @version, @ops, @ranges, @callback @@ -476,14 +475,14 @@ describe "RedisManager", -> describe "putDocInMemory", -> beforeEach -> - @rclient.set = sinon.stub() + @multi.set = sinon.stub() @rclient.sadd = sinon.stub().yields() - @rclient.del = sinon.stub() - @rclient.eval = sinon.stub() + @multi.del = sinon.stub() + @multi.eval = sinon.stub() @lines = ["one", "two", "three", "これは"] @version = 42 @hash = crypto.createHash('sha1').update(JSON.stringify(@lines),'utf8').digest('hex') - @rclient.exec = sinon.stub().callsArgWith(0, null, [@hash]) + @multi.exec = sinon.stub().callsArgWith(0, null, [@hash]) @ranges = { comments: "mock", entries: "mock" } @pathname = '/a/b/c.tex' @@ -492,32 +491,32 @@ describe "RedisManager", -> @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, done it "should set the lines", -> - @rclient.eval + @multi.eval .calledWith(sinon.match(/redis.call/), 1, "doclines:#{@doc_id}", JSON.stringify(@lines)) .should.equal true it "should set the version", -> - @rclient.set + @multi.set .calledWith("DocVersion:#{@doc_id}", @version) .should.equal true it "should set the hash", -> - @rclient.set + @multi.set .calledWith("DocHash:#{@doc_id}", @hash) .should.equal true it "should set the ranges", -> - @rclient.set + @multi.set .calledWith("Ranges:#{@doc_id}", JSON.stringify(@ranges)) .should.equal true it "should set the project_id for the doc", -> - @rclient.set + @multi.set .calledWith("ProjectId:#{@doc_id}", @project_id) .should.equal true it "should set the pathname for the doc", -> - @rclient.set + @multi.set .calledWith("Pathname:#{@doc_id}", @pathname) .should.equal true @@ -535,18 +534,18 @@ describe "RedisManager", -> @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, {}, @pathname, done it "should delete the ranges key", -> - @rclient.del + @multi.del .calledWith("Ranges:#{@doc_id}") .should.equal true it "should not set the ranges", -> - @rclient.set + @multi.set .calledWith("Ranges:#{@doc_id}", JSON.stringify(@ranges)) .should.equal false describe "with a corrupted write", -> beforeEach (done) -> - @rclient.exec = sinon.stub().callsArgWith(0, null, ["INVALID-HASH-VALUE"]) + @multi.exec = sinon.stub().callsArgWith(0, null, ["INVALID-HASH-VALUE"]) @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, done it 'should log a hash error', -> @@ -581,43 +580,43 @@ describe "RedisManager", -> describe "removeDocFromMemory", -> beforeEach (done) -> - @rclient.del = sinon.stub() - @rclient.srem = sinon.stub() - @rclient.exec.yields() + @multi.del = sinon.stub() + @multi.srem = sinon.stub() + @multi.exec.yields() @RedisManager.removeDocFromMemory @project_id, @doc_id, done it "should delete the lines", -> - @rclient.del + @multi.del .calledWith("doclines:#{@doc_id}") .should.equal true it "should delete the version", -> - @rclient.del + @multi.del .calledWith("DocVersion:#{@doc_id}") .should.equal true it "should delete the hash", -> - @rclient.del + @multi.del .calledWith("DocHash:#{@doc_id}") .should.equal true it "should delete the unflushed time", -> - @rclient.del + @multi.del .calledWith("UnflushedTime:#{@doc_id}") .should.equal true it "should delete the project_id for the doc", -> - @rclient.del + @multi.del .calledWith("ProjectId:#{@doc_id}") .should.equal true it "should remove the doc_id from the project set", -> - @rclient.srem + @multi.srem .calledWith("DocsIn:#{@project_id}", @doc_id) .should.equal true it "should delete the pathname for the doc", -> - @rclient.del + @multi.del .calledWith("Pathname:#{@doc_id}") .should.equal true From d003aef31c97a01c16f3993bd462740ced9703d8 Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Fri, 29 Sep 2017 12:44:36 +0100 Subject: [PATCH 350/769] conditionally enqueue history updates for project --- .../app/coffee/DocumentManager.coffee | 4 +- .../app/coffee/RedisManager.coffee | 18 +- .../app/coffee/UpdateManager.coffee | 2 +- .../config/settings.defaults.coffee | 7 + .../DocumentManagerTests.coffee | 4 +- .../RedisManager/RedisManagerTests.coffee | 164 +++++++++++------- .../UpdateManager/UpdateManagerTests.coffee | 34 ++-- 7 files changed, 146 insertions(+), 87 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index ebd01adeda..e74e436fde 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -138,7 +138,7 @@ module.exports = DocumentManager = return callback(new Errors.NotFoundError("document not found: #{doc_id}")) RangesManager.acceptChanges change_ids, ranges, (error, new_ranges) -> return callback(error) if error? - RedisManager.updateDocument doc_id, lines, version, [], new_ranges, (error) -> + RedisManager.updateDocument project_id, doc_id, lines, version, [], new_ranges, (error) -> return callback(error) if error? callback() @@ -154,7 +154,7 @@ module.exports = DocumentManager = return callback(new Errors.NotFoundError("document not found: #{doc_id}")) RangesManager.deleteComment comment_id, ranges, (error, new_ranges) -> return callback(error) if error? - RedisManager.updateDocument doc_id, lines, version, [], new_ranges, (error) -> + RedisManager.updateDocument project_id, doc_id, lines, version, [], new_ranges, (error) -> return callback(error) if error? callback() diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 08f8e7595c..0c15391ac4 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -32,6 +32,7 @@ MAX_RANGES_SIZE = 3 * MEGABYTES keys = Settings.redis.documentupdater.key_schema historyKeys = Settings.redis.history.key_schema +projectHistoryKeys = Settings.redis.project_history.key_schema module.exports = RedisManager = rclient: rclient @@ -204,7 +205,7 @@ module.exports = RedisManager = DOC_OPS_TTL: 60 * minutes DOC_OPS_MAX_LENGTH: 100 - updateDocument : (doc_id, docLines, newVersion, appliedOps = [], ranges, callback = (error) ->)-> + updateDocument : (project_id, doc_id, docLines, newVersion, appliedOps = [], ranges, callback = (error) ->)-> RedisManager.getDocVersion doc_id, (error, currentVersion) -> return callback(error) if error? if currentVersion + appliedOps.length != newVersion @@ -262,9 +263,16 @@ module.exports = RedisManager = writeHash = result?[0] if logHashWriteErrors and writeHash? and writeHash isnt newHash logger.error doc_id: doc_id, writeHash: writeHash, origHash: newHash, docLines:newDocLines, "hash mismatch on updateDocument" - # return length of uncompressedHistoryOps queue (index 7) - uncompressedHistoryOpsLength = result?[7] - return callback(null, uncompressedHistoryOpsLength) + + # length of uncompressedHistoryOps queue (index 7) + docUpdateCount = result[7] + + if jsonOps.length > 0 && Settings.apis?.project_history?.enabled + rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), jsonOps..., (error, projectUpdateCount) -> + callback null, docUpdateCount, projectUpdateCount + else + callback null, docUpdateCount + clearUnflushedTime: (doc_id, callback = (error) ->) -> rclient.del keys.unflushedTime(doc_id:doc_id), callback @@ -280,7 +288,7 @@ module.exports = RedisManager = # Most doc will have empty ranges so don't fill redis with lots of '{}' keys jsonRanges = null return callback null, jsonRanges - + _deserializeRanges: (ranges) -> if !ranges? or ranges == "" return {} diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index 3a69761977..b5ced9544d 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -80,7 +80,7 @@ module.exports = UpdateManager = RangesManager.applyUpdate project_id, doc_id, ranges, appliedOps, updatedDocLines, (error, new_ranges) -> profile.log("RangesManager.applyUpdate") return callback(error) if error? - RedisManager.updateDocument doc_id, updatedDocLines, version, appliedOps, new_ranges, (error, historyOpsLength) -> + RedisManager.updateDocument project_id, doc_id, updatedDocLines, version, appliedOps, new_ranges, (error, historyOpsLength) -> profile.log("RedisManager.updateDocument") return callback(error) if error? HistoryManager.recordAndFlushHistoryOps project_id, doc_id, appliedOps, historyOpsLength, (error) -> diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index b1d9be6b2e..dcfedc6b81 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -14,6 +14,9 @@ module.exports = pass: "password" trackchanges: url: "http://localhost:3015" + project_history: + url: "http://localhost:3054" + enabled: true redis: realtime: @@ -65,6 +68,10 @@ module.exports = key_schema: uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}" docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}" + + project_history: + key_schema: + projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:#{project_id}" # cluster: [{ # port: "7000" # host: "localhost" diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index ae99ac2b8d..3703058693 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -307,7 +307,7 @@ describe "DocumentManager", -> it "should save the updated ranges", -> @RedisManager.updateDocument - .calledWith(@doc_id, @lines, @version, [], @updated_ranges) + .calledWith(@project_id, @doc_id, @lines, @version, [], @updated_ranges) .should.equal true it "should call the callback", -> @@ -361,7 +361,7 @@ describe "DocumentManager", -> it "should save the updated ranges", -> @RedisManager.updateDocument - .calledWith(@doc_id, @lines, @version, [], @updated_ranges) + .calledWith(@project_id, @doc_id, @lines, @version, [], @updated_ranges) .should.equal true it "should call the callback", -> diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index b76eba319e..f5bf3843fa 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -15,8 +15,10 @@ describe "RedisManager", -> @RedisManager = SandboxedModule.require modulePath, requires: "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } - "settings-sharelatex": { + "settings-sharelatex": @settings = { documentupdater: {logHashErrors: {write:true, read:true}} + apis: + project_history: {enabled: true} redis: documentupdater: key_schema: @@ -36,6 +38,9 @@ describe "RedisManager", -> key_schema: uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}" docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}" + project_history: + key_schema: + projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:#{project_id}" } "redis-sharelatex": createClient: () => @rclient @@ -314,83 +319,117 @@ describe "RedisManager", -> describe "updateDocument", -> beforeEach -> + @lines = ["one", "two", "three", "これは"] + @ops = [{ op: [{ i: "foo", p: 4 }] },{ op: [{ i: "bar", p: 8 }] }] + @version = 42 + @hash = crypto.createHash('sha1').update(JSON.stringify(@lines),'utf8').digest('hex') + @ranges = { comments: "mock", entries: "mock" } + @doc_update_list_length = sinon.stub() + @project_update_list_length = sinon.stub() + + @RedisManager.getDocVersion = sinon.stub() @multi.set = sinon.stub() @multi.rpush = sinon.stub() @multi.expire = sinon.stub() @multi.ltrim = sinon.stub() @multi.del = sinon.stub() @multi.eval = sinon.stub() - @RedisManager.getDocVersion = sinon.stub() - - @lines = ["one", "two", "three", "これは"] - @ops = [{ op: [{ i: "foo", p: 4 }] },{ op: [{ i: "bar", p: 8 }] }] - @version = 42 - @hash = crypto.createHash('sha1').update(JSON.stringify(@lines),'utf8').digest('hex') - @ranges = { comments: "mock", entries: "mock" } - - @multi.exec = sinon.stub().callsArg(0, null, [@hash]) + @multi.exec = sinon.stub().callsArgWith(0, null, + [@hash, null, null, null, null, null, null, @doc_update_list_length] + ) + @rclient.rpush = sinon.stub().callsArgWith(@ops.length + 1, null, @project_update_list_length) describe "with a consistent version", -> beforeEach -> @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) - @RedisManager.updateDocument @doc_id, @lines, @version, @ops, @ranges, @callback - it "should get the current doc version to check for consistency", -> - @RedisManager.getDocVersion - .calledWith(@doc_id) - .should.equal true + describe "with project history enabled", -> + beforeEach -> + @settings.apis.project_history.enabled = true + @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @callback - it "should set the doclines", -> - @multi.eval - .calledWith(sinon.match(/redis.call/), 1, "doclines:#{@doc_id}", JSON.stringify(@lines)) - .should.equal true + it "should get the current doc version to check for consistency", -> + @RedisManager.getDocVersion + .calledWith(@doc_id) + .should.equal true - it "should set the version", -> - @multi.set - .calledWith("DocVersion:#{@doc_id}", @version) - .should.equal true + it "should set the doclines", -> + @multi.eval + .calledWith(sinon.match(/redis.call/), 1, "doclines:#{@doc_id}", JSON.stringify(@lines)) + .should.equal true - it "should set the hash", -> - @multi.set - .calledWith("DocHash:#{@doc_id}", @hash) - .should.equal true + it "should set the version", -> + @multi.set + .calledWith("DocVersion:#{@doc_id}", @version) + .should.equal true - it "should set the ranges", -> - @multi.set - .calledWith("Ranges:#{@doc_id}", JSON.stringify(@ranges)) - .should.equal true + it "should set the hash", -> + @multi.set + .calledWith("DocHash:#{@doc_id}", @hash) + .should.equal true - it "should set the unflushed time", -> - @multi.set - .calledWith("UnflushedTime:#{@doc_id}", Date.now(), "NX") - .should.equal true + it "should set the ranges", -> + @multi.set + .calledWith("Ranges:#{@doc_id}", JSON.stringify(@ranges)) + .should.equal true - it "should push the doc op into the doc ops list", -> - @multi.rpush - .calledWith("DocOps:#{@doc_id}", JSON.stringify(@ops[0]), JSON.stringify(@ops[1])) - .should.equal true + it "should set the unflushed time", -> + @multi.set + .calledWith("UnflushedTime:#{@doc_id}", Date.now(), "NX") + .should.equal true - it "should renew the expiry ttl on the doc ops array", -> - @multi.expire - .calledWith("DocOps:#{@doc_id}", @RedisManager.DOC_OPS_TTL) - .should.equal true + it "should push the doc op into the doc ops list", -> + @multi.rpush + .calledWith("DocOps:#{@doc_id}", JSON.stringify(@ops[0]), JSON.stringify(@ops[1])) + .should.equal true - it "should truncate the list to 100 members", -> - @multi.ltrim - .calledWith("DocOps:#{@doc_id}", -@RedisManager.DOC_OPS_MAX_LENGTH, -1) - .should.equal true + it "should renew the expiry ttl on the doc ops array", -> + @multi.expire + .calledWith("DocOps:#{@doc_id}", @RedisManager.DOC_OPS_TTL) + .should.equal true - it "should call the callback", -> - @callback.called.should.equal true + it "should truncate the list to 100 members", -> + @multi.ltrim + .calledWith("DocOps:#{@doc_id}", -@RedisManager.DOC_OPS_MAX_LENGTH, -1) + .should.equal true - it 'should not log any errors', -> - @logger.error.calledWith() - .should.equal false + it "should push the updates into the history ops list", -> + @multi.rpush + .calledWith("UncompressedHistoryOps:#{@doc_id}", JSON.stringify(@ops[0]), JSON.stringify(@ops[1])) + .should.equal true + + it "should push the updates into the project history ops list", -> + @rclient.rpush + .calledWith("ProjectHistory:Ops:#{@project_id}", JSON.stringify(@ops[0]), JSON.stringify(@ops[1])) + .should.equal true + + it "should call the callback", -> + @callback + .calledWith(null, @doc_update_list_length, @project_update_list_length) + .should.equal true + + it 'should not log any errors', -> + @logger.error.calledWith() + .should.equal false + + describe "with project history disabled", -> + beforeEach -> + @rclient.rpush = sinon.stub() + @settings.apis.project_history.enabled = false + @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @callback + + it "should not push the updates into the project history ops list", -> + @rclient.rpush.called.should.equal false + + it "should call the callback", -> + @callback + .calledWith(null, @doc_update_list_length) + .should.equal true describe "with an inconsistent version", -> beforeEach -> @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length - 1) - @RedisManager.updateDocument @doc_id, @lines, @version, @ops, @ranges, @callback + @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @callback it "should not call multi.exec", -> @multi.exec.called.should.equal false @@ -402,15 +441,20 @@ describe "RedisManager", -> describe "with no updates", -> beforeEach -> - @multi.rpush = sinon.stub().callsArg(1) + @rclient.rpush = sinon.stub().callsArgWith(1, null, @project_update_list_length) @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version) - @RedisManager.updateDocument @doc_id, @lines, @version, [], @ranges, @callback + @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, [], @ranges, @callback - it "should not do an rpush", -> + it "should not try to enqueue doc updates", -> @multi.rpush .called .should.equal false + it "should not try to enqueue project updates", -> + @rclient.rpush + .called + .should.equal false + it "should still set the doclines", -> @multi.eval .calledWith(sinon.match(/redis.call/), 1, "doclines:#{@doc_id}", JSON.stringify(@lines)) @@ -419,7 +463,7 @@ describe "RedisManager", -> describe "with empty ranges", -> beforeEach -> @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) - @RedisManager.updateDocument @doc_id, @lines, @version, @ops, {}, @callback + @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, {}, @callback it "should not set the ranges", -> @multi.set @@ -436,7 +480,7 @@ describe "RedisManager", -> @badHash = "INVALID-HASH-VALUE" @multi.exec = sinon.stub().callsArgWith(0, null, [@badHash]) @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) - @RedisManager.updateDocument @doc_id, @lines, @version, @ops, @ranges, @callback + @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @callback it 'should log a hash error', -> @logger.error.calledWith() @@ -450,7 +494,7 @@ describe "RedisManager", -> @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) @_stringify = JSON.stringify @JSON.stringify = () -> return '["bad bytes! \u0000 <- here"]' - @RedisManager.updateDocument @doc_id, @lines, @version, @ops, @ranges, @callback + @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @callback afterEach -> @JSON.stringify = @_stringify @@ -465,7 +509,7 @@ describe "RedisManager", -> beforeEach -> @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) @RedisManager._serializeRanges = sinon.stub().yields(new Error("ranges are too large")) - @RedisManager.updateDocument @doc_id, @lines, @version, @ops, @ranges, @callback + @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @callback it 'should log an error', -> @logger.error.called.should.equal true diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index 4f456992f0..45653f99b5 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -60,14 +60,14 @@ describe "UpdateManager", -> it "should process the outstanding updates", -> @UpdateManager.processOutstandingUpdates.calledWith(@project_id, @doc_id).should.equal true - + it "should do everything with the lock acquired", -> @UpdateManager.processOutstandingUpdates.calledAfter(@LockManager.tryLock).should.equal true @UpdateManager.processOutstandingUpdates.calledBefore(@LockManager.releaseLock).should.equal true it "should continue processing new updates that may have come in", -> @UpdateManager.continueProcessingUpdatesWithLock.calledWith(@project_id, @doc_id).should.equal true - + it "should return the callback", -> @callback.called.should.equal true @@ -78,7 +78,7 @@ describe "UpdateManager", -> it "should free the lock", -> @LockManager.releaseLock.calledWith(@doc_id, @lockValue).should.equal true - + it "should return the error in the callback", -> @callback.calledWith(@error).should.equal true @@ -93,7 +93,7 @@ describe "UpdateManager", -> it "should not process the updates", -> @UpdateManager.processOutstandingUpdates.called.should.equal false - + describe "continueProcessingUpdatesWithLock", -> describe "when there are outstanding updates", -> beforeEach -> @@ -137,7 +137,7 @@ describe "UpdateManager", -> @UpdateManager.applyUpdate .calledWith(@project_id, @doc_id, update) .should.equal true - + it "should call the callback", -> @callback.called.should.equal true @@ -154,7 +154,7 @@ describe "UpdateManager", -> it "should call the callback", -> @callback.called.should.equal true - + describe "applyUpdate", -> beforeEach -> @update = {op: [{p: 42, i: "foo"}]} @@ -170,16 +170,16 @@ describe "UpdateManager", -> @RedisManager.updateDocument = sinon.stub().yields() @RealTimeRedisManager.sendData = sinon.stub() @HistoryManager.recordAndFlushHistoryOps = sinon.stub().callsArg(4) - + describe "normally", -> beforeEach -> @UpdateManager.applyUpdate @project_id, @doc_id, @update, @callback - + it "should apply the updates via ShareJS", -> @ShareJsUpdateManager.applyUpdate .calledWith(@project_id, @doc_id, @update, @lines, @version) .should.equal true - + it "should update the ranges", -> @RangesManager.applyUpdate .calledWith(@project_id, @doc_id, @ranges, @appliedOps, @updatedDocLines) @@ -187,9 +187,9 @@ describe "UpdateManager", -> it "should save the document", -> @RedisManager.updateDocument - .calledWith(@doc_id, @updatedDocLines, @version, @appliedOps, @updated_ranges) + .calledWith(@project_id, @doc_id, @updatedDocLines, @version, @appliedOps, @updated_ranges) .should.equal true - + it "should push the applied ops into the history queue", -> @HistoryManager.recordAndFlushHistoryOps .calledWith(@project_id, @doc_id, @appliedOps) @@ -207,16 +207,16 @@ describe "UpdateManager", -> @ShareJsUpdateManager.applyUpdate .calledWith(@project_id, @doc_id, @update) .should.equal true - + # \uFFFD is 'replacement character' @update.op[0].i.should.equal "\uFFFD\uFFFD" - + describe "with an error", -> beforeEach -> @error = new Error("something went wrong") @ShareJsUpdateManager.applyUpdate = sinon.stub().yields(@error) @UpdateManager.applyUpdate @project_id, @doc_id, @update, @callback - + it "should call RealTimeRedisManager.sendData with the error", -> @RealTimeRedisManager.sendData .calledWith({ @@ -228,7 +228,7 @@ describe "UpdateManager", -> it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true - + describe "lockUpdatesAndDo", -> beforeEach -> @@ -283,7 +283,7 @@ describe "UpdateManager", -> it "should free the lock", -> @LockManager.releaseLock.calledWith(@doc_id, @lockValue).should.equal true - + it "should return the error in the callback", -> @callback.calledWith(@error).should.equal true @@ -295,7 +295,7 @@ describe "UpdateManager", -> it "should free the lock", -> @LockManager.releaseLock.calledWith(@doc_id, @lockValue).should.equal true - + it "should return the error in the callback", -> @callback.calledWith(@error).should.equal true From 962520fca8e2e485e6872e1c2e553ca2f59c6d82 Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Thu, 5 Oct 2017 13:45:29 +0100 Subject: [PATCH 351/769] flush track-changes and project-history in HistoryManager --- .../app/coffee/DocumentManager.coffee | 7 +- .../app/coffee/HistoryManager.coffee | 86 +++++--- .../app/coffee/UpdateManager.coffee | 7 +- .../DocumentManagerTests.coffee | 4 +- .../HistoryManager/HistoryManagerTests.coffee | 205 ++++++++++++------ .../UpdateManager/UpdateManagerTests.coffee | 10 +- 6 files changed, 205 insertions(+), 114 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index e74e436fde..5ddca2e6a8 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -116,11 +116,8 @@ module.exports = DocumentManager = DocumentManager.flushDocIfLoaded project_id, doc_id, (error) -> return callback(error) if error? - # Flush in the background since it requires and http request - # to track changes - HistoryManager.flushDocChanges project_id, doc_id, (err) -> - if err? - logger.err {err, project_id, doc_id}, "error flushing to track changes" + # Flush in the background since it requires a http request + HistoryManager.flushChangesAsync project_id, doc_id RedisManager.removeDocFromMemory project_id, doc_id, (error) -> return callback(error) if error? diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index 9f78b5af4b..c693a6a599 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -1,45 +1,73 @@ -settings = require "settings-sharelatex" +Settings = require "settings-sharelatex" request = require "request" logger = require "logger-sharelatex" async = require "async" HistoryRedisManager = require "./HistoryRedisManager" module.exports = HistoryManager = - flushDocChanges: (project_id, doc_id, callback = (error) ->) -> - if !settings.apis?.trackchanges? - logger.warn doc_id: doc_id, "track changes API is not configured, so not flushing" - return callback() + flushChangesAsync: (project_id, doc_id) -> + HistoryManager._flushDocChangesAsync project_id, doc_id + if Settings.apis?.project_history?.enabled + HistoryManager._flushProjectChangesAsync project_id - url = "#{settings.apis.trackchanges.url}/project/#{project_id}/doc/#{doc_id}/flush" - logger.log project_id: project_id, doc_id: doc_id, url: url, "flushing doc in track changes api" + _flushDocChangesAsync: (project_id, doc_id) -> + if !Settings.apis?.trackchanges? + logger.warn { doc_id }, "track changes API is not configured, so not flushing" + return + + url = "#{Settings.apis.trackchanges.url}/project/#{project_id}/doc/#{doc_id}/flush" + logger.log { project_id, doc_id, url }, "flushing doc in track changes api" request.post url, (error, res, body)-> if error? - return callback(error) - else if res.statusCode >= 200 and res.statusCode < 300 - return callback(null) - else - error = new Error("track changes api returned a failure status code: #{res.statusCode}") - return callback(error) + logger.error( + { error, doc_id, project_id}, + "track changes doc to track changes api" + ) + else if res.statusCode < 200 and res.statusCode >= 300 + logger.error( + { doc_id, project_id }, + "track changes api returned a failure status code: #{res.statusCode}" + ) + + _flushProjectChangesAsync: (project_id) -> + return if !Settings.apis?.project_history? + + url = "#{Settings.apis.project_history.url}/project/#{project_id}/flush" + logger.log { project_id, url }, "flushing doc in project history api" + request.post url, (error, res, body)-> + if error? + logger.error { error, project_id}, "project history doc to track changes api" + else if res.statusCode < 200 and res.statusCode >= 300 + logger.error { project_id }, "project history api returned a failure status code: #{res.statusCode}" FLUSH_EVERY_N_OPS: 100 - recordAndFlushHistoryOps: (project_id, doc_id, ops = [], length, callback = (error) ->) -> + recordAndFlushHistoryOps: (project_id, doc_id, ops = [], doc_ops_length, project_ops_length, callback = (error) ->) -> if ops.length == 0 return callback() - HistoryRedisManager.recordDocHasHistoryOps project_id, doc_id, ops, (error) -> - return callback(error) if error? - return callback() if not length? # don't flush unless we know the length - # We want to flush every 100 ops, i.e. 100, 200, 300, etc - # Find out which 'block' (i.e. 0-99, 100-199) we were in before and after pushing these - # ops. If we've changed, then we've gone over a multiple of 100 and should flush. - # (Most of the time, we will only hit 100 and then flushing will put us back to 0) - previousLength = length - ops.length - prevBlock = Math.floor(previousLength / HistoryManager.FLUSH_EVERY_N_OPS) - newBlock = Math.floor(length / HistoryManager.FLUSH_EVERY_N_OPS) - if newBlock != prevBlock + + if Settings.apis?.project_history?.enabled + if HistoryManager._shouldFlushHistoryOps(project_ops_length, ops, HistoryManager.FLUSH_EVERY_N_OPS) # Do this in the background since it uses HTTP and so may be too # slow to wait for when processing a doc update. - logger.log length: length, doc_id: doc_id, project_id: project_id, "flushing track changes api" - HistoryManager.flushDocChanges project_id, doc_id, (error) -> - if error? - logger.error err: error, doc_id: doc_id, project_id: project_id, "error flushing doc to track changes api" + logger.log { project_ops_length, project_id }, "flushing project history api" + HistoryManager._flushProjectChangesAsync project_id + + HistoryRedisManager.recordDocHasHistoryOps project_id, doc_id, ops, (error) -> + return callback(error) if error? + if HistoryManager._shouldFlushHistoryOps(doc_ops_length, ops, HistoryManager.FLUSH_EVERY_N_OPS) + # Do this in the background since it uses HTTP and so may be too + # slow to wait for when processing a doc update. + logger.log { doc_ops_length, doc_id, project_id }, "flushing track changes api" + HistoryManager._flushDocChangesAsync project_id, doc_id callback() + + _shouldFlushHistoryOps: (length, ops, threshold) -> + return false if !length # don't flush unless we know the length + # We want to flush every 100 ops, i.e. 100, 200, 300, etc + # Find out which 'block' (i.e. 0-99, 100-199) we were in before and after pushing these + # ops. If we've changed, then we've gone over a multiple of 100 and should flush. + # (Most of the time, we will only hit 100 and then flushing will put us back to 0) + previousLength = length - ops.length + prevBlock = Math.floor(previousLength / threshold) + newBlock = Math.floor(length / threshold) + return newBlock != prevBlock diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index b5ced9544d..e821926015 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -80,13 +80,12 @@ module.exports = UpdateManager = RangesManager.applyUpdate project_id, doc_id, ranges, appliedOps, updatedDocLines, (error, new_ranges) -> profile.log("RangesManager.applyUpdate") return callback(error) if error? - RedisManager.updateDocument project_id, doc_id, updatedDocLines, version, appliedOps, new_ranges, (error, historyOpsLength) -> + RedisManager.updateDocument project_id, doc_id, updatedDocLines, version, appliedOps, new_ranges, (error, doc_ops_length, project_ops_length) -> profile.log("RedisManager.updateDocument") return callback(error) if error? - HistoryManager.recordAndFlushHistoryOps project_id, doc_id, appliedOps, historyOpsLength, (error) -> + HistoryManager.recordAndFlushHistoryOps project_id, doc_id, appliedOps, doc_ops_length, project_ops_length, (error) -> profile.log("recordAndFlushHistoryOps") - return callback(error) if error? - callback() + callback(error) lockUpdatesAndDo: (method, project_id, doc_id, args..., callback) -> profile = new Profiler("lockUpdatesAndDo", {project_id, doc_id}) diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index 3703058693..ac0601b34b 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -35,7 +35,7 @@ describe "DocumentManager", -> beforeEach -> @RedisManager.removeDocFromMemory = sinon.stub().callsArg(2) @DocumentManager.flushDocIfLoaded = sinon.stub().callsArgWith(2) - @HistoryManager.flushDocChanges = sinon.stub().callsArg(2) + @HistoryManager.flushChangesAsync = sinon.stub() @DocumentManager.flushAndDeleteDoc @project_id, @doc_id, @callback it "should flush the doc", -> @@ -55,7 +55,7 @@ describe "DocumentManager", -> @Metrics.Timer::done.called.should.equal true it "should flush to the history api", -> - @HistoryManager.flushDocChanges + @HistoryManager.flushChangesAsync .calledWith(@project_id, @doc_id) .should.equal true diff --git a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee index 37e35ca285..4956a410b2 100644 --- a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee @@ -7,106 +7,171 @@ describe "HistoryManager", -> beforeEach -> @HistoryManager = SandboxedModule.require modulePath, requires: "request": @request = {} - "settings-sharelatex": @Settings = {} + "settings-sharelatex": @Settings = { + apis: + project_history: + enabled: true + url: "http://project_history.example.com" + trackchanges: + url: "http://trackchanges.example.com" + } "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } "./HistoryRedisManager": @HistoryRedisManager = {} @project_id = "mock-project-id" @doc_id = "mock-doc-id" @callback = sinon.stub() - describe "flushDocChanges", -> + describe "flushChangesAsync", -> beforeEach -> - @Settings.apis = - trackchanges: url: "http://trackchanges.example.com" + @HistoryManager._flushDocChangesAsync = sinon.stub() + @HistoryManager._flushProjectChangesAsync = sinon.stub() - describe "successfully", -> - beforeEach -> - @request.post = sinon.stub().callsArgWith(1, null, statusCode: 204) - @HistoryManager.flushDocChanges @project_id, @doc_id, @callback + @HistoryManager.flushChangesAsync(@project_id, @doc_id) - it "should send a request to the track changes api", -> - @request.post - .calledWith("#{@Settings.apis.trackchanges.url}/project/#{@project_id}/doc/#{@doc_id}/flush") - .should.equal true + it "flushes doc changes", -> + @HistoryManager._flushDocChangesAsync + .calledWith(@project_id, @doc_id) + .should.equal true - it "should return the callback", -> - @callback.calledWith(null).should.equal true + it "flushes project changes", -> + @HistoryManager._flushProjectChangesAsync + .calledWith(@project_id) + .should.equal true - describe "when the track changes api returns an error", -> - beforeEach -> - @request.post = sinon.stub().callsArgWith(1, null, statusCode: 500) - @HistoryManager.flushDocChanges @project_id, @doc_id, @callback + describe "_flushDocChangesAsync", -> + beforeEach -> + @request.post = sinon.stub().callsArgWith(1, null, statusCode: 204) - it "should return the callback with an error", -> - @callback.calledWith(new Error("track changes api return non-success code: 500")).should.equal true + @HistoryManager._flushDocChangesAsync @project_id, @doc_id + + it "should send a request to the track changes api", -> + @request.post + .calledWith("#{@Settings.apis.trackchanges.url}/project/#{@project_id}/doc/#{@doc_id}/flush") + .should.equal true + + describe "_flushProjectChangesAsync", -> + beforeEach -> + @request.post = sinon.stub().callsArgWith(1, null, statusCode: 204) + + @HistoryManager._flushProjectChangesAsync @project_id + + it "should send a request to the project history api", -> + @request.post + .calledWith("#{@Settings.apis.project_history.url}/project/#{@project_id}/flush") + .should.equal true describe "recordAndFlushHistoryOps", -> beforeEach -> - @ops = ["mock-ops"] - @HistoryManager.flushDocChanges = sinon.stub().callsArg(2) + @ops = [ 'mock-ops' ] + @project_ops_length = 10 + @doc_ops_length = 5 - describe "pushing the op", -> + @HistoryManager._flushProjectChangesAsync = sinon.stub() + @HistoryRedisManager.recordDocHasHistoryOps = sinon.stub().callsArg(3) + @HistoryManager._flushDocChangesAsync = sinon.stub() + + describe "with no ops", -> beforeEach -> - @HistoryRedisManager.recordDocHasHistoryOps = sinon.stub().callsArgWith(3, null) - @HistoryManager.recordAndFlushHistoryOps @project_id, @doc_id, @ops, 1, @callback + @HistoryManager.recordAndFlushHistoryOps( + @project_id, @doc_id, [], @doc_ops_length, @project_ops_length, @callback + ) - it "should push the ops into redis", -> + it "should not flush project changes", -> + @HistoryManager._flushProjectChangesAsync.called.should.equal false + + it "should not record doc has history ops", -> + @HistoryRedisManager.recordDocHasHistoryOps.called.should.equal false + + it "should not flush doc changes", -> + @HistoryManager._flushDocChangesAsync.called.should.equal false + + it "should call the callback", -> + @callback.called.should.equal true + + describe "with enough ops to flush project changes", -> + beforeEach -> + @HistoryManager._shouldFlushHistoryOps = sinon.stub() + @HistoryManager._shouldFlushHistoryOps.withArgs(@project_ops_length).returns(true) + @HistoryManager._shouldFlushHistoryOps.withArgs(@doc_ops_length).returns(false) + + @HistoryManager.recordAndFlushHistoryOps( + @project_id, @doc_id, @ops, @doc_ops_length, @project_ops_length, @callback + ) + + it "should flush project changes", -> + @HistoryManager._flushProjectChangesAsync + .calledWith(@project_id) + .should.equal true + + it "should record doc has history ops", -> @HistoryRedisManager.recordDocHasHistoryOps .calledWith(@project_id, @doc_id, @ops) + + it "should not flush doc changes", -> + @HistoryManager._flushDocChangesAsync.called.should.equal false + + it "should call the callback", -> + @callback.called.should.equal true + + describe "with enough ops to flush doc changes", -> + beforeEach -> + @HistoryManager._shouldFlushHistoryOps = sinon.stub() + @HistoryManager._shouldFlushHistoryOps.withArgs(@project_ops_length).returns(false) + @HistoryManager._shouldFlushHistoryOps.withArgs(@doc_ops_length).returns(true) + + @HistoryManager.recordAndFlushHistoryOps( + @project_id, @doc_id, @ops, @doc_ops_length, @project_ops_length, @callback + ) + + it "should not flush project changes", -> + @HistoryManager._flushProjectChangesAsync.called.should.equal false + + it "should record doc has history ops", -> + @HistoryRedisManager.recordDocHasHistoryOps + .calledWith(@project_id, @doc_id, @ops) + + it "should flush doc changes", -> + @HistoryManager._flushDocChangesAsync + .calledWith(@project_id, @doc_id) .should.equal true it "should call the callback", -> @callback.called.should.equal true - it "should not try to flush the op", -> - @HistoryManager.flushDocChanges.called.should.equal false - - describe "when we hit a multiple of FLUSH_EVERY_N_OPS ops", -> + describe "when recording doc has history ops errors", -> beforeEach -> + @error = new Error("error") @HistoryRedisManager.recordDocHasHistoryOps = - sinon.stub().callsArgWith(3, null) - @HistoryManager.recordAndFlushHistoryOps @project_id, @doc_id, @ops, 2 * @HistoryManager.FLUSH_EVERY_N_OPS,@callback + sinon.stub().callsArgWith(3, @error) - it "should tell the track changes api to flush", -> - @HistoryManager.flushDocChanges - .calledWith(@project_id, @doc_id) - .should.equal true + @HistoryManager.recordAndFlushHistoryOps( + @project_id, @doc_id, @ops, @doc_ops_length, @project_ops_length, @callback + ) - describe "when we go over a multiple of FLUSH_EVERY_N_OPS ops", -> - beforeEach -> - @ops = ["op1", "op2", "op3"] - @HistoryRedisManager.recordDocHasHistoryOps = - sinon.stub().callsArgWith(3, null) - @HistoryManager.recordAndFlushHistoryOps @project_id, @doc_id, @ops, 2 * @HistoryManager.FLUSH_EVERY_N_OPS + 1, @callback + it "should not flush doc changes", -> + @HistoryManager._flushDocChangesAsync.called.should.equal false - it "should tell the track changes api to flush", -> - @HistoryManager.flushDocChanges - .calledWith(@project_id, @doc_id) - .should.equal true + it "should call the callback with the error", -> + @callback.calledWith(@error).should.equal true - describe "when HistoryManager errors", -> - beforeEach -> - @HistoryRedisManager.recordDocHasHistoryOps = - sinon.stub().callsArgWith(3, null) - @HistoryManager.flushDocChanges = sinon.stub().callsArgWith(2, @error = new Error("oops")) - @HistoryManager.recordAndFlushHistoryOps @project_id, @doc_id, @ops, 2 * @HistoryManager.FLUSH_EVERY_N_OPS, @callback + describe "_shouldFlushHistoryOps", -> + it "should return false if the number of ops is not known", -> + @HistoryManager._shouldFlushHistoryOps(null, ['a', 'b', 'c'], 1).should.equal false - it "should log out the error", -> - @logger.error - .calledWith( - err: @error - doc_id: @doc_id - project_id: @project_id - "error flushing doc to track changes api" - ) - .should.equal true - - describe "with no ops", -> - beforeEach -> - @HistoryRedisManager.recordDocHasHistoryOps = sinon.stub().callsArgWith(3, null) - @HistoryManager.recordAndFlushHistoryOps @project_id, @doc_id, [], 1, @callback - - it "should not call HistoryRedisManager.recordDocHasHistoryOps", -> - @HistoryRedisManager.recordDocHasHistoryOps.called.should.equal false - + it "should return false if the updates didn't take us past the threshold", -> + # Currently there are 14 ops + # Previously we were on 11 ops + # We didn't pass over a multiple of 5 + @HistoryManager._shouldFlushHistoryOps(14, ['a', 'b', 'c'], 5).should.equal false + it "should return true if the updates took to the threshold", -> + # Currently there are 15 ops + # Previously we were on 12 ops + # We've reached a new multiple of 5 + @HistoryManager._shouldFlushHistoryOps(15, ['a', 'b', 'c'], 5).should.equal true + + it "should return true if the updates took past the threshold", -> + # Currently there are 19 ops + # Previously we were on 16 ops + # We didn't pass over a multiple of 5 + @HistoryManager._shouldFlushHistoryOps(17, ['a', 'b', 'c'], 5).should.equal true diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index 45653f99b5..b68698bc49 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -19,7 +19,7 @@ describe "UpdateManager", -> "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() - "settings-sharelatex": Settings = {} + "settings-sharelatex": @Settings = {} "./DocumentManager": @DocumentManager = {} "./RangesManager": @RangesManager = {} "./Profiler": class Profiler @@ -164,12 +164,14 @@ describe "UpdateManager", -> @ranges = { entries: "mock", comments: "mock" } @updated_ranges = { entries: "updated", comments: "updated" } @appliedOps = ["mock-applied-ops"] + @doc_ops_length = sinon.stub() + @project_ops_length = sinon.stub() @DocumentManager.getDoc = sinon.stub().yields(null, @lines, @version, @ranges) @RangesManager.applyUpdate = sinon.stub().yields(null, @updated_ranges) @ShareJsUpdateManager.applyUpdate = sinon.stub().yields(null, @updatedDocLines, @version, @appliedOps) - @RedisManager.updateDocument = sinon.stub().yields() + @RedisManager.updateDocument = sinon.stub().yields(null, @doc_ops_length, @project_ops_length) @RealTimeRedisManager.sendData = sinon.stub() - @HistoryManager.recordAndFlushHistoryOps = sinon.stub().callsArg(4) + @HistoryManager.recordAndFlushHistoryOps = sinon.stub().callsArg(5) describe "normally", -> beforeEach -> @@ -192,7 +194,7 @@ describe "UpdateManager", -> it "should push the applied ops into the history queue", -> @HistoryManager.recordAndFlushHistoryOps - .calledWith(@project_id, @doc_id, @appliedOps) + .calledWith(@project_id, @doc_id, @appliedOps, @doc_ops_length, @project_ops_length) .should.equal true it "should call the callback", -> From be41a1614dee4aca8c64927ae396b18700e3b612 Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Fri, 29 Sep 2017 12:57:27 +0100 Subject: [PATCH 352/769] decorate ops with pathname and doc_length --- .../app/coffee/UpdateManager.coffee | 19 ++++++++--- .../UpdateManager/UpdateManagerTests.coffee | 33 +++++++++++++++++-- 2 files changed, 46 insertions(+), 6 deletions(-) diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index e821926015..5064725aa0 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -4,6 +4,7 @@ RealTimeRedisManager = require "./RealTimeRedisManager" ShareJsUpdateManager = require "./ShareJsUpdateManager" HistoryManager = require "./HistoryManager" Settings = require('settings-sharelatex') +_ = require("underscore") async = require("async") logger = require('logger-sharelatex') Metrics = require "./Metrics" @@ -69,7 +70,7 @@ module.exports = UpdateManager = profile = new Profiler("applyUpdate", {project_id, doc_id}) UpdateManager._sanitizeUpdate update profile.log("sanitizeUpdate") - DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges) -> + DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname) -> profile.log("getDoc") return callback(error) if error? if !lines? or !version? @@ -78,6 +79,7 @@ module.exports = UpdateManager = profile.log("sharejs.applyUpdate") return callback(error) if error? RangesManager.applyUpdate project_id, doc_id, ranges, appliedOps, updatedDocLines, (error, new_ranges) -> + UpdateManager._addProjectHistoryMetadataToOps(appliedOps, pathname, updatedDocLines) profile.log("RangesManager.applyUpdate") return callback(error) if error? RedisManager.updateDocument project_id, doc_id, updatedDocLines, version, appliedOps, new_ranges, (error, doc_ops_length, project_ops_length) -> @@ -108,15 +110,15 @@ module.exports = UpdateManager = _handleErrorInsideLock: (doc_id, lockValue, original_error, callback = (error) ->) -> LockManager.releaseLock doc_id, lockValue, (lock_error) -> callback(original_error) - + _sanitizeUpdate: (update) -> # In Javascript, characters are 16-bits wide. It does not understand surrogates as characters. - # + # # From Wikipedia (http://en.wikipedia.org/wiki/Plane_(Unicode)#Basic_Multilingual_Plane): # "The High Surrogates (U+D800–U+DBFF) and Low Surrogate (U+DC00–U+DFFF) codes are reserved # for encoding non-BMP characters in UTF-16 by using a pair of 16-bit codes: one High Surrogate # and one Low Surrogate. A single surrogate code point will never be assigned a character."" - # + # # The main offender seems to be \uD835 as a stand alone character, which would be the first # 16-bit character of a blackboard bold character (http://www.fileformat.info/info/unicode/char/1d400/index.htm). # Something must be going on client side that is screwing up the encoding and splitting the @@ -127,3 +129,12 @@ module.exports = UpdateManager = op.i = op.i.replace(/[\uD800-\uDFFF]/g, "\uFFFD") return update + _addProjectHistoryMetadataToOps: (ops, pathname, lines) -> + doc_length = _.reduce lines, + (chars, line) -> chars + line.length, + 0 + doc_length += lines.length - 1 # count newline characters + ops.forEach (op) -> + op.meta ||= {} + op.meta.pathname = pathname + op.meta.doc_length = doc_length diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index b68698bc49..946f5d9fbe 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -163,14 +163,16 @@ describe "UpdateManager", -> @lines = ["original", "lines"] @ranges = { entries: "mock", comments: "mock" } @updated_ranges = { entries: "updated", comments: "updated" } - @appliedOps = ["mock-applied-ops"] + @appliedOps = [ {v: 42, op: "mock-op-42"}, { v: 45, op: "mock-op-45" }] @doc_ops_length = sinon.stub() @project_ops_length = sinon.stub() - @DocumentManager.getDoc = sinon.stub().yields(null, @lines, @version, @ranges) + @pathname = '/a/b/c.tex' + @DocumentManager.getDoc = sinon.stub().yields(null, @lines, @version, @ranges, @pathname) @RangesManager.applyUpdate = sinon.stub().yields(null, @updated_ranges) @ShareJsUpdateManager.applyUpdate = sinon.stub().yields(null, @updatedDocLines, @version, @appliedOps) @RedisManager.updateDocument = sinon.stub().yields(null, @doc_ops_length, @project_ops_length) @RealTimeRedisManager.sendData = sinon.stub() + @UpdateManager._addProjectHistoryMetadataToOps = sinon.stub() @HistoryManager.recordAndFlushHistoryOps = sinon.stub().callsArg(5) describe "normally", -> @@ -192,6 +194,11 @@ describe "UpdateManager", -> .calledWith(@project_id, @doc_id, @updatedDocLines, @version, @appliedOps, @updated_ranges) .should.equal true + it "shoould add metadata to the ops" , -> + @UpdateManager._addProjectHistoryMetadataToOps + .calledWith(@appliedOps, @pathname, @updatedDocLines) + .should.equal true + it "should push the applied ops into the history queue", -> @HistoryManager.recordAndFlushHistoryOps .calledWith(@project_id, @doc_id, @appliedOps, @doc_ops_length, @project_ops_length) @@ -231,6 +238,28 @@ describe "UpdateManager", -> it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true + describe "_addProjectHistoryMetadataToOps", -> + it "should add pathname and doc_length metadata to the ops", -> + lines = [ + 'some' + 'test' + 'data' + ] + appliedOps = [ {v: 42, op: "mock-op-42"}, { v: 45, op: "mock-op-45" }] + @UpdateManager._addProjectHistoryMetadataToOps(appliedOps, @pathname, lines) + appliedOps.should.deep.equal [{ + v: 42 + op: "mock-op-42" + meta: + pathname: @pathname + doc_length: 14 + }, { + v: 45 + op: "mock-op-45" + meta: + pathname: @pathname + doc_length: 14 + }] describe "lockUpdatesAndDo", -> beforeEach -> From 04ecd1e7ee889c0177f61cb776d03e691fd6f419 Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Tue, 3 Oct 2017 15:34:07 +0100 Subject: [PATCH 353/769] set pathname in acceptance tests --- .../coffee/ApplyingUpdatesToADocTests.coffee | 16 ++++++++-------- .../acceptance/coffee/helpers/MockWebApi.coffee | 2 ++ 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index a2eba4c063..3995721e43 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -136,7 +136,7 @@ describe "Applying updates to a doc", -> rclient_history.sismember HistoryKeys.docsWithHistoryOps({@project_id}), @doc_id, (error, result) => result.should.equal 1 done() - + it "should store the doc ops in the correct order", (done) -> rclient_du.lrange Keys.docOps({doc_id: @doc_id}), 0, -1, (error, updates) => updates = (JSON.parse(u) for u in updates) @@ -181,7 +181,7 @@ describe "Applying updates to a doc", -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] @broken_update = { doc_id: @doc_id, v: @version, op: [d: "not the correct content", p: 0 ] } MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} - + DocUpdaterClient.subscribeToAppliedOps @messageCallback = sinon.stub() DocUpdaterClient.sendUpdate @project_id, @doc_id, @broken_update, (error) -> @@ -192,14 +192,14 @@ describe "Applying updates to a doc", -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => doc.lines.should.deep.equal @lines done() - + it "should send a message with an error", -> @messageCallback.called.should.equal true [channel, message] = @messageCallback.args[0] channel.should.equal "applied-ops" JSON.parse(message).should.deep.equal { project_id: @project_id, - doc_id: @doc_id, + doc_id: @doc_id, error:'Delete component \'not the correct content\' does not match deleted text \'one\ntwo\nthree\'' } @@ -240,7 +240,7 @@ describe "Applying updates to a doc", -> lines: @lines } - update = + update = doc: @doc_id op: @update.op v: 0 @@ -252,12 +252,12 @@ describe "Applying updates to a doc", -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => doc.lines.should.deep.equal @result done() - + describe "when the sending duplicate ops", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} - + DocUpdaterClient.subscribeToAppliedOps @messageCallback = sinon.stub() # One user delete 'one', the next turns it into 'once'. The second becomes a NOP. @@ -292,7 +292,7 @@ describe "Applying updates to a doc", -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => doc.lines.should.deep.equal @result done() - + it "should return a message about duplicate ops", -> @messageCallback.calledTwice.should.equal true @messageCallback.args[0][0].should.equal "applied-ops" diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee index f2b8bce318..6041e4a0d4 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee @@ -9,6 +9,7 @@ module.exports = MockWebApi = insertDoc: (project_id, doc_id, doc) -> doc.version ?= 0 doc.lines ?= [] + doc.pathname = '/a/b/c.tex' @docs["#{project_id}:#{doc_id}"] = doc setDocument: (project_id, doc_id, lines, version, ranges, callback = (error) ->) -> @@ -16,6 +17,7 @@ module.exports = MockWebApi = doc.lines = lines doc.version = version doc.ranges = ranges + doc.pathname = '/a/b/c.tex' callback null getDocument: (project_id, doc_id, callback = (error, doc) ->) -> From 20655b6eeb8337842f50ec90ed901b40e511de43 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 23 Oct 2017 15:59:32 +0100 Subject: [PATCH 354/769] remove unused requires of async --- services/document-updater/app/coffee/HistoryManager.coffee | 1 - services/document-updater/app/coffee/HistoryRedisManager.coffee | 1 - services/document-updater/app/coffee/RedisManager.coffee | 1 - services/document-updater/app/coffee/ShareJsUpdateManager.coffee | 1 - 4 files changed, 4 deletions(-) diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index 9f78b5af4b..c3675f4018 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -1,7 +1,6 @@ settings = require "settings-sharelatex" request = require "request" logger = require "logger-sharelatex" -async = require "async" HistoryRedisManager = require "./HistoryRedisManager" module.exports = HistoryManager = diff --git a/services/document-updater/app/coffee/HistoryRedisManager.coffee b/services/document-updater/app/coffee/HistoryRedisManager.coffee index 0ac8723359..d9a99a09aa 100644 --- a/services/document-updater/app/coffee/HistoryRedisManager.coffee +++ b/services/document-updater/app/coffee/HistoryRedisManager.coffee @@ -1,7 +1,6 @@ Settings = require('settings-sharelatex') rclient = require("redis-sharelatex").createClient(Settings.redis.history) Keys = Settings.redis.history.key_schema -async = require "async" logger = require('logger-sharelatex') module.exports = HistoryRedisManager = diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index b873f9bd6a..ccb82a1ab6 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -1,5 +1,4 @@ Settings = require('settings-sharelatex') -async = require('async') rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) _ = require('underscore') logger = require('logger-sharelatex') diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee index 3ec90e4f62..a5cc6070cb 100644 --- a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee +++ b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee @@ -1,6 +1,5 @@ ShareJsModel = require "./sharejs/server/model" ShareJsDB = require "./ShareJsDB" -async = require "async" logger = require "logger-sharelatex" Settings = require('settings-sharelatex') Keys = require "./UpdateKeys" From 14f3d22071a6875ae1aad1a917c724dacf2c21f5 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 23 Oct 2017 16:02:24 +0100 Subject: [PATCH 355/769] upgrade to latest async ^2.5.0 --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 3436da467d..3854e78158 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -7,7 +7,7 @@ "url": "https://github.com/sharelatex/document-updater-sharelatex.git" }, "dependencies": { - "async": "2.0.0-rc.5", + "async": "^2.5.0", "coffee-script": "1.4.0", "express": "3.3.4", "ioredis": "^3.1.4", From d1f0c4ceae9c7eae6c2366e5c95eda9140dbeefd Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Thu, 5 Oct 2017 15:14:35 +0100 Subject: [PATCH 356/769] add acceptance tests for project history API --- services/document-updater/Jenkinsfile | 2 +- .../config/settings.defaults.coffee | 2 +- .../coffee/ApplyingUpdatesToADocTests.coffee | 12 +++++++ .../coffee/DeletingADocumentTests.coffee | 18 ++++++---- .../coffee/DeletingAProjectTests.coffee | 12 ++++--- .../coffee/SettingADocumentTests.coffee | 34 +++++++++++-------- .../helpers/MockProjectHistoryApi.coffee | 19 +++++++++++ 7 files changed, 73 insertions(+), 26 deletions(-) create mode 100644 services/document-updater/test/acceptance/coffee/helpers/MockProjectHistoryApi.coffee diff --git a/services/document-updater/Jenkinsfile b/services/document-updater/Jenkinsfile index 4ffd642356..b8f12777f1 100644 --- a/services/document-updater/Jenkinsfile +++ b/services/document-updater/Jenkinsfile @@ -41,7 +41,7 @@ pipeline { stage('Acceptance Tests') { steps { sh 'docker pull sharelatex/acceptance-test-runner' - sh 'docker run --rm -v $(pwd):/app sharelatex/acceptance-test-runner' + sh 'docker run --rm -e SHARELATEX_ENABLE_PROJECT_HISTORY=true -v $(pwd):/app sharelatex/acceptance-test-runner' } } stage('Package') { diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index dcfedc6b81..fee3d614c2 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -15,8 +15,8 @@ module.exports = trackchanges: url: "http://localhost:3015" project_history: + enabled: process.env.SHARELATEX_ENABLE_PROJECT_HISTORY == 'true' url: "http://localhost:3054" - enabled: true redis: realtime: diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index 3995721e43..29a17c04ea 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -8,6 +8,7 @@ rclient_history = require("redis-sharelatex").createClient(Settings.redis.histor rclient_du = require("redis-sharelatex").createClient(Settings.redis.documentupdater) Keys = Settings.redis.documentupdater.key_schema HistoryKeys = Settings.redis.history.key_schema +ProjectHistoryKeys = Settings.redis.project_history.key_schema MockTrackChangesApi = require "./helpers/MockTrackChangesApi" MockWebApi = require "./helpers/MockWebApi" @@ -58,6 +59,11 @@ describe "Applying updates to a doc", -> result.should.equal 1 done() + it "should push the applied updates to the project history changes api", (done) -> + rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + throw error if error? + JSON.parse(updates[0]).op.should.deep.equal @update.op + done() describe "when the document is loaded", -> before (done) -> @@ -89,6 +95,12 @@ describe "Applying updates to a doc", -> result.should.equal 1 done() + it "should push the applied updates to the project history changes api", (done) -> + rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + JSON.parse(updates[0]).op.should.deep.equal @update.op + done() + + describe "when the document has been deleted", -> describe "when the ops come in a single linear order", -> before (done) -> diff --git a/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee index 291b627a3e..2be5b01245 100644 --- a/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee @@ -3,6 +3,7 @@ chai = require("chai") chai.should() MockTrackChangesApi = require "./helpers/MockTrackChangesApi" +MockProjectHistoryApi = require "./helpers/MockProjectHistoryApi" MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" @@ -18,11 +19,13 @@ describe "Deleting a document", -> }] v: @version @result = ["one", "one and a half", "two", "three"] - + sinon.spy MockTrackChangesApi, "flushDoc" - + sinon.spy MockProjectHistoryApi, "flushProject" + after -> MockTrackChangesApi.flushDoc.restore() + MockProjectHistoryApi.flushProject.restore() describe "when the updated doc exists in the doc updater", -> before (done) -> @@ -60,10 +63,13 @@ describe "Deleting a document", -> .calledWith(@project_id, @doc_id) .should.equal true done() - + it "should flush track changes", -> MockTrackChangesApi.flushDoc.calledWith(@doc_id).should.equal true + it "should flush project history", -> + MockProjectHistoryApi.flushProject.calledWith(@project_id).should.equal true + describe "when the doc is not in the doc updater", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] @@ -93,9 +99,9 @@ describe "Deleting a document", -> .calledWith(@project_id, @doc_id) .should.equal true done() - + it "should flush track changes", -> MockTrackChangesApi.flushDoc.calledWith(@doc_id).should.equal true - - + it "should flush project history", -> + MockProjectHistoryApi.flushProject.calledWith(@project_id).should.equal true diff --git a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee index 27d241d97d..3b889f79ea 100644 --- a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee +++ b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee @@ -4,6 +4,7 @@ chai.should() async = require "async" MockTrackChangesApi = require "./helpers/MockTrackChangesApi" +MockProjectHistoryApi = require "./helpers/MockProjectHistoryApi" MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" @@ -38,11 +39,13 @@ describe "Deleting a project", -> lines: doc.lines version: doc.update.v } - + sinon.spy MockTrackChangesApi, "flushDoc" - + sinon.spy MockProjectHistoryApi, "flushProject" + after -> MockTrackChangesApi.flushDoc.restore() + MockProjectHistoryApi.flushProject.restore() describe "with documents which have been updated", -> before (done) -> @@ -84,9 +87,10 @@ describe "Deleting a project", -> ), () -> MockWebApi.getDocument.restore() done() - + it "should flush each doc in track changes", -> for doc in @docs MockTrackChangesApi.flushDoc.calledWith(doc.id).should.equal true - + it "should flush each doc in project history", -> + MockProjectHistoryApi.flushProject.calledWith(@project_id).should.equal true diff --git a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee index 97fae5cf14..68ed5483c9 100644 --- a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee @@ -7,6 +7,7 @@ rclient_du = require("redis-sharelatex").createClient(Settings.redis.documentupd Keys = Settings.redis.documentupdater.key_schema MockTrackChangesApi = require "./helpers/MockTrackChangesApi" +MockProjectHistoryApi = require "./helpers/MockProjectHistoryApi" MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" @@ -25,14 +26,16 @@ describe "Setting a document", -> @newLines = ["these", "are", "the", "new", "lines"] @source = "dropbox" @user_id = "user-id-123" - + sinon.spy MockTrackChangesApi, "flushDoc" + sinon.spy MockProjectHistoryApi, "flushProject" sinon.spy MockWebApi, "setDocument" - + after -> - MockWebApi.setDocument.restore() MockTrackChangesApi.flushDoc.restore() - + MockProjectHistoryApi.flushProject.restore() + MockWebApi.setDocument.restore() + describe "when the updated doc exists in the doc updater", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] @@ -64,13 +67,13 @@ describe "Setting a document", -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => doc.version.should.equal @version + 2 done() - + it "should leave the document in redis", (done) -> rclient_du.get Keys.docLines({doc_id: @doc_id}), (error, lines) => throw error if error? expect(JSON.parse(lines)).to.deep.equal @newLines done() - + describe "when the updated doc does not exist in the doc updater", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] @@ -78,7 +81,7 @@ describe "Setting a document", -> DocUpdaterClient.setDocLines @project_id, @doc_id, @newLines, @source, @user_id, false, (error, res, body) => @statusCode = res.statusCode setTimeout done, 200 - + it "should return a 204 status code", -> @statusCode.should.equal 204 @@ -86,16 +89,19 @@ describe "Setting a document", -> MockWebApi.setDocument .calledWith(@project_id, @doc_id, @newLines) .should.equal true - + it "should flush track changes", -> MockTrackChangesApi.flushDoc.calledWith(@doc_id).should.equal true - + + it "should flush project history", -> + MockProjectHistoryApi.flushProject.calledWith(@project_id).should.equal true + it "should remove the document from redis", (done) -> rclient_du.get Keys.docLines({doc_id: @doc_id}), (error, lines) => throw error if error? expect(lines).to.not.exist done() - + describe "with track changes", -> before -> @lines = ["one", "one and a half", "two", "three"] @@ -123,14 +129,14 @@ describe "Setting a document", -> DocUpdaterClient.setDocLines @project_id, @doc_id, @lines, @source, @user_id, true, (error, res, body) => @statusCode = res.statusCode setTimeout done, 200 - + it "should undo the tracked changes", (done) -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, data) => throw error if error? ranges = data.ranges expect(ranges.changes).to.be.undefined done() - + describe "without the undo flag", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] @@ -143,7 +149,7 @@ describe "Setting a document", -> DocUpdaterClient.setDocLines @project_id, @doc_id, @lines, @source, @user_id, false, (error, res, body) => @statusCode = res.statusCode setTimeout done, 200 - + it "should not undo the tracked changes", (done) -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, data) => throw error if error? @@ -151,4 +157,4 @@ describe "Setting a document", -> expect(ranges.changes.length).to.equal 1 done() - + diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockProjectHistoryApi.coffee b/services/document-updater/test/acceptance/coffee/helpers/MockProjectHistoryApi.coffee new file mode 100644 index 0000000000..2a0c8603a4 --- /dev/null +++ b/services/document-updater/test/acceptance/coffee/helpers/MockProjectHistoryApi.coffee @@ -0,0 +1,19 @@ +express = require("express") +app = express() + +module.exports = MockProjectHistoryApi = + flushProject: (doc_id, callback = (error) ->) -> + callback() + + run: () -> + app.post "/project/:project_id/flush", (req, res, next) => + @flushProject req.params.project_id, (error) -> + if error? + res.send 500 + else + res.send 204 + + app.listen 3054, (error) -> + throw error if error? + +MockProjectHistoryApi.run() From d9d58393d78d47c2f3029754692ada76ed30de85 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 24 Oct 2017 10:39:00 +0100 Subject: [PATCH 357/769] remove unused ioredis package it is loaded from redis-sharelatex, not here. --- services/document-updater/package.json | 1 - 1 file changed, 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 3854e78158..84f32cb9ff 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -10,7 +10,6 @@ "async": "^2.5.0", "coffee-script": "1.4.0", "express": "3.3.4", - "ioredis": "^3.1.4", "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.6", "lynx": "0.0.11", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.5.0", From 56fd6e3d4805307fc88e013bdce4e1845b1c4fd4 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 23 Oct 2017 16:56:34 +0100 Subject: [PATCH 358/769] upgrade to node 6.9.5 --- services/document-updater/.nvmrc | 2 +- services/document-updater/Jenkinsfile | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/services/document-updater/.nvmrc b/services/document-updater/.nvmrc index d87edbfc10..26ec038c18 100644 --- a/services/document-updater/.nvmrc +++ b/services/document-updater/.nvmrc @@ -1 +1 @@ -4.2.1 \ No newline at end of file +6.9.5 \ No newline at end of file diff --git a/services/document-updater/Jenkinsfile b/services/document-updater/Jenkinsfile index 4ffd642356..80f552dbea 100644 --- a/services/document-updater/Jenkinsfile +++ b/services/document-updater/Jenkinsfile @@ -11,7 +11,7 @@ pipeline { stage('Install') { agent { docker { - image 'node:4.2.1' + image 'node:6.9.5' args "-v /var/lib/jenkins/.npm:/tmp/.npm -e HOME=/tmp" reuseNode true } @@ -28,7 +28,7 @@ pipeline { stage('Compile and Test') { agent { docker { - image 'node:4.2.1' + image 'node:6.9.5' reuseNode true } } From f3098f7470063fc7ae6c7a860e5bef5048b71e69 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 24 Oct 2017 12:20:14 +0100 Subject: [PATCH 359/769] upgrade ioredis to 3.2.1 via redis-sharelatex --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 84f32cb9ff..269e9f42f6 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -13,7 +13,7 @@ "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.6", "lynx": "0.0.11", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.5.0", - "redis-sharelatex": "git+https://github.com/sharelatex/redis-sharelatex.git#v1.0.3", + "redis-sharelatex": "git+https://github.com/sharelatex/redis-sharelatex.git#v1.0.4", "request": "2.25.0", "requestretry": "^1.12.0", "sandboxed-module": "~0.2.0", From b8052e7612387e3097b5cb6992f07f887e10a917 Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Wed, 25 Oct 2017 13:56:38 +0100 Subject: [PATCH 360/769] allow settings.redis.project_history to be undefined --- services/document-updater/app/coffee/RedisManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 82125310b4..d2ab35a3ee 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -31,7 +31,7 @@ MAX_RANGES_SIZE = 3 * MEGABYTES keys = Settings.redis.documentupdater.key_schema historyKeys = Settings.redis.history.key_schema -projectHistoryKeys = Settings.redis.project_history.key_schema +projectHistoryKeys = Settings.redis?.project_history?.key_schema module.exports = RedisManager = rclient: rclient From 6d571e6d2367bbd68ecf788a2d9a2c34198cd99c Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Wed, 1 Nov 2017 19:16:49 +0000 Subject: [PATCH 361/769] version document renames --- services/document-updater/app.coffee | 1 + .../app/coffee/DocumentManager.coffee | 13 +++++ .../app/coffee/HttpController.coffee | 14 ++++- .../app/coffee/ProjectManager.coffee | 12 +++++ .../app/coffee/RedisManager.coffee | 16 ++++++ .../DocumentManagerTests.coffee | 18 +++++++ .../HttpController/HttpControllerTests.coffee | 37 +++++++++++++ .../ProjectManager/updateProjectTests.coffee | 54 +++++++++++++++++++ .../RedisManager/RedisManagerTests.coffee | 42 +++++++++++++++ 9 files changed, 205 insertions(+), 2 deletions(-) create mode 100644 services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 41cab59680..b4188292da 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -47,6 +47,7 @@ app.post '/project/:project_id/doc/:doc_id', HttpCont app.post '/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLoaded app.delete '/project/:project_id/doc/:doc_id', HttpController.flushAndDeleteDoc app.delete '/project/:project_id', HttpController.deleteProject +app.post '/project/:project_id', HttpController.updateProject app.post '/project/:project_id/flush', HttpController.flushProject app.post '/project/:project_id/doc/:doc_id/change/:change_id/accept', HttpController.acceptChanges app.post '/project/:project_id/doc/:doc_id/change/accept', HttpController.acceptChanges diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 5ddca2e6a8..c557db3f54 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -7,6 +7,7 @@ HistoryManager = require "./HistoryManager" RealTimeRedisManager = require "./RealTimeRedisManager" Errors = require "./Errors" RangesManager = require "./RangesManager" +async = require "async" MAX_UNFLUSHED_AGE = 300 * 1000 # 5 mins, document should be flushed to mongo this time after a change @@ -155,6 +156,14 @@ module.exports = DocumentManager = return callback(error) if error? callback() + renameDoc: (project_id, doc_id, user_id, update, _callback = (error) ->) -> + timer = new Metrics.Timer("docManager.updateProject") + callback = (args...) -> + timer.done() + _callback(args...) + + RedisManager.renameDoc project_id, doc_id, user_id, update, callback + getDocAndFlushIfOld: (project_id, doc_id, callback = (error, doc) ->) -> DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, unflushedTime, alreadyLoaded) -> return callback(error) if error? @@ -197,3 +206,7 @@ module.exports = DocumentManager = deleteCommentWithLock: (project_id, doc_id, thread_id, callback = (error) ->) -> UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.deleteComment, project_id, doc_id, thread_id, callback + + renameDocWithLock: (project_id, doc_id, user_id, update, callback = (error) ->) -> + UpdateManager = require "./UpdateManager" + UpdateManager.lockUpdatesAndDo DocumentManager.renameDoc, project_id, doc_id, user_id, update, callback diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 0c03a4f7bd..78de5fb765 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -141,7 +141,7 @@ module.exports = HttpController = return next(error) if error? logger.log {project_id, doc_id}, "accepted #{ change_ids.length } changes via http" res.send 204 # No Content - + deleteComment: (req, res, next = (error) ->) -> {project_id, doc_id, comment_id} = req.params logger.log {project_id, doc_id, comment_id}, "deleting comment via http" @@ -151,5 +151,15 @@ module.exports = HttpController = return next(error) if error? logger.log {project_id, doc_id, comment_id}, "deleted comment via http" res.send 204 # No Content - + updateProject: (req, res, next = (error) ->) -> + timer = new Metrics.Timer("http.updateProject") + project_id = req.params.project_id + {userId, docUpdates} = req.body + logger.log {project_id, docUpdates}, "updating project via http" + + ProjectManager.updateProjectWithLocks project_id, userId, docUpdates, (error) -> + timer.done() + return next(error) if error? + logger.log project_id: project_id, "updated project via http" + res.send 204 # No Content diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index 26b6e79b0d..6b320e6e28 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -93,3 +93,15 @@ module.exports = ProjectManager = clearProjectState: (project_id, callback = (error) ->) -> RedisManager.clearProjectState project_id, callback + + updateProjectWithLocks: (project_id, user_id, updates, _callback = (error) ->) -> + timer = new Metrics.Timer("projectManager.updateProject") + callback = (args...) -> + timer.done() + _callback(args...) + + handleUpdate = (update, cb) -> + doc_id = update.id + DocumentManager.renameDocWithLock project_id, doc_id, user_id, update, cb + + async.each updates, handleUpdate, callback diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index d2ab35a3ee..cde2ccddc9 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -272,6 +272,22 @@ module.exports = RedisManager = else callback null, docUpdateCount + renameDoc: (project_id, doc_id, user_id, update, callback = (error) ->) -> + update = + doc: doc_id + pathname: update.pathname + new_pathname: update.newPathname + meta: + user_id: user_id + ts: new Date() + jsonUpdate = JSON.stringify(update) + + RedisManager.getDoc project_id, doc_id, (error, lines, version) -> + return callback(error) if error? + if lines? and version? + rclient.set keys.pathname(doc_id:doc_id), update.new_pathname + + rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), jsonUpdate, callback clearUnflushedTime: (doc_id, callback = (error) ->) -> rclient.del keys.unflushedTime(doc_id:doc_id), callback diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index ac0601b34b..6c7b051f7e 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -23,6 +23,7 @@ describe "DocumentManager", -> "./RangesManager": @RangesManager = {} @project_id = "project-id-123" @doc_id = "doc-id-123" + @user_id = 1234 @callback = sinon.stub() @lines = ["one", "two", "three"] @version = 42 @@ -439,3 +440,20 @@ describe "DocumentManager", -> it "should call the callback with the lines and versions", -> @callback.calledWith(null, @lines, @version).should.equal true + + describe "renameDoc", -> + beforeEach -> + @update = 'some-update' + @RedisManager.renameDoc = sinon.stub().yields() + + describe "successfully", -> + beforeEach -> + @DocumentManager.renameDoc @project_id, @doc_id, @user_id, @update, @callback + + it "should rename the document", -> + @RedisManager.renameDoc + .calledWith(@project_id, @doc_id, @user_id, @update) + .should.equal true + + it "should call the callback", -> + @callback.called.should.equal true diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index 17b5d10304..5ddefefaa3 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -492,3 +492,40 @@ describe "HttpController", -> @next .calledWith(new Error("oops")) .should.equal true + + describe "updateProject", -> + beforeEach -> + @userId = "user-id-123" + @docUpdates = sinon.stub() + @req = + body: {@userId, @docUpdates} + params: + project_id: @project_id + + describe "successfully", -> + beforeEach -> + @ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(3) + @HttpController.updateProject(@req, @res, @next) + + it "should accept the change", -> + @ProjectManager.updateProjectWithLocks + .calledWith(@project_id, @userId, @docUpdates) + .should.equal true + + it "should return a successful No Content response", -> + @res.send + .calledWith(204) + .should.equal true + + it "should time the request", -> + @Metrics.Timer::done.called.should.equal true + + describe "when an errors occurs", -> + beforeEach -> + @ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(3, new Error("oops")) + @HttpController.updateProject(@req, @res, @next) + + it "should call next with the error", -> + @next + .calledWith(new Error("oops")) + .should.equal true diff --git a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee new file mode 100644 index 0000000000..fc81834782 --- /dev/null +++ b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee @@ -0,0 +1,54 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/ProjectManager.js" +SandboxedModule = require('sandboxed-module') + +describe "ProjectManager", -> + beforeEach -> + @ProjectManager = SandboxedModule.require modulePath, requires: + "./RedisManager": @RedisManager = {} + "./DocumentManager": @DocumentManager = {} + "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } + "./Metrics": @Metrics = + Timer: class Timer + done: sinon.stub() + + @project_id = "project-id-123" + @user_id = "user-id-123" + @callback = sinon.stub() + + describe "updateProjectWithLocks", -> + beforeEach -> + @firstUpdate = + id: 1 + update: 'foo' + @secondUpdate = + id: 2 + update: 'bar' + @updates = [ @firstUpdate, @secondUpdate ] + + describe "successfully", -> + beforeEach -> + @DocumentManager.renameDocWithLock = sinon.stub().yields() + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @updates, @callback + + it "should rename the documents in the updates", -> + @DocumentManager.renameDocWithLock + .calledWith(@project_id, @firstUpdate.id, @user_id, @firstUpdate) + .should.equal true + @DocumentManager.renameDocWithLock + .calledWith(@project_id, @secondUpdate.id, @user_id, @secondUpdate) + .should.equal true + + it "should call the callback", -> + @callback.called.should.equal true + + describe "when renaming a doc fails", -> + beforeEach -> + @error = new Error('error') + @DocumentManager.renameDocWithLock = sinon.stub().yields(@error) + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @updates, @callback + + it "should call the callback with the error", -> + @callback.calledWith(@error).should.equal true diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index f5bf3843fa..2b81c18a18 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -673,3 +673,45 @@ describe "RedisManager", -> @rclient.del .calledWith("ProjectState:#{@project_id}") .should.equal true + + describe "renameDoc", -> + beforeEach () -> + @rclient.rpush = sinon.stub().callsArg(2) + @rclient.set = sinon.stub() + @update = + id: @doc_id + pathname: @pathname = 'pathname' + newPathname: @newPathname = 'new-pathname' + + describe "the document is cached in redis", -> + beforeEach -> + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, 'lines', 'version') + @RedisManager.renameDoc @project_id, @doc_id, @userId, @update, @callback + + it "update the cached pathname", -> + @rclient.set + .calledWith("Pathname:#{@doc_id}", @newPathname) + .should.equal true + + it "should queue an update", -> + update = + doc: @doc_id + pathname: @pathname + new_pathname: @newPathname + meta: + user_id: @userId + ts: new Date() + @rclient.rpush + .calledWith("ProjectHistory:Ops:#{@project_id}", JSON.stringify(update)) + .should.equal true + + it "should call the callback", -> + @callback.calledWith().should.equal true + + describe "the document is not cached in redis", -> + beforeEach -> + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null) + @RedisManager.renameDoc @project_id, @doc_id, @userId, @update, @callback + + it "does not update the cached pathname", -> + @rclient.set.called.should.equal false From 7e86afe55e1bbd24d33035092f3519522159e794 Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Mon, 6 Nov 2017 16:14:27 +0000 Subject: [PATCH 362/769] version file renames --- .../app/coffee/HttpController.coffee | 6 +-- .../app/coffee/ProjectManager.coffee | 12 ++++-- .../app/coffee/RedisManager.coffee | 25 +++++++++---- .../HttpController/HttpControllerTests.coffee | 9 +++-- .../ProjectManager/updateProjectTests.coffee | 37 ++++++++++++++----- .../RedisManager/RedisManagerTests.coffee | 30 +++++++++++++-- 6 files changed, 90 insertions(+), 29 deletions(-) diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 78de5fb765..38e82fb04e 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -155,10 +155,10 @@ module.exports = HttpController = updateProject: (req, res, next = (error) ->) -> timer = new Metrics.Timer("http.updateProject") project_id = req.params.project_id - {userId, docUpdates} = req.body - logger.log {project_id, docUpdates}, "updating project via http" + {userId, docUpdates, fileUpdates} = req.body + logger.log {project_id, docUpdates, fileUpdates}, "updating project via http" - ProjectManager.updateProjectWithLocks project_id, userId, docUpdates, (error) -> + ProjectManager.updateProjectWithLocks project_id, userId, docUpdates, fileUpdates, (error) -> timer.done() return next(error) if error? logger.log project_id: project_id, "updated project via http" diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index 6b320e6e28..2f85173b4c 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -94,14 +94,20 @@ module.exports = ProjectManager = clearProjectState: (project_id, callback = (error) ->) -> RedisManager.clearProjectState project_id, callback - updateProjectWithLocks: (project_id, user_id, updates, _callback = (error) ->) -> + updateProjectWithLocks: (project_id, user_id, docUpdates, fileUpdates, _callback = (error) ->) -> timer = new Metrics.Timer("projectManager.updateProject") callback = (args...) -> timer.done() _callback(args...) - handleUpdate = (update, cb) -> + handleDocUpdate = (update, cb) -> doc_id = update.id DocumentManager.renameDocWithLock project_id, doc_id, user_id, update, cb - async.each updates, handleUpdate, callback + handleFileUpdate = (update, cb) -> + file_id = update.id + RedisManager.renameFile project_id, file_id, user_id, update, cb + + async.each docUpdates, handleDocUpdate, (error) -> + return callback(error) if error? + async.each fileUpdates, handleFileUpdate, callback diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index cde2ccddc9..56ce0fb9f9 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -273,21 +273,32 @@ module.exports = RedisManager = callback null, docUpdateCount renameDoc: (project_id, doc_id, user_id, update, callback = (error) ->) -> + RedisManager.getDoc project_id, doc_id, (error, lines, version) -> + return callback(error) if error? + + if lines? and version? + rclient.set keys.pathname(doc_id:doc_id), update.newPathname, (error) -> + return callback(error) if error? + RedisManager._renameEntity project_id, 'doc', doc_id, user_id, update, callback + else + RedisManager._renameEntity project_id, 'doc', doc_id, user_id, update, callback + + renameFile: (project_id, file_id, user_id, update, callback = (error) ->) -> + RedisManager._renameEntity project_id, 'file', file_id, user_id, update, callback + + _renameEntity: (project_id, entity_type, entity_id, user_id, update, callback = (error) ->) -> update = - doc: doc_id pathname: update.pathname new_pathname: update.newPathname meta: user_id: user_id ts: new Date() + update[entity_type] = entity_id + + logger.log {project_id, update}, "queue rename operation to project-history" jsonUpdate = JSON.stringify(update) - RedisManager.getDoc project_id, doc_id, (error, lines, version) -> - return callback(error) if error? - if lines? and version? - rclient.set keys.pathname(doc_id:doc_id), update.new_pathname - - rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), jsonUpdate, callback + rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), jsonUpdate, callback clearUnflushedTime: (doc_id, callback = (error) ->) -> rclient.del keys.unflushedTime(doc_id:doc_id), callback diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index 5ddefefaa3..b5ebe02339 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -497,19 +497,20 @@ describe "HttpController", -> beforeEach -> @userId = "user-id-123" @docUpdates = sinon.stub() + @fileUpdates = sinon.stub() @req = - body: {@userId, @docUpdates} + body: {@userId, @docUpdates, @fileUpdates} params: project_id: @project_id describe "successfully", -> beforeEach -> - @ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(3) + @ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(4) @HttpController.updateProject(@req, @res, @next) it "should accept the change", -> @ProjectManager.updateProjectWithLocks - .calledWith(@project_id, @userId, @docUpdates) + .calledWith(@project_id, @userId, @docUpdates, @fileUpdates) .should.equal true it "should return a successful No Content response", -> @@ -522,7 +523,7 @@ describe "HttpController", -> describe "when an errors occurs", -> beforeEach -> - @ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(3, new Error("oops")) + @ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(4, new Error("oops")) @HttpController.updateProject(@req, @res, @next) it "should call next with the error", -> diff --git a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee index fc81834782..7009405842 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee @@ -20,25 +20,35 @@ describe "ProjectManager", -> describe "updateProjectWithLocks", -> beforeEach -> - @firstUpdate = + @firstDocUpdate = id: 1 update: 'foo' - @secondUpdate = + @secondDocUpdate = id: 2 update: 'bar' - @updates = [ @firstUpdate, @secondUpdate ] + @docUpdates = [ @firstDocUpdate, @secondDocUpdate ] + @firstFileUpdate = + id: 2 + update: 'bar' + @fileUpdates = [ @firstFileUpdate ] + @DocumentManager.renameDocWithLock = sinon.stub().yields() + @RedisManager.renameFile = sinon.stub().yields() describe "successfully", -> beforeEach -> - @DocumentManager.renameDocWithLock = sinon.stub().yields() - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @updates, @callback + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback - it "should rename the documents in the updates", -> + it "should rename the docs in the updates", -> @DocumentManager.renameDocWithLock - .calledWith(@project_id, @firstUpdate.id, @user_id, @firstUpdate) + .calledWith(@project_id, @firstDocUpdate.id, @user_id, @firstDocUpdate) .should.equal true @DocumentManager.renameDocWithLock - .calledWith(@project_id, @secondUpdate.id, @user_id, @secondUpdate) + .calledWith(@project_id, @secondDocUpdate.id, @user_id, @secondDocUpdate) + .should.equal true + + it "should rename the files in the updates", -> + @RedisManager.renameFile + .calledWith(@project_id, @firstFileUpdate.id, @user_id, @firstFileUpdate) .should.equal true it "should call the callback", -> @@ -48,7 +58,16 @@ describe "ProjectManager", -> beforeEach -> @error = new Error('error') @DocumentManager.renameDocWithLock = sinon.stub().yields(@error) - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @updates, @callback + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback + + it "should call the callback with the error", -> + @callback.calledWith(@error).should.equal true + + describe "when renaming a file fails", -> + beforeEach -> + @error = new Error('error') + @RedisManager.renameFile = sinon.stub().yields(@error) + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 2b81c18a18..157c315b63 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -676,8 +676,8 @@ describe "RedisManager", -> describe "renameDoc", -> beforeEach () -> - @rclient.rpush = sinon.stub().callsArg(2) - @rclient.set = sinon.stub() + @rclient.rpush = sinon.stub().yields() + @rclient.set = sinon.stub().yields() @update = id: @doc_id pathname: @pathname = 'pathname' @@ -695,12 +695,12 @@ describe "RedisManager", -> it "should queue an update", -> update = - doc: @doc_id pathname: @pathname new_pathname: @newPathname meta: user_id: @userId ts: new Date() + doc: @doc_id @rclient.rpush .calledWith("ProjectHistory:Ops:#{@project_id}", JSON.stringify(update)) .should.equal true @@ -715,3 +715,27 @@ describe "RedisManager", -> it "does not update the cached pathname", -> @rclient.set.called.should.equal false + + describe "renameFile", -> + beforeEach () -> + @rclient.rpush = sinon.stub().yields() + @file_id = 1234 + + @update = + pathname: @pathname = '/old' + newPathname: @newPathname = '/new' + + @RedisManager.renameFile @project_id, @file_id, @userId, @update + + it "should queue an update", -> + update = + pathname: @pathname + new_pathname: @newPathname + meta: + user_id: @userId + ts: new Date() + file: @file_id + + @rclient.rpush + .calledWith("ProjectHistory:Ops:#{@project_id}", JSON.stringify(update)) + .should.equal true From 944e633bac4301f116014c4dafbeed22da607abd Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Mon, 6 Nov 2017 17:18:28 +0000 Subject: [PATCH 363/769] add acceptance test for entity renaming --- .../app/coffee/DocumentManager.coffee | 10 +- .../app/coffee/HttpController.coffee | 3 +- ...lyingUpdatesToProjectStructureTests.coffee | 98 +++++++++++++++++++ .../coffee/helpers/DocUpdaterClient.coffee | 7 ++ .../DocumentManagerTests.coffee | 8 +- .../HttpController/HttpControllerTests.coffee | 4 +- 6 files changed, 119 insertions(+), 11 deletions(-) create mode 100644 services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index c557db3f54..8e69989d09 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -31,20 +31,20 @@ module.exports = DocumentManager = else callback null, lines, version, ranges, pathname, unflushedTime, true - getDocAndRecentOps: (project_id, doc_id, fromVersion, _callback = (error, lines, version, recentOps, ranges) ->) -> + getDocAndRecentOps: (project_id, doc_id, fromVersion, _callback = (error, lines, version, ops, ranges, pathname) ->) -> timer = new Metrics.Timer("docManager.getDocAndRecentOps") callback = (args...) -> timer.done() _callback(args...) - DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges) -> + DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname) -> return callback(error) if error? if fromVersion == -1 - callback null, lines, version, [], ranges + callback null, lines, version, [], ranges, pathname else RedisManager.getPreviousDocOps doc_id, fromVersion, version, (error, ops) -> return callback(error) if error? - callback null, lines, version, ops, ranges + callback null, lines, version, ops, ranges, pathname setDoc: (project_id, doc_id, newLines, source, user_id, undoing, _callback = (error) ->) -> timer = new Metrics.Timer("docManager.setDoc") @@ -179,7 +179,7 @@ module.exports = DocumentManager = UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.getDoc, project_id, doc_id, callback - getDocAndRecentOpsWithLock: (project_id, doc_id, fromVersion, callback = (error, lines, version) ->) -> + getDocAndRecentOpsWithLock: (project_id, doc_id, fromVersion, callback = (error, lines, version, ops, ranges, pathname) ->) -> UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.getDocAndRecentOps, project_id, doc_id, fromVersion, callback diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 38e82fb04e..ef9f860552 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -18,7 +18,7 @@ module.exports = HttpController = else fromVersion = -1 - DocumentManager.getDocAndRecentOpsWithLock project_id, doc_id, fromVersion, (error, lines, version, ops, ranges) -> + DocumentManager.getDocAndRecentOpsWithLock project_id, doc_id, fromVersion, (error, lines, version, ops, ranges, pathname) -> timer.done() return next(error) if error? logger.log project_id: project_id, doc_id: doc_id, "got doc via http" @@ -30,6 +30,7 @@ module.exports = HttpController = version: version ops: ops ranges: ranges + pathname: pathname _getTotalSizeOfLines: (lines) -> size = 0 diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee new file mode 100644 index 0000000000..21657793a8 --- /dev/null +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee @@ -0,0 +1,98 @@ +sinon = require "sinon" +chai = require("chai") +chai.should() +Settings = require('settings-sharelatex') +rclient_history = require("redis-sharelatex").createClient(Settings.redis.history) +ProjectHistoryKeys = Settings.redis.project_history.key_schema + +MockWebApi = require "./helpers/MockWebApi" +DocUpdaterClient = require "./helpers/DocUpdaterClient" + +describe "Applying updates to a project's structure", -> + before -> + @user_id = 'user-id-123' + + describe "renaming a file", -> + before (done) -> + @project_id = DocUpdaterClient.randomId() + @fileUpdate = + id: DocUpdaterClient.randomId() + pathname: '/file-path' + newPathname: '/new-file-path' + @fileUpdates = [ @fileUpdate ] + DocUpdaterClient.sendProjectUpdate @project_id, @user_id, [], @fileUpdates, (error) -> + throw error if error? + setTimeout done, 200 + + it "should push the applied file renames to the project history changes api", (done) -> + rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + throw error if error? + + update = JSON.parse(updates[0]) + update.file.should.equal @fileUpdate.id + update.pathname.should.equal '/file-path' + update.new_pathname.should.equal '/new-file-path' + update.meta.user_id.should.equal @user_id + update.meta.ts.should.be.a('string') + + done() + + describe "renaming a document", -> + before -> + @docUpdate = + id: DocUpdaterClient.randomId() + pathname: '/doc-path' + newPathname: '/new-doc-path' + @docUpdates = [ @docUpdate ] + + describe "when the document is not loaded", -> + before (done) -> + @project_id = DocUpdaterClient.randomId() + DocUpdaterClient.sendProjectUpdate @project_id, @user_id, @docUpdates, [], (error) -> + throw error if error? + setTimeout done, 200 + + it "should push the applied doc renames to the project history changes api", (done) -> + rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + throw error if error? + + update = JSON.parse(updates[0]) + update.doc.should.equal @docUpdate.id + update.pathname.should.equal '/doc-path' + update.new_pathname.should.equal '/new-doc-path' + update.meta.user_id.should.equal @user_id + update.meta.ts.should.be.a('string') + + done() + + describe "when the document is loaded", -> + before (done) -> + @project_id = DocUpdaterClient.randomId() + MockWebApi.insertDoc @project_id, @docUpdate.id, {} + DocUpdaterClient.preloadDoc @project_id, @docUpdate.id, (error) => + throw error if error? + sinon.spy MockWebApi, "getDocument" + DocUpdaterClient.sendProjectUpdate @project_id, @user_id, @docUpdates, [], (error) -> + throw error if error? + setTimeout done, 200 + + after -> + MockWebApi.getDocument.restore() + + it "should update the doc", (done) -> + DocUpdaterClient.getDoc @project_id, @docUpdate.id, (error, res, doc) => + doc.pathname.should.equal @docUpdate.newPathname + done() + + it "should push the applied doc renames to the project history changes api", (done) -> + rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + throw error if error? + + update = JSON.parse(updates[0]) + update.doc.should.equal @docUpdate.id + update.pathname.should.equal '/doc-path' + update.new_pathname.should.equal '/new-doc-path' + update.meta.user_id.should.equal @user_id + update.meta.ts.should.be.a('string') + + done() diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee index 6b2a5ac2fb..f70271021b 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee @@ -86,3 +86,10 @@ module.exports = DocUpdaterClient = if body? and res.statusCode >= 200 and res.statusCode < 300 body = JSON.parse(body) callback error, res, body + + sendProjectUpdate: (project_id, userId, docUpdates, fileUpdates, callback = (error) ->) -> + request.post { + url: "http://localhost:3003/project/#{project_id}" + json: { userId, docUpdates, fileUpdates } + }, (error, res, body) -> + callback error, res, body diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index 6c7b051f7e..702617f7ae 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -109,7 +109,7 @@ describe "DocumentManager", -> describe "getDocAndRecentOps", -> describe "with a previous version specified", -> beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname) @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) @DocumentManager.getDocAndRecentOps @project_id, @doc_id, @fromVersion, @callback @@ -124,14 +124,14 @@ describe "DocumentManager", -> .should.equal true it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, @ops, @ranges).should.equal true + @callback.calledWith(null, @lines, @version, @ops, @ranges, @pathname).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true describe "with no previous version specified", -> beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname) @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) @DocumentManager.getDocAndRecentOps @project_id, @doc_id, -1, @callback @@ -144,7 +144,7 @@ describe "DocumentManager", -> @RedisManager.getPreviousDocOps.called.should.equal false it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, [], @ranges).should.equal true + @callback.calledWith(null, @lines, @version, [], @ranges, @pathname).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index b5ebe02339..d52956635d 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -28,6 +28,7 @@ describe "HttpController", -> @version = 42 @fromVersion = 42 @ranges = { changes: "mock", comments: "mock" } + @pathname = '/a/b/c' @req = params: project_id: @project_id @@ -35,7 +36,7 @@ describe "HttpController", -> describe "when the document exists and no recent ops are requested", -> beforeEach -> - @DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, @lines, @version, [], @ranges) + @DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, @lines, @version, [], @ranges, @pathname) @HttpController.getDoc(@req, @res, @next) it "should get the doc", -> @@ -51,6 +52,7 @@ describe "HttpController", -> version: @version ops: [] ranges: @ranges + pathname: @pathname })) .should.equal true From a3420b12369273a4b76f3ec5f3ddbb970b309418 Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Fri, 10 Nov 2017 14:54:56 +0000 Subject: [PATCH 364/769] version entity additions --- .../app/coffee/ProjectManager.coffee | 10 +- .../app/coffee/RedisManager.coffee | 15 ++ .../ProjectManager/updateProjectTests.coffee | 144 ++++++++++++------ .../RedisManager/RedisManagerTests.coffee | 27 ++++ 4 files changed, 151 insertions(+), 45 deletions(-) diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index 2f85173b4c..fe7867319d 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -102,11 +102,17 @@ module.exports = ProjectManager = handleDocUpdate = (update, cb) -> doc_id = update.id - DocumentManager.renameDocWithLock project_id, doc_id, user_id, update, cb + if update.docLines? + RedisManager.addEntity project_id, 'doc', doc_id, user_id, update, cb + else + DocumentManager.renameDocWithLock project_id, doc_id, user_id, update, cb handleFileUpdate = (update, cb) -> file_id = update.id - RedisManager.renameFile project_id, file_id, user_id, update, cb + if update.url? + RedisManager.addEntity project_id, 'file', file_id, user_id, update, cb + else + RedisManager.renameFile project_id, file_id, user_id, update, cb async.each docUpdates, handleDocUpdate, (error) -> return callback(error) if error? diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 56ce0fb9f9..df84947a22 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -300,6 +300,21 @@ module.exports = RedisManager = rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), jsonUpdate, callback + addEntity: (project_id, entity_type, entitiy_id, user_id, update, callback = (error) ->) -> + update = + pathname: update.pathname + docLines: update.docLines + url: update.url + meta: + user_id: user_id + ts: new Date() + update[entity_type] = entitiy_id + + logger.log {project_id, update}, "queue add operation to project-history" + jsonUpdate = JSON.stringify(update) + + rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), jsonUpdate, callback + clearUnflushedTime: (doc_id, callback = (error) ->) -> rclient.del keys.unflushedTime(doc_id:doc_id), callback diff --git a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee index 7009405842..898479a8c7 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee @@ -19,55 +19,113 @@ describe "ProjectManager", -> @callback = sinon.stub() describe "updateProjectWithLocks", -> - beforeEach -> - @firstDocUpdate = - id: 1 - update: 'foo' - @secondDocUpdate = - id: 2 - update: 'bar' - @docUpdates = [ @firstDocUpdate, @secondDocUpdate ] - @firstFileUpdate = - id: 2 - update: 'bar' - @fileUpdates = [ @firstFileUpdate ] - @DocumentManager.renameDocWithLock = sinon.stub().yields() - @RedisManager.renameFile = sinon.stub().yields() - - describe "successfully", -> + describe "rename operations", -> beforeEach -> - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback + @firstDocUpdate = + id: 1 + pathname: 'foo' + newPathname: 'foo' + @secondDocUpdate = + id: 2 + pathname: 'bar' + newPathname: 'bar2' + @docUpdates = [ @firstDocUpdate, @secondDocUpdate ] + @firstFileUpdate = + id: 2 + pathname: 'bar' + newPathname: 'bar2' + @fileUpdates = [ @firstFileUpdate ] + @DocumentManager.renameDocWithLock = sinon.stub().yields() + @RedisManager.renameFile = sinon.stub().yields() - it "should rename the docs in the updates", -> - @DocumentManager.renameDocWithLock - .calledWith(@project_id, @firstDocUpdate.id, @user_id, @firstDocUpdate) - .should.equal true - @DocumentManager.renameDocWithLock - .calledWith(@project_id, @secondDocUpdate.id, @user_id, @secondDocUpdate) - .should.equal true + describe "successfully", -> + beforeEach -> + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback - it "should rename the files in the updates", -> - @RedisManager.renameFile - .calledWith(@project_id, @firstFileUpdate.id, @user_id, @firstFileUpdate) - .should.equal true + it "should rename the docs in the updates", -> + @DocumentManager.renameDocWithLock + .calledWith(@project_id, @firstDocUpdate.id, @user_id, @firstDocUpdate) + .should.equal true + @DocumentManager.renameDocWithLock + .calledWith(@project_id, @secondDocUpdate.id, @user_id, @secondDocUpdate) + .should.equal true - it "should call the callback", -> - @callback.called.should.equal true + it "should rename the files in the updates", -> + @RedisManager.renameFile + .calledWith(@project_id, @firstFileUpdate.id, @user_id, @firstFileUpdate) + .should.equal true - describe "when renaming a doc fails", -> + it "should call the callback", -> + @callback.called.should.equal true + + describe "when renaming a doc fails", -> + beforeEach -> + @error = new Error('error') + @DocumentManager.renameDocWithLock = sinon.stub().yields(@error) + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback + + it "should call the callback with the error", -> + @callback.calledWith(@error).should.equal true + + describe "when renaming a file fails", -> + beforeEach -> + @error = new Error('error') + @RedisManager.renameFile = sinon.stub().yields(@error) + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback + + it "should call the callback with the error", -> + @callback.calledWith(@error).should.equal true + + describe "add operations", -> beforeEach -> - @error = new Error('error') - @DocumentManager.renameDocWithLock = sinon.stub().yields(@error) - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback + @firstDocUpdate = + id: 1 + docLines: "a\nb" + @secondDocUpdate = + id: 2 + docLines: "a\nb" + @docUpdates = [ @firstDocUpdate, @secondDocUpdate ] + @firstFileUpdate = + id: 2 + url: 'filestore.example.com/2' + @fileUpdates = [ @firstFileUpdate ] + @RedisManager.addEntity = sinon.stub().yields() - it "should call the callback with the error", -> - @callback.calledWith(@error).should.equal true + describe "successfully", -> + beforeEach -> + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback - describe "when renaming a file fails", -> - beforeEach -> - @error = new Error('error') - @RedisManager.renameFile = sinon.stub().yields(@error) - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback + it "should add the docs in the updates", -> + @RedisManager.addEntity + .calledWith(@project_id, 'doc', @firstDocUpdate.id, @user_id, @firstDocUpdate) + .should.equal true + @RedisManager.addEntity + .calledWith(@project_id, 'doc', @secondDocUpdate.id, @user_id, @secondDocUpdate) + .should.equal true + + it "should add the files in the updates", -> + @RedisManager.addEntity + .calledWith(@project_id, 'file', @firstFileUpdate.id, @user_id, @firstFileUpdate) + .should.equal true + + it "should call the callback", -> + @callback.called.should.equal true + + describe "when adding a doc fails", -> + beforeEach -> + @error = new Error('error') + @RedisManager.addEntity = sinon.stub().yields(@error) + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback + + it "should call the callback with the error", -> + @callback.calledWith(@error).should.equal true + + describe "when adding a file fails", -> + beforeEach -> + @error = new Error('error') + @RedisManager.addEntity = sinon.stub().yields(@error) + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback + + it "should call the callback with the error", -> + @callback.calledWith(@error).should.equal true - it "should call the callback with the error", -> - @callback.calledWith(@error).should.equal true diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 157c315b63..4cbac611b6 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -739,3 +739,30 @@ describe "RedisManager", -> @rclient.rpush .calledWith("ProjectHistory:Ops:#{@project_id}", JSON.stringify(update)) .should.equal true + + describe "addEntity", -> + beforeEach (done) -> + @rclient.rpush = sinon.stub().yields() + @entity_id = 1234 + @entity_type = 'type' + + @update = + pathname: @pathname = '/old' + docLines: @docLines = 'a\nb' + url: @url = 'filestore.example.com' + + @RedisManager.addEntity @project_id, @entity_type, @entity_id, @userId, @update, done + + it "should queue an update", -> + update = + pathname: @pathname + docLines: @docLines + url: @url + meta: + user_id: @user_id + ts: new Date() + update[@entity_type] = @entity_id + + @rclient.rpush + .calledWith("ProjectHistory:Ops:#{@project_id}", JSON.stringify(update)) + .should.equal true From d0e56e505441487cbd475579815ce51a7b99ef75 Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Fri, 10 Nov 2017 15:01:37 +0000 Subject: [PATCH 365/769] add acceptance tests for entity additions --- ...lyingUpdatesToProjectStructureTests.coffee | 57 ++++++++++++++++++- 1 file changed, 54 insertions(+), 3 deletions(-) diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee index 21657793a8..5526a0b40f 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee @@ -24,7 +24,7 @@ describe "Applying updates to a project's structure", -> throw error if error? setTimeout done, 200 - it "should push the applied file renames to the project history changes api", (done) -> + it "should push the applied file renames to the project history api", (done) -> rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => throw error if error? @@ -52,7 +52,7 @@ describe "Applying updates to a project's structure", -> throw error if error? setTimeout done, 200 - it "should push the applied doc renames to the project history changes api", (done) -> + it "should push the applied doc renames to the project history api", (done) -> rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => throw error if error? @@ -84,7 +84,7 @@ describe "Applying updates to a project's structure", -> doc.pathname.should.equal @docUpdate.newPathname done() - it "should push the applied doc renames to the project history changes api", (done) -> + it "should push the applied doc renames to the project history api", (done) -> rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => throw error if error? @@ -96,3 +96,54 @@ describe "Applying updates to a project's structure", -> update.meta.ts.should.be.a('string') done() + + describe "adding a file", -> + before (done) -> + @project_id = DocUpdaterClient.randomId() + @fileUpdate = + id: DocUpdaterClient.randomId() + pathname: '/file-path' + url: 'filestore.example.com' + @fileUpdates = [ @fileUpdate ] + DocUpdaterClient.sendProjectUpdate @project_id, @user_id, [], @fileUpdates, (error) -> + throw error if error? + setTimeout done, 200 + + it "should push the file addition to the project history api", (done) -> + rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + throw error if error? + + update = JSON.parse(updates[0]) + update.file.should.equal @fileUpdate.id + update.pathname.should.equal '/file-path' + update.url.should.equal 'filestore.example.com' + update.meta.user_id.should.equal @user_id + update.meta.ts.should.be.a('string') + + done() + + describe "adding a doc", -> + before (done) -> + @project_id = DocUpdaterClient.randomId() + @docUpdate = + id: DocUpdaterClient.randomId() + pathname: '/file-path' + docLines: 'a\nb' + @docUpdates = [ @docUpdate ] + DocUpdaterClient.sendProjectUpdate @project_id, @user_id, @docUpdates, [], (error) -> + throw error if error? + setTimeout done, 200 + + it "should push the doc addition to the project history api", (done) -> + rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + throw error if error? + + update = JSON.parse(updates[0]) + update.doc.should.equal @docUpdate.id + update.pathname.should.equal '/file-path' + update.docLines.should.equal 'a\nb' + update.meta.user_id.should.equal @user_id + update.meta.ts.should.be.a('string') + + done() + From a4bbf5cf52098dfa37ad5429046912a0754b2bc6 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 13 Nov 2017 11:53:39 +0000 Subject: [PATCH 366/769] use a separate parameter for doc and project flush --- .../document-updater/app/coffee/HistoryManager.coffee | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index daf558c2e2..bf19a34e2f 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -39,13 +39,15 @@ module.exports = HistoryManager = else if res.statusCode < 200 and res.statusCode >= 300 logger.error { project_id }, "project history api returned a failure status code: #{res.statusCode}" - FLUSH_EVERY_N_OPS: 100 + FLUSH_DOC_EVERY_N_OPS: 100 + FLUSH_PROJECT_EVERY_N_OPS: 500 + recordAndFlushHistoryOps: (project_id, doc_id, ops = [], doc_ops_length, project_ops_length, callback = (error) ->) -> if ops.length == 0 return callback() if Settings.apis?.project_history?.enabled - if HistoryManager._shouldFlushHistoryOps(project_ops_length, ops, HistoryManager.FLUSH_EVERY_N_OPS) + if HistoryManager._shouldFlushHistoryOps(project_ops_length, ops, HistoryManager.FLUSH_PROJECT_EVERY_N_OPS) # Do this in the background since it uses HTTP and so may be too # slow to wait for when processing a doc update. logger.log { project_ops_length, project_id }, "flushing project history api" @@ -53,7 +55,7 @@ module.exports = HistoryManager = HistoryRedisManager.recordDocHasHistoryOps project_id, doc_id, ops, (error) -> return callback(error) if error? - if HistoryManager._shouldFlushHistoryOps(doc_ops_length, ops, HistoryManager.FLUSH_EVERY_N_OPS) + if HistoryManager._shouldFlushHistoryOps(doc_ops_length, ops, HistoryManager.FLUSH_DOC_EVERY_N_OPS) # Do this in the background since it uses HTTP and so may be too # slow to wait for when processing a doc update. logger.log { doc_ops_length, doc_id, project_id }, "flushing track changes api" From 0bb9f147aa3b411236a0114fe888581b118e5b7f Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 13 Nov 2017 11:56:08 +0000 Subject: [PATCH 367/769] fix whitespace --- .../document-updater/app/coffee/HistoryManager.coffee | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index daf558c2e2..19d4c933c0 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -18,15 +18,9 @@ module.exports = HistoryManager = logger.log { project_id, doc_id, url }, "flushing doc in track changes api" request.post url, (error, res, body)-> if error? - logger.error( - { error, doc_id, project_id}, - "track changes doc to track changes api" - ) + logger.error { error, doc_id, project_id}, "track changes doc to track changes api" else if res.statusCode < 200 and res.statusCode >= 300 - logger.error( - { doc_id, project_id }, - "track changes api returned a failure status code: #{res.statusCode}" - ) + logger.error { doc_id, project_id }, "track changes api returned a failure status code: #{res.statusCode}" _flushProjectChangesAsync: (project_id) -> return if !Settings.apis?.project_history? From 6abf9c227d7da4d341869f20eb862c796358d306 Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 11 Dec 2017 17:26:32 +0000 Subject: [PATCH 368/769] Calculate and send doc length before each update --- .../app/coffee/UpdateManager.coffee | 26 ++++++++++++++----- .../UpdateManager/UpdateManagerTests.coffee | 18 ++++++++++--- 2 files changed, 34 insertions(+), 10 deletions(-) diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index 5064725aa0..56c0faa165 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -79,7 +79,7 @@ module.exports = UpdateManager = profile.log("sharejs.applyUpdate") return callback(error) if error? RangesManager.applyUpdate project_id, doc_id, ranges, appliedOps, updatedDocLines, (error, new_ranges) -> - UpdateManager._addProjectHistoryMetadataToOps(appliedOps, pathname, updatedDocLines) + UpdateManager._addProjectHistoryMetadataToOps(appliedOps, pathname, lines) profile.log("RangesManager.applyUpdate") return callback(error) if error? RedisManager.updateDocument project_id, doc_id, updatedDocLines, version, appliedOps, new_ranges, (error, doc_ops_length, project_ops_length) -> @@ -129,12 +129,26 @@ module.exports = UpdateManager = op.i = op.i.replace(/[\uD800-\uDFFF]/g, "\uFFFD") return update - _addProjectHistoryMetadataToOps: (ops, pathname, lines) -> + _addProjectHistoryMetadataToOps: (updates, pathname, lines) -> doc_length = _.reduce lines, (chars, line) -> chars + line.length, 0 doc_length += lines.length - 1 # count newline characters - ops.forEach (op) -> - op.meta ||= {} - op.meta.pathname = pathname - op.meta.doc_length = doc_length + updates.forEach (update) -> + update.meta ||= {} + update.meta.pathname = pathname + update.meta.doc_length = doc_length + # Each update may contain multiple ops, i.e. + # [{ + # ops: [{i: "foo", p: 4}, {d: "bar", p:8}] + # }, { + # ops: [{d: "baz", p: 40}, {i: "qux", p:8}] + # }] + # We want to include the doc_length at the start of each update, + # before it's ops are applied. However, we need to track any + # changes to it for the next update. + for op in update.op + if op.i? + doc_length += op.i.length + if op.d? + doc_length -= op.d.length diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index 946f5d9fbe..1d933bc5d4 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -245,20 +245,30 @@ describe "UpdateManager", -> 'test' 'data' ] - appliedOps = [ {v: 42, op: "mock-op-42"}, { v: 45, op: "mock-op-45" }] + appliedOps = [ + { v: 42, op: [{i: "foo", p: 4}, { i: "bar", p: 6 }] }, + { v: 45, op: [{d: "qux", p: 4}, { i: "bazbaz", p: 14 }] }, + { v: 49, op: [{i: "penguin", p: 18}] } + ] @UpdateManager._addProjectHistoryMetadataToOps(appliedOps, @pathname, lines) appliedOps.should.deep.equal [{ v: 42 - op: "mock-op-42" + op: [{i: "foo", p: 4}, { i: "bar", p: 6 }] meta: pathname: @pathname doc_length: 14 }, { v: 45 - op: "mock-op-45" + op: [{d: "qux", p: 4}, { i: "bazbaz", p: 14 }] meta: pathname: @pathname - doc_length: 14 + doc_length: 20 # 14 + 'foo' + 'bar' + }, { + v: 49 + op: [{i: "penguin", p: 18}] + meta: + pathname: @pathname + doc_length: 23 # 14 - 'qux' + 'bazbaz' }] describe "lockUpdatesAndDo", -> From 41f15c4fa898ab586977f53d939622f99d88e38b Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 14 Dec 2017 14:46:27 +0000 Subject: [PATCH 369/769] fix unit test --- .../test/unit/coffee/UpdateManager/UpdateManagerTests.coffee | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index 1d933bc5d4..e91c35f7e6 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -194,9 +194,9 @@ describe "UpdateManager", -> .calledWith(@project_id, @doc_id, @updatedDocLines, @version, @appliedOps, @updated_ranges) .should.equal true - it "shoould add metadata to the ops" , -> + it "should add metadata to the ops" , -> @UpdateManager._addProjectHistoryMetadataToOps - .calledWith(@appliedOps, @pathname, @updatedDocLines) + .calledWith(@appliedOps, @pathname, @lines) .should.equal true it "should push the applied ops into the history queue", -> From a8b14552c0069f22026e794412a87cd920174f42 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 19 Dec 2017 12:27:57 +0000 Subject: [PATCH 370/769] allow flush to succeed when doc is already deleted --- .../document-updater/app/coffee/ProjectManager.coffee | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index fe7867319d..03b57acf53 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -20,10 +20,15 @@ module.exports = ProjectManager = do (doc_id) -> jobs.push (callback) -> DocumentManager.flushDocIfLoadedWithLock project_id, doc_id, (error) -> - if error? + if error? and error instanceof Errors.NotFoundError + logger.warn err: error, project_id: project_id, doc_id: doc_id, "found deleted doc when flushing, removing from redis" + callback() + else if error? logger.error err: error, project_id: project_id, doc_id: doc_id, "error flushing doc" errors.push(error) - callback() + callback() + else + callback() logger.log project_id: project_id, doc_ids: doc_ids, "flushing docs" async.series jobs, () -> From 0b583dd4e6bc699ad5d25a976122d275cb7d42bb Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 20 Dec 2017 13:26:57 +0000 Subject: [PATCH 371/769] fix log message --- services/document-updater/app/coffee/ProjectManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index 03b57acf53..abe5818e34 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -21,7 +21,7 @@ module.exports = ProjectManager = jobs.push (callback) -> DocumentManager.flushDocIfLoadedWithLock project_id, doc_id, (error) -> if error? and error instanceof Errors.NotFoundError - logger.warn err: error, project_id: project_id, doc_id: doc_id, "found deleted doc when flushing, removing from redis" + logger.warn err: error, project_id: project_id, doc_id: doc_id, "found deleted doc when flushing" callback() else if error? logger.error err: error, project_id: project_id, doc_id: doc_id, "error flushing doc" From ccf6cb1a18f2d2d68faee5b5e0c55fff87e5e358 Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 29 Dec 2017 08:13:16 +0000 Subject: [PATCH 372/769] Provide hosts as environment settings, add npm run start script and update coffeescript --- .../app/coffee/RedisManager.coffee | 6 ++--- .../config/settings.defaults.coffee | 23 ++++++++++--------- services/document-updater/package.json | 6 ++++- 3 files changed, 20 insertions(+), 15 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index df84947a22..cd12b497a1 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -44,7 +44,7 @@ module.exports = RedisManager = docLines = JSON.stringify(docLines) if docLines.indexOf("\u0000") != -1 error = new Error("null bytes found in doc lines") - logger.error err: error, doc_id: doc_id, docLines: docLines, error.message + logger.error {err: error, doc_id: doc_id, docLines: docLines}, error.message return callback(error) docHash = RedisManager._computeHash(docLines) logger.log project_id:project_id, doc_id:doc_id, version: version, hash:docHash, "putting doc in redis" @@ -216,13 +216,13 @@ module.exports = RedisManager = for op in jsonOps if op.indexOf("\u0000") != -1 error = new Error("null bytes found in jsonOps") - logger.error err: error, doc_id: doc_id, jsonOps: jsonOps, error.message + logger.error {err: error, doc_id: doc_id, jsonOps: jsonOps}, error.message return callback(error) newDocLines = JSON.stringify(docLines) if newDocLines.indexOf("\u0000") != -1 error = new Error("null bytes found in doc lines") - logger.error err: error, doc_id: doc_id, newDocLines: newDocLines, error.message + logger.error {err: error, doc_id: doc_id, newDocLines: newDocLines}, error.message return callback(error) newHash = RedisManager._computeHash(newDocLines) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index fee3d614c2..96ae5d5735 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -5,23 +5,24 @@ http.globalAgent.maxSockets = 300 module.exports = internal: documentupdater: + host: process.env["LISTEN_ADDRESS"] or "localhost" port: 3003 apis: web: - url: "http://localhost:3000" + url: "http://#{process.env["WEB_HOST"] or "localhost"}:3000" user: "sharelatex" pass: "password" trackchanges: - url: "http://localhost:3015" + url: "http://#{process.env["TRACK_CHANGES_HOST"] or "localhost"}:3015" project_history: enabled: process.env.SHARELATEX_ENABLE_PROJECT_HISTORY == 'true' - url: "http://localhost:3054" + url: "http://#{process.env["PROJECT_HISTORY_HOST"] or "localhost"}:3054" redis: realtime: - port:"6379" - host:"localhost" + port: "6379" + host: process.env["REDIS_HOST"] or "localhost" password:"" key_schema: pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" @@ -33,7 +34,7 @@ module.exports = # pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" documentupdater: port: "6379" - host: "localhost" + host: process.env["REDIS_HOST"] or "localhost" password: "" key_schema: blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" @@ -62,8 +63,8 @@ module.exports = # ranges: ({doc_id}) -> "Ranges:{#{doc_id}}" # projectState: ({project_id}) -> "ProjectState:{#{project_id}}" history: - port:"6379" - host:"localhost" + port: "6379" + host: process.env["REDIS_HOST"] or "localhost" password:"" key_schema: uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}" @@ -80,8 +81,8 @@ module.exports = # uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:{#{doc_id}}" # docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:{#{project_id}}" lock: - port:"6379" - host:"localhost" + port: "6379" + host: process.env["REDIS_HOST"] or "localhost" password:"" key_schema: blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" @@ -95,4 +96,4 @@ module.exports = max_doc_length: 2 * 1024 * 1024 # 2mb mongo: - url: 'mongodb://127.0.0.1/sharelatex' + url: "mongodb://#{process.env["MONGO_HOST"] or "localhost"}/sharelatex" diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 269e9f42f6..7ef7cccc82 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -6,9 +6,13 @@ "type": "git", "url": "https://github.com/sharelatex/document-updater-sharelatex.git" }, + "scripts": { + "compile:app": "coffee -o app/js -c app/coffee && coffee -c app.coffee", + "start": "npm run compile:app && node app.js" + }, "dependencies": { "async": "^2.5.0", - "coffee-script": "1.4.0", + "coffee-script": "~1.7.0", "express": "3.3.4", "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.6", "lynx": "0.0.11", From 3d313ebc53c78237a0bc56bb94dc539064d5b7d6 Mon Sep 17 00:00:00 2001 From: Michael Walker Date: Tue, 23 Jan 2018 11:10:54 +0000 Subject: [PATCH 373/769] Add method to get number of queued project updates --- services/document-updater/app/coffee/RedisManager.coffee | 3 +++ 1 file changed, 3 insertions(+) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index cd12b497a1..1d16f54fa1 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -315,6 +315,9 @@ module.exports = RedisManager = rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), jsonUpdate, callback + numQueuedProjectUpdates: (project_id, callback = (error, length) ->) -> + rclient.llen projectHistoryKeys.projectHistoryOps({project_id}), callback + clearUnflushedTime: (doc_id, callback = (error) ->) -> rclient.del keys.unflushedTime(doc_id:doc_id), callback From 24c74db0dc354b178273a18d18ba360b5e253d0c Mon Sep 17 00:00:00 2001 From: Michael Walker Date: Tue, 23 Jan 2018 11:11:15 +0000 Subject: [PATCH 374/769] Flush project changes when queue hits limit --- .../document-updater/app/coffee/HistoryManager.coffee | 11 +++++++---- .../document-updater/app/coffee/ProjectManager.coffee | 9 ++++++++- 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index 2f6372faa2..5ebef79fee 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -42,10 +42,7 @@ module.exports = HistoryManager = if Settings.apis?.project_history?.enabled if HistoryManager._shouldFlushHistoryOps(project_ops_length, ops, HistoryManager.FLUSH_PROJECT_EVERY_N_OPS) - # Do this in the background since it uses HTTP and so may be too - # slow to wait for when processing a doc update. - logger.log { project_ops_length, project_id }, "flushing project history api" - HistoryManager._flushProjectChangesAsync project_id + HistoryManager.flushProjectChanges project_id, project_ops_length HistoryRedisManager.recordDocHasHistoryOps project_id, doc_id, ops, (error) -> return callback(error) if error? @@ -56,6 +53,12 @@ module.exports = HistoryManager = HistoryManager._flushDocChangesAsync project_id, doc_id callback() + flushProjectChanges: (project_id, project_ops_length) -> + # Do this in the background since it uses HTTP and so may be too + # slow to wait for when processing a doc update. + logger.log { project_ops_length, project_id }, "flushing project history api" + HistoryManager._flushProjectChangesAsync project_id + _shouldFlushHistoryOps: (length, ops, threshold) -> return false if !length # don't flush unless we know the length # We want to flush every 100 ops, i.e. 100, 200, 300, etc diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index abe5818e34..2493719007 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -1,5 +1,6 @@ RedisManager = require "./RedisManager" DocumentManager = require "./DocumentManager" +HistoryManager = require "./HistoryManager" async = require "async" logger = require "logger-sharelatex" Metrics = require "./Metrics" @@ -121,4 +122,10 @@ module.exports = ProjectManager = async.each docUpdates, handleDocUpdate, (error) -> return callback(error) if error? - async.each fileUpdates, handleFileUpdate, callback + async.each fileUpdates, handleFileUpdate, (error) -> + return callback(error) if error? + RedisManager.numQueuedProjectUpdates project_id, (error, length) -> + return callback(error) if error? + if length >= HistoryManager.FLUSH_PROJECT_EVERY_N_OPS + HistoryManager.flushProjectChanges project_id, length + callback() From 3028fb9c3de47fce1d5b1fbbf240e2bd3f6f35da Mon Sep 17 00:00:00 2001 From: Michael Walker Date: Wed, 24 Jan 2018 11:37:28 +0000 Subject: [PATCH 375/769] Only flush project updates when crossing the threshold --- .../app/coffee/HistoryManager.coffee | 20 +++++++++---------- .../app/coffee/ProjectManager.coffee | 11 +++++----- .../app/coffee/RedisManager.coffee | 3 --- .../HistoryManager/HistoryManagerTests.coffee | 16 +++++++-------- 4 files changed, 23 insertions(+), 27 deletions(-) diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index 5ebef79fee..cc3da22b11 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -7,7 +7,7 @@ module.exports = HistoryManager = flushChangesAsync: (project_id, doc_id) -> HistoryManager._flushDocChangesAsync project_id, doc_id if Settings.apis?.project_history?.enabled - HistoryManager._flushProjectChangesAsync project_id + HistoryManager.flushProjectChangesAsync project_id _flushDocChangesAsync: (project_id, doc_id) -> if !Settings.apis?.trackchanges? @@ -22,7 +22,7 @@ module.exports = HistoryManager = else if res.statusCode < 200 and res.statusCode >= 300 logger.error { doc_id, project_id }, "track changes api returned a failure status code: #{res.statusCode}" - _flushProjectChangesAsync: (project_id) -> + flushProjectChangesAsync: (project_id) -> return if !Settings.apis?.project_history? url = "#{Settings.apis.project_history.url}/project/#{project_id}/flush" @@ -42,7 +42,10 @@ module.exports = HistoryManager = if Settings.apis?.project_history?.enabled if HistoryManager._shouldFlushHistoryOps(project_ops_length, ops, HistoryManager.FLUSH_PROJECT_EVERY_N_OPS) - HistoryManager.flushProjectChanges project_id, project_ops_length + # Do this in the background since it uses HTTP and so may be too + # slow to wait for when processing a doc update. + logger.log { project_ops_length, project_id }, "flushing project history api" + HistoryManager.flushProjectChangesAsync project_id HistoryRedisManager.recordDocHasHistoryOps project_id, doc_id, ops, (error) -> return callback(error) if error? @@ -53,19 +56,16 @@ module.exports = HistoryManager = HistoryManager._flushDocChangesAsync project_id, doc_id callback() - flushProjectChanges: (project_id, project_ops_length) -> - # Do this in the background since it uses HTTP and so may be too - # slow to wait for when processing a doc update. - logger.log { project_ops_length, project_id }, "flushing project history api" - HistoryManager._flushProjectChangesAsync project_id - _shouldFlushHistoryOps: (length, ops, threshold) -> + return HistoryManager.shouldFlushHistoryOps(length, ops.length, threshold) + + shouldFlushHistoryOps: (length, ops_length, threshold) -> return false if !length # don't flush unless we know the length # We want to flush every 100 ops, i.e. 100, 200, 300, etc # Find out which 'block' (i.e. 0-99, 100-199) we were in before and after pushing these # ops. If we've changed, then we've gone over a multiple of 100 and should flush. # (Most of the time, we will only hit 100 and then flushing will put us back to 0) - previousLength = length - ops.length + previousLength = length - ops_length prevBlock = Math.floor(previousLength / threshold) newBlock = Math.floor(length / threshold) return newBlock != prevBlock diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index 2493719007..d45d770440 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -122,10 +122,9 @@ module.exports = ProjectManager = async.each docUpdates, handleDocUpdate, (error) -> return callback(error) if error? - async.each fileUpdates, handleFileUpdate, (error) -> + async.each fileUpdates, handleFileUpdate, (error, project_ops_length) -> return callback(error) if error? - RedisManager.numQueuedProjectUpdates project_id, (error, length) -> - return callback(error) if error? - if length >= HistoryManager.FLUSH_PROJECT_EVERY_N_OPS - HistoryManager.flushProjectChanges project_id, length - callback() + if HistoryManager.shouldFlushHistoryOps(project_ops_length, docUpdates.length + fileUpdates.length, HistoryManager.FLUSH_PROJECT_EVERY_N_OPS) + logger.log { project_ops_length, project_id }, "flushing project history api" + HistoryManager.flushProjectChangesAsync project_id + callback() diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 1d16f54fa1..cd12b497a1 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -315,9 +315,6 @@ module.exports = RedisManager = rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), jsonUpdate, callback - numQueuedProjectUpdates: (project_id, callback = (error, length) ->) -> - rclient.llen projectHistoryKeys.projectHistoryOps({project_id}), callback - clearUnflushedTime: (doc_id, callback = (error) ->) -> rclient.del keys.unflushedTime(doc_id:doc_id), callback diff --git a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee index 4956a410b2..b4f622c7b5 100644 --- a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee @@ -24,7 +24,7 @@ describe "HistoryManager", -> describe "flushChangesAsync", -> beforeEach -> @HistoryManager._flushDocChangesAsync = sinon.stub() - @HistoryManager._flushProjectChangesAsync = sinon.stub() + @HistoryManager.flushProjectChangesAsync = sinon.stub() @HistoryManager.flushChangesAsync(@project_id, @doc_id) @@ -34,7 +34,7 @@ describe "HistoryManager", -> .should.equal true it "flushes project changes", -> - @HistoryManager._flushProjectChangesAsync + @HistoryManager.flushProjectChangesAsync .calledWith(@project_id) .should.equal true @@ -49,11 +49,11 @@ describe "HistoryManager", -> .calledWith("#{@Settings.apis.trackchanges.url}/project/#{@project_id}/doc/#{@doc_id}/flush") .should.equal true - describe "_flushProjectChangesAsync", -> + describe "flushProjectChangesAsync", -> beforeEach -> @request.post = sinon.stub().callsArgWith(1, null, statusCode: 204) - @HistoryManager._flushProjectChangesAsync @project_id + @HistoryManager.flushProjectChangesAsync @project_id it "should send a request to the project history api", -> @request.post @@ -66,7 +66,7 @@ describe "HistoryManager", -> @project_ops_length = 10 @doc_ops_length = 5 - @HistoryManager._flushProjectChangesAsync = sinon.stub() + @HistoryManager.flushProjectChangesAsync = sinon.stub() @HistoryRedisManager.recordDocHasHistoryOps = sinon.stub().callsArg(3) @HistoryManager._flushDocChangesAsync = sinon.stub() @@ -77,7 +77,7 @@ describe "HistoryManager", -> ) it "should not flush project changes", -> - @HistoryManager._flushProjectChangesAsync.called.should.equal false + @HistoryManager.flushProjectChangesAsync.called.should.equal false it "should not record doc has history ops", -> @HistoryRedisManager.recordDocHasHistoryOps.called.should.equal false @@ -99,7 +99,7 @@ describe "HistoryManager", -> ) it "should flush project changes", -> - @HistoryManager._flushProjectChangesAsync + @HistoryManager.flushProjectChangesAsync .calledWith(@project_id) .should.equal true @@ -124,7 +124,7 @@ describe "HistoryManager", -> ) it "should not flush project changes", -> - @HistoryManager._flushProjectChangesAsync.called.should.equal false + @HistoryManager.flushProjectChangesAsync.called.should.equal false it "should record doc has history ops", -> @HistoryRedisManager.recordDocHasHistoryOps From d2865f0650909eca1106ad708b14bd3b08e96ab7 Mon Sep 17 00:00:00 2001 From: Michael Walker Date: Fri, 26 Jan 2018 14:41:10 +0000 Subject: [PATCH 376/769] Correctly count project_ops_length --- .../app/coffee/ProjectManager.coffee | 21 +++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index d45d770440..af2e27c8e5 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -106,25 +106,34 @@ module.exports = ProjectManager = timer.done() _callback(args...) + project_ops_length = 0 + handleDocUpdate = (update, cb) -> doc_id = update.id if update.docLines? - RedisManager.addEntity project_id, 'doc', doc_id, user_id, update, cb + RedisManager.addEntity project_id, 'doc', doc_id, user_id, update, (error, count) => + project_ops_length = count + cb(error) else - DocumentManager.renameDocWithLock project_id, doc_id, user_id, update, cb + DocumentManager.renameDocWithLock project_id, doc_id, user_id, update, (error, count) => + project_ops_length = count + cb(error) handleFileUpdate = (update, cb) -> file_id = update.id if update.url? - RedisManager.addEntity project_id, 'file', file_id, user_id, update, cb + RedisManager.addEntity project_id, 'file', file_id, user_id, update, (error, count) => + project_ops_length = count + cb(error) else - RedisManager.renameFile project_id, file_id, user_id, update, cb + RedisManager.renameFile project_id, file_id, user_id, update, (error, count) => + project_ops_length = count + cb(error) async.each docUpdates, handleDocUpdate, (error) -> return callback(error) if error? - async.each fileUpdates, handleFileUpdate, (error, project_ops_length) -> + async.each fileUpdates, handleFileUpdate, (error) -> return callback(error) if error? if HistoryManager.shouldFlushHistoryOps(project_ops_length, docUpdates.length + fileUpdates.length, HistoryManager.FLUSH_PROJECT_EVERY_N_OPS) - logger.log { project_ops_length, project_id }, "flushing project history api" HistoryManager.flushProjectChangesAsync project_id callback() From bdaa2ffa77cb80befcfff65dc71989ca5efa4dbf Mon Sep 17 00:00:00 2001 From: Michael Walker Date: Fri, 26 Jan 2018 11:53:49 +0000 Subject: [PATCH 377/769] Add an acceptance test for flushing project changes --- ...lyingUpdatesToProjectStructureTests.coffee | 41 +++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee index 5526a0b40f..d4521b6b18 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee @@ -4,6 +4,10 @@ chai.should() Settings = require('settings-sharelatex') rclient_history = require("redis-sharelatex").createClient(Settings.redis.history) ProjectHistoryKeys = Settings.redis.project_history.key_schema +SandboxedModule = require('sandboxed-module') +HistoryManagerModulePath = require('path').join __dirname, '../../../app/js/HistoryManager.js' +ProjectManagerModulePath = require('path').join __dirname, '../../../app/js/ProjectManager.js' +RedisManagerModulePath = require('path').join __dirname, '../../../app/js/RedisManager.js' MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" @@ -147,3 +151,40 @@ describe "Applying updates to a project's structure", -> done() + describe "with enough updates to flush to the history service", -> + before (done) -> + @RedisManager = SandboxedModule.require RedisManagerModulePath, requires: + "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } + @HistoryManager = SandboxedModule.require HistoryManagerModulePath, requires: + "request": {} + "settings-sharelatex": {} + "logger-sharelatex": @logger + "./HistoryRedisManager": {} + @HistoryManager.flushProjectChangesAsync = sinon.stub() + @ProjectManager = SandboxedModule.require ProjectManagerModulePath, requires: + './HistoryManager': @HistoryManager + 'logger-sharelatex' : @logger + './Metrics': @Metrics = {} + './RedisManager': @RedisManager + @Metrics.Timer = class Timer + done: sinon.stub() + + @project_id = DocUpdaterClient.randomId() + @user_id = DocUpdaterClient.randomId() + + updates = [] + for v in [0..599] # Should flush after 500 ops + updates.push + id: DocUpdaterClient.randomId(), + pathname: '/file-' + v + docLines: 'a\nb' + + # Send updates in chunks to causes multiple flushes + @ProjectManager.updateProjectWithLocks @project_id, @user_id, updates.slice(0, 250), [], (error) => + throw error if error? + @ProjectManager.updateProjectWithLocks @project_id, @user_id, updates.slice(250), [], (error) => + throw error if error? + setTimeout done, 2000 + + it "should flush project history", -> + @HistoryManager.flushProjectChangesAsync.calledWith(@project_id).should.equal true From 772ee4083a4976a108e2e59aad3561b1e72c6468 Mon Sep 17 00:00:00 2001 From: Michael Walker Date: Wed, 31 Jan 2018 11:17:56 +0000 Subject: [PATCH 378/769] Turn a unit test into an acceptance test --- ...lyingUpdatesToProjectStructureTests.coffee | 34 ++++++------------- 1 file changed, 11 insertions(+), 23 deletions(-) diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee index d4521b6b18..d006890bed 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee @@ -4,11 +4,8 @@ chai.should() Settings = require('settings-sharelatex') rclient_history = require("redis-sharelatex").createClient(Settings.redis.history) ProjectHistoryKeys = Settings.redis.project_history.key_schema -SandboxedModule = require('sandboxed-module') -HistoryManagerModulePath = require('path').join __dirname, '../../../app/js/HistoryManager.js' -ProjectManagerModulePath = require('path').join __dirname, '../../../app/js/ProjectManager.js' -RedisManagerModulePath = require('path').join __dirname, '../../../app/js/RedisManager.js' +MockProjectHistoryApi = require "./helpers/MockProjectHistoryApi" MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" @@ -153,22 +150,6 @@ describe "Applying updates to a project's structure", -> describe "with enough updates to flush to the history service", -> before (done) -> - @RedisManager = SandboxedModule.require RedisManagerModulePath, requires: - "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } - @HistoryManager = SandboxedModule.require HistoryManagerModulePath, requires: - "request": {} - "settings-sharelatex": {} - "logger-sharelatex": @logger - "./HistoryRedisManager": {} - @HistoryManager.flushProjectChangesAsync = sinon.stub() - @ProjectManager = SandboxedModule.require ProjectManagerModulePath, requires: - './HistoryManager': @HistoryManager - 'logger-sharelatex' : @logger - './Metrics': @Metrics = {} - './RedisManager': @RedisManager - @Metrics.Timer = class Timer - done: sinon.stub() - @project_id = DocUpdaterClient.randomId() @user_id = DocUpdaterClient.randomId() @@ -179,12 +160,19 @@ describe "Applying updates to a project's structure", -> pathname: '/file-' + v docLines: 'a\nb' + sinon.spy MockProjectHistoryApi, "flushProject" + # Send updates in chunks to causes multiple flushes - @ProjectManager.updateProjectWithLocks @project_id, @user_id, updates.slice(0, 250), [], (error) => + projectId = @project_id + userId = @project_id + DocUpdaterClient.sendProjectUpdate projectId, userId, updates.slice(0, 250), [], (error) -> throw error if error? - @ProjectManager.updateProjectWithLocks @project_id, @user_id, updates.slice(250), [], (error) => + DocUpdaterClient.sendProjectUpdate projectId, userId, updates.slice(250), [], (error) -> throw error if error? setTimeout done, 2000 + after -> + MockProjectHistoryApi.flushProject.restore() + it "should flush project history", -> - @HistoryManager.flushProjectChangesAsync.calledWith(@project_id).should.equal true + MockProjectHistoryApi.flushProject.calledWith(@project_id).should.equal true From 241d1b27d5c0de1a2ea700b36784d3b8b7c6749a Mon Sep 17 00:00:00 2001 From: Michael Walker Date: Wed, 31 Jan 2018 11:27:40 +0000 Subject: [PATCH 379/769] Remove _shouldFlushHistoryOps wrapper --- .../app/coffee/HistoryManager.coffee | 7 ++---- .../HistoryManager/HistoryManagerTests.coffee | 22 +++++++++---------- 2 files changed, 13 insertions(+), 16 deletions(-) diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index cc3da22b11..9ec5db2aa3 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -41,7 +41,7 @@ module.exports = HistoryManager = return callback() if Settings.apis?.project_history?.enabled - if HistoryManager._shouldFlushHistoryOps(project_ops_length, ops, HistoryManager.FLUSH_PROJECT_EVERY_N_OPS) + if HistoryManager.shouldFlushHistoryOps(project_ops_length, ops.length, HistoryManager.FLUSH_PROJECT_EVERY_N_OPS) # Do this in the background since it uses HTTP and so may be too # slow to wait for when processing a doc update. logger.log { project_ops_length, project_id }, "flushing project history api" @@ -49,16 +49,13 @@ module.exports = HistoryManager = HistoryRedisManager.recordDocHasHistoryOps project_id, doc_id, ops, (error) -> return callback(error) if error? - if HistoryManager._shouldFlushHistoryOps(doc_ops_length, ops, HistoryManager.FLUSH_DOC_EVERY_N_OPS) + if HistoryManager.shouldFlushHistoryOps(doc_ops_length, ops.length, HistoryManager.FLUSH_DOC_EVERY_N_OPS) # Do this in the background since it uses HTTP and so may be too # slow to wait for when processing a doc update. logger.log { doc_ops_length, doc_id, project_id }, "flushing track changes api" HistoryManager._flushDocChangesAsync project_id, doc_id callback() - _shouldFlushHistoryOps: (length, ops, threshold) -> - return HistoryManager.shouldFlushHistoryOps(length, ops.length, threshold) - shouldFlushHistoryOps: (length, ops_length, threshold) -> return false if !length # don't flush unless we know the length # We want to flush every 100 ops, i.e. 100, 200, 300, etc diff --git a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee index b4f622c7b5..75327a7ae9 100644 --- a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee @@ -90,9 +90,9 @@ describe "HistoryManager", -> describe "with enough ops to flush project changes", -> beforeEach -> - @HistoryManager._shouldFlushHistoryOps = sinon.stub() - @HistoryManager._shouldFlushHistoryOps.withArgs(@project_ops_length).returns(true) - @HistoryManager._shouldFlushHistoryOps.withArgs(@doc_ops_length).returns(false) + @HistoryManager.shouldFlushHistoryOps = sinon.stub() + @HistoryManager.shouldFlushHistoryOps.withArgs(@project_ops_length).returns(true) + @HistoryManager.shouldFlushHistoryOps.withArgs(@doc_ops_length).returns(false) @HistoryManager.recordAndFlushHistoryOps( @project_id, @doc_id, @ops, @doc_ops_length, @project_ops_length, @callback @@ -115,9 +115,9 @@ describe "HistoryManager", -> describe "with enough ops to flush doc changes", -> beforeEach -> - @HistoryManager._shouldFlushHistoryOps = sinon.stub() - @HistoryManager._shouldFlushHistoryOps.withArgs(@project_ops_length).returns(false) - @HistoryManager._shouldFlushHistoryOps.withArgs(@doc_ops_length).returns(true) + @HistoryManager.shouldFlushHistoryOps = sinon.stub() + @HistoryManager.shouldFlushHistoryOps.withArgs(@project_ops_length).returns(false) + @HistoryManager.shouldFlushHistoryOps.withArgs(@doc_ops_length).returns(true) @HistoryManager.recordAndFlushHistoryOps( @project_id, @doc_id, @ops, @doc_ops_length, @project_ops_length, @callback @@ -154,24 +154,24 @@ describe "HistoryManager", -> it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true - describe "_shouldFlushHistoryOps", -> + describe "shouldFlushHistoryOps", -> it "should return false if the number of ops is not known", -> - @HistoryManager._shouldFlushHistoryOps(null, ['a', 'b', 'c'], 1).should.equal false + @HistoryManager.shouldFlushHistoryOps(null, ['a', 'b', 'c'].length, 1).should.equal false it "should return false if the updates didn't take us past the threshold", -> # Currently there are 14 ops # Previously we were on 11 ops # We didn't pass over a multiple of 5 - @HistoryManager._shouldFlushHistoryOps(14, ['a', 'b', 'c'], 5).should.equal false + @HistoryManager.shouldFlushHistoryOps(14, ['a', 'b', 'c'].length, 5).should.equal false it "should return true if the updates took to the threshold", -> # Currently there are 15 ops # Previously we were on 12 ops # We've reached a new multiple of 5 - @HistoryManager._shouldFlushHistoryOps(15, ['a', 'b', 'c'], 5).should.equal true + @HistoryManager.shouldFlushHistoryOps(15, ['a', 'b', 'c'].length, 5).should.equal true it "should return true if the updates took past the threshold", -> # Currently there are 19 ops # Previously we were on 16 ops # We didn't pass over a multiple of 5 - @HistoryManager._shouldFlushHistoryOps(17, ['a', 'b', 'c'], 5).should.equal true + @HistoryManager.shouldFlushHistoryOps(17, ['a', 'b', 'c'].length, 5).should.equal true From 6c57317f8db8c6f489efe6c9fd4c6ce324a35c7d Mon Sep 17 00:00:00 2001 From: Michael Walker Date: Wed, 31 Jan 2018 11:41:08 +0000 Subject: [PATCH 380/769] Add a test that sending too few updates does not flush history --- ...lyingUpdatesToProjectStructureTests.coffee | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee index d006890bed..3762936add 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee @@ -176,3 +176,32 @@ describe "Applying updates to a project's structure", -> it "should flush project history", -> MockProjectHistoryApi.flushProject.calledWith(@project_id).should.equal true + + describe "with too few updates to flush to the history service", -> + before (done) -> + @project_id = DocUpdaterClient.randomId() + @user_id = DocUpdaterClient.randomId() + + updates = [] + for v in [0..42] # Should flush after 500 ops + updates.push + id: DocUpdaterClient.randomId(), + pathname: '/file-' + v + docLines: 'a\nb' + + sinon.spy MockProjectHistoryApi, "flushProject" + + # Send updates in chunks + projectId = @project_id + userId = @project_id + DocUpdaterClient.sendProjectUpdate projectId, userId, updates.slice(0, 10), [], (error) -> + throw error if error? + DocUpdaterClient.sendProjectUpdate projectId, userId, updates.slice(10), [], (error) -> + throw error if error? + setTimeout done, 2000 + + after -> + MockProjectHistoryApi.flushProject.restore() + + it "should not flush project history", -> + MockProjectHistoryApi.flushProject.calledWith(@project_id).should.equal false From f6c79f3203ec43b96f5d5e7e03df3485b1baa7b9 Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 15 Feb 2018 15:55:12 +0000 Subject: [PATCH 381/769] Fix and extend unit tests --- .../flushAndDeleteProjectTests.coffee | 1 + .../ProjectManager/flushProjectTests.coffee | 1 + .../ProjectManager/getProjectDocsTests.coffee | 1 + .../ProjectManager/updateProjectTests.coffee | 34 +++++++++++++++++++ 4 files changed, 37 insertions(+) diff --git a/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee index fc2ea998f6..74161ca4a2 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee @@ -10,6 +10,7 @@ describe "ProjectManager - flushAndDeleteProject", -> "./RedisManager": @RedisManager = {} "./DocumentManager": @DocumentManager = {} "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } + "./HistoryManager": @HistoryManager = {} "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() diff --git a/services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.coffee index 301740c015..613b25ae4d 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.coffee @@ -10,6 +10,7 @@ describe "ProjectManager - flushProject", -> "./RedisManager": @RedisManager = {} "./DocumentManager": @DocumentManager = {} "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } + "./HistoryManager": @HistoryManager = {} "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() diff --git a/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee index 8e3bc2206d..41870f41ad 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee @@ -11,6 +11,7 @@ describe "ProjectManager - getProjectDocsAndFlushIfOld", -> "./RedisManager": @RedisManager = {} "./DocumentManager": @DocumentManager = {} "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } + "./HistoryManager": @HistoryManager = {} "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() diff --git a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee index 898479a8c7..5753b7f2f1 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee @@ -10,17 +10,21 @@ describe "ProjectManager", -> "./RedisManager": @RedisManager = {} "./DocumentManager": @DocumentManager = {} "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } + "./HistoryManager": @HistoryManager = {} "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() @project_id = "project-id-123" @user_id = "user-id-123" + @HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(false) + @HistoryManager.flushProjectChangesAsync = sinon.stub() @callback = sinon.stub() describe "updateProjectWithLocks", -> describe "rename operations", -> beforeEach -> + @firstDocUpdate = id: 1 pathname: 'foo' @@ -55,6 +59,11 @@ describe "ProjectManager", -> .calledWith(@project_id, @firstFileUpdate.id, @user_id, @firstFileUpdate) .should.equal true + it "should not flush the history", -> + @HistoryManager.flushProjectChangesAsync + .calledWith(@project_id) + .should.equal false + it "should call the callback", -> @callback.called.should.equal true @@ -76,6 +85,16 @@ describe "ProjectManager", -> it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true + describe "with enough ops to flush", -> + beforeEach -> + @HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(true) + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback + + it "should flush the history", -> + @HistoryManager.flushProjectChangesAsync + .calledWith(@project_id) + .should.equal true + describe "add operations", -> beforeEach -> @firstDocUpdate = @@ -108,6 +127,11 @@ describe "ProjectManager", -> .calledWith(@project_id, 'file', @firstFileUpdate.id, @user_id, @firstFileUpdate) .should.equal true + it "should not flush the history", -> + @HistoryManager.flushProjectChangesAsync + .calledWith(@project_id) + .should.equal false + it "should call the callback", -> @callback.called.should.equal true @@ -129,3 +153,13 @@ describe "ProjectManager", -> it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true + describe "with enough ops to flush", -> + beforeEach -> + @HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(true) + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback + + it "should flush the history", -> + @HistoryManager.flushProjectChangesAsync + .calledWith(@project_id) + .should.equal true + From 5d1659457eeed3ec2457ee90a153553a6a8c39c3 Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 15 Feb 2018 16:28:40 +0000 Subject: [PATCH 382/769] Update to build 1.0.0 build scripts --- services/document-updater/.nvmrc | 2 +- services/document-updater/Gruntfile.coffee | 137 ------------------ services/document-updater/Jenkinsfile | 57 ++++---- services/document-updater/Makefile | 29 ++++ services/document-updater/app.coffee | 6 +- .../config/settings.defaults.coffee | 2 +- .../document-updater/docker-compose.ci.yml | 33 +++++ services/document-updater/docker-compose.yml | 33 +++++ services/document-updater/nodemon.json | 15 ++ services/document-updater/package.json | 20 +-- .../coffee/ApplyingUpdatesToADocTests.coffee | 4 +- ...lyingUpdatesToProjectStructureTests.coffee | 7 +- .../coffee/DeletingADocumentTests.coffee | 4 +- .../coffee/DeletingAProjectTests.coffee | 4 +- .../coffee/FlushingAProjectTests.coffee | 4 +- .../coffee/FlushingDocsTests.coffee | 4 +- .../coffee/GettingADocumentTests.coffee | 3 +- .../coffee/GettingProjectDocsTests.coffee | 3 +- .../test/acceptance/coffee/RangesTests.coffee | 10 +- .../coffee/SettingADocumentTests.coffee | 4 +- .../coffee/helpers/DocUpdaterApp.coffee | 20 +++ .../test/acceptance/scripts/full-test.sh | 23 --- 22 files changed, 211 insertions(+), 213 deletions(-) delete mode 100644 services/document-updater/Gruntfile.coffee create mode 100644 services/document-updater/Makefile create mode 100644 services/document-updater/docker-compose.ci.yml create mode 100644 services/document-updater/docker-compose.yml create mode 100644 services/document-updater/nodemon.json create mode 100644 services/document-updater/test/acceptance/coffee/helpers/DocUpdaterApp.coffee delete mode 100755 services/document-updater/test/acceptance/scripts/full-test.sh diff --git a/services/document-updater/.nvmrc b/services/document-updater/.nvmrc index 26ec038c18..e1e5d1369a 100644 --- a/services/document-updater/.nvmrc +++ b/services/document-updater/.nvmrc @@ -1 +1 @@ -6.9.5 \ No newline at end of file +6.9.5 diff --git a/services/document-updater/Gruntfile.coffee b/services/document-updater/Gruntfile.coffee deleted file mode 100644 index 698ee9c733..0000000000 --- a/services/document-updater/Gruntfile.coffee +++ /dev/null @@ -1,137 +0,0 @@ -module.exports = (grunt) -> - grunt.initConfig - forever: - app: - options: - index: "app.js" - - execute: - app: - src: "app.js" - - bunyan: - strict: false - - coffee: - app_dir: - expand: true, - flatten: false, - cwd: 'app/coffee', - src: ['**/*.coffee'], - dest: 'app/js/', - ext: '.js' - - app: - src: 'app.coffee' - dest: 'app.js' - - acceptance_tests: - expand: true, - flatten: false, - cwd: 'test/acceptance/coffee', - src: ['**/*.coffee'], - dest: 'test/acceptance/js/', - ext: '.js' - - unit_tests: - expand: true, - flatten: false, - cwd: 'test/unit/coffee', - src: ['**/*.coffee'], - dest: 'test/unit/js/', - ext: '.js' - - clean: - app: ["app/js"] - acceptance_tests: ["test/acceptance/js"] - unit_tests: ["test/unit/js"] - - mochaTest: - unit: - src: ["test/unit/js/#{grunt.option('feature') or '**'}/*.js"] - options: - reporter: grunt.option('reporter') or 'spec' - grep: grunt.option("grep") - acceptance: - src: ["test/acceptance/js/#{grunt.option('feature') or '*'}.js"] - options: - reporter: grunt.option('reporter') or 'spec' - grep: grunt.option("grep") - timeout: 50000 - - shell: - fullAcceptanceTests: - command: "bash ./test/acceptance/scripts/full-test.sh" - dockerTests: - command: 'docker run -v "$(pwd):/app" --rm sl-acceptance-test-runner' - - availabletasks: - tasks: - options: - filter: 'exclude', - tasks: [ - 'coffee' - 'clean' - 'mochaTest' - 'availabletasks' - 'execute' - 'bunyan' - ] - groups: - "Compile tasks": [ - "compile:server" - "compile:tests" - "compile" - "compile:unit_tests" - "compile:acceptance_tests" - "install" - ] - "Test tasks": [ - "test:unit" - "test:acceptance" - ] - "Run tasks": [ - "run" - "default" - ] - "Misc": [ - "help" - ] - - grunt.loadNpmTasks 'grunt-contrib-coffee' - grunt.loadNpmTasks 'grunt-contrib-clean' - grunt.loadNpmTasks 'grunt-mocha-test' - grunt.loadNpmTasks 'grunt-available-tasks' - grunt.loadNpmTasks 'grunt-execute' - grunt.loadNpmTasks 'grunt-bunyan' - grunt.loadNpmTasks 'grunt-forever' - grunt.loadNpmTasks 'grunt-shell' - - grunt.registerTask 'help', 'Display this help list', 'availabletasks' - - grunt.registerTask 'compile:server', 'Compile the server side coffee script', ['clean:app', 'coffee:app', 'coffee:app_dir'] - grunt.registerTask 'compile:unit_tests', 'Compile the unit tests', ['clean:unit_tests', 'coffee:unit_tests'] - grunt.registerTask 'compile:acceptance_tests', 'Compile the acceptance tests', ['clean:acceptance_tests', 'coffee:acceptance_tests'] - grunt.registerTask 'compile:tests', 'Compile all the tests', ['compile:acceptance_tests', 'compile:unit_tests'] - grunt.registerTask 'compile', 'Compiles everything need to run document-updater-sharelatex', ['compile:server'] - - grunt.registerTask 'install', "Compile everything when installing as an npm module", ['compile'] - - grunt.registerTask 'test:unit', 'Run the unit tests (use --grep= for individual tests)', ['compile:server', 'compile:unit_tests', 'mochaTest:unit'] - - grunt.registerTask( - 'test:acceptance:full', - "Start server and run acceptance tests", - ['shell:fullAcceptanceTests'] - ) - - grunt.registerTask( - 'test:acceptance:docker', - "Run acceptance tests inside docker container", - ['shell:dockerTests'] - ) - - grunt.registerTask 'test:acceptance', 'Run the acceptance tests (use --grep= for individual tests)', ['compile:acceptance_tests', 'mochaTest:acceptance'] - - grunt.registerTask 'run', "Compile and run the document-updater-sharelatex server", ['compile', 'bunyan', 'execute'] - grunt.registerTask 'default', 'run' diff --git a/services/document-updater/Jenkinsfile b/services/document-updater/Jenkinsfile index 3d51dfcb43..ab90aaae29 100644 --- a/services/document-updater/Jenkinsfile +++ b/services/document-updater/Jenkinsfile @@ -1,10 +1,11 @@ -pipeline { +String cron_string = BRANCH_NAME == "master" ? "@daily" : "" +pipeline { agent any - + triggers { pollSCM('* * * * *') - cron('@daily') + cron(cron_string) } stages { @@ -17,15 +18,17 @@ pipeline { } } steps { - // we need to disable logallrefupdates, else git clones during the npm install will require git to lookup the user id - // which does not exist in the container's /etc/passwd file, causing the clone to fail. + // we need to disable logallrefupdates, else git clones + // during the npm install will require git to lookup the + // user id which does not exist in the container's + // /etc/passwd file, causing the clone to fail. sh 'git config --global core.logallrefupdates false' - sh 'rm -fr node_modules' + sh 'rm -rf node_modules' sh 'npm install && npm rebuild' - sh 'npm install --quiet grunt-cli' } } - stage('Compile and Test') { + + stage('Compile') { agent { docker { image 'node:6.9.5' @@ -33,47 +36,49 @@ pipeline { } } steps { - sh 'node_modules/.bin/grunt compile' - sh 'node_modules/.bin/grunt compile:acceptance_tests' - sh 'NODE_ENV=development node_modules/.bin/grunt test:unit' + sh 'npm run compile:all' } } + + stage('Unit Tests') { + steps { + sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_unit' + } + } + stage('Acceptance Tests') { steps { - sh 'docker pull sharelatex/acceptance-test-runner' - sh 'docker run --rm -e SHARELATEX_ENABLE_PROJECT_HISTORY=true -v $(pwd):/app sharelatex/acceptance-test-runner' + sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_acceptance' } } - stage('Package') { + + stage('Package and publish build') { steps { sh 'echo ${BUILD_NUMBER} > build_number.txt' sh 'touch build.tar.gz' // Avoid tar warning about files changing during read sh 'tar -czf build.tar.gz --exclude=build.tar.gz --exclude-vcs .' - } - } - stage('Publish') { - steps { withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") { s3Upload(file:'build.tar.gz', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/${BUILD_NUMBER}.tar.gz") - // The deployment process uses this file to figure out the latest build - s3Upload(file:'build_number.txt', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/latest") } } } - stage('Sync OSS') { - when { - branch 'master' - } + stage('Publish build number') { steps { - sshagent (credentials: ['GIT_DEPLOY_KEY']) { - sh 'git push git@github.com:sharelatex/document-updater-sharelatex.git HEAD:master' + sh 'echo ${BRANCH_NAME}-${BUILD_NUMBER} > build_number.txt' + withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") { + // The deployment process uses this file to figure out the latest build + s3Upload(file:'build_number.txt', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/latest") } } } } post { + always { + sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_clean' + } + failure { mail(from: "${EMAIL_ALERT_FROM}", to: "${EMAIL_ALERT_TO}", diff --git a/services/document-updater/Makefile b/services/document-updater/Makefile new file mode 100644 index 0000000000..cc0f89bd47 --- /dev/null +++ b/services/document-updater/Makefile @@ -0,0 +1,29 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/sharelatex/sharelatex-dev-environment +# Version: 1.0.0 + +BUILD_NUMBER ?= local +BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) +PROJECT_NAME = document-updater +DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml +DOCKER_COMPOSE := docker-compose ${DOCKER_COMPOSE_FLAGS} + +clean: + rm -f app.js + rm -rf app/js + rm -rf test/unit/js + rm -rf test/acceptance/js + +test: test_unit test_acceptance + +test_unit: + @[ -d test/unit ] && $(DOCKER_COMPOSE) run --rm test_unit -- ${MOCHA_ARGS} || echo "document-updater has no unit tests" + +test_acceptance: test_clean # clear the database before each acceptance test run + @[ -d test/acceptance ] && $(DOCKER_COMPOSE) run --rm test_acceptance -- ${MOCHA_ARGS} || echo "document-updater has no acceptance tests" + +test_clean: + $(DOCKER_COMPOSE) down + +.PHONY: clean test test_unit test_acceptance test_clean build publish diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index b4188292da..7c8792aa29 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -103,8 +103,10 @@ shutdownCleanly = (signal) -> port = Settings.internal?.documentupdater?.port or Settings.apis?.documentupdater?.port or 3003 host = Settings.internal.documentupdater.host or "localhost" -app.listen port, host, -> - logger.info "Document-updater starting up, listening on #{host}:#{port}" +if !module.parent # Called directly + app.listen port, host, -> + logger.info "Document-updater starting up, listening on #{host}:#{port}" +module.exports = app for signal in ['SIGINT', 'SIGHUP', 'SIGQUIT', 'SIGUSR1', 'SIGUSR2', 'SIGTERM', 'SIGABRT'] process.on signal, shutdownCleanly(signal) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 96ae5d5735..1c7ebf283e 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -16,7 +16,7 @@ module.exports = trackchanges: url: "http://#{process.env["TRACK_CHANGES_HOST"] or "localhost"}:3015" project_history: - enabled: process.env.SHARELATEX_ENABLE_PROJECT_HISTORY == 'true' + enabled: true url: "http://#{process.env["PROJECT_HISTORY_HOST"] or "localhost"}:3054" redis: diff --git a/services/document-updater/docker-compose.ci.yml b/services/document-updater/docker-compose.ci.yml new file mode 100644 index 0000000000..e2fd21d74f --- /dev/null +++ b/services/document-updater/docker-compose.ci.yml @@ -0,0 +1,33 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/sharelatex/sharelatex-dev-environment +# Version: 1.0.0 + +version: "2" + +services: + test_unit: + image: node:6.9.5 + volumes: + - .:/app + working_dir: /app + entrypoint: npm run test:unit:_run + + test_acceptance: + image: node:6.9.5 + volumes: + - .:/app + working_dir: /app + environment: + REDIS_HOST: redis + MONGO_HOST: mongo + depends_on: + - redis + - mongo + entrypoint: npm run test:acceptance:_run + + redis: + image: redis + + mongo: + image: mongo:3.4 diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml new file mode 100644 index 0000000000..10991d010e --- /dev/null +++ b/services/document-updater/docker-compose.yml @@ -0,0 +1,33 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/sharelatex/sharelatex-dev-environment +# Version: 1.0.0 + +version: "2" + +services: + test_unit: + image: node:6.9.5 + volumes: + - .:/app + working_dir: /app + entrypoint: npm run test:unit + + test_acceptance: + image: node:6.9.5 + volumes: + - .:/app + environment: + REDIS_HOST: redis + MONGO_HOST: mongo + depends_on: + - redis + - mongo + working_dir: /app + entrypoint: npm run test:acceptance + + redis: + image: redis + + mongo: + image: mongo:3.4 diff --git a/services/document-updater/nodemon.json b/services/document-updater/nodemon.json new file mode 100644 index 0000000000..9044f921c6 --- /dev/null +++ b/services/document-updater/nodemon.json @@ -0,0 +1,15 @@ +{ + "ignore": [ + ".git", + "node_modules/" + ], + "verbose": true, + "execMap": { + "js": "npm run start" + }, + "watch": [ + "app/coffee/", + "app.coffee" + ], + "ext": "coffee" +} diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 7ef7cccc82..ce5aa9d18c 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -8,7 +8,15 @@ }, "scripts": { "compile:app": "coffee -o app/js -c app/coffee && coffee -c app.coffee", - "start": "npm run compile:app && node app.js" + "start": "npm run compile:app && node app.js", + "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:acceptance": "npm run compile:app && npm run compile:acceptance_tests && npm run test:acceptance:_run -- $@", + "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js", + "test:unit": "npm run compile:app && npm run compile:unit_tests && npm run test:unit:_run -- $@", + "compile:unit_tests": "[ -e test/unit ] && coffee -o test/unit/js -c test/unit/coffee || echo 'No unit tests to compile'", + "compile:acceptance_tests": "[ -e test/acceptance ] && coffee -o test/acceptance/js -c test/acceptance/coffee || echo 'No acceptance tests to compile'", + "compile:all": "npm run compile:app && npm run compile:unit_tests && npm run compile:acceptance_tests", + "nodemon": "nodemon --config nodemon.json" }, "dependencies": { "async": "^2.5.0", @@ -30,15 +38,7 @@ "chai": "^3.5.0", "chai-spies": "^0.7.1", "cluster-key-slot": "^1.0.5", - "grunt": "~0.4.2", - "grunt-available-tasks": "~0.4.1", - "grunt-bunyan": "~0.5.0", - "grunt-contrib-clean": "~0.5.0", - "grunt-contrib-coffee": "~0.10.0", - "grunt-execute": "~0.1.5", - "grunt-forever": "^0.4.7", - "grunt-mocha-test": "~0.9.0", - "grunt-shell": "^1.3.0", + "mocha": "^5.0.1", "timekeeper": "^2.0.0" } } diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index 29a17c04ea..cb560a26bb 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -13,9 +13,10 @@ ProjectHistoryKeys = Settings.redis.project_history.key_schema MockTrackChangesApi = require "./helpers/MockTrackChangesApi" MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" +DocUpdaterApp = require "./helpers/DocUpdaterApp" describe "Applying updates to a doc", -> - before -> + before (done) -> @lines = ["one", "two", "three"] @version = 42 @update = @@ -26,6 +27,7 @@ describe "Applying updates to a doc", -> }] v: @version @result = ["one", "one and a half", "two", "three"] + DocUpdaterApp.ensureRunning(done) describe "when the document is not loaded", -> before (done) -> diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee index 5526a0b40f..b2b08836d1 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee @@ -7,6 +7,7 @@ ProjectHistoryKeys = Settings.redis.project_history.key_schema MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" +DocUpdaterApp = require "./helpers/DocUpdaterApp" describe "Applying updates to a project's structure", -> before -> @@ -20,9 +21,11 @@ describe "Applying updates to a project's structure", -> pathname: '/file-path' newPathname: '/new-file-path' @fileUpdates = [ @fileUpdate ] - DocUpdaterClient.sendProjectUpdate @project_id, @user_id, [], @fileUpdates, (error) -> + DocUpdaterApp.ensureRunning (error) => throw error if error? - setTimeout done, 200 + DocUpdaterClient.sendProjectUpdate @project_id, @user_id, [], @fileUpdates, (error) -> + throw error if error? + setTimeout done, 200 it "should push the applied file renames to the project history api", (done) -> rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => diff --git a/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee index 2be5b01245..c2c4462d31 100644 --- a/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee @@ -6,9 +6,10 @@ MockTrackChangesApi = require "./helpers/MockTrackChangesApi" MockProjectHistoryApi = require "./helpers/MockProjectHistoryApi" MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" +DocUpdaterApp = require "./helpers/DocUpdaterApp" describe "Deleting a document", -> - before -> + before (done) -> @lines = ["one", "two", "three"] @version = 42 @update = @@ -22,6 +23,7 @@ describe "Deleting a document", -> sinon.spy MockTrackChangesApi, "flushDoc" sinon.spy MockProjectHistoryApi, "flushProject" + DocUpdaterApp.ensureRunning(done) after -> MockTrackChangesApi.flushDoc.restore() diff --git a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee index 3b889f79ea..7a5eed5691 100644 --- a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee +++ b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee @@ -7,9 +7,10 @@ MockTrackChangesApi = require "./helpers/MockTrackChangesApi" MockProjectHistoryApi = require "./helpers/MockProjectHistoryApi" MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" +DocUpdaterApp = require "./helpers/DocUpdaterApp" describe "Deleting a project", -> - before -> + before (done) -> @project_id = DocUpdaterClient.randomId() @docs = [{ id: doc_id0 = DocUpdaterClient.randomId() @@ -42,6 +43,7 @@ describe "Deleting a project", -> sinon.spy MockTrackChangesApi, "flushDoc" sinon.spy MockProjectHistoryApi, "flushProject" + DocUpdaterApp.ensureRunning(done) after -> MockTrackChangesApi.flushDoc.restore() diff --git a/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee b/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee index f6f7818990..c32b6b4001 100644 --- a/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee +++ b/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee @@ -5,9 +5,10 @@ async = require "async" MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" +DocUpdaterApp = require "./helpers/DocUpdaterApp" describe "Flushing a project", -> - before -> + before (done) -> @project_id = DocUpdaterClient.randomId() @docs = [{ id: doc_id0 = DocUpdaterClient.randomId() @@ -37,6 +38,7 @@ describe "Flushing a project", -> lines: doc.lines version: doc.update.v } + DocUpdaterApp.ensureRunning(done) describe "with documents which have been updated", -> before (done) -> diff --git a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee index 4c9c893e44..709159ccfb 100644 --- a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee +++ b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee @@ -6,9 +6,10 @@ async = require "async" MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" +DocUpdaterApp = require "./helpers/DocUpdaterApp" describe "Flushing a doc to Mongo", -> - before -> + before (done) -> @lines = ["one", "two", "three"] @version = 42 @update = @@ -19,6 +20,7 @@ describe "Flushing a doc to Mongo", -> }] v: @version @result = ["one", "one and a half", "two", "three"] + DocUpdaterApp.ensureRunning(done) describe "when the updated doc exists in the doc updater", -> before (done) -> diff --git a/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee index 67bbd6ea80..f3aa6ef875 100644 --- a/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee @@ -5,12 +5,13 @@ expect = chai.expect MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" +DocUpdaterApp = require "./helpers/DocUpdaterApp" describe "Getting a document", -> before (done) -> @lines = ["one", "two", "three"] @version = 42 - setTimeout done, 200 # Give MockWebApi a chance to start + DocUpdaterApp.ensureRunning(done) describe "when the document is not loaded", -> before (done) -> diff --git a/services/document-updater/test/acceptance/coffee/GettingProjectDocsTests.coffee b/services/document-updater/test/acceptance/coffee/GettingProjectDocsTests.coffee index 708176ea69..3483d170fa 100644 --- a/services/document-updater/test/acceptance/coffee/GettingProjectDocsTests.coffee +++ b/services/document-updater/test/acceptance/coffee/GettingProjectDocsTests.coffee @@ -5,12 +5,13 @@ expect = chai.expect MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" +DocUpdaterApp = require "./helpers/DocUpdaterApp" describe "Getting documents for project", -> before (done) -> @lines = ["one", "two", "three"] @version = 42 - setTimeout done, 200 # Give MockWebApi a chance to start + DocUpdaterApp.ensureRunning(done) describe "when project state hash does not match", -> before (done) -> diff --git a/services/document-updater/test/acceptance/coffee/RangesTests.coffee b/services/document-updater/test/acceptance/coffee/RangesTests.coffee index e3ec097e2e..95c80440c0 100644 --- a/services/document-updater/test/acceptance/coffee/RangesTests.coffee +++ b/services/document-updater/test/acceptance/coffee/RangesTests.coffee @@ -6,6 +6,7 @@ async = require "async" MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" +DocUpdaterApp = require "./helpers/DocUpdaterApp" describe "Ranges", -> describe "tracking changes from ops", -> @@ -41,11 +42,14 @@ describe "Ranges", -> for update in @updates do (update) => jobs.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc.id, update, callback - DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) => + + DocUpdaterApp.ensureRunning (error) => throw error if error? - async.series jobs, (error) -> + DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) => throw error if error? - setTimeout done, 200 + async.series jobs, (error) -> + throw error if error? + done() it "should update the ranges", (done) -> DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => diff --git a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee index 68ed5483c9..eb71179b3b 100644 --- a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee @@ -10,9 +10,10 @@ MockTrackChangesApi = require "./helpers/MockTrackChangesApi" MockProjectHistoryApi = require "./helpers/MockProjectHistoryApi" MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" +DocUpdaterApp = require "./helpers/DocUpdaterApp" describe "Setting a document", -> - before -> + before (done) -> @lines = ["one", "two", "three"] @version = 42 @update = @@ -30,6 +31,7 @@ describe "Setting a document", -> sinon.spy MockTrackChangesApi, "flushDoc" sinon.spy MockProjectHistoryApi, "flushProject" sinon.spy MockWebApi, "setDocument" + DocUpdaterApp.ensureRunning(done) after -> MockTrackChangesApi.flushDoc.restore() diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterApp.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterApp.coffee new file mode 100644 index 0000000000..9819f9f99e --- /dev/null +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterApp.coffee @@ -0,0 +1,20 @@ +app = require('../../../../app') +require("logger-sharelatex").logger.level("fatal") + +module.exports = + running: false + initing: false + callbacks: [] + ensureRunning: (callback = (error) ->) -> + if @running + return callback() + else if @initing + @callbacks.push callback + else + @initing = true + @callbacks.push callback + app.listen 3003, "localhost", (error) => + throw error if error? + @running = true + for callback in @callbacks + callback() \ No newline at end of file diff --git a/services/document-updater/test/acceptance/scripts/full-test.sh b/services/document-updater/test/acceptance/scripts/full-test.sh deleted file mode 100755 index 8584cd17d0..0000000000 --- a/services/document-updater/test/acceptance/scripts/full-test.sh +++ /dev/null @@ -1,23 +0,0 @@ -#! /usr/bin/env bash - -# npm rebuild - -echo ">> Starting server..." - -grunt --no-color forever:app:start - -echo ">> Server started" - -sleep 5 - -echo ">> Running acceptance tests..." -grunt --no-color mochaTest:acceptance -_test_exit_code=$? - -echo ">> Killing server" - -grunt --no-color forever:app:stop - -echo ">> Done" - -exit $_test_exit_code From 657da70d45414abb0765ecaaefffa5b4c7334f9b Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 15 Feb 2018 16:28:58 +0000 Subject: [PATCH 383/769] Fix unit test namespacing from mocha upgrade --- .../test/unit/coffee/LockManager/ReleasingTheLock.coffee | 1 + .../test/unit/coffee/ProjectManager/getProjectDocsTests.coffee | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee index 06dd2aa6bf..28fb02059e 100644 --- a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee +++ b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee @@ -31,6 +31,7 @@ describe 'LockManager - releasing the lock', ()-> end: sinon.stub() @LockManager = SandboxedModule.require(modulePath, requires: mocks) @lockValue = "lock-value-stub" + @callback = sinon.stub() describe "when the lock is current", -> beforeEach -> diff --git a/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee index 8e3bc2206d..217e23edc7 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee @@ -16,11 +16,11 @@ describe "ProjectManager - getProjectDocsAndFlushIfOld", -> done: sinon.stub() @project_id = "project-id-123" @callback = sinon.stub() + @doc_versions = [111, 222, 333] describe "successfully", -> beforeEach (done) -> @doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"] - @doc_versions = [111, 222, 333] @doc_lines = [["aaa","aaa"],["bbb","bbb"],["ccc","ccc"]] @docs = [ {_id: @doc_ids[0], lines: @doc_lines[0], v: @doc_versions[0]} From 5425fdef8a48cc8f4f58864bf6beddf3508a3b8b Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 15 Feb 2018 16:54:21 +0000 Subject: [PATCH 384/769] update to 1.0.1 build scripts --- services/document-updater/Makefile | 4 ++-- services/document-updater/docker-compose.ci.yml | 2 +- services/document-updater/docker-compose.yml | 2 +- services/document-updater/nodemon.json | 1 + 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/services/document-updater/Makefile b/services/document-updater/Makefile index cc0f89bd47..10adbe3866 100644 --- a/services/document-updater/Makefile +++ b/services/document-updater/Makefile @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.0.0 +# Version: 1.0.1 BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) @@ -24,6 +24,6 @@ test_acceptance: test_clean # clear the database before each acceptance test run @[ -d test/acceptance ] && $(DOCKER_COMPOSE) run --rm test_acceptance -- ${MOCHA_ARGS} || echo "document-updater has no acceptance tests" test_clean: - $(DOCKER_COMPOSE) down + $(DOCKER_COMPOSE) down -t 0 .PHONY: clean test test_unit test_acceptance test_clean build publish diff --git a/services/document-updater/docker-compose.ci.yml b/services/document-updater/docker-compose.ci.yml index e2fd21d74f..9f40ba879d 100644 --- a/services/document-updater/docker-compose.ci.yml +++ b/services/document-updater/docker-compose.ci.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.0.0 +# Version: 1.0.1 version: "2" diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index 10991d010e..d55ca56580 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.0.0 +# Version: 1.0.1 version: "2" diff --git a/services/document-updater/nodemon.json b/services/document-updater/nodemon.json index 9044f921c6..9a3be8d966 100644 --- a/services/document-updater/nodemon.json +++ b/services/document-updater/nodemon.json @@ -4,6 +4,7 @@ "node_modules/" ], "verbose": true, + "legacyWatch": true, "execMap": { "js": "npm run start" }, From ea0dd9700b6b92bbe853f48c185088cfaa5e01ee Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Thu, 22 Feb 2018 10:01:05 +0000 Subject: [PATCH 385/769] reduce the number of times we flush project history --- .../app/coffee/DocumentManager.coffee | 9 ++++++++- .../app/coffee/HistoryManager.coffee | 6 ++---- .../app/coffee/HttpController.coffee | 8 ++++++++ .../app/coffee/ProjectManager.coffee | 6 ++++++ .../DocumentManagerTests.coffee | 18 ++++++++++++++---- .../HistoryManager/HistoryManagerTests.coffee | 17 ----------------- .../HttpController/HttpControllerTests.coffee | 12 ++++++++++++ .../flushAndDeleteProjectTests.coffee | 19 ++++++++++++++----- 8 files changed, 64 insertions(+), 31 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 8e69989d09..6574c4c4b1 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -89,6 +89,13 @@ module.exports = DocumentManager = callback null else DocumentManager.flushAndDeleteDoc project_id, doc_id, (error) -> + # Flush in the background since it requires a http request. We + # want to flush project history if the previous call only failed + # to delete the doc from Redis. There is no harm in flushing + # project history if the previous call failed to flush at all. So + # do this before checking errors. + HistoryManager.flushProjectChangesAsync project_id + return callback(error) if error? callback null @@ -118,7 +125,7 @@ module.exports = DocumentManager = return callback(error) if error? # Flush in the background since it requires a http request - HistoryManager.flushChangesAsync project_id, doc_id + HistoryManager.flushDocChangesAsync project_id, doc_id RedisManager.removeDocFromMemory project_id, doc_id, (error) -> return callback(error) if error? diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index 9ec5db2aa3..6d567234da 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -4,10 +4,8 @@ logger = require "logger-sharelatex" HistoryRedisManager = require "./HistoryRedisManager" module.exports = HistoryManager = - flushChangesAsync: (project_id, doc_id) -> + flushDocChangesAsync: (project_id, doc_id) -> HistoryManager._flushDocChangesAsync project_id, doc_id - if Settings.apis?.project_history?.enabled - HistoryManager.flushProjectChangesAsync project_id _flushDocChangesAsync: (project_id, doc_id) -> if !Settings.apis?.trackchanges? @@ -23,7 +21,7 @@ module.exports = HistoryManager = logger.error { doc_id, project_id }, "track changes api returned a failure status code: #{res.statusCode}" flushProjectChangesAsync: (project_id) -> - return if !Settings.apis?.project_history? + return if !Settings.apis?.project_history?.enabled url = "#{Settings.apis.project_history.url}/project/#{project_id}/flush" logger.log { project_id, url }, "flushing doc in project history api" diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index ef9f860552..069cfc889e 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -1,4 +1,5 @@ DocumentManager = require "./DocumentManager" +HistoryManager = require "./HistoryManager" ProjectManager = require "./ProjectManager" Errors = require "./Errors" logger = require "logger-sharelatex" @@ -106,6 +107,13 @@ module.exports = HttpController = timer = new Metrics.Timer("http.deleteDoc") DocumentManager.flushAndDeleteDocWithLock project_id, doc_id, (error) -> timer.done() + # Flush in the background since it requires a http request. We + # want to flush project history if the previous call only failed + # to delete the doc from Redis. There is no harm in flushing + # project history if the previous call failed to flush at all. So + # do this before checking errors. + HistoryManager.flushProjectChangesAsync project_id + return next(error) if error? logger.log project_id: project_id, doc_id: doc_id, "deleted doc via http" res.send 204 # No Content diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index af2e27c8e5..a82d88b4a6 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -59,6 +59,12 @@ module.exports = ProjectManager = logger.log project_id: project_id, doc_ids: doc_ids, "deleting docs" async.series jobs, () -> + # Flush in the background since it requires a htpt request. If we + # flushed and deleted only some docs successfully then we should still + # flush project history. If no docs succeeded then there is still no + # harm flushing project history. So do this before checking errors. + HistoryManager.flushProjectChangesAsync project_id + if errors.length > 0 callback new Error("Errors deleting docs. See log for details") else diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index 702617f7ae..c390138cf9 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -11,7 +11,9 @@ describe "DocumentManager", -> @DocumentManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} "./PersistenceManager": @PersistenceManager = {} - "./HistoryManager": @HistoryManager = {} + "./HistoryManager": @HistoryManager = + flushDocChangesAsync: sinon.stub() + flushProjectChangesAsync: sinon.stub() "logger-sharelatex": @logger = {log: sinon.stub()} "./DocOpsManager": @DocOpsManager = {} "./Metrics": @Metrics = @@ -36,7 +38,6 @@ describe "DocumentManager", -> beforeEach -> @RedisManager.removeDocFromMemory = sinon.stub().callsArg(2) @DocumentManager.flushDocIfLoaded = sinon.stub().callsArgWith(2) - @HistoryManager.flushChangesAsync = sinon.stub() @DocumentManager.flushAndDeleteDoc @project_id, @doc_id, @callback it "should flush the doc", -> @@ -56,8 +57,8 @@ describe "DocumentManager", -> @Metrics.Timer::done.called.should.equal true it "should flush to the history api", -> - @HistoryManager.flushChangesAsync - .calledWith(@project_id, @doc_id) + @HistoryManager.flushDocChangesAsync + .calledWithExactly(@project_id, @doc_id) .should.equal true describe "flushDocIfLoaded", -> @@ -243,6 +244,10 @@ describe "DocumentManager", -> .calledWith(@project_id, @doc_id) .should.equal true + it "should not flush the project history", -> + @HistoryManager.flushProjectChangesAsync + .called.should.equal false + it "should call the callback", -> @callback.calledWith(null).should.equal true @@ -259,6 +264,11 @@ describe "DocumentManager", -> .calledWith(@project_id, @doc_id) .should.equal true + it "should not flush the project history", -> + @HistoryManager.flushProjectChangesAsync + .calledWithExactly(@project_id) + .should.equal true + describe "without new lines", -> beforeEach -> @DocumentManager.setDoc @project_id, @doc_id, null, @source, @user_id, false, @callback diff --git a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee index 75327a7ae9..161b7afd44 100644 --- a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee @@ -21,23 +21,6 @@ describe "HistoryManager", -> @doc_id = "mock-doc-id" @callback = sinon.stub() - describe "flushChangesAsync", -> - beforeEach -> - @HistoryManager._flushDocChangesAsync = sinon.stub() - @HistoryManager.flushProjectChangesAsync = sinon.stub() - - @HistoryManager.flushChangesAsync(@project_id, @doc_id) - - it "flushes doc changes", -> - @HistoryManager._flushDocChangesAsync - .calledWith(@project_id, @doc_id) - .should.equal true - - it "flushes project changes", -> - @HistoryManager.flushProjectChangesAsync - .calledWith(@project_id) - .should.equal true - describe "_flushDocChangesAsync", -> beforeEach -> @request.post = sinon.stub().callsArgWith(1, null, statusCode: 204) diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index d52956635d..99496332fc 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -9,6 +9,8 @@ describe "HttpController", -> beforeEach -> @HttpController = SandboxedModule.require modulePath, requires: "./DocumentManager": @DocumentManager = {} + "./HistoryManager": @HistoryManager = + flushProjectChangesAsync: sinon.stub() "./ProjectManager": @ProjectManager = {} "logger-sharelatex" : @logger = { log: sinon.stub() } "./Metrics": @Metrics = {} @@ -275,6 +277,11 @@ describe "HttpController", -> .calledWith(@project_id, @doc_id) .should.equal true + it "should flush project history", -> + @HistoryManager.flushProjectChangesAsync + .calledWithExactly(@project_id) + .should.equal true + it "should return a successful No Content response", -> @res.send .calledWith(204) @@ -293,6 +300,11 @@ describe "HttpController", -> @DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArgWith(2, new Error("oops")) @HttpController.flushAndDeleteDoc(@req, @res, @next) + it "should flush project history", -> + @HistoryManager.flushProjectChangesAsync + .calledWithExactly(@project_id) + .should.equal true + it "should call next with the error", -> @next .calledWith(new Error("oops")) diff --git a/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee index 74161ca4a2..50a2679953 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee @@ -10,7 +10,8 @@ describe "ProjectManager - flushAndDeleteProject", -> "./RedisManager": @RedisManager = {} "./DocumentManager": @DocumentManager = {} "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } - "./HistoryManager": @HistoryManager = {} + "./HistoryManager": @HistoryManager = + flushProjectChangesAsync: sinon.stub() "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() @@ -30,13 +31,18 @@ describe "ProjectManager - flushAndDeleteProject", -> @RedisManager.getDocIdsInProject .calledWith(@project_id) .should.equal true - + it "should delete each doc in the project", -> for doc_id in @doc_ids @DocumentManager.flushAndDeleteDocWithLock .calledWith(@project_id, doc_id) .should.equal true + it "should flush project history", -> + @HistoryManager.flushProjectChangesAsync + .calledWithExactly(@project_id) + .should.equal true + it "should call the callback without error", -> @callback.calledWith(null).should.equal true @@ -55,13 +61,18 @@ describe "ProjectManager - flushAndDeleteProject", -> @ProjectManager.flushAndDeleteProjectWithLocks @project_id, (error) => @callback(error) done() - + it "should still flush each doc in the project", -> for doc_id in @doc_ids @DocumentManager.flushAndDeleteDocWithLock .calledWith(@project_id, doc_id) .should.equal true + it "should still flush project history", -> + @HistoryManager.flushProjectChangesAsync + .calledWithExactly(@project_id) + .should.equal true + it "should record the error", -> @logger.error .calledWith(err: @error, project_id: @project_id, doc_id: "doc-id-1", "error deleting doc") @@ -72,5 +83,3 @@ describe "ProjectManager - flushAndDeleteProject", -> it "should time the execution", -> @Metrics.Timer::done.called.should.equal true - - From 0f87ae1f742a7f6ababa59e2d89205edf2e79bf2 Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Thu, 22 Feb 2018 10:16:29 +0000 Subject: [PATCH 386/769] simplify comments --- .../document-updater/app/coffee/DocumentManager.coffee | 7 ++----- services/document-updater/app/coffee/HttpController.coffee | 7 ++----- services/document-updater/app/coffee/ProjectManager.coffee | 6 ++---- 3 files changed, 6 insertions(+), 14 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 6574c4c4b1..3d3f690b5c 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -89,11 +89,8 @@ module.exports = DocumentManager = callback null else DocumentManager.flushAndDeleteDoc project_id, doc_id, (error) -> - # Flush in the background since it requires a http request. We - # want to flush project history if the previous call only failed - # to delete the doc from Redis. There is no harm in flushing - # project history if the previous call failed to flush at all. So - # do this before checking errors. + # There is no harm in flushing project history if the previous + # call failed and sometimes it is required HistoryManager.flushProjectChangesAsync project_id return callback(error) if error? diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 069cfc889e..650ee07ae2 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -107,11 +107,8 @@ module.exports = HttpController = timer = new Metrics.Timer("http.deleteDoc") DocumentManager.flushAndDeleteDocWithLock project_id, doc_id, (error) -> timer.done() - # Flush in the background since it requires a http request. We - # want to flush project history if the previous call only failed - # to delete the doc from Redis. There is no harm in flushing - # project history if the previous call failed to flush at all. So - # do this before checking errors. + # There is no harm in flushing project history if the previous call + # failed and sometimes it is required HistoryManager.flushProjectChangesAsync project_id return next(error) if error? diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index a82d88b4a6..64293e6985 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -59,10 +59,8 @@ module.exports = ProjectManager = logger.log project_id: project_id, doc_ids: doc_ids, "deleting docs" async.series jobs, () -> - # Flush in the background since it requires a htpt request. If we - # flushed and deleted only some docs successfully then we should still - # flush project history. If no docs succeeded then there is still no - # harm flushing project history. So do this before checking errors. + # There is no harm in flushing project history if the previous call + # failed and sometimes it is required HistoryManager.flushProjectChangesAsync project_id if errors.length > 0 From 5a11332aa3609f031b3f51d72f7ac7eead747fad Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Thu, 22 Feb 2018 10:16:41 +0000 Subject: [PATCH 387/769] remove unecessary method wrapping --- .../app/coffee/HistoryManager.coffee | 5 +---- .../HistoryManager/HistoryManagerTests.coffee | 14 +++++++------- 2 files changed, 8 insertions(+), 11 deletions(-) diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index 6d567234da..3ccc42e97f 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -5,9 +5,6 @@ HistoryRedisManager = require "./HistoryRedisManager" module.exports = HistoryManager = flushDocChangesAsync: (project_id, doc_id) -> - HistoryManager._flushDocChangesAsync project_id, doc_id - - _flushDocChangesAsync: (project_id, doc_id) -> if !Settings.apis?.trackchanges? logger.warn { doc_id }, "track changes API is not configured, so not flushing" return @@ -51,7 +48,7 @@ module.exports = HistoryManager = # Do this in the background since it uses HTTP and so may be too # slow to wait for when processing a doc update. logger.log { doc_ops_length, doc_id, project_id }, "flushing track changes api" - HistoryManager._flushDocChangesAsync project_id, doc_id + HistoryManager.flushDocChangesAsync project_id, doc_id callback() shouldFlushHistoryOps: (length, ops_length, threshold) -> diff --git a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee index 161b7afd44..6e5010d89c 100644 --- a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee @@ -21,11 +21,11 @@ describe "HistoryManager", -> @doc_id = "mock-doc-id" @callback = sinon.stub() - describe "_flushDocChangesAsync", -> + describe "flushDocChangesAsync", -> beforeEach -> @request.post = sinon.stub().callsArgWith(1, null, statusCode: 204) - @HistoryManager._flushDocChangesAsync @project_id, @doc_id + @HistoryManager.flushDocChangesAsync @project_id, @doc_id it "should send a request to the track changes api", -> @request.post @@ -51,7 +51,7 @@ describe "HistoryManager", -> @HistoryManager.flushProjectChangesAsync = sinon.stub() @HistoryRedisManager.recordDocHasHistoryOps = sinon.stub().callsArg(3) - @HistoryManager._flushDocChangesAsync = sinon.stub() + @HistoryManager.flushDocChangesAsync = sinon.stub() describe "with no ops", -> beforeEach -> @@ -66,7 +66,7 @@ describe "HistoryManager", -> @HistoryRedisManager.recordDocHasHistoryOps.called.should.equal false it "should not flush doc changes", -> - @HistoryManager._flushDocChangesAsync.called.should.equal false + @HistoryManager.flushDocChangesAsync.called.should.equal false it "should call the callback", -> @callback.called.should.equal true @@ -91,7 +91,7 @@ describe "HistoryManager", -> .calledWith(@project_id, @doc_id, @ops) it "should not flush doc changes", -> - @HistoryManager._flushDocChangesAsync.called.should.equal false + @HistoryManager.flushDocChangesAsync.called.should.equal false it "should call the callback", -> @callback.called.should.equal true @@ -114,7 +114,7 @@ describe "HistoryManager", -> .calledWith(@project_id, @doc_id, @ops) it "should flush doc changes", -> - @HistoryManager._flushDocChangesAsync + @HistoryManager.flushDocChangesAsync .calledWith(@project_id, @doc_id) .should.equal true @@ -132,7 +132,7 @@ describe "HistoryManager", -> ) it "should not flush doc changes", -> - @HistoryManager._flushDocChangesAsync.called.should.equal false + @HistoryManager.flushDocChangesAsync.called.should.equal false it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true From 939eaa2d4bfd34c9474740e74953f9c42a39217b Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 2 Mar 2018 10:02:49 +0000 Subject: [PATCH 388/769] Don't allow a document to be loaded without a pathname --- .../app/coffee/DocumentManager.coffee | 2 +- .../app/coffee/PersistenceManager.coffee | 2 ++ .../app/coffee/RedisManager.coffee | 2 +- .../PersistenceManagerTests.coffee | 29 ++++++++++++++----- 4 files changed, 25 insertions(+), 10 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 3d3f690b5c..dedb5f63aa 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -24,7 +24,7 @@ module.exports = DocumentManager = logger.log {project_id, doc_id}, "doc not in redis so getting from persistence API" PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname) -> return callback(error) if error? - logger.log {project_id, doc_id, lines, version}, "got doc from persistence API" + logger.log {project_id, doc_id, lines, version, pathname}, "got doc from persistence API" RedisManager.putDocInMemory project_id, doc_id, lines, version, ranges, pathname, (error) -> return callback(error) if error? callback null, lines, version, ranges, pathname, null, false diff --git a/services/document-updater/app/coffee/PersistenceManager.coffee b/services/document-updater/app/coffee/PersistenceManager.coffee index 974dec5a2c..bd5ce5239c 100644 --- a/services/document-updater/app/coffee/PersistenceManager.coffee +++ b/services/document-updater/app/coffee/PersistenceManager.coffee @@ -42,6 +42,8 @@ module.exports = PersistenceManager = return callback(new Error("web API response had no doc lines")) if !body.version? or not body.version instanceof Number return callback(new Error("web API response had no valid doc version")) + if !body.pathname? + return callback(new Error("web API response had no valid doc pathname")) return callback null, body.lines, body.version, body.ranges, body.pathname else if res.statusCode == 404 return callback(new Errors.NotFoundError("doc not not found: #{url}")) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index cd12b497a1..b14c00ae66 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -47,7 +47,7 @@ module.exports = RedisManager = logger.error {err: error, doc_id: doc_id, docLines: docLines}, error.message return callback(error) docHash = RedisManager._computeHash(docLines) - logger.log project_id:project_id, doc_id:doc_id, version: version, hash:docHash, "putting doc in redis" + logger.log {project_id, doc_id, version, docHash, pathname}, "putting doc in redis" RedisManager._serializeRanges ranges, (error, ranges) -> if error? logger.error {err: error, doc_id, project_id}, error.message diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee index 925274ac2f..937dcf3a77 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee @@ -30,15 +30,17 @@ describe "PersistenceManager", -> pass: @pass = "password" describe "getDoc", -> + beforeEach -> + @webResponse = { + lines: @lines, + version: @version, + ranges: @ranges + pathname: @pathname, + } describe "with a successful response from the web api", -> beforeEach -> - @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify({ - lines: @lines, - version: @version, - ranges: @ranges - pathname: @pathname, - })) + @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(@webResponse)) @PersistenceManager.getDoc(@project_id, @doc_id, @callback) it "should call the web api", -> @@ -98,7 +100,8 @@ describe "PersistenceManager", -> describe "when request returns an doc without lines", -> beforeEach -> - @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(version: @version)) + delete @webResponse.lines + @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(@webResponse)) @PersistenceManager.getDoc(@project_id, @doc_id, @callback) it "should return and error", -> @@ -106,12 +109,22 @@ describe "PersistenceManager", -> describe "when request returns an doc without a version", -> beforeEach -> - @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(lines: @lines)) + delete @webResponse.version + @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(@webResponse)) @PersistenceManager.getDoc(@project_id, @doc_id, @callback) it "should return and error", -> @callback.calledWith(new Error("web API response had no valid doc version")).should.equal true + describe "when request returns an doc without a pathname", -> + beforeEach -> + delete @webResponse.pathname + @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(@webResponse)) + @PersistenceManager.getDoc(@project_id, @doc_id, @callback) + + it "should return and error", -> + @callback.calledWith(new Error("web API response had no valid doc pathname")).should.equal true + describe "setDoc", -> describe "with a successful response from the web api", -> beforeEach -> From 52d6b710bd1f2a327e7ad3dd4b243dd725441cee Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Wed, 28 Feb 2018 17:31:43 +0000 Subject: [PATCH 389/769] add ability to resync project structure --- services/document-updater/app.coffee | 3 ++- .../app/coffee/HistoryManager.coffee | 12 ++++++++++++ .../app/coffee/HttpController.coffee | 8 ++++++++ .../document-updater/app/coffee/RedisManager.coffee | 8 ++++++++ 4 files changed, 30 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 7c8792aa29..2408b164f0 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -47,7 +47,8 @@ app.post '/project/:project_id/doc/:doc_id', HttpCont app.post '/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLoaded app.delete '/project/:project_id/doc/:doc_id', HttpController.flushAndDeleteDoc app.delete '/project/:project_id', HttpController.deleteProject -app.post '/project/:project_id', HttpController.updateProject +app.post '/project/:project_id', HttpController.updateProject +app.post '/project/:project_id/resync', HttpController.resyncProject app.post '/project/:project_id/flush', HttpController.flushProject app.post '/project/:project_id/doc/:doc_id/change/:change_id/accept', HttpController.acceptChanges app.post '/project/:project_id/doc/:doc_id/change/accept', HttpController.acceptChanges diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index 3ccc42e97f..15b44475a2 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -2,6 +2,7 @@ Settings = require "settings-sharelatex" request = require "request" logger = require "logger-sharelatex" HistoryRedisManager = require "./HistoryRedisManager" +RedisManager = require "./RedisManager" module.exports = HistoryManager = flushDocChangesAsync: (project_id, doc_id) -> @@ -61,3 +62,14 @@ module.exports = HistoryManager = prevBlock = Math.floor(previousLength / threshold) newBlock = Math.floor(length / threshold) return newBlock != prevBlock + + resyncProject: (project_id, docs, files, callback) -> + RedisManager.resyncProjectStructure project_id, docs, files, (error) -> + return callback(error) if error? + callback null + + #jobs = _.union + #_.map docs, (doc) -> RedisManager.resyncDoc project_id, doc + #_.map files, (files) -> RedisManager.resyncFile project_id, file + + #async.series jobs, callback diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 650ee07ae2..3793146f38 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -169,3 +169,11 @@ module.exports = HttpController = return next(error) if error? logger.log project_id: project_id, "updated project via http" res.send 204 # No Content + + resyncProject: (req, res, next = (error) ->) -> + project_id = req.params.project_id + {docs, files} = req.body + + HistoryManager.resyncProject project_id, docs, files, (error) -> + return next(error) if error? + res.send 204 diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index cd12b497a1..3de65ef768 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -321,6 +321,14 @@ module.exports = RedisManager = getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) -> rclient.smembers keys.docsInProject(project_id: project_id), callback + resyncProjectStructure: (project_id, docs, files, callback) -> + update = + projectStructure: { docs, files } + meta: + ts: new Date() + jsonUpdate = JSON.stringify update + rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), jsonUpdate, callback + _serializeRanges: (ranges, callback = (error, serializedRanges) ->) -> jsonRanges = JSON.stringify(ranges) if jsonRanges? and jsonRanges.length > MAX_RANGES_SIZE From d46217be52278b96b7703ea0e5cf733f3214b724 Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Fri, 2 Mar 2018 15:02:27 +0000 Subject: [PATCH 390/769] queue doc content sync updates --- .../app/coffee/DocumentManager.coffee | 15 +++++++++++++++ .../app/coffee/HistoryManager.coffee | 18 ++++++++---------- .../app/coffee/RedisManager.coffee | 16 ++++++++++++++-- 3 files changed, 37 insertions(+), 12 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 3d3f690b5c..38992af237 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -179,6 +179,17 @@ module.exports = DocumentManager = else callback(null, lines, version) + resyncDocContents: (project_id, doc_id, callback) -> + RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname) -> + return callback(error) if error? + + if !lines? or !version? + PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname) -> + return callback(error) if error? + RedisManager.queueResyncDocContents project_id, doc_id, lines, version, pathname, callback + else + RedisManager.queueResyncDocContents project_id, doc_id, lines, version, pathname, callback + getDocWithLock: (project_id, doc_id, callback = (error, lines, version) ->) -> UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.getDoc, project_id, doc_id, callback @@ -214,3 +225,7 @@ module.exports = DocumentManager = renameDocWithLock: (project_id, doc_id, user_id, update, callback = (error) ->) -> UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.renameDoc, project_id, doc_id, user_id, update, callback + + resyncDocContentsWithLock: (project_id, doc_id, callback = (error) ->) -> + UpdateManager = require "./UpdateManager" + UpdateManager.lockUpdatesAndDo DocumentManager.resyncDocContents, project_id, doc_id, callback diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index 15b44475a2..03fb8b31f3 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -1,6 +1,7 @@ -Settings = require "settings-sharelatex" -request = require "request" +async = require "async" logger = require "logger-sharelatex" +request = require "request" +Settings = require "settings-sharelatex" HistoryRedisManager = require "./HistoryRedisManager" RedisManager = require "./RedisManager" @@ -64,12 +65,9 @@ module.exports = HistoryManager = return newBlock != prevBlock resyncProject: (project_id, docs, files, callback) -> - RedisManager.resyncProjectStructure project_id, docs, files, (error) -> + RedisManager.queueResyncProjectStructure project_id, docs, files, (error) -> return callback(error) if error? - callback null - - #jobs = _.union - #_.map docs, (doc) -> RedisManager.resyncDoc project_id, doc - #_.map files, (files) -> RedisManager.resyncFile project_id, file - - #async.series jobs, callback + DocumentManager = require "./DocumentManager" + resyncDoc = (doc, cb) -> + DocumentManager.resyncDocContentsWithLock project_id, doc.doc, cb + async.each docs, resyncDoc, callback diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 3de65ef768..8f0b161bc0 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -321,9 +321,21 @@ module.exports = RedisManager = getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) -> rclient.smembers keys.docsInProject(project_id: project_id), callback - resyncProjectStructure: (project_id, docs, files, callback) -> + queueResyncProjectStructure: (project_id, docs, files, callback) -> update = - projectStructure: { docs, files } + resyncProjectStructure: { docs, files } + meta: + ts: new Date() + jsonUpdate = JSON.stringify update + rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), jsonUpdate, callback + + queueResyncDocContents: (project_id, doc_id, lines, version, pathname, callback) -> + update = + resyncDocContents: + content: lines.join("\n"), + version: version + path: pathname + doc: doc_id meta: ts: new Date() jsonUpdate = JSON.stringify update From 039f997e80d321f66d8a095497e8dc085fe6ecfb Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Wed, 7 Mar 2018 11:37:26 +0000 Subject: [PATCH 391/769] update project history resync url --- services/document-updater/app.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 2408b164f0..3768146b34 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -48,7 +48,7 @@ app.post '/project/:project_id/doc/:doc_id/flush', HttpCont app.delete '/project/:project_id/doc/:doc_id', HttpController.flushAndDeleteDoc app.delete '/project/:project_id', HttpController.deleteProject app.post '/project/:project_id', HttpController.updateProject -app.post '/project/:project_id/resync', HttpController.resyncProject +app.post '/project/:project_id/history/resync', HttpController.resyncProject app.post '/project/:project_id/flush', HttpController.flushProject app.post '/project/:project_id/doc/:doc_id/change/:change_id/accept', HttpController.acceptChanges app.post '/project/:project_id/doc/:doc_id/change/accept', HttpController.acceptChanges From 152a25e96dd86e25b40e238488e32a4df958a90b Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Wed, 7 Mar 2018 13:29:09 +0000 Subject: [PATCH 392/769] rename resyncProject -> resyncProjectHistory --- services/document-updater/app.coffee | 2 +- services/document-updater/app/coffee/HistoryManager.coffee | 2 +- services/document-updater/app/coffee/HttpController.coffee | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 3768146b34..1d6a57239e 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -48,7 +48,7 @@ app.post '/project/:project_id/doc/:doc_id/flush', HttpCont app.delete '/project/:project_id/doc/:doc_id', HttpController.flushAndDeleteDoc app.delete '/project/:project_id', HttpController.deleteProject app.post '/project/:project_id', HttpController.updateProject -app.post '/project/:project_id/history/resync', HttpController.resyncProject +app.post '/project/:project_id/history/resync', HttpController.resyncProjectHistory app.post '/project/:project_id/flush', HttpController.flushProject app.post '/project/:project_id/doc/:doc_id/change/:change_id/accept', HttpController.acceptChanges app.post '/project/:project_id/doc/:doc_id/change/accept', HttpController.acceptChanges diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index 03fb8b31f3..b4f74d6170 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -64,7 +64,7 @@ module.exports = HistoryManager = newBlock = Math.floor(length / threshold) return newBlock != prevBlock - resyncProject: (project_id, docs, files, callback) -> + resyncProjectHistory: (project_id, docs, files, callback) -> RedisManager.queueResyncProjectStructure project_id, docs, files, (error) -> return callback(error) if error? DocumentManager = require "./DocumentManager" diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 3793146f38..4a7ff3b302 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -170,10 +170,10 @@ module.exports = HttpController = logger.log project_id: project_id, "updated project via http" res.send 204 # No Content - resyncProject: (req, res, next = (error) ->) -> + resyncProjectHistory: (req, res, next = (error) ->) -> project_id = req.params.project_id {docs, files} = req.body - HistoryManager.resyncProject project_id, docs, files, (error) -> + HistoryManager.resyncProjectHistory project_id, docs, files, (error) -> return next(error) if error? res.send 204 From a72d2283543fe6e78cd5bcc966895f189344d611 Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Wed, 7 Mar 2018 13:29:53 +0000 Subject: [PATCH 393/769] add unit tests --- .../DocumentManagerTests.coffee | 39 +++++++++++++++++++ .../HistoryManager/HistoryManagerTests.coffee | 31 +++++++++++++++ .../HttpController/HttpControllerTests.coffee | 36 +++++++++++++++++ 3 files changed, 106 insertions(+) diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index c390138cf9..69ab704094 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -467,3 +467,42 @@ describe "DocumentManager", -> it "should call the callback", -> @callback.called.should.equal true + + describe "resyncDocContents", -> + describe "when doc is loaded in redis", -> + beforeEach -> + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname) + @RedisManager.queueResyncDocContents = sinon.stub() + @DocumentManager.resyncDocContents @project_id, @doc_id, @callback + + it "gets the doc contents from redis", -> + @RedisManager.getDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + it "queues a resync doc content update", -> + @RedisManager.queueResyncDocContents + .calledWith(@project_id, @doc_id, @lines, @version, @pathname, @callback) + .should.equal true + + describe "when doc is not loaded in redis", -> + beforeEach -> + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null) + @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname) + @RedisManager.queueResyncDocContents = sinon.stub() + @DocumentManager.resyncDocContents @project_id, @doc_id, @callback + + it "tries to get the doc contents from redis", -> + @RedisManager.getDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + it "gets the doc contents from web", -> + @PersistenceManager.getDoc + .calledWith(@project_id, @doc_id) + .should.equal true + + it "queues a resync doc content update", -> + @RedisManager.queueResyncDocContents + .calledWith(@project_id, @doc_id, @lines, @version, @pathname, @callback) + .should.equal true diff --git a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee index 6e5010d89c..c268a86ba7 100644 --- a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee @@ -16,7 +16,9 @@ describe "HistoryManager", -> url: "http://trackchanges.example.com" } "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } + "./DocumentManager": @DocumentManager = {} "./HistoryRedisManager": @HistoryRedisManager = {} + "./RedisManager": @RedisManager = {} @project_id = "mock-project-id" @doc_id = "mock-doc-id" @callback = sinon.stub() @@ -158,3 +160,32 @@ describe "HistoryManager", -> # Previously we were on 16 ops # We didn't pass over a multiple of 5 @HistoryManager.shouldFlushHistoryOps(17, ['a', 'b', 'c'].length, 5).should.equal true + + describe "resyncProjectHistory", -> + beforeEach -> + @docs = [ + doc: @doc_id + path: 'main.tex' + ] + @files = [ + file: 'mock-file-id' + path: 'universe.png' + url: "www.filestore.test/#{@project_id}/mock-file-id" + ] + @RedisManager.queueResyncProjectStructure = sinon.stub().yields() + @DocumentManager.resyncDocContentsWithLock = sinon.stub().yields() + @HistoryManager.resyncProjectHistory @project_id, @docs, @files, @callback + + it "should queue a project structure reync", -> + @RedisManager.queueResyncProjectStructure + .calledWith(@project_id, @docs, @files) + .should.equal true + + it "should queue doc content reyncs", -> + @DocumentManager + .resyncDocContentsWithLock + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should call the callback", -> + @callback.called.should.equal true diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index 99496332fc..b79659cada 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -544,3 +544,39 @@ describe "HttpController", -> @next .calledWith(new Error("oops")) .should.equal true + + describe "resyncProjectHistory", -> + beforeEach -> + @docs = sinon.stub() + @files = sinon.stub() + @fileUpdates = sinon.stub() + @req = + body: + {@docs, @files} + params: + project_id: @project_id + + describe "successfully", -> + beforeEach -> + @HistoryManager.resyncProjectHistory = sinon.stub().callsArg(3) + @HttpController.resyncProjectHistory(@req, @res, @next) + + it "should accept the change", -> + @HistoryManager.resyncProjectHistory + .calledWith(@project_id, @docs, @files) + .should.equal true + + it "should return a successful No Content response", -> + @res.send + .calledWith(204) + .should.equal true + + describe "when an errors occurs", -> + beforeEach -> + @HistoryManager.resyncProjectHistory = sinon.stub().callsArgWith(3, new Error("oops")) + @HttpController.resyncProjectHistory(@req, @res, @next) + + it "should call next with the error", -> + @next + .calledWith(new Error("oops")) + .should.equal true From 64d1fe13bcac46d97a0371b61124b4a51a7b016a Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Wed, 7 Mar 2018 16:16:19 +0000 Subject: [PATCH 394/769] additional logging for project history resync --- services/document-updater/app.coffee | 2 ++ .../app/coffee/HttpController.coffee | 2 ++ .../app/coffee/LoggerSerializers.coffee | 14 ++++++++++++++ .../app/coffee/RedisManager.coffee | 2 ++ 4 files changed, 20 insertions(+) create mode 100644 services/document-updater/app/coffee/LoggerSerializers.coffee diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 1d6a57239e..a1a18d5889 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -3,6 +3,8 @@ http = require("http") Settings = require('settings-sharelatex') logger = require('logger-sharelatex') logger.initialize("documentupdater") +logger.logger.serializers.docs = require("./app/js/LoggerSerializers").docs +logger.logger.serializers.files = require("./app/js/LoggerSerializers").files if Settings.sentry?.dsn? logger.initializeErrorReporting(Settings.sentry.dsn) diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 4a7ff3b302..2f3f8d7d66 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -174,6 +174,8 @@ module.exports = HttpController = project_id = req.params.project_id {docs, files} = req.body + logger.log {project_id}, "queuing project history resync via http" HistoryManager.resyncProjectHistory project_id, docs, files, (error) -> return next(error) if error? + logger.log {project_id}, "queued project history resync via http" res.send 204 diff --git a/services/document-updater/app/coffee/LoggerSerializers.coffee b/services/document-updater/app/coffee/LoggerSerializers.coffee new file mode 100644 index 0000000000..3d4bfc42c2 --- /dev/null +++ b/services/document-updater/app/coffee/LoggerSerializers.coffee @@ -0,0 +1,14 @@ +module.exports = + docs: (docs) -> + docs.map (doc) -> + { + path: doc.path + id: doc.doc + } + + files: (files) -> + files.map (file) -> + { + path: file.path + id: file.file + } diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 8f0b161bc0..a959b1dd41 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -322,6 +322,7 @@ module.exports = RedisManager = rclient.smembers keys.docsInProject(project_id: project_id), callback queueResyncProjectStructure: (project_id, docs, files, callback) -> + logger.log {project_id, docs, files}, "queue project structure resync" update = resyncProjectStructure: { docs, files } meta: @@ -330,6 +331,7 @@ module.exports = RedisManager = rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), jsonUpdate, callback queueResyncDocContents: (project_id, doc_id, lines, version, pathname, callback) -> + logger.log {project_id, doc_id, pathname}, "queue doc content resync" update = resyncDocContents: content: lines.join("\n"), From 088ab6e7ba826fac846b10c028b6bd68ee70e247 Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Fri, 9 Mar 2018 12:21:37 +0000 Subject: [PATCH 395/769] rename queueResyncDocContents -> queueResyncDocContent --- services/document-updater/app/coffee/DocumentManager.coffee | 4 ++-- services/document-updater/app/coffee/RedisManager.coffee | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 38992af237..fde56a7c3b 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -186,9 +186,9 @@ module.exports = DocumentManager = if !lines? or !version? PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname) -> return callback(error) if error? - RedisManager.queueResyncDocContents project_id, doc_id, lines, version, pathname, callback + RedisManager.queueResyncDocContent project_id, doc_id, lines, version, pathname, callback else - RedisManager.queueResyncDocContents project_id, doc_id, lines, version, pathname, callback + RedisManager.queueResyncDocContent project_id, doc_id, lines, version, pathname, callback getDocWithLock: (project_id, doc_id, callback = (error, lines, version) ->) -> UpdateManager = require "./UpdateManager" diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index a959b1dd41..4d3fafeb60 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -330,10 +330,10 @@ module.exports = RedisManager = jsonUpdate = JSON.stringify update rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), jsonUpdate, callback - queueResyncDocContents: (project_id, doc_id, lines, version, pathname, callback) -> + queueResyncDocContent: (project_id, doc_id, lines, version, pathname, callback) -> logger.log {project_id, doc_id, pathname}, "queue doc content resync" update = - resyncDocContents: + resyncDocContent: content: lines.join("\n"), version: version path: pathname From fc6ef6ea7a3c5ece8deaa862b76eb7705f3f3ab7 Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Fri, 9 Mar 2018 12:21:48 +0000 Subject: [PATCH 396/769] improve logging --- services/document-updater/app/coffee/HttpController.coffee | 2 +- services/document-updater/app/coffee/RedisManager.coffee | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 2f3f8d7d66..9918339d6e 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -174,7 +174,7 @@ module.exports = HttpController = project_id = req.params.project_id {docs, files} = req.body - logger.log {project_id}, "queuing project history resync via http" + logger.log {project_id, docs, files}, "queuing project history resync via http" HistoryManager.resyncProjectHistory project_id, docs, files, (error) -> return next(error) if error? logger.log {project_id}, "queued project history resync via http" diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 4d3fafeb60..572aaa44a6 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -331,7 +331,7 @@ module.exports = RedisManager = rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), jsonUpdate, callback queueResyncDocContent: (project_id, doc_id, lines, version, pathname, callback) -> - logger.log {project_id, doc_id, pathname}, "queue doc content resync" + logger.log {project_id, doc_id, lines, version, pathname}, "queue doc content resync" update = resyncDocContent: content: lines.join("\n"), From 1226f96fc3e6ffbcd877ed2359236282fb7982a9 Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Fri, 9 Mar 2018 14:14:14 +0000 Subject: [PATCH 397/769] extract ProjectHistoryRedisManager --- .../app/coffee/DocumentManager.coffee | 5 +- .../app/coffee/HistoryManager.coffee | 3 +- .../coffee/ProjectHistoryRedisManager.coffee | 59 ++++++++++ .../app/coffee/ProjectManager.coffee | 9 +- .../app/coffee/RedisManager.coffee | 62 +---------- .../DocumentManagerTests.coffee | 9 +- .../HistoryManager/HistoryManagerTests.coffee | 5 +- .../ProjectHistoryRedisManagerTests.coffee | 101 ++++++++++++++++++ .../flushAndDeleteProjectTests.coffee | 1 + .../ProjectManager/flushProjectTests.coffee | 3 +- .../ProjectManager/getProjectDocsTests.coffee | 1 + .../ProjectManager/updateProjectTests.coffee | 22 ++-- .../RedisManager/RedisManagerTests.coffee | 82 +++----------- 13 files changed, 210 insertions(+), 152 deletions(-) create mode 100644 services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee create mode 100644 services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index fde56a7c3b..7ec501250e 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -1,4 +1,5 @@ RedisManager = require "./RedisManager" +ProjectHistoryRedisManager = require "./ProjectHistoryRedisManager" PersistenceManager = require "./PersistenceManager" DiffCodec = require "./DiffCodec" logger = require "logger-sharelatex" @@ -186,9 +187,9 @@ module.exports = DocumentManager = if !lines? or !version? PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname) -> return callback(error) if error? - RedisManager.queueResyncDocContent project_id, doc_id, lines, version, pathname, callback + ProjectHistoryRedisManager.queueResyncDocContent project_id, doc_id, lines, version, pathname, callback else - RedisManager.queueResyncDocContent project_id, doc_id, lines, version, pathname, callback + ProjectHistoryRedisManager.queueResyncDocContent project_id, doc_id, lines, version, pathname, callback getDocWithLock: (project_id, doc_id, callback = (error, lines, version) ->) -> UpdateManager = require "./UpdateManager" diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index b4f74d6170..c1371615f7 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -3,6 +3,7 @@ logger = require "logger-sharelatex" request = require "request" Settings = require "settings-sharelatex" HistoryRedisManager = require "./HistoryRedisManager" +ProjectHistoryRedisManager = require "./ProjectHistoryRedisManager" RedisManager = require "./RedisManager" module.exports = HistoryManager = @@ -65,7 +66,7 @@ module.exports = HistoryManager = return newBlock != prevBlock resyncProjectHistory: (project_id, docs, files, callback) -> - RedisManager.queueResyncProjectStructure project_id, docs, files, (error) -> + ProjectHistoryRedisManager.queueResyncProjectStructure project_id, docs, files, (error) -> return callback(error) if error? DocumentManager = require "./DocumentManager" resyncDoc = (doc, cb) -> diff --git a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee new file mode 100644 index 0000000000..6de1d8efc9 --- /dev/null +++ b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee @@ -0,0 +1,59 @@ +Settings = require('settings-sharelatex') +projectHistoryKeys = Settings.redis?.project_history?.key_schema +rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) +logger = require('logger-sharelatex') + +module.exports = ProjectHistoryRedisManager = + queueOps: (project_id, ops..., callback) -> + rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), ops..., callback + + queueRenameEntity: (project_id, entity_type, entity_id, user_id, update, callback) -> + update = + pathname: update.pathname + new_pathname: update.newPathname + meta: + user_id: user_id + ts: new Date() + update[entity_type] = entity_id + + logger.log {project_id, update}, "queue rename operation to project-history" + jsonUpdate = JSON.stringify(update) + + ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback + + queueAddEntity: (project_id, entity_type, entitiy_id, user_id, update, callback = (error) ->) -> + update = + pathname: update.pathname + docLines: update.docLines + url: update.url + meta: + user_id: user_id + ts: new Date() + update[entity_type] = entitiy_id + + logger.log {project_id, update}, "queue add operation to project-history" + jsonUpdate = JSON.stringify(update) + + ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback + + queueResyncProjectStructure: (project_id, docs, files, callback) -> + logger.log {project_id, docs, files}, "queue project structure resync" + update = + resyncProjectStructure: { docs, files } + meta: + ts: new Date() + jsonUpdate = JSON.stringify update + ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback + + queueResyncDocContent: (project_id, doc_id, lines, version, pathname, callback) -> + logger.log {project_id, doc_id, lines, version, pathname}, "queue doc content resync" + update = + resyncDocContent: + content: lines.join("\n"), + version: version + path: pathname + doc: doc_id + meta: + ts: new Date() + jsonUpdate = JSON.stringify update + ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index 64293e6985..36ae86363d 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -1,4 +1,5 @@ RedisManager = require "./RedisManager" +ProjectHistoryRedisManager = require "./ProjectHistoryRedisManager" DocumentManager = require "./DocumentManager" HistoryManager = require "./HistoryManager" async = require "async" @@ -115,22 +116,22 @@ module.exports = ProjectManager = handleDocUpdate = (update, cb) -> doc_id = update.id if update.docLines? - RedisManager.addEntity project_id, 'doc', doc_id, user_id, update, (error, count) => + ProjectHistoryRedisManager.queueAddEntity project_id, 'doc', doc_id, user_id, update, (error, count) -> project_ops_length = count cb(error) else - DocumentManager.renameDocWithLock project_id, doc_id, user_id, update, (error, count) => + DocumentManager.renameDocWithLock project_id, doc_id, user_id, update, (error, count) -> project_ops_length = count cb(error) handleFileUpdate = (update, cb) -> file_id = update.id if update.url? - RedisManager.addEntity project_id, 'file', file_id, user_id, update, (error, count) => + ProjectHistoryRedisManager.queueAddEntity project_id, 'file', file_id, user_id, update, (error, count) -> project_ops_length = count cb(error) else - RedisManager.renameFile project_id, file_id, user_id, update, (error, count) => + ProjectHistoryRedisManager.queueRenameEntity project_id, 'file', file_id, user_id, update, (error, count) -> project_ops_length = count cb(error) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 572aaa44a6..c97bfb65e4 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -5,6 +5,7 @@ logger = require('logger-sharelatex') metrics = require('./Metrics') Errors = require "./Errors" crypto = require "crypto" +ProjectHistoryRedisManager = require "./ProjectHistoryRedisManager" # Sometimes Redis calls take an unexpectedly long time. We have to be # quick with Redis calls because we're holding a lock that expires @@ -31,7 +32,6 @@ MAX_RANGES_SIZE = 3 * MEGABYTES keys = Settings.redis.documentupdater.key_schema historyKeys = Settings.redis.history.key_schema -projectHistoryKeys = Settings.redis?.project_history?.key_schema module.exports = RedisManager = rclient: rclient @@ -267,7 +267,7 @@ module.exports = RedisManager = docUpdateCount = result[7] if jsonOps.length > 0 && Settings.apis?.project_history?.enabled - rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), jsonOps..., (error, projectUpdateCount) -> + ProjectHistoryRedisManager.queueOps project_id, jsonOps..., (error, projectUpdateCount) -> callback null, docUpdateCount, projectUpdateCount else callback null, docUpdateCount @@ -279,41 +279,9 @@ module.exports = RedisManager = if lines? and version? rclient.set keys.pathname(doc_id:doc_id), update.newPathname, (error) -> return callback(error) if error? - RedisManager._renameEntity project_id, 'doc', doc_id, user_id, update, callback + ProjectHistoryRedisManager.queueRenameEntity project_id, 'doc', doc_id, user_id, update, callback else - RedisManager._renameEntity project_id, 'doc', doc_id, user_id, update, callback - - renameFile: (project_id, file_id, user_id, update, callback = (error) ->) -> - RedisManager._renameEntity project_id, 'file', file_id, user_id, update, callback - - _renameEntity: (project_id, entity_type, entity_id, user_id, update, callback = (error) ->) -> - update = - pathname: update.pathname - new_pathname: update.newPathname - meta: - user_id: user_id - ts: new Date() - update[entity_type] = entity_id - - logger.log {project_id, update}, "queue rename operation to project-history" - jsonUpdate = JSON.stringify(update) - - rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), jsonUpdate, callback - - addEntity: (project_id, entity_type, entitiy_id, user_id, update, callback = (error) ->) -> - update = - pathname: update.pathname - docLines: update.docLines - url: update.url - meta: - user_id: user_id - ts: new Date() - update[entity_type] = entitiy_id - - logger.log {project_id, update}, "queue add operation to project-history" - jsonUpdate = JSON.stringify(update) - - rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), jsonUpdate, callback + ProjectHistoryRedisManager.queueRenameEntity project_id, 'doc', doc_id, user_id, update, callback clearUnflushedTime: (doc_id, callback = (error) ->) -> rclient.del keys.unflushedTime(doc_id:doc_id), callback @@ -321,28 +289,6 @@ module.exports = RedisManager = getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) -> rclient.smembers keys.docsInProject(project_id: project_id), callback - queueResyncProjectStructure: (project_id, docs, files, callback) -> - logger.log {project_id, docs, files}, "queue project structure resync" - update = - resyncProjectStructure: { docs, files } - meta: - ts: new Date() - jsonUpdate = JSON.stringify update - rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), jsonUpdate, callback - - queueResyncDocContent: (project_id, doc_id, lines, version, pathname, callback) -> - logger.log {project_id, doc_id, lines, version, pathname}, "queue doc content resync" - update = - resyncDocContent: - content: lines.join("\n"), - version: version - path: pathname - doc: doc_id - meta: - ts: new Date() - jsonUpdate = JSON.stringify update - rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), jsonUpdate, callback - _serializeRanges: (ranges, callback = (error, serializedRanges) ->) -> jsonRanges = JSON.stringify(ranges) if jsonRanges? and jsonRanges.length > MAX_RANGES_SIZE diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index 69ab704094..16e58a81a7 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -10,6 +10,7 @@ describe "DocumentManager", -> beforeEach -> @DocumentManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} + "./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {} "./PersistenceManager": @PersistenceManager = {} "./HistoryManager": @HistoryManager = flushDocChangesAsync: sinon.stub() @@ -472,7 +473,7 @@ describe "DocumentManager", -> describe "when doc is loaded in redis", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname) - @RedisManager.queueResyncDocContents = sinon.stub() + @ProjectHistoryRedisManager.queueResyncDocContent = sinon.stub() @DocumentManager.resyncDocContents @project_id, @doc_id, @callback it "gets the doc contents from redis", -> @@ -481,7 +482,7 @@ describe "DocumentManager", -> .should.equal true it "queues a resync doc content update", -> - @RedisManager.queueResyncDocContents + @ProjectHistoryRedisManager.queueResyncDocContent .calledWith(@project_id, @doc_id, @lines, @version, @pathname, @callback) .should.equal true @@ -489,7 +490,7 @@ describe "DocumentManager", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(2, null) @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname) - @RedisManager.queueResyncDocContents = sinon.stub() + @ProjectHistoryRedisManager.queueResyncDocContent = sinon.stub() @DocumentManager.resyncDocContents @project_id, @doc_id, @callback it "tries to get the doc contents from redis", -> @@ -503,6 +504,6 @@ describe "DocumentManager", -> .should.equal true it "queues a resync doc content update", -> - @RedisManager.queueResyncDocContents + @ProjectHistoryRedisManager.queueResyncDocContent .calledWith(@project_id, @doc_id, @lines, @version, @pathname, @callback) .should.equal true diff --git a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee index c268a86ba7..e207cde99d 100644 --- a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee @@ -19,6 +19,7 @@ describe "HistoryManager", -> "./DocumentManager": @DocumentManager = {} "./HistoryRedisManager": @HistoryRedisManager = {} "./RedisManager": @RedisManager = {} + "./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {} @project_id = "mock-project-id" @doc_id = "mock-doc-id" @callback = sinon.stub() @@ -172,12 +173,12 @@ describe "HistoryManager", -> path: 'universe.png' url: "www.filestore.test/#{@project_id}/mock-file-id" ] - @RedisManager.queueResyncProjectStructure = sinon.stub().yields() + @ProjectHistoryRedisManager.queueResyncProjectStructure = sinon.stub().yields() @DocumentManager.resyncDocContentsWithLock = sinon.stub().yields() @HistoryManager.resyncProjectHistory @project_id, @docs, @files, @callback it "should queue a project structure reync", -> - @RedisManager.queueResyncProjectStructure + @ProjectHistoryRedisManager.queueResyncProjectStructure .calledWith(@project_id, @docs, @files) .should.equal true diff --git a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee new file mode 100644 index 0000000000..8ad0f53b5b --- /dev/null +++ b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee @@ -0,0 +1,101 @@ +sinon = require('sinon') +chai = require('chai') +should = chai.should() +modulePath = "../../../../app/js/ProjectHistoryRedisManager.js" +SandboxedModule = require('sandboxed-module') +tk = require "timekeeper" + +describe "ProjectHistoryRedisManager", -> + beforeEach -> + @project_id = "project-id-123" + @user_id = "user-id-123" + @callback = sinon.stub() + @rclient = {} + tk.freeze(new Date()) + @ProjectHistoryRedisManager = SandboxedModule.require modulePath, + requires: + "settings-sharelatex": @settings = { + redis: + project_history: + key_schema: + projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:#{project_id}" + } + "redis-sharelatex": + createClient: () => @rclient + globals: + JSON: @JSON = JSON + + afterEach -> + tk.reset() + + describe "queueOps", -> + beforeEach -> + @ops = ["mock-op-1", "mock-op-2"] + @rclient.rpush = sinon.stub() + @ProjectHistoryRedisManager.queueOps @project_id, @ops..., @callback + + it "should queue an update", -> + @rclient.rpush + .calledWithExactly( + "ProjectHistory:Ops:#{@project_id}" + @ops[0] + @ops[1] + @callback + ).should.equal true + + describe "queueRenameEntity", -> + beforeEach () -> + @file_id = 1234 + + @rawUpdate = + pathname: @pathname = '/old' + newPathname: @newPathname = '/new' + + @ProjectHistoryRedisManager.queueOps = sinon.stub() + @ProjectHistoryRedisManager.queueRenameEntity @project_id, 'file', @file_id, @user_id, @rawUpdate, @callback + + it "should queue an update", -> + update = + pathname: @pathname + new_pathname: @newPathname + meta: + user_id: @user_id + ts: new Date() + file: @file_id + + @ProjectHistoryRedisManager.queueOps + .calledWithExactly(@project_id, @JSON.stringify(update), @callback) + .should.equal true + + describe "queueAddEntity", -> + beforeEach () -> + @rclient.rpush = sinon.stub().yields() + @doc_id = 1234 + + @rawUpdate = + pathname: @pathname = '/old' + docLines: @docLines = 'a\nb' + url: @url = 'filestore.example.com' + + @ProjectHistoryRedisManager.queueOps = sinon.stub() + @ProjectHistoryRedisManager.queueAddEntity @project_id, 'doc', @doc_id, @user_id, @rawUpdate, @callback + + it "should queue an update", -> + update = + pathname: @pathname + docLines: @docLines + url: @url + meta: + user_id: @user_id + ts: new Date() + doc: @doc_id + + @ProjectHistoryRedisManager.queueOps + .calledWithExactly(@project_id, @JSON.stringify(update), @callback) + .should.equal true + + describe "queueResyncProjectStructure", -> + it "should queue an update", -> + + describe "queueResyncDocContent", -> + it "should queue an update", -> diff --git a/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee index 50a2679953..51e736aa45 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee @@ -8,6 +8,7 @@ describe "ProjectManager - flushAndDeleteProject", -> beforeEach -> @ProjectManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} + "./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {} "./DocumentManager": @DocumentManager = {} "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } "./HistoryManager": @HistoryManager = diff --git a/services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.coffee index 613b25ae4d..a5fe3805d5 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.coffee @@ -8,6 +8,7 @@ describe "ProjectManager - flushProject", -> beforeEach -> @ProjectManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} + "./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {} "./DocumentManager": @DocumentManager = {} "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } "./HistoryManager": @HistoryManager = {} @@ -72,5 +73,3 @@ describe "ProjectManager - flushProject", -> it "should time the execution", -> @Metrics.Timer::done.called.should.equal true - - diff --git a/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee index 5c91f5ea37..11f23bbd4c 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee @@ -9,6 +9,7 @@ describe "ProjectManager - getProjectDocsAndFlushIfOld", -> beforeEach -> @ProjectManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} + "./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {} "./DocumentManager": @DocumentManager = {} "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } "./HistoryManager": @HistoryManager = {} diff --git a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee index 5753b7f2f1..7bd5c19848 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee @@ -8,6 +8,7 @@ describe "ProjectManager", -> beforeEach -> @ProjectManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} + "./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {} "./DocumentManager": @DocumentManager = {} "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } "./HistoryManager": @HistoryManager = {} @@ -40,7 +41,7 @@ describe "ProjectManager", -> newPathname: 'bar2' @fileUpdates = [ @firstFileUpdate ] @DocumentManager.renameDocWithLock = sinon.stub().yields() - @RedisManager.renameFile = sinon.stub().yields() + @ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields() describe "successfully", -> beforeEach -> @@ -55,8 +56,8 @@ describe "ProjectManager", -> .should.equal true it "should rename the files in the updates", -> - @RedisManager.renameFile - .calledWith(@project_id, @firstFileUpdate.id, @user_id, @firstFileUpdate) + @ProjectHistoryRedisManager.queueRenameEntity + .calledWith(@project_id, 'file', @firstFileUpdate.id, @user_id, @firstFileUpdate) .should.equal true it "should not flush the history", -> @@ -79,7 +80,7 @@ describe "ProjectManager", -> describe "when renaming a file fails", -> beforeEach -> @error = new Error('error') - @RedisManager.renameFile = sinon.stub().yields(@error) + @ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields(@error) @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback it "should call the callback with the error", -> @@ -108,22 +109,22 @@ describe "ProjectManager", -> id: 2 url: 'filestore.example.com/2' @fileUpdates = [ @firstFileUpdate ] - @RedisManager.addEntity = sinon.stub().yields() + @ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields() describe "successfully", -> beforeEach -> @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback it "should add the docs in the updates", -> - @RedisManager.addEntity + @ProjectHistoryRedisManager.queueAddEntity .calledWith(@project_id, 'doc', @firstDocUpdate.id, @user_id, @firstDocUpdate) .should.equal true - @RedisManager.addEntity + @ProjectHistoryRedisManager.queueAddEntity .calledWith(@project_id, 'doc', @secondDocUpdate.id, @user_id, @secondDocUpdate) .should.equal true it "should add the files in the updates", -> - @RedisManager.addEntity + @ProjectHistoryRedisManager.queueAddEntity .calledWith(@project_id, 'file', @firstFileUpdate.id, @user_id, @firstFileUpdate) .should.equal true @@ -138,7 +139,7 @@ describe "ProjectManager", -> describe "when adding a doc fails", -> beforeEach -> @error = new Error('error') - @RedisManager.addEntity = sinon.stub().yields(@error) + @ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields(@error) @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback it "should call the callback with the error", -> @@ -147,7 +148,7 @@ describe "ProjectManager", -> describe "when adding a file fails", -> beforeEach -> @error = new Error('error') - @RedisManager.addEntity = sinon.stub().yields(@error) + @ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields(@error) @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback it "should call the callback with the error", -> @@ -162,4 +163,3 @@ describe "ProjectManager", -> @HistoryManager.flushProjectChangesAsync .calledWith(@project_id) .should.equal true - diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 4cbac611b6..a3b28d00cb 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -15,6 +15,7 @@ describe "RedisManager", -> @RedisManager = SandboxedModule.require modulePath, requires: "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } + "./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {} "settings-sharelatex": @settings = { documentupdater: {logHashErrors: {write:true, read:true}} apis: @@ -38,9 +39,6 @@ describe "RedisManager", -> key_schema: uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}" docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}" - project_history: - key_schema: - projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:#{project_id}" } "redis-sharelatex": createClient: () => @rclient @@ -337,7 +335,9 @@ describe "RedisManager", -> @multi.exec = sinon.stub().callsArgWith(0, null, [@hash, null, null, null, null, null, null, @doc_update_list_length] ) - @rclient.rpush = sinon.stub().callsArgWith(@ops.length + 1, null, @project_update_list_length) + @ProjectHistoryRedisManager.queueOps = sinon.stub().callsArgWith( + @ops.length + 1, null, @project_update_list_length + ) describe "with a consistent version", -> beforeEach -> @@ -399,8 +399,8 @@ describe "RedisManager", -> .should.equal true it "should push the updates into the project history ops list", -> - @rclient.rpush - .calledWith("ProjectHistory:Ops:#{@project_id}", JSON.stringify(@ops[0]), JSON.stringify(@ops[1])) + @ProjectHistoryRedisManager.queueOps + .calledWith(@project_id, JSON.stringify(@ops[0])) .should.equal true it "should call the callback", -> @@ -686,6 +686,7 @@ describe "RedisManager", -> describe "the document is cached in redis", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, 'lines', 'version') + @ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields() @RedisManager.renameDoc @project_id, @doc_id, @userId, @update, @callback it "update the cached pathname", -> @@ -694,75 +695,20 @@ describe "RedisManager", -> .should.equal true it "should queue an update", -> - update = - pathname: @pathname - new_pathname: @newPathname - meta: - user_id: @userId - ts: new Date() - doc: @doc_id - @rclient.rpush - .calledWith("ProjectHistory:Ops:#{@project_id}", JSON.stringify(update)) + @ProjectHistoryRedisManager.queueRenameEntity + .calledWithExactly(@project_id, 'doc', @doc_id, @userId, @update, @callback) .should.equal true - it "should call the callback", -> - @callback.calledWith().should.equal true - describe "the document is not cached in redis", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null) + @ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields() @RedisManager.renameDoc @project_id, @doc_id, @userId, @update, @callback it "does not update the cached pathname", -> @rclient.set.called.should.equal false - describe "renameFile", -> - beforeEach () -> - @rclient.rpush = sinon.stub().yields() - @file_id = 1234 - - @update = - pathname: @pathname = '/old' - newPathname: @newPathname = '/new' - - @RedisManager.renameFile @project_id, @file_id, @userId, @update - - it "should queue an update", -> - update = - pathname: @pathname - new_pathname: @newPathname - meta: - user_id: @userId - ts: new Date() - file: @file_id - - @rclient.rpush - .calledWith("ProjectHistory:Ops:#{@project_id}", JSON.stringify(update)) - .should.equal true - - describe "addEntity", -> - beforeEach (done) -> - @rclient.rpush = sinon.stub().yields() - @entity_id = 1234 - @entity_type = 'type' - - @update = - pathname: @pathname = '/old' - docLines: @docLines = 'a\nb' - url: @url = 'filestore.example.com' - - @RedisManager.addEntity @project_id, @entity_type, @entity_id, @userId, @update, done - - it "should queue an update", -> - update = - pathname: @pathname - docLines: @docLines - url: @url - meta: - user_id: @user_id - ts: new Date() - update[@entity_type] = @entity_id - - @rclient.rpush - .calledWith("ProjectHistory:Ops:#{@project_id}", JSON.stringify(update)) - .should.equal true + it "should queue an update", -> + @ProjectHistoryRedisManager.queueRenameEntity + .calledWithExactly(@project_id, 'doc', @doc_id, @userId, @update, @callback) + .should.equal true From 0642e3c8c9df79d3247f77b2555c5aff87e751c5 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 5 Mar 2018 12:14:47 +0000 Subject: [PATCH 398/769] support project version on incoming requests --- .../app/coffee/HttpController.coffee | 6 +++--- .../coffee/ProjectHistoryRedisManager.coffee | 2 ++ .../app/coffee/ProjectManager.coffee | 7 ++++++- .../HttpController/HttpControllerTests.coffee | 9 +++++---- .../ProjectManager/updateProjectTests.coffee | 17 +++++++++-------- 5 files changed, 25 insertions(+), 16 deletions(-) diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 9918339d6e..ce4d8bf637 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -161,10 +161,10 @@ module.exports = HttpController = updateProject: (req, res, next = (error) ->) -> timer = new Metrics.Timer("http.updateProject") project_id = req.params.project_id - {userId, docUpdates, fileUpdates} = req.body - logger.log {project_id, docUpdates, fileUpdates}, "updating project via http" + {userId, docUpdates, fileUpdates, version} = req.body + logger.log {project_id, docUpdates, fileUpdates, version}, "updating project via http" - ProjectManager.updateProjectWithLocks project_id, userId, docUpdates, fileUpdates, (error) -> + ProjectManager.updateProjectWithLocks project_id, userId, docUpdates, fileUpdates, version, (error) -> timer.done() return next(error) if error? logger.log project_id: project_id, "updated project via http" diff --git a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee index 6de1d8efc9..ebf5568317 100644 --- a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee +++ b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee @@ -14,6 +14,7 @@ module.exports = ProjectHistoryRedisManager = meta: user_id: user_id ts: new Date() + version: update.version update[entity_type] = entity_id logger.log {project_id, update}, "queue rename operation to project-history" @@ -29,6 +30,7 @@ module.exports = ProjectHistoryRedisManager = meta: user_id: user_id ts: new Date() + version: update.version update[entity_type] = entitiy_id logger.log {project_id, update}, "queue add operation to project-history" diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index 36ae86363d..729f1743e8 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -105,16 +105,20 @@ module.exports = ProjectManager = clearProjectState: (project_id, callback = (error) ->) -> RedisManager.clearProjectState project_id, callback - updateProjectWithLocks: (project_id, user_id, docUpdates, fileUpdates, _callback = (error) ->) -> + updateProjectWithLocks: (project_id, user_id, docUpdates, fileUpdates, version, _callback = (error) ->) -> timer = new Metrics.Timer("projectManager.updateProject") callback = (args...) -> timer.done() _callback(args...) + project_version = version + project_subversion = 0 # project versions can have multiple operations + project_ops_length = 0 handleDocUpdate = (update, cb) -> doc_id = update.id + update.version = "#{project_version}.#{project_subversion++}" if update.docLines? ProjectHistoryRedisManager.queueAddEntity project_id, 'doc', doc_id, user_id, update, (error, count) -> project_ops_length = count @@ -126,6 +130,7 @@ module.exports = ProjectManager = handleFileUpdate = (update, cb) -> file_id = update.id + update.version = "#{project_version}.#{project_subversion++}" if update.url? ProjectHistoryRedisManager.queueAddEntity project_id, 'file', file_id, user_id, update, (error, count) -> project_ops_length = count diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index b79659cada..fca1614c2d 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -512,19 +512,20 @@ describe "HttpController", -> @userId = "user-id-123" @docUpdates = sinon.stub() @fileUpdates = sinon.stub() + @version = 1234567 @req = - body: {@userId, @docUpdates, @fileUpdates} + body: {@userId, @docUpdates, @fileUpdates, @version} params: project_id: @project_id describe "successfully", -> beforeEach -> - @ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(4) + @ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(5) @HttpController.updateProject(@req, @res, @next) it "should accept the change", -> @ProjectManager.updateProjectWithLocks - .calledWith(@project_id, @userId, @docUpdates, @fileUpdates) + .calledWith(@project_id, @userId, @docUpdates, @fileUpdates, @version) .should.equal true it "should return a successful No Content response", -> @@ -537,7 +538,7 @@ describe "HttpController", -> describe "when an errors occurs", -> beforeEach -> - @ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(4, new Error("oops")) + @ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(5, new Error("oops")) @HttpController.updateProject(@req, @res, @next) it "should call next with the error", -> diff --git a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee index 7bd5c19848..b04a8c7a50 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee @@ -18,6 +18,7 @@ describe "ProjectManager", -> @project_id = "project-id-123" @user_id = "user-id-123" + @version = 1234567 @HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(false) @HistoryManager.flushProjectChangesAsync = sinon.stub() @callback = sinon.stub() @@ -45,7 +46,7 @@ describe "ProjectManager", -> describe "successfully", -> beforeEach -> - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should rename the docs in the updates", -> @DocumentManager.renameDocWithLock @@ -72,7 +73,7 @@ describe "ProjectManager", -> beforeEach -> @error = new Error('error') @DocumentManager.renameDocWithLock = sinon.stub().yields(@error) - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true @@ -81,7 +82,7 @@ describe "ProjectManager", -> beforeEach -> @error = new Error('error') @ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields(@error) - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true @@ -89,7 +90,7 @@ describe "ProjectManager", -> describe "with enough ops to flush", -> beforeEach -> @HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(true) - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should flush the history", -> @HistoryManager.flushProjectChangesAsync @@ -113,7 +114,7 @@ describe "ProjectManager", -> describe "successfully", -> beforeEach -> - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should add the docs in the updates", -> @ProjectHistoryRedisManager.queueAddEntity @@ -140,7 +141,7 @@ describe "ProjectManager", -> beforeEach -> @error = new Error('error') @ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields(@error) - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true @@ -149,7 +150,7 @@ describe "ProjectManager", -> beforeEach -> @error = new Error('error') @ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields(@error) - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true @@ -157,7 +158,7 @@ describe "ProjectManager", -> describe "with enough ops to flush", -> beforeEach -> @HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(true) - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should flush the history", -> @HistoryManager.flushProjectChangesAsync From b3887fd9844483c850cb8b73eff0f814086a7079 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 6 Mar 2018 09:49:54 +0000 Subject: [PATCH 399/769] update unit tests for incoming project versions --- .../ProjectManager/updateProjectTests.coffee | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee index b04a8c7a50..a5d3e881c6 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee @@ -3,6 +3,7 @@ chai = require('chai') should = chai.should() modulePath = "../../../../app/js/ProjectManager.js" SandboxedModule = require('sandboxed-module') +_ = require('underscore') describe "ProjectManager", -> beforeEach -> @@ -49,16 +50,19 @@ describe "ProjectManager", -> @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should rename the docs in the updates", -> + firstDocUpdateWithVersion = _.extend({}, @firstDocUpdate, {version: "#{@version}.0"}) + secondDocUpdateWithVersion = _.extend({}, @secondDocUpdate, {version: "#{@version}.1"}) @DocumentManager.renameDocWithLock - .calledWith(@project_id, @firstDocUpdate.id, @user_id, @firstDocUpdate) + .calledWith(@project_id, @firstDocUpdate.id, @user_id, firstDocUpdateWithVersion) .should.equal true @DocumentManager.renameDocWithLock - .calledWith(@project_id, @secondDocUpdate.id, @user_id, @secondDocUpdate) + .calledWith(@project_id, @secondDocUpdate.id, @user_id, secondDocUpdateWithVersion) .should.equal true it "should rename the files in the updates", -> + firstFileUpdateWithVersion = _.extend({}, @firstFileUpdate, {version: "#{@version}.2"}) @ProjectHistoryRedisManager.queueRenameEntity - .calledWith(@project_id, 'file', @firstFileUpdate.id, @user_id, @firstFileUpdate) + .calledWith(@project_id, 'file', @firstFileUpdate.id, @user_id, firstFileUpdateWithVersion) .should.equal true it "should not flush the history", -> @@ -117,16 +121,19 @@ describe "ProjectManager", -> @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should add the docs in the updates", -> + firstDocUpdateWithVersion = _.extend({}, @firstDocUpdate, {version: "#{@version}.0"}) + secondDocUpdateWithVersion = _.extend({}, @secondDocUpdate, {version: "#{@version}.1"}) @ProjectHistoryRedisManager.queueAddEntity - .calledWith(@project_id, 'doc', @firstDocUpdate.id, @user_id, @firstDocUpdate) + .calledWith(@project_id, 'doc', @firstDocUpdate.id, @user_id, firstDocUpdateWithVersion) .should.equal true @ProjectHistoryRedisManager.queueAddEntity - .calledWith(@project_id, 'doc', @secondDocUpdate.id, @user_id, @secondDocUpdate) + .calledWith(@project_id, 'doc', @secondDocUpdate.id, @user_id, secondDocUpdateWithVersion) .should.equal true it "should add the files in the updates", -> + firstFileUpdateWithVersion = _.extend({}, @firstFileUpdate, {version: "#{@version}.2"}) @ProjectHistoryRedisManager.queueAddEntity - .calledWith(@project_id, 'file', @firstFileUpdate.id, @user_id, @firstFileUpdate) + .calledWith(@project_id, 'file', @firstFileUpdate.id, @user_id, firstFileUpdateWithVersion) .should.equal true it "should not flush the history", -> From 75a5428cbffb97fea0f1c8ea47b0afd71f418468 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 6 Mar 2018 10:36:38 +0000 Subject: [PATCH 400/769] update acceptance tests --- ...lyingUpdatesToProjectStructureTests.coffee | 29 ++++++++++++------- .../coffee/helpers/DocUpdaterClient.coffee | 4 +-- 2 files changed, 21 insertions(+), 12 deletions(-) diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee index d060cd918c..b617afc7db 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee @@ -13,6 +13,7 @@ DocUpdaterApp = require "./helpers/DocUpdaterApp" describe "Applying updates to a project's structure", -> before -> @user_id = 'user-id-123' + @version = 1234 describe "renaming a file", -> before (done) -> @@ -24,7 +25,7 @@ describe "Applying updates to a project's structure", -> @fileUpdates = [ @fileUpdate ] DocUpdaterApp.ensureRunning (error) => throw error if error? - DocUpdaterClient.sendProjectUpdate @project_id, @user_id, [], @fileUpdates, (error) -> + DocUpdaterClient.sendProjectUpdate @project_id, @user_id, [], @fileUpdates, @version, (error) -> throw error if error? setTimeout done, 200 @@ -38,6 +39,7 @@ describe "Applying updates to a project's structure", -> update.new_pathname.should.equal '/new-file-path' update.meta.user_id.should.equal @user_id update.meta.ts.should.be.a('string') + update.version.should.equal "#{@version}.0" done() @@ -52,7 +54,7 @@ describe "Applying updates to a project's structure", -> describe "when the document is not loaded", -> before (done) -> @project_id = DocUpdaterClient.randomId() - DocUpdaterClient.sendProjectUpdate @project_id, @user_id, @docUpdates, [], (error) -> + DocUpdaterClient.sendProjectUpdate @project_id, @user_id, @docUpdates, [], @version, (error) -> throw error if error? setTimeout done, 200 @@ -66,6 +68,7 @@ describe "Applying updates to a project's structure", -> update.new_pathname.should.equal '/new-doc-path' update.meta.user_id.should.equal @user_id update.meta.ts.should.be.a('string') + update.version.should.equal "#{@version}.0" done() @@ -76,7 +79,7 @@ describe "Applying updates to a project's structure", -> DocUpdaterClient.preloadDoc @project_id, @docUpdate.id, (error) => throw error if error? sinon.spy MockWebApi, "getDocument" - DocUpdaterClient.sendProjectUpdate @project_id, @user_id, @docUpdates, [], (error) -> + DocUpdaterClient.sendProjectUpdate @project_id, @user_id, @docUpdates, [], @version, (error) -> throw error if error? setTimeout done, 200 @@ -98,6 +101,7 @@ describe "Applying updates to a project's structure", -> update.new_pathname.should.equal '/new-doc-path' update.meta.user_id.should.equal @user_id update.meta.ts.should.be.a('string') + update.version.should.equal "#{@version}.0" done() @@ -109,7 +113,7 @@ describe "Applying updates to a project's structure", -> pathname: '/file-path' url: 'filestore.example.com' @fileUpdates = [ @fileUpdate ] - DocUpdaterClient.sendProjectUpdate @project_id, @user_id, [], @fileUpdates, (error) -> + DocUpdaterClient.sendProjectUpdate @project_id, @user_id, [], @fileUpdates, @version, (error) -> throw error if error? setTimeout done, 200 @@ -123,6 +127,7 @@ describe "Applying updates to a project's structure", -> update.url.should.equal 'filestore.example.com' update.meta.user_id.should.equal @user_id update.meta.ts.should.be.a('string') + update.version.should.equal "#{@version}.0" done() @@ -134,7 +139,7 @@ describe "Applying updates to a project's structure", -> pathname: '/file-path' docLines: 'a\nb' @docUpdates = [ @docUpdate ] - DocUpdaterClient.sendProjectUpdate @project_id, @user_id, @docUpdates, [], (error) -> + DocUpdaterClient.sendProjectUpdate @project_id, @user_id, @docUpdates, [], @version, (error) -> throw error if error? setTimeout done, 200 @@ -148,6 +153,7 @@ describe "Applying updates to a project's structure", -> update.docLines.should.equal 'a\nb' update.meta.user_id.should.equal @user_id update.meta.ts.should.be.a('string') + update.version.should.equal "#{@version}.0" done() @@ -155,7 +161,8 @@ describe "Applying updates to a project's structure", -> before (done) -> @project_id = DocUpdaterClient.randomId() @user_id = DocUpdaterClient.randomId() - + @version0 = 12345 + @version1 = @version0 + 1 updates = [] for v in [0..599] # Should flush after 500 ops updates.push @@ -168,9 +175,9 @@ describe "Applying updates to a project's structure", -> # Send updates in chunks to causes multiple flushes projectId = @project_id userId = @project_id - DocUpdaterClient.sendProjectUpdate projectId, userId, updates.slice(0, 250), [], (error) -> + DocUpdaterClient.sendProjectUpdate projectId, userId, updates.slice(0, 250), [], @version0, (error) -> throw error if error? - DocUpdaterClient.sendProjectUpdate projectId, userId, updates.slice(250), [], (error) -> + DocUpdaterClient.sendProjectUpdate projectId, userId, updates.slice(250), [], @version1, (error) -> throw error if error? setTimeout done, 2000 @@ -184,6 +191,8 @@ describe "Applying updates to a project's structure", -> before (done) -> @project_id = DocUpdaterClient.randomId() @user_id = DocUpdaterClient.randomId() + @version0 = 12345 + @version1 = @version0 + 1 updates = [] for v in [0..42] # Should flush after 500 ops @@ -197,9 +206,9 @@ describe "Applying updates to a project's structure", -> # Send updates in chunks projectId = @project_id userId = @project_id - DocUpdaterClient.sendProjectUpdate projectId, userId, updates.slice(0, 10), [], (error) -> + DocUpdaterClient.sendProjectUpdate projectId, userId, updates.slice(0, 10), [], @version0, (error) -> throw error if error? - DocUpdaterClient.sendProjectUpdate projectId, userId, updates.slice(10), [], (error) -> + DocUpdaterClient.sendProjectUpdate projectId, userId, updates.slice(10), [], @version1, (error) -> throw error if error? setTimeout done, 2000 diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee index f70271021b..7f50d64372 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee @@ -87,9 +87,9 @@ module.exports = DocUpdaterClient = body = JSON.parse(body) callback error, res, body - sendProjectUpdate: (project_id, userId, docUpdates, fileUpdates, callback = (error) ->) -> + sendProjectUpdate: (project_id, userId, docUpdates, fileUpdates, version, callback = (error) ->) -> request.post { url: "http://localhost:3003/project/#{project_id}" - json: { userId, docUpdates, fileUpdates } + json: { userId, docUpdates, fileUpdates, version } }, (error, res, body) -> callback error, res, body From 3385d2640a5a8a8e140bf8b4887ab8d5bdcce9fb Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 7 Mar 2018 17:06:42 +0000 Subject: [PATCH 401/769] fix structure ordering bug --- .../app/coffee/ProjectManager.coffee | 4 ++-- .../ProjectManager/updateProjectTests.coffee | 17 ++++++++++++----- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index 729f1743e8..8ae14cd66e 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -140,9 +140,9 @@ module.exports = ProjectManager = project_ops_length = count cb(error) - async.each docUpdates, handleDocUpdate, (error) -> + async.eachSeries docUpdates, handleDocUpdate, (error) -> return callback(error) if error? - async.each fileUpdates, handleFileUpdate, (error) -> + async.eachSeries fileUpdates, handleFileUpdate, (error) -> return callback(error) if error? if HistoryManager.shouldFlushHistoryOps(project_ops_length, docUpdates.length + fileUpdates.length, HistoryManager.FLUSH_PROJECT_EVERY_N_OPS) HistoryManager.flushProjectChangesAsync project_id diff --git a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee index a5d3e881c6..96d2ccc07b 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee @@ -111,9 +111,12 @@ describe "ProjectManager", -> docLines: "a\nb" @docUpdates = [ @firstDocUpdate, @secondDocUpdate ] @firstFileUpdate = - id: 2 + id: 3 url: 'filestore.example.com/2' - @fileUpdates = [ @firstFileUpdate ] + @secondFileUpdate = + id: 4 + url: 'filestore.example.com/3' + @fileUpdates = [ @firstFileUpdate, @secondFileUpdate ] @ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields() describe "successfully", -> @@ -123,18 +126,22 @@ describe "ProjectManager", -> it "should add the docs in the updates", -> firstDocUpdateWithVersion = _.extend({}, @firstDocUpdate, {version: "#{@version}.0"}) secondDocUpdateWithVersion = _.extend({}, @secondDocUpdate, {version: "#{@version}.1"}) - @ProjectHistoryRedisManager.queueAddEntity + @ProjectHistoryRedisManager.queueAddEntity.getCall(0) .calledWith(@project_id, 'doc', @firstDocUpdate.id, @user_id, firstDocUpdateWithVersion) .should.equal true - @ProjectHistoryRedisManager.queueAddEntity + @ProjectHistoryRedisManager.queueAddEntity.getCall(1) .calledWith(@project_id, 'doc', @secondDocUpdate.id, @user_id, secondDocUpdateWithVersion) .should.equal true it "should add the files in the updates", -> firstFileUpdateWithVersion = _.extend({}, @firstFileUpdate, {version: "#{@version}.2"}) - @ProjectHistoryRedisManager.queueAddEntity + secondFileUpdateWithVersion = _.extend({}, @secondFileUpdate, {version: "#{@version}.3"}) + @ProjectHistoryRedisManager.queueAddEntity.getCall(2) .calledWith(@project_id, 'file', @firstFileUpdate.id, @user_id, firstFileUpdateWithVersion) .should.equal true + @ProjectHistoryRedisManager.queueAddEntity.getCall(3) + .calledWith(@project_id, 'file', @secondFileUpdate.id, @user_id, secondFileUpdateWithVersion) + .should.equal true it "should not flush the history", -> @HistoryManager.flushProjectChangesAsync From 779f00f9125a013f1a28a8c7e9d6561bb7abf199 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 8 Mar 2018 09:38:24 +0000 Subject: [PATCH 402/769] add acceptance test for ordering of project structure changes --- ...lyingUpdatesToProjectStructureTests.coffee | 67 +++++++++++++++++++ 1 file changed, 67 insertions(+) diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee index b617afc7db..5fdf2a9b4d 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee @@ -105,6 +105,73 @@ describe "Applying updates to a project's structure", -> done() + describe "renaming multiple documents and files", -> + before -> + @docUpdate0 = + id: DocUpdaterClient.randomId() + pathname: '/doc-path0' + newPathname: '/new-doc-path0' + @docUpdate1 = + id: DocUpdaterClient.randomId() + pathname: '/doc-path1' + newPathname: '/new-doc-path1' + @docUpdates = [ @docUpdate0, @docUpdate1 ] + @fileUpdate0 = + id: DocUpdaterClient.randomId() + pathname: '/file-path0' + newPathname: '/new-file-path0' + @fileUpdate1 = + id: DocUpdaterClient.randomId() + pathname: '/file-path1' + newPathname: '/new-file-path1' + @fileUpdates = [ @fileUpdate0, @fileUpdate1 ] + + describe "when the documents are not loaded", -> + before (done) -> + @project_id = DocUpdaterClient.randomId() + DocUpdaterClient.sendProjectUpdate @project_id, @user_id, @docUpdates, @fileUpdates, @version, (error) -> + throw error if error? + setTimeout done, 200 + + it "should push the applied doc renames to the project history api", (done) -> + rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + throw error if error? + + update = JSON.parse(updates[0]) + update.doc.should.equal @docUpdate0.id + update.pathname.should.equal '/doc-path0' + update.new_pathname.should.equal '/new-doc-path0' + update.meta.user_id.should.equal @user_id + update.meta.ts.should.be.a('string') + update.version.should.equal "#{@version}.0" + + update = JSON.parse(updates[1]) + update.doc.should.equal @docUpdate1.id + update.pathname.should.equal '/doc-path1' + update.new_pathname.should.equal '/new-doc-path1' + update.meta.user_id.should.equal @user_id + update.meta.ts.should.be.a('string') + update.version.should.equal "#{@version}.1" + + update = JSON.parse(updates[2]) + update.file.should.equal @fileUpdate0.id + update.pathname.should.equal '/file-path0' + update.new_pathname.should.equal '/new-file-path0' + update.meta.user_id.should.equal @user_id + update.meta.ts.should.be.a('string') + update.version.should.equal "#{@version}.2" + + update = JSON.parse(updates[3]) + update.file.should.equal @fileUpdate1.id + update.pathname.should.equal '/file-path1' + update.new_pathname.should.equal '/new-file-path1' + update.meta.user_id.should.equal @user_id + update.meta.ts.should.be.a('string') + update.version.should.equal "#{@version}.3" + + done() + + describe "adding a file", -> before (done) -> @project_id = DocUpdaterClient.randomId() From dd0f8b880aeb71a6a82db5fe688e7ab826587825 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 16 Mar 2018 10:54:12 +0000 Subject: [PATCH 403/769] change update to projectUpdate in project related methods --- .../coffee/ProjectHistoryRedisManager.coffee | 42 +++++++++---------- .../app/coffee/ProjectManager.coffee | 24 +++++------ 2 files changed, 33 insertions(+), 33 deletions(-) diff --git a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee index ebf5568317..c92b7277f4 100644 --- a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee +++ b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee @@ -7,49 +7,49 @@ module.exports = ProjectHistoryRedisManager = queueOps: (project_id, ops..., callback) -> rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), ops..., callback - queueRenameEntity: (project_id, entity_type, entity_id, user_id, update, callback) -> - update = - pathname: update.pathname - new_pathname: update.newPathname + queueRenameEntity: (project_id, entity_type, entity_id, user_id, projectUpdate, callback) -> + projectUpdate = + pathname: projectUpdate.pathname + new_pathname: projectUpdate.newPathname meta: user_id: user_id ts: new Date() - version: update.version - update[entity_type] = entity_id + version: projectUpdate.version + projectUpdate[entity_type] = entity_id - logger.log {project_id, update}, "queue rename operation to project-history" - jsonUpdate = JSON.stringify(update) + logger.log {project_id, projectUpdate}, "queue rename operation to project-history" + jsonUpdate = JSON.stringify(projectUpdate) ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback - queueAddEntity: (project_id, entity_type, entitiy_id, user_id, update, callback = (error) ->) -> - update = - pathname: update.pathname - docLines: update.docLines - url: update.url + queueAddEntity: (project_id, entity_type, entitiy_id, user_id, projectUpdate, callback = (error) ->) -> + projectUpdate = + pathname: projectUpdate.pathname + docLines: projectUpdate.docLines + url: projectUpdate.url meta: user_id: user_id ts: new Date() - version: update.version - update[entity_type] = entitiy_id + version: projectUpdate.version + projectUpdate[entity_type] = entitiy_id - logger.log {project_id, update}, "queue add operation to project-history" - jsonUpdate = JSON.stringify(update) + logger.log {project_id, projectUpdate}, "queue add operation to project-history" + jsonUpdate = JSON.stringify(projectUpdate) ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback queueResyncProjectStructure: (project_id, docs, files, callback) -> logger.log {project_id, docs, files}, "queue project structure resync" - update = + projectUpdate = resyncProjectStructure: { docs, files } meta: ts: new Date() - jsonUpdate = JSON.stringify update + jsonUpdate = JSON.stringify projectUpdate ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback queueResyncDocContent: (project_id, doc_id, lines, version, pathname, callback) -> logger.log {project_id, doc_id, lines, version, pathname}, "queue doc content resync" - update = + projectUpdate = resyncDocContent: content: lines.join("\n"), version: version @@ -57,5 +57,5 @@ module.exports = ProjectHistoryRedisManager = doc: doc_id meta: ts: new Date() - jsonUpdate = JSON.stringify update + jsonUpdate = JSON.stringify projectUpdate ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index 8ae14cd66e..eb7acaede1 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -116,27 +116,27 @@ module.exports = ProjectManager = project_ops_length = 0 - handleDocUpdate = (update, cb) -> - doc_id = update.id - update.version = "#{project_version}.#{project_subversion++}" - if update.docLines? - ProjectHistoryRedisManager.queueAddEntity project_id, 'doc', doc_id, user_id, update, (error, count) -> + handleDocUpdate = (projectUpdate, cb) -> + doc_id = projectUpdate.id + projectUpdate.version = "#{project_version}.#{project_subversion++}" + if projectUpdate.docLines? + ProjectHistoryRedisManager.queueAddEntity project_id, 'doc', doc_id, user_id, projectUpdate, (error, count) -> project_ops_length = count cb(error) else - DocumentManager.renameDocWithLock project_id, doc_id, user_id, update, (error, count) -> + DocumentManager.renameDocWithLock project_id, doc_id, user_id, projectUpdate, (error, count) -> project_ops_length = count cb(error) - handleFileUpdate = (update, cb) -> - file_id = update.id - update.version = "#{project_version}.#{project_subversion++}" - if update.url? - ProjectHistoryRedisManager.queueAddEntity project_id, 'file', file_id, user_id, update, (error, count) -> + handleFileUpdate = (projectUpdate, cb) -> + file_id = projectUpdate.id + projectUpdate.version = "#{project_version}.#{project_subversion++}" + if projectUpdate.url? + ProjectHistoryRedisManager.queueAddEntity project_id, 'file', file_id, user_id, projectUpdate, (error, count) -> project_ops_length = count cb(error) else - ProjectHistoryRedisManager.queueRenameEntity project_id, 'file', file_id, user_id, update, (error, count) -> + ProjectHistoryRedisManager.queueRenameEntity project_id, 'file', file_id, user_id, projectUpdate, (error, count) -> project_ops_length = count cb(error) From fb1852a593b71955a91c5d9ae95e09479a966012 Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Wed, 11 Apr 2018 11:03:20 +0100 Subject: [PATCH 404/769] cache projectHistoryId with doc in Redis --- .../app/coffee/DocumentManager.coffee | 28 +++++++-------- .../app/coffee/PersistenceManager.coffee | 4 +-- .../app/coffee/RedisManager.coffee | 15 ++++---- .../app/coffee/UpdateManager.coffee | 7 ++-- .../config/settings.defaults.coffee | 1 + .../DocumentManagerTests.coffee | 22 ++++++------ .../PersistenceManagerTests.coffee | 6 +++- .../RedisManager/RedisManagerTests.coffee | 36 +++++++++++++------ .../UpdateManager/UpdateManagerTests.coffee | 12 ++++--- 9 files changed, 80 insertions(+), 51 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 640ebf63cb..8b4f4192ec 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -13,39 +13,39 @@ async = require "async" MAX_UNFLUSHED_AGE = 300 * 1000 # 5 mins, document should be flushed to mongo this time after a change module.exports = DocumentManager = - getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, pathname, unflushedTime, alreadyLoaded) ->) -> + getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, pathname, projectHistoryId, unflushedTime, alreadyLoaded) ->) -> timer = new Metrics.Timer("docManager.getDoc") callback = (args...) -> timer.done() _callback(args...) - RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, unflushedTime) -> + RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId, unflushedTime) -> return callback(error) if error? if !lines? or !version? logger.log {project_id, doc_id}, "doc not in redis so getting from persistence API" - PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname) -> + PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) -> return callback(error) if error? - logger.log {project_id, doc_id, lines, version, pathname}, "got doc from persistence API" - RedisManager.putDocInMemory project_id, doc_id, lines, version, ranges, pathname, (error) -> + logger.log {project_id, doc_id, lines, version, pathname, projectHistoryId}, "got doc from persistence API" + RedisManager.putDocInMemory project_id, doc_id, lines, version, ranges, pathname, projectHistoryId, (error) -> return callback(error) if error? - callback null, lines, version, ranges, pathname, null, false + callback null, lines, version, ranges, pathname, projectHistoryId, null, false else - callback null, lines, version, ranges, pathname, unflushedTime, true + callback null, lines, version, ranges, pathname, projectHistoryId, unflushedTime, true - getDocAndRecentOps: (project_id, doc_id, fromVersion, _callback = (error, lines, version, ops, ranges, pathname) ->) -> + getDocAndRecentOps: (project_id, doc_id, fromVersion, _callback = (error, lines, version, ops, ranges, pathname, projectHistoryId) ->) -> timer = new Metrics.Timer("docManager.getDocAndRecentOps") callback = (args...) -> timer.done() _callback(args...) - DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname) -> + DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) -> return callback(error) if error? if fromVersion == -1 - callback null, lines, version, [], ranges, pathname + callback null, lines, version, [], ranges, pathname, projectHistoryId else RedisManager.getPreviousDocOps doc_id, fromVersion, version, (error, ops) -> return callback(error) if error? - callback null, lines, version, ops, ranges, pathname + callback null, lines, version, ops, ranges, pathname, projectHistoryId setDoc: (project_id, doc_id, newLines, source, user_id, undoing, _callback = (error) ->) -> timer = new Metrics.Timer("docManager.setDoc") @@ -57,7 +57,7 @@ module.exports = DocumentManager = return callback(new Error("No lines were provided to setDoc")) UpdateManager = require "./UpdateManager" - DocumentManager.getDoc project_id, doc_id, (error, oldLines, version, ranges, pathname, unflushedTime, alreadyLoaded) -> + DocumentManager.getDoc project_id, doc_id, (error, oldLines, version, ranges, pathname, projectHistoryId, unflushedTime, alreadyLoaded) -> return callback(error) if error? if oldLines? and oldLines.length > 0 and oldLines[0].text? @@ -170,7 +170,7 @@ module.exports = DocumentManager = RedisManager.renameDoc project_id, doc_id, user_id, update, callback getDocAndFlushIfOld: (project_id, doc_id, callback = (error, doc) ->) -> - DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, unflushedTime, alreadyLoaded) -> + DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId, unflushedTime, alreadyLoaded) -> return callback(error) if error? # if doc was already loaded see if it needs to be flushed if alreadyLoaded and unflushedTime? and (Date.now() - unflushedTime) > MAX_UNFLUSHED_AGE @@ -195,7 +195,7 @@ module.exports = DocumentManager = UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.getDoc, project_id, doc_id, callback - getDocAndRecentOpsWithLock: (project_id, doc_id, fromVersion, callback = (error, lines, version, ops, ranges, pathname) ->) -> + getDocAndRecentOpsWithLock: (project_id, doc_id, fromVersion, callback = (error, lines, version, ops, ranges, pathname, projectHistoryId) ->) -> UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.getDocAndRecentOps, project_id, doc_id, fromVersion, callback diff --git a/services/document-updater/app/coffee/PersistenceManager.coffee b/services/document-updater/app/coffee/PersistenceManager.coffee index bd5ce5239c..8a43d989a8 100644 --- a/services/document-updater/app/coffee/PersistenceManager.coffee +++ b/services/document-updater/app/coffee/PersistenceManager.coffee @@ -13,7 +13,7 @@ request = (require("requestretry")).defaults({ MAX_HTTP_REQUEST_LENGTH = 5000 # 5 seconds module.exports = PersistenceManager = - getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, pathname) ->) -> + getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, pathname, projectHistoryId) ->) -> timer = new Metrics.Timer("persistenceManager.getDoc") callback = (args...) -> timer.done() @@ -44,7 +44,7 @@ module.exports = PersistenceManager = return callback(new Error("web API response had no valid doc version")) if !body.pathname? return callback(new Error("web API response had no valid doc pathname")) - return callback null, body.lines, body.version, body.ranges, body.pathname + return callback null, body.lines, body.version, body.ranges, body.pathname, body.projectHistoryId else if res.statusCode == 404 return callback(new Errors.NotFoundError("doc not not found: #{url}")) else diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 382ff2b502..b483839302 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -36,7 +36,7 @@ historyKeys = Settings.redis.history.key_schema module.exports = RedisManager = rclient: rclient - putDocInMemory : (project_id, doc_id, docLines, version, ranges, pathname, _callback)-> + putDocInMemory : (project_id, doc_id, docLines, version, ranges, pathname, projectHistoryId, _callback)-> timer = new metrics.Timer("redis.put-doc") callback = (error) -> timer.done() @@ -47,7 +47,7 @@ module.exports = RedisManager = logger.error {err: error, doc_id: doc_id, docLines: docLines}, error.message return callback(error) docHash = RedisManager._computeHash(docLines) - logger.log {project_id, doc_id, version, docHash, pathname}, "putting doc in redis" + logger.log {project_id, doc_id, version, docHash, pathname, projectHistoryId}, "putting doc in redis" RedisManager._serializeRanges ranges, (error, ranges) -> if error? logger.error {err: error, doc_id, project_id}, error.message @@ -62,6 +62,7 @@ module.exports = RedisManager = else multi.del keys.ranges(doc_id:doc_id) multi.set keys.pathname(doc_id:doc_id), pathname + multi.set keys.projectHistoryId(doc_id:doc_id), projectHistoryId multi.exec (error, result) -> return callback(error) if error? # check the hash computed on the redis server @@ -88,6 +89,7 @@ module.exports = RedisManager = multi.del keys.docHash(doc_id:doc_id) multi.del keys.ranges(doc_id:doc_id) multi.del keys.pathname(doc_id:doc_id) + multi.del keys.projectHistoryId(doc_id:doc_id) multi.del keys.unflushedTime(doc_id:doc_id) multi.exec (error) -> return callback(error) if error? @@ -108,7 +110,7 @@ module.exports = RedisManager = clearProjectState: (project_id, callback = (error) ->) -> rclient.del keys.projectState(project_id:project_id), callback - getDoc : (project_id, doc_id, callback = (error, lines, version, ranges, pathname, unflushedTime) ->)-> + getDoc : (project_id, doc_id, callback = (error, lines, version, ranges, pathname, projectHistoryId, unflushedTime) ->)-> timer = new metrics.Timer("redis.get-doc") multi = rclient.multi() multi.get keys.docLines(doc_id:doc_id) @@ -117,8 +119,9 @@ module.exports = RedisManager = multi.get keys.projectKey(doc_id:doc_id) multi.get keys.ranges(doc_id:doc_id) multi.get keys.pathname(doc_id:doc_id) + multi.get keys.projectHistoryId(doc_id:doc_id) multi.get keys.unflushedTime(doc_id:doc_id) - multi.exec (error, [docLines, version, storedHash, doc_project_id, ranges, pathname, unflushedTime])-> + multi.exec (error, [docLines, version, storedHash, doc_project_id, ranges, pathname, projectHistoryId, unflushedTime])-> timeSpan = timer.done() return callback(error) if error? # check if request took too long and bail out. only do this for @@ -147,14 +150,14 @@ module.exports = RedisManager = # doc is not in redis, bail out if !docLines? - return callback null, docLines, version, ranges, pathname, unflushedTime + return callback null, docLines, version, ranges, pathname, projectHistoryId, unflushedTime # doc should be in project set, check if missing (workaround for missing docs from putDoc) rclient.sadd keys.docsInProject(project_id:project_id), doc_id, (error, result) -> return callback(error) if error? if result isnt 0 # doc should already be in set logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, "doc missing from docsInProject set" - callback null, docLines, version, ranges, pathname, unflushedTime + callback null, docLines, version, ranges, pathname, projectHistoryId, unflushedTime getDocVersion: (doc_id, callback = (error, version) ->) -> rclient.get keys.docVersion(doc_id: doc_id), (error, version) -> diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index 02f0a1b8d1..bfcfb806ca 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -71,7 +71,7 @@ module.exports = UpdateManager = profile = new Profiler("applyUpdate", {project_id, doc_id}) UpdateManager._sanitizeUpdate update profile.log("sanitizeUpdate") - DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname) -> + DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) -> profile.log("getDoc") return callback(error) if error? if !lines? or !version? @@ -80,7 +80,7 @@ module.exports = UpdateManager = profile.log("sharejs.applyUpdate") return callback(error) if error? RangesManager.applyUpdate project_id, doc_id, ranges, appliedOps, updatedDocLines, (error, new_ranges) -> - UpdateManager._addProjectHistoryMetadataToOps(appliedOps, pathname, lines) + UpdateManager._addProjectHistoryMetadataToOps(appliedOps, pathname, projectHistoryId, lines) profile.log("RangesManager.applyUpdate") return callback(error) if error? RedisManager.updateDocument project_id, doc_id, updatedDocLines, version, appliedOps, new_ranges, (error, doc_ops_length, project_ops_length) -> @@ -130,12 +130,13 @@ module.exports = UpdateManager = op.i = op.i.replace(/[\uD800-\uDFFF]/g, "\uFFFD") return update - _addProjectHistoryMetadataToOps: (updates, pathname, lines) -> + _addProjectHistoryMetadataToOps: (updates, pathname, projectHistoryId, lines) -> doc_length = _.reduce lines, (chars, line) -> chars + line.length, 0 doc_length += lines.length - 1 # count newline characters updates.forEach (update) -> + update.projectHistoryId = projectHistoryId update.meta ||= {} update.meta.pathname = pathname update.meta.doc_length = doc_length diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 1c7ebf283e..18f2b1570b 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -46,6 +46,7 @@ module.exports = docsInProject: ({project_id}) -> "DocsIn:#{project_id}" ranges: ({doc_id}) -> "Ranges:#{doc_id}" pathname: ({doc_id}) -> "Pathname:#{doc_id}" + projectHistoryId: ({doc_id}) -> "ProjectHistoryId:#{doc_id}" projectState: ({project_id}) -> "ProjectState:#{project_id}" unflushedTime: ({doc_id}) -> "UnflushedTime:#{doc_id}" # cluster: [{ diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index 16e58a81a7..a2d55d1cb3 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -111,7 +111,7 @@ describe "DocumentManager", -> describe "getDocAndRecentOps", -> describe "with a previous version specified", -> beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId) @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) @DocumentManager.getDocAndRecentOps @project_id, @doc_id, @fromVersion, @callback @@ -126,14 +126,14 @@ describe "DocumentManager", -> .should.equal true it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, @ops, @ranges, @pathname).should.equal true + @callback.calledWith(null, @lines, @version, @ops, @ranges, @pathname, @projectHistoryId).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true describe "with no previous version specified", -> beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId) @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) @DocumentManager.getDocAndRecentOps @project_id, @doc_id, -1, @callback @@ -146,7 +146,7 @@ describe "DocumentManager", -> @RedisManager.getPreviousDocOps.called.should.equal false it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, [], @ranges, @pathname).should.equal true + @callback.calledWith(null, @lines, @version, [], @ranges, @pathname, @projectHistoryId).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true @@ -154,7 +154,7 @@ describe "DocumentManager", -> describe "getDoc", -> describe "when the doc exists in Redis", -> beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @unflushedTime) + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId, @unflushedTime) @DocumentManager.getDoc @project_id, @doc_id, @callback it "should get the doc from Redis", -> @@ -163,7 +163,7 @@ describe "DocumentManager", -> .should.equal true it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, @ranges, @pathname, @unflushedTime, true).should.equal true + @callback.calledWith(null, @lines, @version, @ranges, @pathname, @projectHistoryId, @unflushedTime, true).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true @@ -171,7 +171,7 @@ describe "DocumentManager", -> describe "when the doc does not exist in Redis", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null, null, null, null) - @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname) + @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId) @RedisManager.putDocInMemory = sinon.stub().yields() @DocumentManager.getDoc @project_id, @doc_id, @callback @@ -187,11 +187,11 @@ describe "DocumentManager", -> it "should set the doc in Redis", -> @RedisManager.putDocInMemory - .calledWith(@project_id, @doc_id, @lines, @version, @ranges, @pathname) + .calledWith(@project_id, @doc_id, @lines, @version, @ranges, @pathname, @projectHistoryId) .should.equal true it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, @ranges, @pathname, null, false).should.equal true + @callback.calledWith(null, @lines, @version, @ranges, @pathname, @projectHistoryId, null, false).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true @@ -202,7 +202,7 @@ describe "DocumentManager", -> @beforeLines = ["before", "lines"] @afterLines = ["after", "lines"] @ops = [{ i: "foo", p: 4 }, { d: "bar", p: 42 }] - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, @ranges, @pathname, @unflushedTime, true) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, @ranges, @pathname, @projectHistoryId, @unflushedTime, true) @DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, @ops) @UpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null) @DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2) @@ -402,7 +402,7 @@ describe "DocumentManager", -> describe "when the doc is in Redis", -> describe "and has changes to be flushed", -> beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, Date.now() - 1e9, true) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @projectHistoryId, @pathname, Date.now() - 1e9, true) @DocumentManager.getDocAndFlushIfOld @project_id, @doc_id, @callback it "should get the doc", -> diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee index 937dcf3a77..0f8ad59167 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee @@ -17,6 +17,7 @@ describe "PersistenceManager", -> done: sinon.stub() "logger-sharelatex": @logger = {log: sinon.stub(), err: sinon.stub()} @project_id = "project-id-123" + @projectHistoryId = "history-id-123" @doc_id = "doc-id-123" @lines = ["one", "two", "three"] @version = 42 @@ -36,6 +37,7 @@ describe "PersistenceManager", -> version: @version, ranges: @ranges pathname: @pathname, + projectHistoryId: @projectHistoryId } describe "with a successful response from the web api", -> @@ -60,7 +62,9 @@ describe "PersistenceManager", -> .should.equal true it "should call the callback with the doc lines, version and ranges", -> - @callback.calledWith(null, @lines, @version, @ranges, @pathname).should.equal true + @callback + .calledWith(null, @lines, @version, @ranges, @pathname, @projectHistoryId) + .should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index a3b28d00cb..5166287a34 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -33,6 +33,7 @@ describe "RedisManager", -> docsInProject: ({project_id}) -> "DocsIn:#{project_id}" ranges: ({doc_id}) -> "Ranges:#{doc_id}" pathname: ({doc_id}) -> "Pathname:#{doc_id}" + projectHistoryId: ({doc_id}) -> "ProjectHistoryId:#{doc_id}" projectState: ({project_id}) -> "ProjectState:#{project_id}" unflushedTime: ({doc_id}) -> "UnflushedTime:#{doc_id}" history: @@ -72,7 +73,7 @@ describe "RedisManager", -> @unflushed_time = 12345 @pathname = '/a/b/c.tex' @multi.get = sinon.stub() - @multi.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @hash, @project_id, @json_ranges, @pathname, @unflushed_time]) + @multi.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @hash, @project_id, @json_ranges, @pathname, @projectHistoryId, @unflushed_time]) @rclient.sadd = sinon.stub().yields(null, 0) describe "successfully", -> @@ -109,6 +110,11 @@ describe "RedisManager", -> .calledWith("Pathname:#{@doc_id}") .should.equal true + it "should get the projectHistoryId", -> + @multi.get + .calledWith("ProjectHistoryId:#{@doc_id}") + .should.equal true + it "should check if the document is in the DocsIn set", -> @rclient.sadd .calledWith("DocsIn:#{@project_id}") @@ -116,7 +122,7 @@ describe "RedisManager", -> it 'should return the document', -> @callback - .calledWithExactly(null, @lines, @version, @ranges, @pathname, @unflushed_time) + .calledWithExactly(null, @lines, @version, @ranges, @pathname, @projectHistoryId, @unflushed_time) .should.equal true it 'should not log any errors', -> @@ -125,7 +131,7 @@ describe "RedisManager", -> describe "when the document is not present", -> beforeEach -> - @multi.exec = sinon.stub().callsArgWith(0, null, [null, null, null, null, null, null, null]) + @multi.exec = sinon.stub().callsArgWith(0, null, [null, null, null, null, null, null, null, null]) @rclient.sadd = sinon.stub().yields() @RedisManager.getDoc @project_id, @doc_id, @callback @@ -136,7 +142,7 @@ describe "RedisManager", -> it 'should return an empty result', -> @callback - .calledWithExactly(null, null, 0, {}, null, null) + .calledWithExactly(null, null, 0, {}, null, null, null) .should.equal true it 'should not log any errors', -> @@ -154,7 +160,7 @@ describe "RedisManager", -> it 'should return the document', -> @callback - .calledWithExactly(null, @lines, @version, @ranges, @pathname, @unflushed_time) + .calledWithExactly(null, @lines, @version, @ranges, @pathname, @projectHistoryId, @unflushed_time) .should.equal true describe "with a corrupted document", -> @@ -532,7 +538,7 @@ describe "RedisManager", -> describe "with non-empty ranges", -> beforeEach (done) -> - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, done + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @projectHistoryId, done it "should set the lines", -> @multi.eval @@ -564,6 +570,11 @@ describe "RedisManager", -> .calledWith("Pathname:#{@doc_id}", @pathname) .should.equal true + it "should set the projectHistoryId for the doc", -> + @multi.set + .calledWith("ProjectHistoryId:#{@doc_id}", @projectHistoryId) + .should.equal true + it "should add the doc_id to the project set", -> @rclient.sadd .calledWith("DocsIn:#{@project_id}", @doc_id) @@ -575,7 +586,7 @@ describe "RedisManager", -> describe "with empty ranges", -> beforeEach (done) -> - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, {}, @pathname, done + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, {}, @pathname, @projectHistoryId, done it "should delete the ranges key", -> @multi.del @@ -590,7 +601,7 @@ describe "RedisManager", -> describe "with a corrupted write", -> beforeEach (done) -> @multi.exec = sinon.stub().callsArgWith(0, null, ["INVALID-HASH-VALUE"]) - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, done + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @projectHistoryId, done it 'should log a hash error', -> @logger.error.calledWith() @@ -600,7 +611,7 @@ describe "RedisManager", -> beforeEach -> @_stringify = JSON.stringify @JSON.stringify = () -> return '["bad bytes! \u0000 <- here"]' - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @callback + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @projectHistoryId, @callback afterEach -> @JSON.stringify = @_stringify @@ -614,7 +625,7 @@ describe "RedisManager", -> describe "with ranges that are too big", -> beforeEach -> @RedisManager._serializeRanges = sinon.stub().yields(new Error("ranges are too large")) - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @callback + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @projectHistoryId, @callback it 'should log an error', -> @logger.error.called.should.equal true @@ -664,6 +675,11 @@ describe "RedisManager", -> .calledWith("Pathname:#{@doc_id}") .should.equal true + it "should delete the pathname for the doc", -> + @multi.del + .calledWith("ProjectHistoryId:#{@doc_id}") + .should.equal true + describe "clearProjectState", -> beforeEach (done) -> @rclient.del = sinon.stub().callsArg(1) diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index e91c35f7e6..383bd1848e 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -7,6 +7,7 @@ SandboxedModule = require('sandboxed-module') describe "UpdateManager", -> beforeEach -> @project_id = "project-id-123" + @projectHistoryId = "history-id-123" @doc_id = "document-id-123" @callback = sinon.stub() @UpdateManager = SandboxedModule.require modulePath, requires: @@ -167,7 +168,7 @@ describe "UpdateManager", -> @doc_ops_length = sinon.stub() @project_ops_length = sinon.stub() @pathname = '/a/b/c.tex' - @DocumentManager.getDoc = sinon.stub().yields(null, @lines, @version, @ranges, @pathname) + @DocumentManager.getDoc = sinon.stub().yields(null, @lines, @version, @ranges, @pathname, @projectHistoryId) @RangesManager.applyUpdate = sinon.stub().yields(null, @updated_ranges) @ShareJsUpdateManager.applyUpdate = sinon.stub().yields(null, @updatedDocLines, @version, @appliedOps) @RedisManager.updateDocument = sinon.stub().yields(null, @doc_ops_length, @project_ops_length) @@ -196,7 +197,7 @@ describe "UpdateManager", -> it "should add metadata to the ops" , -> @UpdateManager._addProjectHistoryMetadataToOps - .calledWith(@appliedOps, @pathname, @lines) + .calledWith(@appliedOps, @pathname, @projectHistoryId, @lines) .should.equal true it "should push the applied ops into the history queue", -> @@ -239,7 +240,7 @@ describe "UpdateManager", -> @callback.calledWith(@error).should.equal true describe "_addProjectHistoryMetadataToOps", -> - it "should add pathname and doc_length metadata to the ops", -> + it "should add projectHistoryId, pathname and doc_length metadata to the ops", -> lines = [ 'some' 'test' @@ -250,20 +251,23 @@ describe "UpdateManager", -> { v: 45, op: [{d: "qux", p: 4}, { i: "bazbaz", p: 14 }] }, { v: 49, op: [{i: "penguin", p: 18}] } ] - @UpdateManager._addProjectHistoryMetadataToOps(appliedOps, @pathname, lines) + @UpdateManager._addProjectHistoryMetadataToOps(appliedOps, @pathname, @projectHistoryId, lines) appliedOps.should.deep.equal [{ + projectHistoryId: @projectHistoryId v: 42 op: [{i: "foo", p: 4}, { i: "bar", p: 6 }] meta: pathname: @pathname doc_length: 14 }, { + projectHistoryId: @projectHistoryId v: 45 op: [{d: "qux", p: 4}, { i: "bazbaz", p: 14 }] meta: pathname: @pathname doc_length: 20 # 14 + 'foo' + 'bar' }, { + projectHistoryId: @projectHistoryId v: 49 op: [{i: "penguin", p: 18}] meta: From a220794d32d93e269737fccbc924b6ec3a1b6cc8 Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Fri, 13 Apr 2018 14:13:18 +0100 Subject: [PATCH 405/769] add projectHistoryId to resync updates --- .../document-updater/app/coffee/DocumentManager.coffee | 8 ++++---- .../document-updater/app/coffee/HistoryManager.coffee | 4 ++-- .../document-updater/app/coffee/HttpController.coffee | 4 ++-- .../app/coffee/ProjectHistoryRedisManager.coffee | 6 ++++-- .../coffee/DocumentManager/DocumentManagerTests.coffee | 8 ++++---- .../coffee/HistoryManager/HistoryManagerTests.coffee | 5 +++-- .../coffee/HttpController/HttpControllerTests.coffee | 9 +++++---- 7 files changed, 24 insertions(+), 20 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 8b4f4192ec..43c02bfc26 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -181,15 +181,15 @@ module.exports = DocumentManager = callback(null, lines, version) resyncDocContents: (project_id, doc_id, callback) -> - RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname) -> + RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) -> return callback(error) if error? if !lines? or !version? - PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname) -> + PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) -> return callback(error) if error? - ProjectHistoryRedisManager.queueResyncDocContent project_id, doc_id, lines, version, pathname, callback + ProjectHistoryRedisManager.queueResyncDocContent project_id, projectHistoryId, doc_id, lines, version, pathname, callback else - ProjectHistoryRedisManager.queueResyncDocContent project_id, doc_id, lines, version, pathname, callback + ProjectHistoryRedisManager.queueResyncDocContent project_id, projectHistoryId, doc_id, lines, version, pathname, callback getDocWithLock: (project_id, doc_id, callback = (error, lines, version) ->) -> UpdateManager = require "./UpdateManager" diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index c1371615f7..9d39166681 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -65,8 +65,8 @@ module.exports = HistoryManager = newBlock = Math.floor(length / threshold) return newBlock != prevBlock - resyncProjectHistory: (project_id, docs, files, callback) -> - ProjectHistoryRedisManager.queueResyncProjectStructure project_id, docs, files, (error) -> + resyncProjectHistory: (project_id, projectHistoryId, docs, files, callback) -> + ProjectHistoryRedisManager.queueResyncProjectStructure project_id, projectHistoryId, docs, files, (error) -> return callback(error) if error? DocumentManager = require "./DocumentManager" resyncDoc = (doc, cb) -> diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index ce4d8bf637..63421db755 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -172,10 +172,10 @@ module.exports = HttpController = resyncProjectHistory: (req, res, next = (error) ->) -> project_id = req.params.project_id - {docs, files} = req.body + {projectHistoryId, docs, files} = req.body logger.log {project_id, docs, files}, "queuing project history resync via http" - HistoryManager.resyncProjectHistory project_id, docs, files, (error) -> + HistoryManager.resyncProjectHistory project_id, projectHistoryId, docs, files, (error) -> return next(error) if error? logger.log {project_id}, "queued project history resync via http" res.send 204 diff --git a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee index c92b7277f4..625ca7fde9 100644 --- a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee +++ b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee @@ -38,21 +38,23 @@ module.exports = ProjectHistoryRedisManager = ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback - queueResyncProjectStructure: (project_id, docs, files, callback) -> + queueResyncProjectStructure: (project_id, projectHistoryId, docs, files, callback) -> logger.log {project_id, docs, files}, "queue project structure resync" projectUpdate = resyncProjectStructure: { docs, files } + projectHistoryId: projectHistoryId meta: ts: new Date() jsonUpdate = JSON.stringify projectUpdate ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback - queueResyncDocContent: (project_id, doc_id, lines, version, pathname, callback) -> + queueResyncDocContent: (project_id, projectHistoryId, doc_id, lines, version, pathname, callback) -> logger.log {project_id, doc_id, lines, version, pathname}, "queue doc content resync" projectUpdate = resyncDocContent: content: lines.join("\n"), version: version + projectHistoryId: projectHistoryId path: pathname doc: doc_id meta: diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index a2d55d1cb3..d4262278d6 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -472,7 +472,7 @@ describe "DocumentManager", -> describe "resyncDocContents", -> describe "when doc is loaded in redis", -> beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname) + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId) @ProjectHistoryRedisManager.queueResyncDocContent = sinon.stub() @DocumentManager.resyncDocContents @project_id, @doc_id, @callback @@ -483,13 +483,13 @@ describe "DocumentManager", -> it "queues a resync doc content update", -> @ProjectHistoryRedisManager.queueResyncDocContent - .calledWith(@project_id, @doc_id, @lines, @version, @pathname, @callback) + .calledWith(@project_id, @projectHistoryId, @doc_id, @lines, @version, @pathname, @callback) .should.equal true describe "when doc is not loaded in redis", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(2, null) - @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname) + @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId) @ProjectHistoryRedisManager.queueResyncDocContent = sinon.stub() @DocumentManager.resyncDocContents @project_id, @doc_id, @callback @@ -505,5 +505,5 @@ describe "DocumentManager", -> it "queues a resync doc content update", -> @ProjectHistoryRedisManager.queueResyncDocContent - .calledWith(@project_id, @doc_id, @lines, @version, @pathname, @callback) + .calledWith(@project_id, @projectHistoryId, @doc_id, @lines, @version, @pathname, @callback) .should.equal true diff --git a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee index e207cde99d..2233610d28 100644 --- a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee @@ -164,6 +164,7 @@ describe "HistoryManager", -> describe "resyncProjectHistory", -> beforeEach -> + @projectHistoryId = 'history-id-1234' @docs = [ doc: @doc_id path: 'main.tex' @@ -175,11 +176,11 @@ describe "HistoryManager", -> ] @ProjectHistoryRedisManager.queueResyncProjectStructure = sinon.stub().yields() @DocumentManager.resyncDocContentsWithLock = sinon.stub().yields() - @HistoryManager.resyncProjectHistory @project_id, @docs, @files, @callback + @HistoryManager.resyncProjectHistory @project_id, @projectHistoryId, @docs, @files, @callback it "should queue a project structure reync", -> @ProjectHistoryRedisManager.queueResyncProjectStructure - .calledWith(@project_id, @docs, @files) + .calledWith(@project_id, @projectHistoryId, @docs, @files) .should.equal true it "should queue doc content reyncs", -> diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index fca1614c2d..e36f54ee3d 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -548,23 +548,24 @@ describe "HttpController", -> describe "resyncProjectHistory", -> beforeEach -> + @projectHistoryId = "history-id-123" @docs = sinon.stub() @files = sinon.stub() @fileUpdates = sinon.stub() @req = body: - {@docs, @files} + {@projectHistoryId, @docs, @files} params: project_id: @project_id describe "successfully", -> beforeEach -> - @HistoryManager.resyncProjectHistory = sinon.stub().callsArg(3) + @HistoryManager.resyncProjectHistory = sinon.stub().callsArgWith(4) @HttpController.resyncProjectHistory(@req, @res, @next) it "should accept the change", -> @HistoryManager.resyncProjectHistory - .calledWith(@project_id, @docs, @files) + .calledWith(@project_id, @projectHistoryId, @docs, @files) .should.equal true it "should return a successful No Content response", -> @@ -574,7 +575,7 @@ describe "HttpController", -> describe "when an errors occurs", -> beforeEach -> - @HistoryManager.resyncProjectHistory = sinon.stub().callsArgWith(3, new Error("oops")) + @HistoryManager.resyncProjectHistory = sinon.stub().callsArgWith(4, new Error("oops")) @HttpController.resyncProjectHistory(@req, @res, @next) it "should call next with the error", -> From 25c0be4eaaf4452aab5a864953c6c6b49e3fb6e2 Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Fri, 13 Apr 2018 14:59:36 +0100 Subject: [PATCH 406/769] add projectHistryId to project structure updates --- .../app/coffee/DocumentManager.coffee | 8 ++--- .../app/coffee/HttpController.coffee | 4 +-- .../coffee/ProjectHistoryRedisManager.coffee | 6 ++-- .../app/coffee/ProjectManager.coffee | 10 +++--- .../app/coffee/RedisManager.coffee | 6 ++-- .../DocumentManagerTests.coffee | 5 +-- .../HttpController/HttpControllerTests.coffee | 9 +++--- .../ProjectHistoryRedisManagerTests.coffee | 11 +++++-- .../ProjectManager/updateProjectTests.coffee | 32 +++++++++---------- .../RedisManager/RedisManagerTests.coffee | 9 +++--- 10 files changed, 56 insertions(+), 44 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 43c02bfc26..0c50d9b1f3 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -161,13 +161,13 @@ module.exports = DocumentManager = return callback(error) if error? callback() - renameDoc: (project_id, doc_id, user_id, update, _callback = (error) ->) -> + renameDoc: (project_id, doc_id, user_id, update, projectHistoryId, _callback = (error) ->) -> timer = new Metrics.Timer("docManager.updateProject") callback = (args...) -> timer.done() _callback(args...) - RedisManager.renameDoc project_id, doc_id, user_id, update, callback + RedisManager.renameDoc project_id, doc_id, user_id, update, projectHistoryId, callback getDocAndFlushIfOld: (project_id, doc_id, callback = (error, doc) ->) -> DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId, unflushedTime, alreadyLoaded) -> @@ -223,9 +223,9 @@ module.exports = DocumentManager = UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.deleteComment, project_id, doc_id, thread_id, callback - renameDocWithLock: (project_id, doc_id, user_id, update, callback = (error) ->) -> + renameDocWithLock: (project_id, doc_id, user_id, update, projectHistoryId, callback = (error) ->) -> UpdateManager = require "./UpdateManager" - UpdateManager.lockUpdatesAndDo DocumentManager.renameDoc, project_id, doc_id, user_id, update, callback + UpdateManager.lockUpdatesAndDo DocumentManager.renameDoc, project_id, doc_id, user_id, update, projectHistoryId, callback resyncDocContentsWithLock: (project_id, doc_id, callback = (error) ->) -> UpdateManager = require "./UpdateManager" diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 63421db755..93f915d662 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -161,10 +161,10 @@ module.exports = HttpController = updateProject: (req, res, next = (error) ->) -> timer = new Metrics.Timer("http.updateProject") project_id = req.params.project_id - {userId, docUpdates, fileUpdates, version} = req.body + {projectHistoryId, userId, docUpdates, fileUpdates, version} = req.body logger.log {project_id, docUpdates, fileUpdates, version}, "updating project via http" - ProjectManager.updateProjectWithLocks project_id, userId, docUpdates, fileUpdates, version, (error) -> + ProjectManager.updateProjectWithLocks project_id, projectHistoryId, userId, docUpdates, fileUpdates, version, (error) -> timer.done() return next(error) if error? logger.log project_id: project_id, "updated project via http" diff --git a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee index 625ca7fde9..42b9f16df2 100644 --- a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee +++ b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee @@ -7,7 +7,7 @@ module.exports = ProjectHistoryRedisManager = queueOps: (project_id, ops..., callback) -> rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), ops..., callback - queueRenameEntity: (project_id, entity_type, entity_id, user_id, projectUpdate, callback) -> + queueRenameEntity: (project_id, projectHistoryId, entity_type, entity_id, user_id, projectUpdate, callback) -> projectUpdate = pathname: projectUpdate.pathname new_pathname: projectUpdate.newPathname @@ -15,6 +15,7 @@ module.exports = ProjectHistoryRedisManager = user_id: user_id ts: new Date() version: projectUpdate.version + projectHistoryId: projectHistoryId projectUpdate[entity_type] = entity_id logger.log {project_id, projectUpdate}, "queue rename operation to project-history" @@ -22,7 +23,7 @@ module.exports = ProjectHistoryRedisManager = ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback - queueAddEntity: (project_id, entity_type, entitiy_id, user_id, projectUpdate, callback = (error) ->) -> + queueAddEntity: (project_id, projectHistoryId, entity_type, entitiy_id, user_id, projectUpdate, callback = (error) ->) -> projectUpdate = pathname: projectUpdate.pathname docLines: projectUpdate.docLines @@ -31,6 +32,7 @@ module.exports = ProjectHistoryRedisManager = user_id: user_id ts: new Date() version: projectUpdate.version + projectHistoryId: projectHistoryId projectUpdate[entity_type] = entitiy_id logger.log {project_id, projectUpdate}, "queue add operation to project-history" diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index eb7acaede1..cbf7bb661b 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -105,7 +105,7 @@ module.exports = ProjectManager = clearProjectState: (project_id, callback = (error) ->) -> RedisManager.clearProjectState project_id, callback - updateProjectWithLocks: (project_id, user_id, docUpdates, fileUpdates, version, _callback = (error) ->) -> + updateProjectWithLocks: (project_id, projectHistoryId, user_id, docUpdates, fileUpdates, version, _callback = (error) ->) -> timer = new Metrics.Timer("projectManager.updateProject") callback = (args...) -> timer.done() @@ -120,11 +120,11 @@ module.exports = ProjectManager = doc_id = projectUpdate.id projectUpdate.version = "#{project_version}.#{project_subversion++}" if projectUpdate.docLines? - ProjectHistoryRedisManager.queueAddEntity project_id, 'doc', doc_id, user_id, projectUpdate, (error, count) -> + ProjectHistoryRedisManager.queueAddEntity project_id, projectHistoryId, 'doc', doc_id, user_id, projectUpdate, (error, count) -> project_ops_length = count cb(error) else - DocumentManager.renameDocWithLock project_id, doc_id, user_id, projectUpdate, (error, count) -> + DocumentManager.renameDocWithLock project_id, doc_id, user_id, projectUpdate, projectHistoryId, (error, count) -> project_ops_length = count cb(error) @@ -132,11 +132,11 @@ module.exports = ProjectManager = file_id = projectUpdate.id projectUpdate.version = "#{project_version}.#{project_subversion++}" if projectUpdate.url? - ProjectHistoryRedisManager.queueAddEntity project_id, 'file', file_id, user_id, projectUpdate, (error, count) -> + ProjectHistoryRedisManager.queueAddEntity project_id, projectHistoryId, 'file', file_id, user_id, projectUpdate, (error, count) -> project_ops_length = count cb(error) else - ProjectHistoryRedisManager.queueRenameEntity project_id, 'file', file_id, user_id, projectUpdate, (error, count) -> + ProjectHistoryRedisManager.queueRenameEntity project_id, projectHistoryId, 'file', file_id, user_id, projectUpdate, (error, count) -> project_ops_length = count cb(error) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index b483839302..a940970176 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -275,16 +275,16 @@ module.exports = RedisManager = else callback null, docUpdateCount - renameDoc: (project_id, doc_id, user_id, update, callback = (error) ->) -> + renameDoc: (project_id, doc_id, user_id, update, projectHistoryId, callback = (error) ->) -> RedisManager.getDoc project_id, doc_id, (error, lines, version) -> return callback(error) if error? if lines? and version? rclient.set keys.pathname(doc_id:doc_id), update.newPathname, (error) -> return callback(error) if error? - ProjectHistoryRedisManager.queueRenameEntity project_id, 'doc', doc_id, user_id, update, callback + ProjectHistoryRedisManager.queueRenameEntity project_id, projectHistoryId, 'doc', doc_id, user_id, update, callback else - ProjectHistoryRedisManager.queueRenameEntity project_id, 'doc', doc_id, user_id, update, callback + ProjectHistoryRedisManager.queueRenameEntity project_id, projectHistoryId, 'doc', doc_id, user_id, update, callback clearUnflushedTime: (doc_id, callback = (error) ->) -> rclient.del keys.unflushedTime(doc_id:doc_id), callback diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index d4262278d6..d7cd18630a 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -25,6 +25,7 @@ describe "DocumentManager", -> "./UpdateManager": @UpdateManager = {} "./RangesManager": @RangesManager = {} @project_id = "project-id-123" + @projectHistoryId = "history-id-123" @doc_id = "doc-id-123" @user_id = 1234 @callback = sinon.stub() @@ -459,11 +460,11 @@ describe "DocumentManager", -> describe "successfully", -> beforeEach -> - @DocumentManager.renameDoc @project_id, @doc_id, @user_id, @update, @callback + @DocumentManager.renameDoc @project_id, @doc_id, @user_id, @update, @projectHistoryId, @callback it "should rename the document", -> @RedisManager.renameDoc - .calledWith(@project_id, @doc_id, @user_id, @update) + .calledWith(@project_id, @doc_id, @user_id, @update, @projectHistoryId) .should.equal true it "should call the callback", -> diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index e36f54ee3d..ab6718c12a 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -509,23 +509,24 @@ describe "HttpController", -> describe "updateProject", -> beforeEach -> + @projectHistoryId = "history-id-123" @userId = "user-id-123" @docUpdates = sinon.stub() @fileUpdates = sinon.stub() @version = 1234567 @req = - body: {@userId, @docUpdates, @fileUpdates, @version} + body: {@projectHistoryId, @userId, @docUpdates, @fileUpdates, @version} params: project_id: @project_id describe "successfully", -> beforeEach -> - @ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(5) + @ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(6) @HttpController.updateProject(@req, @res, @next) it "should accept the change", -> @ProjectManager.updateProjectWithLocks - .calledWith(@project_id, @userId, @docUpdates, @fileUpdates, @version) + .calledWith(@project_id, @projectHistoryId, @userId, @docUpdates, @fileUpdates, @version) .should.equal true it "should return a successful No Content response", -> @@ -538,7 +539,7 @@ describe "HttpController", -> describe "when an errors occurs", -> beforeEach -> - @ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(5, new Error("oops")) + @ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(6, new Error("oops")) @HttpController.updateProject(@req, @res, @next) it "should call next with the error", -> diff --git a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee index 8ad0f53b5b..349d3623e6 100644 --- a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee @@ -8,6 +8,7 @@ tk = require "timekeeper" describe "ProjectHistoryRedisManager", -> beforeEach -> @project_id = "project-id-123" + @projectHistoryId = "history-id-123" @user_id = "user-id-123" @callback = sinon.stub() @rclient = {} @@ -50,9 +51,10 @@ describe "ProjectHistoryRedisManager", -> @rawUpdate = pathname: @pathname = '/old' newPathname: @newPathname = '/new' + version: @version = 2 @ProjectHistoryRedisManager.queueOps = sinon.stub() - @ProjectHistoryRedisManager.queueRenameEntity @project_id, 'file', @file_id, @user_id, @rawUpdate, @callback + @ProjectHistoryRedisManager.queueRenameEntity @project_id, @projectHistoryId, 'file', @file_id, @user_id, @rawUpdate, @callback it "should queue an update", -> update = @@ -61,6 +63,8 @@ describe "ProjectHistoryRedisManager", -> meta: user_id: @user_id ts: new Date() + version: @version + projectHistoryId: @projectHistoryId file: @file_id @ProjectHistoryRedisManager.queueOps @@ -75,10 +79,11 @@ describe "ProjectHistoryRedisManager", -> @rawUpdate = pathname: @pathname = '/old' docLines: @docLines = 'a\nb' + version: @version = 2 url: @url = 'filestore.example.com' @ProjectHistoryRedisManager.queueOps = sinon.stub() - @ProjectHistoryRedisManager.queueAddEntity @project_id, 'doc', @doc_id, @user_id, @rawUpdate, @callback + @ProjectHistoryRedisManager.queueAddEntity @project_id, @projectHistoryId, 'doc', @doc_id, @user_id, @rawUpdate, @callback it "should queue an update", -> update = @@ -88,6 +93,8 @@ describe "ProjectHistoryRedisManager", -> meta: user_id: @user_id ts: new Date() + version: @version + projectHistoryId: @projectHistoryId doc: @doc_id @ProjectHistoryRedisManager.queueOps diff --git a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee index 96d2ccc07b..3ed0109be7 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee @@ -18,6 +18,7 @@ describe "ProjectManager", -> done: sinon.stub() @project_id = "project-id-123" + @projectHistoryId = 'history-id-123' @user_id = "user-id-123" @version = 1234567 @HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(false) @@ -27,7 +28,6 @@ describe "ProjectManager", -> describe "updateProjectWithLocks", -> describe "rename operations", -> beforeEach -> - @firstDocUpdate = id: 1 pathname: 'foo' @@ -47,22 +47,22 @@ describe "ProjectManager", -> describe "successfully", -> beforeEach -> - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback + @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should rename the docs in the updates", -> firstDocUpdateWithVersion = _.extend({}, @firstDocUpdate, {version: "#{@version}.0"}) secondDocUpdateWithVersion = _.extend({}, @secondDocUpdate, {version: "#{@version}.1"}) @DocumentManager.renameDocWithLock - .calledWith(@project_id, @firstDocUpdate.id, @user_id, firstDocUpdateWithVersion) + .calledWith(@project_id, @firstDocUpdate.id, @user_id, firstDocUpdateWithVersion, @projectHistoryId) .should.equal true @DocumentManager.renameDocWithLock - .calledWith(@project_id, @secondDocUpdate.id, @user_id, secondDocUpdateWithVersion) + .calledWith(@project_id, @secondDocUpdate.id, @user_id, secondDocUpdateWithVersion, @projectHistoryId) .should.equal true it "should rename the files in the updates", -> firstFileUpdateWithVersion = _.extend({}, @firstFileUpdate, {version: "#{@version}.2"}) @ProjectHistoryRedisManager.queueRenameEntity - .calledWith(@project_id, 'file', @firstFileUpdate.id, @user_id, firstFileUpdateWithVersion) + .calledWith(@project_id, @projectHistoryId, 'file', @firstFileUpdate.id, @user_id, firstFileUpdateWithVersion) .should.equal true it "should not flush the history", -> @@ -77,7 +77,7 @@ describe "ProjectManager", -> beforeEach -> @error = new Error('error') @DocumentManager.renameDocWithLock = sinon.stub().yields(@error) - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback + @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true @@ -86,7 +86,7 @@ describe "ProjectManager", -> beforeEach -> @error = new Error('error') @ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields(@error) - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback + @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true @@ -94,7 +94,7 @@ describe "ProjectManager", -> describe "with enough ops to flush", -> beforeEach -> @HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(true) - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback + @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should flush the history", -> @HistoryManager.flushProjectChangesAsync @@ -121,26 +121,26 @@ describe "ProjectManager", -> describe "successfully", -> beforeEach -> - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback + @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should add the docs in the updates", -> firstDocUpdateWithVersion = _.extend({}, @firstDocUpdate, {version: "#{@version}.0"}) secondDocUpdateWithVersion = _.extend({}, @secondDocUpdate, {version: "#{@version}.1"}) @ProjectHistoryRedisManager.queueAddEntity.getCall(0) - .calledWith(@project_id, 'doc', @firstDocUpdate.id, @user_id, firstDocUpdateWithVersion) + .calledWith(@project_id, @projectHistoryId, 'doc', @firstDocUpdate.id, @user_id, firstDocUpdateWithVersion) .should.equal true @ProjectHistoryRedisManager.queueAddEntity.getCall(1) - .calledWith(@project_id, 'doc', @secondDocUpdate.id, @user_id, secondDocUpdateWithVersion) + .calledWith(@project_id, @projectHistoryId, 'doc', @secondDocUpdate.id, @user_id, secondDocUpdateWithVersion) .should.equal true it "should add the files in the updates", -> firstFileUpdateWithVersion = _.extend({}, @firstFileUpdate, {version: "#{@version}.2"}) secondFileUpdateWithVersion = _.extend({}, @secondFileUpdate, {version: "#{@version}.3"}) @ProjectHistoryRedisManager.queueAddEntity.getCall(2) - .calledWith(@project_id, 'file', @firstFileUpdate.id, @user_id, firstFileUpdateWithVersion) + .calledWith(@project_id, @projectHistoryId, 'file', @firstFileUpdate.id, @user_id, firstFileUpdateWithVersion) .should.equal true @ProjectHistoryRedisManager.queueAddEntity.getCall(3) - .calledWith(@project_id, 'file', @secondFileUpdate.id, @user_id, secondFileUpdateWithVersion) + .calledWith(@project_id, @projectHistoryId, 'file', @secondFileUpdate.id, @user_id, secondFileUpdateWithVersion) .should.equal true it "should not flush the history", -> @@ -155,7 +155,7 @@ describe "ProjectManager", -> beforeEach -> @error = new Error('error') @ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields(@error) - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback + @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true @@ -164,7 +164,7 @@ describe "ProjectManager", -> beforeEach -> @error = new Error('error') @ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields(@error) - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback + @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true @@ -172,7 +172,7 @@ describe "ProjectManager", -> describe "with enough ops to flush", -> beforeEach -> @HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(true) - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback + @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should flush the history", -> @HistoryManager.flushProjectChangesAsync diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 5166287a34..e2263c21f4 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -60,6 +60,7 @@ describe "RedisManager", -> @doc_id = "doc-id-123" @project_id = "project-id-123" + @projectHistoryId = "history-id-123" @callback = sinon.stub() describe "getDoc", -> @@ -703,7 +704,7 @@ describe "RedisManager", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, 'lines', 'version') @ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields() - @RedisManager.renameDoc @project_id, @doc_id, @userId, @update, @callback + @RedisManager.renameDoc @project_id, @doc_id, @userId, @update, @projectHistoryId, @callback it "update the cached pathname", -> @rclient.set @@ -712,19 +713,19 @@ describe "RedisManager", -> it "should queue an update", -> @ProjectHistoryRedisManager.queueRenameEntity - .calledWithExactly(@project_id, 'doc', @doc_id, @userId, @update, @callback) + .calledWithExactly(@project_id, @projectHistoryId, 'doc', @doc_id, @userId, @update, @callback) .should.equal true describe "the document is not cached in redis", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null) @ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields() - @RedisManager.renameDoc @project_id, @doc_id, @userId, @update, @callback + @RedisManager.renameDoc @project_id, @doc_id, @userId, @update, @projectHistoryId, @callback it "does not update the cached pathname", -> @rclient.set.called.should.equal false it "should queue an update", -> @ProjectHistoryRedisManager.queueRenameEntity - .calledWithExactly(@project_id, 'doc', @doc_id, @userId, @update, @callback) + .calledWithExactly(@project_id, @projectHistoryId, 'doc', @doc_id, @userId, @update, @callback) .should.equal true From 7d597fe59ec1dd88697909241c71345d5261dcec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alberto=20Fern=C3=A1ndez=20Capel?= Date: Thu, 19 Apr 2018 14:03:35 +0100 Subject: [PATCH 407/769] Do not specify node version in .travis.yml So travis picks instead the version from .nvmrc --- services/document-updater/.travis.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/services/document-updater/.travis.yml b/services/document-updater/.travis.yml index 5c181b1569..febdbb55a3 100644 --- a/services/document-updater/.travis.yml +++ b/services/document-updater/.travis.yml @@ -1,8 +1,5 @@ language: node_js -node_js: - - "0.10" - before_install: - npm install -g grunt-cli From 3cc20d6311f4f2fe2203bd4852d677bbbabaf89d Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Mon, 23 Apr 2018 11:41:29 +0100 Subject: [PATCH 408/769] Revert "Add projectHistoryId to updates" --- .../app/coffee/DocumentManager.coffee | 44 +++++++++--------- .../app/coffee/HistoryManager.coffee | 4 +- .../app/coffee/HttpController.coffee | 8 ++-- .../app/coffee/PersistenceManager.coffee | 4 +- .../coffee/ProjectHistoryRedisManager.coffee | 12 ++--- .../app/coffee/ProjectManager.coffee | 10 ++--- .../app/coffee/RedisManager.coffee | 21 ++++----- .../app/coffee/UpdateManager.coffee | 7 ++- .../config/settings.defaults.coffee | 1 - .../DocumentManagerTests.coffee | 35 +++++++-------- .../HistoryManager/HistoryManagerTests.coffee | 5 +-- .../HttpController/HttpControllerTests.coffee | 18 ++++---- .../PersistenceManagerTests.coffee | 6 +-- .../ProjectHistoryRedisManagerTests.coffee | 11 +---- .../ProjectManager/updateProjectTests.coffee | 32 ++++++------- .../RedisManager/RedisManagerTests.coffee | 45 ++++++------------- .../UpdateManager/UpdateManagerTests.coffee | 12 ++--- 17 files changed, 115 insertions(+), 160 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 0c50d9b1f3..640ebf63cb 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -13,39 +13,39 @@ async = require "async" MAX_UNFLUSHED_AGE = 300 * 1000 # 5 mins, document should be flushed to mongo this time after a change module.exports = DocumentManager = - getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, pathname, projectHistoryId, unflushedTime, alreadyLoaded) ->) -> + getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, pathname, unflushedTime, alreadyLoaded) ->) -> timer = new Metrics.Timer("docManager.getDoc") callback = (args...) -> timer.done() _callback(args...) - RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId, unflushedTime) -> + RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, unflushedTime) -> return callback(error) if error? if !lines? or !version? logger.log {project_id, doc_id}, "doc not in redis so getting from persistence API" - PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) -> + PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname) -> return callback(error) if error? - logger.log {project_id, doc_id, lines, version, pathname, projectHistoryId}, "got doc from persistence API" - RedisManager.putDocInMemory project_id, doc_id, lines, version, ranges, pathname, projectHistoryId, (error) -> + logger.log {project_id, doc_id, lines, version, pathname}, "got doc from persistence API" + RedisManager.putDocInMemory project_id, doc_id, lines, version, ranges, pathname, (error) -> return callback(error) if error? - callback null, lines, version, ranges, pathname, projectHistoryId, null, false + callback null, lines, version, ranges, pathname, null, false else - callback null, lines, version, ranges, pathname, projectHistoryId, unflushedTime, true + callback null, lines, version, ranges, pathname, unflushedTime, true - getDocAndRecentOps: (project_id, doc_id, fromVersion, _callback = (error, lines, version, ops, ranges, pathname, projectHistoryId) ->) -> + getDocAndRecentOps: (project_id, doc_id, fromVersion, _callback = (error, lines, version, ops, ranges, pathname) ->) -> timer = new Metrics.Timer("docManager.getDocAndRecentOps") callback = (args...) -> timer.done() _callback(args...) - DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) -> + DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname) -> return callback(error) if error? if fromVersion == -1 - callback null, lines, version, [], ranges, pathname, projectHistoryId + callback null, lines, version, [], ranges, pathname else RedisManager.getPreviousDocOps doc_id, fromVersion, version, (error, ops) -> return callback(error) if error? - callback null, lines, version, ops, ranges, pathname, projectHistoryId + callback null, lines, version, ops, ranges, pathname setDoc: (project_id, doc_id, newLines, source, user_id, undoing, _callback = (error) ->) -> timer = new Metrics.Timer("docManager.setDoc") @@ -57,7 +57,7 @@ module.exports = DocumentManager = return callback(new Error("No lines were provided to setDoc")) UpdateManager = require "./UpdateManager" - DocumentManager.getDoc project_id, doc_id, (error, oldLines, version, ranges, pathname, projectHistoryId, unflushedTime, alreadyLoaded) -> + DocumentManager.getDoc project_id, doc_id, (error, oldLines, version, ranges, pathname, unflushedTime, alreadyLoaded) -> return callback(error) if error? if oldLines? and oldLines.length > 0 and oldLines[0].text? @@ -161,16 +161,16 @@ module.exports = DocumentManager = return callback(error) if error? callback() - renameDoc: (project_id, doc_id, user_id, update, projectHistoryId, _callback = (error) ->) -> + renameDoc: (project_id, doc_id, user_id, update, _callback = (error) ->) -> timer = new Metrics.Timer("docManager.updateProject") callback = (args...) -> timer.done() _callback(args...) - RedisManager.renameDoc project_id, doc_id, user_id, update, projectHistoryId, callback + RedisManager.renameDoc project_id, doc_id, user_id, update, callback getDocAndFlushIfOld: (project_id, doc_id, callback = (error, doc) ->) -> - DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId, unflushedTime, alreadyLoaded) -> + DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, unflushedTime, alreadyLoaded) -> return callback(error) if error? # if doc was already loaded see if it needs to be flushed if alreadyLoaded and unflushedTime? and (Date.now() - unflushedTime) > MAX_UNFLUSHED_AGE @@ -181,21 +181,21 @@ module.exports = DocumentManager = callback(null, lines, version) resyncDocContents: (project_id, doc_id, callback) -> - RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) -> + RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname) -> return callback(error) if error? if !lines? or !version? - PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) -> + PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname) -> return callback(error) if error? - ProjectHistoryRedisManager.queueResyncDocContent project_id, projectHistoryId, doc_id, lines, version, pathname, callback + ProjectHistoryRedisManager.queueResyncDocContent project_id, doc_id, lines, version, pathname, callback else - ProjectHistoryRedisManager.queueResyncDocContent project_id, projectHistoryId, doc_id, lines, version, pathname, callback + ProjectHistoryRedisManager.queueResyncDocContent project_id, doc_id, lines, version, pathname, callback getDocWithLock: (project_id, doc_id, callback = (error, lines, version) ->) -> UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.getDoc, project_id, doc_id, callback - getDocAndRecentOpsWithLock: (project_id, doc_id, fromVersion, callback = (error, lines, version, ops, ranges, pathname, projectHistoryId) ->) -> + getDocAndRecentOpsWithLock: (project_id, doc_id, fromVersion, callback = (error, lines, version, ops, ranges, pathname) ->) -> UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.getDocAndRecentOps, project_id, doc_id, fromVersion, callback @@ -223,9 +223,9 @@ module.exports = DocumentManager = UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.deleteComment, project_id, doc_id, thread_id, callback - renameDocWithLock: (project_id, doc_id, user_id, update, projectHistoryId, callback = (error) ->) -> + renameDocWithLock: (project_id, doc_id, user_id, update, callback = (error) ->) -> UpdateManager = require "./UpdateManager" - UpdateManager.lockUpdatesAndDo DocumentManager.renameDoc, project_id, doc_id, user_id, update, projectHistoryId, callback + UpdateManager.lockUpdatesAndDo DocumentManager.renameDoc, project_id, doc_id, user_id, update, callback resyncDocContentsWithLock: (project_id, doc_id, callback = (error) ->) -> UpdateManager = require "./UpdateManager" diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index 9d39166681..c1371615f7 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -65,8 +65,8 @@ module.exports = HistoryManager = newBlock = Math.floor(length / threshold) return newBlock != prevBlock - resyncProjectHistory: (project_id, projectHistoryId, docs, files, callback) -> - ProjectHistoryRedisManager.queueResyncProjectStructure project_id, projectHistoryId, docs, files, (error) -> + resyncProjectHistory: (project_id, docs, files, callback) -> + ProjectHistoryRedisManager.queueResyncProjectStructure project_id, docs, files, (error) -> return callback(error) if error? DocumentManager = require "./DocumentManager" resyncDoc = (doc, cb) -> diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 93f915d662..ce4d8bf637 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -161,10 +161,10 @@ module.exports = HttpController = updateProject: (req, res, next = (error) ->) -> timer = new Metrics.Timer("http.updateProject") project_id = req.params.project_id - {projectHistoryId, userId, docUpdates, fileUpdates, version} = req.body + {userId, docUpdates, fileUpdates, version} = req.body logger.log {project_id, docUpdates, fileUpdates, version}, "updating project via http" - ProjectManager.updateProjectWithLocks project_id, projectHistoryId, userId, docUpdates, fileUpdates, version, (error) -> + ProjectManager.updateProjectWithLocks project_id, userId, docUpdates, fileUpdates, version, (error) -> timer.done() return next(error) if error? logger.log project_id: project_id, "updated project via http" @@ -172,10 +172,10 @@ module.exports = HttpController = resyncProjectHistory: (req, res, next = (error) ->) -> project_id = req.params.project_id - {projectHistoryId, docs, files} = req.body + {docs, files} = req.body logger.log {project_id, docs, files}, "queuing project history resync via http" - HistoryManager.resyncProjectHistory project_id, projectHistoryId, docs, files, (error) -> + HistoryManager.resyncProjectHistory project_id, docs, files, (error) -> return next(error) if error? logger.log {project_id}, "queued project history resync via http" res.send 204 diff --git a/services/document-updater/app/coffee/PersistenceManager.coffee b/services/document-updater/app/coffee/PersistenceManager.coffee index 8a43d989a8..bd5ce5239c 100644 --- a/services/document-updater/app/coffee/PersistenceManager.coffee +++ b/services/document-updater/app/coffee/PersistenceManager.coffee @@ -13,7 +13,7 @@ request = (require("requestretry")).defaults({ MAX_HTTP_REQUEST_LENGTH = 5000 # 5 seconds module.exports = PersistenceManager = - getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, pathname, projectHistoryId) ->) -> + getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, pathname) ->) -> timer = new Metrics.Timer("persistenceManager.getDoc") callback = (args...) -> timer.done() @@ -44,7 +44,7 @@ module.exports = PersistenceManager = return callback(new Error("web API response had no valid doc version")) if !body.pathname? return callback(new Error("web API response had no valid doc pathname")) - return callback null, body.lines, body.version, body.ranges, body.pathname, body.projectHistoryId + return callback null, body.lines, body.version, body.ranges, body.pathname else if res.statusCode == 404 return callback(new Errors.NotFoundError("doc not not found: #{url}")) else diff --git a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee index 42b9f16df2..c92b7277f4 100644 --- a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee +++ b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee @@ -7,7 +7,7 @@ module.exports = ProjectHistoryRedisManager = queueOps: (project_id, ops..., callback) -> rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), ops..., callback - queueRenameEntity: (project_id, projectHistoryId, entity_type, entity_id, user_id, projectUpdate, callback) -> + queueRenameEntity: (project_id, entity_type, entity_id, user_id, projectUpdate, callback) -> projectUpdate = pathname: projectUpdate.pathname new_pathname: projectUpdate.newPathname @@ -15,7 +15,6 @@ module.exports = ProjectHistoryRedisManager = user_id: user_id ts: new Date() version: projectUpdate.version - projectHistoryId: projectHistoryId projectUpdate[entity_type] = entity_id logger.log {project_id, projectUpdate}, "queue rename operation to project-history" @@ -23,7 +22,7 @@ module.exports = ProjectHistoryRedisManager = ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback - queueAddEntity: (project_id, projectHistoryId, entity_type, entitiy_id, user_id, projectUpdate, callback = (error) ->) -> + queueAddEntity: (project_id, entity_type, entitiy_id, user_id, projectUpdate, callback = (error) ->) -> projectUpdate = pathname: projectUpdate.pathname docLines: projectUpdate.docLines @@ -32,7 +31,6 @@ module.exports = ProjectHistoryRedisManager = user_id: user_id ts: new Date() version: projectUpdate.version - projectHistoryId: projectHistoryId projectUpdate[entity_type] = entitiy_id logger.log {project_id, projectUpdate}, "queue add operation to project-history" @@ -40,23 +38,21 @@ module.exports = ProjectHistoryRedisManager = ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback - queueResyncProjectStructure: (project_id, projectHistoryId, docs, files, callback) -> + queueResyncProjectStructure: (project_id, docs, files, callback) -> logger.log {project_id, docs, files}, "queue project structure resync" projectUpdate = resyncProjectStructure: { docs, files } - projectHistoryId: projectHistoryId meta: ts: new Date() jsonUpdate = JSON.stringify projectUpdate ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback - queueResyncDocContent: (project_id, projectHistoryId, doc_id, lines, version, pathname, callback) -> + queueResyncDocContent: (project_id, doc_id, lines, version, pathname, callback) -> logger.log {project_id, doc_id, lines, version, pathname}, "queue doc content resync" projectUpdate = resyncDocContent: content: lines.join("\n"), version: version - projectHistoryId: projectHistoryId path: pathname doc: doc_id meta: diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index cbf7bb661b..eb7acaede1 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -105,7 +105,7 @@ module.exports = ProjectManager = clearProjectState: (project_id, callback = (error) ->) -> RedisManager.clearProjectState project_id, callback - updateProjectWithLocks: (project_id, projectHistoryId, user_id, docUpdates, fileUpdates, version, _callback = (error) ->) -> + updateProjectWithLocks: (project_id, user_id, docUpdates, fileUpdates, version, _callback = (error) ->) -> timer = new Metrics.Timer("projectManager.updateProject") callback = (args...) -> timer.done() @@ -120,11 +120,11 @@ module.exports = ProjectManager = doc_id = projectUpdate.id projectUpdate.version = "#{project_version}.#{project_subversion++}" if projectUpdate.docLines? - ProjectHistoryRedisManager.queueAddEntity project_id, projectHistoryId, 'doc', doc_id, user_id, projectUpdate, (error, count) -> + ProjectHistoryRedisManager.queueAddEntity project_id, 'doc', doc_id, user_id, projectUpdate, (error, count) -> project_ops_length = count cb(error) else - DocumentManager.renameDocWithLock project_id, doc_id, user_id, projectUpdate, projectHistoryId, (error, count) -> + DocumentManager.renameDocWithLock project_id, doc_id, user_id, projectUpdate, (error, count) -> project_ops_length = count cb(error) @@ -132,11 +132,11 @@ module.exports = ProjectManager = file_id = projectUpdate.id projectUpdate.version = "#{project_version}.#{project_subversion++}" if projectUpdate.url? - ProjectHistoryRedisManager.queueAddEntity project_id, projectHistoryId, 'file', file_id, user_id, projectUpdate, (error, count) -> + ProjectHistoryRedisManager.queueAddEntity project_id, 'file', file_id, user_id, projectUpdate, (error, count) -> project_ops_length = count cb(error) else - ProjectHistoryRedisManager.queueRenameEntity project_id, projectHistoryId, 'file', file_id, user_id, projectUpdate, (error, count) -> + ProjectHistoryRedisManager.queueRenameEntity project_id, 'file', file_id, user_id, projectUpdate, (error, count) -> project_ops_length = count cb(error) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index a940970176..382ff2b502 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -36,7 +36,7 @@ historyKeys = Settings.redis.history.key_schema module.exports = RedisManager = rclient: rclient - putDocInMemory : (project_id, doc_id, docLines, version, ranges, pathname, projectHistoryId, _callback)-> + putDocInMemory : (project_id, doc_id, docLines, version, ranges, pathname, _callback)-> timer = new metrics.Timer("redis.put-doc") callback = (error) -> timer.done() @@ -47,7 +47,7 @@ module.exports = RedisManager = logger.error {err: error, doc_id: doc_id, docLines: docLines}, error.message return callback(error) docHash = RedisManager._computeHash(docLines) - logger.log {project_id, doc_id, version, docHash, pathname, projectHistoryId}, "putting doc in redis" + logger.log {project_id, doc_id, version, docHash, pathname}, "putting doc in redis" RedisManager._serializeRanges ranges, (error, ranges) -> if error? logger.error {err: error, doc_id, project_id}, error.message @@ -62,7 +62,6 @@ module.exports = RedisManager = else multi.del keys.ranges(doc_id:doc_id) multi.set keys.pathname(doc_id:doc_id), pathname - multi.set keys.projectHistoryId(doc_id:doc_id), projectHistoryId multi.exec (error, result) -> return callback(error) if error? # check the hash computed on the redis server @@ -89,7 +88,6 @@ module.exports = RedisManager = multi.del keys.docHash(doc_id:doc_id) multi.del keys.ranges(doc_id:doc_id) multi.del keys.pathname(doc_id:doc_id) - multi.del keys.projectHistoryId(doc_id:doc_id) multi.del keys.unflushedTime(doc_id:doc_id) multi.exec (error) -> return callback(error) if error? @@ -110,7 +108,7 @@ module.exports = RedisManager = clearProjectState: (project_id, callback = (error) ->) -> rclient.del keys.projectState(project_id:project_id), callback - getDoc : (project_id, doc_id, callback = (error, lines, version, ranges, pathname, projectHistoryId, unflushedTime) ->)-> + getDoc : (project_id, doc_id, callback = (error, lines, version, ranges, pathname, unflushedTime) ->)-> timer = new metrics.Timer("redis.get-doc") multi = rclient.multi() multi.get keys.docLines(doc_id:doc_id) @@ -119,9 +117,8 @@ module.exports = RedisManager = multi.get keys.projectKey(doc_id:doc_id) multi.get keys.ranges(doc_id:doc_id) multi.get keys.pathname(doc_id:doc_id) - multi.get keys.projectHistoryId(doc_id:doc_id) multi.get keys.unflushedTime(doc_id:doc_id) - multi.exec (error, [docLines, version, storedHash, doc_project_id, ranges, pathname, projectHistoryId, unflushedTime])-> + multi.exec (error, [docLines, version, storedHash, doc_project_id, ranges, pathname, unflushedTime])-> timeSpan = timer.done() return callback(error) if error? # check if request took too long and bail out. only do this for @@ -150,14 +147,14 @@ module.exports = RedisManager = # doc is not in redis, bail out if !docLines? - return callback null, docLines, version, ranges, pathname, projectHistoryId, unflushedTime + return callback null, docLines, version, ranges, pathname, unflushedTime # doc should be in project set, check if missing (workaround for missing docs from putDoc) rclient.sadd keys.docsInProject(project_id:project_id), doc_id, (error, result) -> return callback(error) if error? if result isnt 0 # doc should already be in set logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, "doc missing from docsInProject set" - callback null, docLines, version, ranges, pathname, projectHistoryId, unflushedTime + callback null, docLines, version, ranges, pathname, unflushedTime getDocVersion: (doc_id, callback = (error, version) ->) -> rclient.get keys.docVersion(doc_id: doc_id), (error, version) -> @@ -275,16 +272,16 @@ module.exports = RedisManager = else callback null, docUpdateCount - renameDoc: (project_id, doc_id, user_id, update, projectHistoryId, callback = (error) ->) -> + renameDoc: (project_id, doc_id, user_id, update, callback = (error) ->) -> RedisManager.getDoc project_id, doc_id, (error, lines, version) -> return callback(error) if error? if lines? and version? rclient.set keys.pathname(doc_id:doc_id), update.newPathname, (error) -> return callback(error) if error? - ProjectHistoryRedisManager.queueRenameEntity project_id, projectHistoryId, 'doc', doc_id, user_id, update, callback + ProjectHistoryRedisManager.queueRenameEntity project_id, 'doc', doc_id, user_id, update, callback else - ProjectHistoryRedisManager.queueRenameEntity project_id, projectHistoryId, 'doc', doc_id, user_id, update, callback + ProjectHistoryRedisManager.queueRenameEntity project_id, 'doc', doc_id, user_id, update, callback clearUnflushedTime: (doc_id, callback = (error) ->) -> rclient.del keys.unflushedTime(doc_id:doc_id), callback diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index bfcfb806ca..02f0a1b8d1 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -71,7 +71,7 @@ module.exports = UpdateManager = profile = new Profiler("applyUpdate", {project_id, doc_id}) UpdateManager._sanitizeUpdate update profile.log("sanitizeUpdate") - DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) -> + DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname) -> profile.log("getDoc") return callback(error) if error? if !lines? or !version? @@ -80,7 +80,7 @@ module.exports = UpdateManager = profile.log("sharejs.applyUpdate") return callback(error) if error? RangesManager.applyUpdate project_id, doc_id, ranges, appliedOps, updatedDocLines, (error, new_ranges) -> - UpdateManager._addProjectHistoryMetadataToOps(appliedOps, pathname, projectHistoryId, lines) + UpdateManager._addProjectHistoryMetadataToOps(appliedOps, pathname, lines) profile.log("RangesManager.applyUpdate") return callback(error) if error? RedisManager.updateDocument project_id, doc_id, updatedDocLines, version, appliedOps, new_ranges, (error, doc_ops_length, project_ops_length) -> @@ -130,13 +130,12 @@ module.exports = UpdateManager = op.i = op.i.replace(/[\uD800-\uDFFF]/g, "\uFFFD") return update - _addProjectHistoryMetadataToOps: (updates, pathname, projectHistoryId, lines) -> + _addProjectHistoryMetadataToOps: (updates, pathname, lines) -> doc_length = _.reduce lines, (chars, line) -> chars + line.length, 0 doc_length += lines.length - 1 # count newline characters updates.forEach (update) -> - update.projectHistoryId = projectHistoryId update.meta ||= {} update.meta.pathname = pathname update.meta.doc_length = doc_length diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 18f2b1570b..1c7ebf283e 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -46,7 +46,6 @@ module.exports = docsInProject: ({project_id}) -> "DocsIn:#{project_id}" ranges: ({doc_id}) -> "Ranges:#{doc_id}" pathname: ({doc_id}) -> "Pathname:#{doc_id}" - projectHistoryId: ({doc_id}) -> "ProjectHistoryId:#{doc_id}" projectState: ({project_id}) -> "ProjectState:#{project_id}" unflushedTime: ({doc_id}) -> "UnflushedTime:#{doc_id}" # cluster: [{ diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index d7cd18630a..16e58a81a7 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -25,7 +25,6 @@ describe "DocumentManager", -> "./UpdateManager": @UpdateManager = {} "./RangesManager": @RangesManager = {} @project_id = "project-id-123" - @projectHistoryId = "history-id-123" @doc_id = "doc-id-123" @user_id = 1234 @callback = sinon.stub() @@ -112,7 +111,7 @@ describe "DocumentManager", -> describe "getDocAndRecentOps", -> describe "with a previous version specified", -> beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname) @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) @DocumentManager.getDocAndRecentOps @project_id, @doc_id, @fromVersion, @callback @@ -127,14 +126,14 @@ describe "DocumentManager", -> .should.equal true it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, @ops, @ranges, @pathname, @projectHistoryId).should.equal true + @callback.calledWith(null, @lines, @version, @ops, @ranges, @pathname).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true describe "with no previous version specified", -> beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname) @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) @DocumentManager.getDocAndRecentOps @project_id, @doc_id, -1, @callback @@ -147,7 +146,7 @@ describe "DocumentManager", -> @RedisManager.getPreviousDocOps.called.should.equal false it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, [], @ranges, @pathname, @projectHistoryId).should.equal true + @callback.calledWith(null, @lines, @version, [], @ranges, @pathname).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true @@ -155,7 +154,7 @@ describe "DocumentManager", -> describe "getDoc", -> describe "when the doc exists in Redis", -> beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId, @unflushedTime) + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @unflushedTime) @DocumentManager.getDoc @project_id, @doc_id, @callback it "should get the doc from Redis", -> @@ -164,7 +163,7 @@ describe "DocumentManager", -> .should.equal true it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, @ranges, @pathname, @projectHistoryId, @unflushedTime, true).should.equal true + @callback.calledWith(null, @lines, @version, @ranges, @pathname, @unflushedTime, true).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true @@ -172,7 +171,7 @@ describe "DocumentManager", -> describe "when the doc does not exist in Redis", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null, null, null, null) - @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId) + @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname) @RedisManager.putDocInMemory = sinon.stub().yields() @DocumentManager.getDoc @project_id, @doc_id, @callback @@ -188,11 +187,11 @@ describe "DocumentManager", -> it "should set the doc in Redis", -> @RedisManager.putDocInMemory - .calledWith(@project_id, @doc_id, @lines, @version, @ranges, @pathname, @projectHistoryId) + .calledWith(@project_id, @doc_id, @lines, @version, @ranges, @pathname) .should.equal true it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, @ranges, @pathname, @projectHistoryId, null, false).should.equal true + @callback.calledWith(null, @lines, @version, @ranges, @pathname, null, false).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true @@ -203,7 +202,7 @@ describe "DocumentManager", -> @beforeLines = ["before", "lines"] @afterLines = ["after", "lines"] @ops = [{ i: "foo", p: 4 }, { d: "bar", p: 42 }] - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, @ranges, @pathname, @projectHistoryId, @unflushedTime, true) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, @ranges, @pathname, @unflushedTime, true) @DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, @ops) @UpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null) @DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2) @@ -403,7 +402,7 @@ describe "DocumentManager", -> describe "when the doc is in Redis", -> describe "and has changes to be flushed", -> beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @projectHistoryId, @pathname, Date.now() - 1e9, true) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, Date.now() - 1e9, true) @DocumentManager.getDocAndFlushIfOld @project_id, @doc_id, @callback it "should get the doc", -> @@ -460,11 +459,11 @@ describe "DocumentManager", -> describe "successfully", -> beforeEach -> - @DocumentManager.renameDoc @project_id, @doc_id, @user_id, @update, @projectHistoryId, @callback + @DocumentManager.renameDoc @project_id, @doc_id, @user_id, @update, @callback it "should rename the document", -> @RedisManager.renameDoc - .calledWith(@project_id, @doc_id, @user_id, @update, @projectHistoryId) + .calledWith(@project_id, @doc_id, @user_id, @update) .should.equal true it "should call the callback", -> @@ -473,7 +472,7 @@ describe "DocumentManager", -> describe "resyncDocContents", -> describe "when doc is loaded in redis", -> beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId) + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname) @ProjectHistoryRedisManager.queueResyncDocContent = sinon.stub() @DocumentManager.resyncDocContents @project_id, @doc_id, @callback @@ -484,13 +483,13 @@ describe "DocumentManager", -> it "queues a resync doc content update", -> @ProjectHistoryRedisManager.queueResyncDocContent - .calledWith(@project_id, @projectHistoryId, @doc_id, @lines, @version, @pathname, @callback) + .calledWith(@project_id, @doc_id, @lines, @version, @pathname, @callback) .should.equal true describe "when doc is not loaded in redis", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(2, null) - @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId) + @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname) @ProjectHistoryRedisManager.queueResyncDocContent = sinon.stub() @DocumentManager.resyncDocContents @project_id, @doc_id, @callback @@ -506,5 +505,5 @@ describe "DocumentManager", -> it "queues a resync doc content update", -> @ProjectHistoryRedisManager.queueResyncDocContent - .calledWith(@project_id, @projectHistoryId, @doc_id, @lines, @version, @pathname, @callback) + .calledWith(@project_id, @doc_id, @lines, @version, @pathname, @callback) .should.equal true diff --git a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee index 2233610d28..e207cde99d 100644 --- a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee @@ -164,7 +164,6 @@ describe "HistoryManager", -> describe "resyncProjectHistory", -> beforeEach -> - @projectHistoryId = 'history-id-1234' @docs = [ doc: @doc_id path: 'main.tex' @@ -176,11 +175,11 @@ describe "HistoryManager", -> ] @ProjectHistoryRedisManager.queueResyncProjectStructure = sinon.stub().yields() @DocumentManager.resyncDocContentsWithLock = sinon.stub().yields() - @HistoryManager.resyncProjectHistory @project_id, @projectHistoryId, @docs, @files, @callback + @HistoryManager.resyncProjectHistory @project_id, @docs, @files, @callback it "should queue a project structure reync", -> @ProjectHistoryRedisManager.queueResyncProjectStructure - .calledWith(@project_id, @projectHistoryId, @docs, @files) + .calledWith(@project_id, @docs, @files) .should.equal true it "should queue doc content reyncs", -> diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index ab6718c12a..fca1614c2d 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -509,24 +509,23 @@ describe "HttpController", -> describe "updateProject", -> beforeEach -> - @projectHistoryId = "history-id-123" @userId = "user-id-123" @docUpdates = sinon.stub() @fileUpdates = sinon.stub() @version = 1234567 @req = - body: {@projectHistoryId, @userId, @docUpdates, @fileUpdates, @version} + body: {@userId, @docUpdates, @fileUpdates, @version} params: project_id: @project_id describe "successfully", -> beforeEach -> - @ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(6) + @ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(5) @HttpController.updateProject(@req, @res, @next) it "should accept the change", -> @ProjectManager.updateProjectWithLocks - .calledWith(@project_id, @projectHistoryId, @userId, @docUpdates, @fileUpdates, @version) + .calledWith(@project_id, @userId, @docUpdates, @fileUpdates, @version) .should.equal true it "should return a successful No Content response", -> @@ -539,7 +538,7 @@ describe "HttpController", -> describe "when an errors occurs", -> beforeEach -> - @ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(6, new Error("oops")) + @ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(5, new Error("oops")) @HttpController.updateProject(@req, @res, @next) it "should call next with the error", -> @@ -549,24 +548,23 @@ describe "HttpController", -> describe "resyncProjectHistory", -> beforeEach -> - @projectHistoryId = "history-id-123" @docs = sinon.stub() @files = sinon.stub() @fileUpdates = sinon.stub() @req = body: - {@projectHistoryId, @docs, @files} + {@docs, @files} params: project_id: @project_id describe "successfully", -> beforeEach -> - @HistoryManager.resyncProjectHistory = sinon.stub().callsArgWith(4) + @HistoryManager.resyncProjectHistory = sinon.stub().callsArg(3) @HttpController.resyncProjectHistory(@req, @res, @next) it "should accept the change", -> @HistoryManager.resyncProjectHistory - .calledWith(@project_id, @projectHistoryId, @docs, @files) + .calledWith(@project_id, @docs, @files) .should.equal true it "should return a successful No Content response", -> @@ -576,7 +574,7 @@ describe "HttpController", -> describe "when an errors occurs", -> beforeEach -> - @HistoryManager.resyncProjectHistory = sinon.stub().callsArgWith(4, new Error("oops")) + @HistoryManager.resyncProjectHistory = sinon.stub().callsArgWith(3, new Error("oops")) @HttpController.resyncProjectHistory(@req, @res, @next) it "should call next with the error", -> diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee index 0f8ad59167..937dcf3a77 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee @@ -17,7 +17,6 @@ describe "PersistenceManager", -> done: sinon.stub() "logger-sharelatex": @logger = {log: sinon.stub(), err: sinon.stub()} @project_id = "project-id-123" - @projectHistoryId = "history-id-123" @doc_id = "doc-id-123" @lines = ["one", "two", "three"] @version = 42 @@ -37,7 +36,6 @@ describe "PersistenceManager", -> version: @version, ranges: @ranges pathname: @pathname, - projectHistoryId: @projectHistoryId } describe "with a successful response from the web api", -> @@ -62,9 +60,7 @@ describe "PersistenceManager", -> .should.equal true it "should call the callback with the doc lines, version and ranges", -> - @callback - .calledWith(null, @lines, @version, @ranges, @pathname, @projectHistoryId) - .should.equal true + @callback.calledWith(null, @lines, @version, @ranges, @pathname).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true diff --git a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee index 349d3623e6..8ad0f53b5b 100644 --- a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee @@ -8,7 +8,6 @@ tk = require "timekeeper" describe "ProjectHistoryRedisManager", -> beforeEach -> @project_id = "project-id-123" - @projectHistoryId = "history-id-123" @user_id = "user-id-123" @callback = sinon.stub() @rclient = {} @@ -51,10 +50,9 @@ describe "ProjectHistoryRedisManager", -> @rawUpdate = pathname: @pathname = '/old' newPathname: @newPathname = '/new' - version: @version = 2 @ProjectHistoryRedisManager.queueOps = sinon.stub() - @ProjectHistoryRedisManager.queueRenameEntity @project_id, @projectHistoryId, 'file', @file_id, @user_id, @rawUpdate, @callback + @ProjectHistoryRedisManager.queueRenameEntity @project_id, 'file', @file_id, @user_id, @rawUpdate, @callback it "should queue an update", -> update = @@ -63,8 +61,6 @@ describe "ProjectHistoryRedisManager", -> meta: user_id: @user_id ts: new Date() - version: @version - projectHistoryId: @projectHistoryId file: @file_id @ProjectHistoryRedisManager.queueOps @@ -79,11 +75,10 @@ describe "ProjectHistoryRedisManager", -> @rawUpdate = pathname: @pathname = '/old' docLines: @docLines = 'a\nb' - version: @version = 2 url: @url = 'filestore.example.com' @ProjectHistoryRedisManager.queueOps = sinon.stub() - @ProjectHistoryRedisManager.queueAddEntity @project_id, @projectHistoryId, 'doc', @doc_id, @user_id, @rawUpdate, @callback + @ProjectHistoryRedisManager.queueAddEntity @project_id, 'doc', @doc_id, @user_id, @rawUpdate, @callback it "should queue an update", -> update = @@ -93,8 +88,6 @@ describe "ProjectHistoryRedisManager", -> meta: user_id: @user_id ts: new Date() - version: @version - projectHistoryId: @projectHistoryId doc: @doc_id @ProjectHistoryRedisManager.queueOps diff --git a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee index 3ed0109be7..96d2ccc07b 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee @@ -18,7 +18,6 @@ describe "ProjectManager", -> done: sinon.stub() @project_id = "project-id-123" - @projectHistoryId = 'history-id-123' @user_id = "user-id-123" @version = 1234567 @HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(false) @@ -28,6 +27,7 @@ describe "ProjectManager", -> describe "updateProjectWithLocks", -> describe "rename operations", -> beforeEach -> + @firstDocUpdate = id: 1 pathname: 'foo' @@ -47,22 +47,22 @@ describe "ProjectManager", -> describe "successfully", -> beforeEach -> - @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should rename the docs in the updates", -> firstDocUpdateWithVersion = _.extend({}, @firstDocUpdate, {version: "#{@version}.0"}) secondDocUpdateWithVersion = _.extend({}, @secondDocUpdate, {version: "#{@version}.1"}) @DocumentManager.renameDocWithLock - .calledWith(@project_id, @firstDocUpdate.id, @user_id, firstDocUpdateWithVersion, @projectHistoryId) + .calledWith(@project_id, @firstDocUpdate.id, @user_id, firstDocUpdateWithVersion) .should.equal true @DocumentManager.renameDocWithLock - .calledWith(@project_id, @secondDocUpdate.id, @user_id, secondDocUpdateWithVersion, @projectHistoryId) + .calledWith(@project_id, @secondDocUpdate.id, @user_id, secondDocUpdateWithVersion) .should.equal true it "should rename the files in the updates", -> firstFileUpdateWithVersion = _.extend({}, @firstFileUpdate, {version: "#{@version}.2"}) @ProjectHistoryRedisManager.queueRenameEntity - .calledWith(@project_id, @projectHistoryId, 'file', @firstFileUpdate.id, @user_id, firstFileUpdateWithVersion) + .calledWith(@project_id, 'file', @firstFileUpdate.id, @user_id, firstFileUpdateWithVersion) .should.equal true it "should not flush the history", -> @@ -77,7 +77,7 @@ describe "ProjectManager", -> beforeEach -> @error = new Error('error') @DocumentManager.renameDocWithLock = sinon.stub().yields(@error) - @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true @@ -86,7 +86,7 @@ describe "ProjectManager", -> beforeEach -> @error = new Error('error') @ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields(@error) - @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true @@ -94,7 +94,7 @@ describe "ProjectManager", -> describe "with enough ops to flush", -> beforeEach -> @HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(true) - @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should flush the history", -> @HistoryManager.flushProjectChangesAsync @@ -121,26 +121,26 @@ describe "ProjectManager", -> describe "successfully", -> beforeEach -> - @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should add the docs in the updates", -> firstDocUpdateWithVersion = _.extend({}, @firstDocUpdate, {version: "#{@version}.0"}) secondDocUpdateWithVersion = _.extend({}, @secondDocUpdate, {version: "#{@version}.1"}) @ProjectHistoryRedisManager.queueAddEntity.getCall(0) - .calledWith(@project_id, @projectHistoryId, 'doc', @firstDocUpdate.id, @user_id, firstDocUpdateWithVersion) + .calledWith(@project_id, 'doc', @firstDocUpdate.id, @user_id, firstDocUpdateWithVersion) .should.equal true @ProjectHistoryRedisManager.queueAddEntity.getCall(1) - .calledWith(@project_id, @projectHistoryId, 'doc', @secondDocUpdate.id, @user_id, secondDocUpdateWithVersion) + .calledWith(@project_id, 'doc', @secondDocUpdate.id, @user_id, secondDocUpdateWithVersion) .should.equal true it "should add the files in the updates", -> firstFileUpdateWithVersion = _.extend({}, @firstFileUpdate, {version: "#{@version}.2"}) secondFileUpdateWithVersion = _.extend({}, @secondFileUpdate, {version: "#{@version}.3"}) @ProjectHistoryRedisManager.queueAddEntity.getCall(2) - .calledWith(@project_id, @projectHistoryId, 'file', @firstFileUpdate.id, @user_id, firstFileUpdateWithVersion) + .calledWith(@project_id, 'file', @firstFileUpdate.id, @user_id, firstFileUpdateWithVersion) .should.equal true @ProjectHistoryRedisManager.queueAddEntity.getCall(3) - .calledWith(@project_id, @projectHistoryId, 'file', @secondFileUpdate.id, @user_id, secondFileUpdateWithVersion) + .calledWith(@project_id, 'file', @secondFileUpdate.id, @user_id, secondFileUpdateWithVersion) .should.equal true it "should not flush the history", -> @@ -155,7 +155,7 @@ describe "ProjectManager", -> beforeEach -> @error = new Error('error') @ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields(@error) - @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true @@ -164,7 +164,7 @@ describe "ProjectManager", -> beforeEach -> @error = new Error('error') @ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields(@error) - @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true @@ -172,7 +172,7 @@ describe "ProjectManager", -> describe "with enough ops to flush", -> beforeEach -> @HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(true) - @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback + @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should flush the history", -> @HistoryManager.flushProjectChangesAsync diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index e2263c21f4..a3b28d00cb 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -33,7 +33,6 @@ describe "RedisManager", -> docsInProject: ({project_id}) -> "DocsIn:#{project_id}" ranges: ({doc_id}) -> "Ranges:#{doc_id}" pathname: ({doc_id}) -> "Pathname:#{doc_id}" - projectHistoryId: ({doc_id}) -> "ProjectHistoryId:#{doc_id}" projectState: ({project_id}) -> "ProjectState:#{project_id}" unflushedTime: ({doc_id}) -> "UnflushedTime:#{doc_id}" history: @@ -60,7 +59,6 @@ describe "RedisManager", -> @doc_id = "doc-id-123" @project_id = "project-id-123" - @projectHistoryId = "history-id-123" @callback = sinon.stub() describe "getDoc", -> @@ -74,7 +72,7 @@ describe "RedisManager", -> @unflushed_time = 12345 @pathname = '/a/b/c.tex' @multi.get = sinon.stub() - @multi.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @hash, @project_id, @json_ranges, @pathname, @projectHistoryId, @unflushed_time]) + @multi.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @hash, @project_id, @json_ranges, @pathname, @unflushed_time]) @rclient.sadd = sinon.stub().yields(null, 0) describe "successfully", -> @@ -111,11 +109,6 @@ describe "RedisManager", -> .calledWith("Pathname:#{@doc_id}") .should.equal true - it "should get the projectHistoryId", -> - @multi.get - .calledWith("ProjectHistoryId:#{@doc_id}") - .should.equal true - it "should check if the document is in the DocsIn set", -> @rclient.sadd .calledWith("DocsIn:#{@project_id}") @@ -123,7 +116,7 @@ describe "RedisManager", -> it 'should return the document', -> @callback - .calledWithExactly(null, @lines, @version, @ranges, @pathname, @projectHistoryId, @unflushed_time) + .calledWithExactly(null, @lines, @version, @ranges, @pathname, @unflushed_time) .should.equal true it 'should not log any errors', -> @@ -132,7 +125,7 @@ describe "RedisManager", -> describe "when the document is not present", -> beforeEach -> - @multi.exec = sinon.stub().callsArgWith(0, null, [null, null, null, null, null, null, null, null]) + @multi.exec = sinon.stub().callsArgWith(0, null, [null, null, null, null, null, null, null]) @rclient.sadd = sinon.stub().yields() @RedisManager.getDoc @project_id, @doc_id, @callback @@ -143,7 +136,7 @@ describe "RedisManager", -> it 'should return an empty result', -> @callback - .calledWithExactly(null, null, 0, {}, null, null, null) + .calledWithExactly(null, null, 0, {}, null, null) .should.equal true it 'should not log any errors', -> @@ -161,7 +154,7 @@ describe "RedisManager", -> it 'should return the document', -> @callback - .calledWithExactly(null, @lines, @version, @ranges, @pathname, @projectHistoryId, @unflushed_time) + .calledWithExactly(null, @lines, @version, @ranges, @pathname, @unflushed_time) .should.equal true describe "with a corrupted document", -> @@ -539,7 +532,7 @@ describe "RedisManager", -> describe "with non-empty ranges", -> beforeEach (done) -> - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @projectHistoryId, done + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, done it "should set the lines", -> @multi.eval @@ -571,11 +564,6 @@ describe "RedisManager", -> .calledWith("Pathname:#{@doc_id}", @pathname) .should.equal true - it "should set the projectHistoryId for the doc", -> - @multi.set - .calledWith("ProjectHistoryId:#{@doc_id}", @projectHistoryId) - .should.equal true - it "should add the doc_id to the project set", -> @rclient.sadd .calledWith("DocsIn:#{@project_id}", @doc_id) @@ -587,7 +575,7 @@ describe "RedisManager", -> describe "with empty ranges", -> beforeEach (done) -> - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, {}, @pathname, @projectHistoryId, done + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, {}, @pathname, done it "should delete the ranges key", -> @multi.del @@ -602,7 +590,7 @@ describe "RedisManager", -> describe "with a corrupted write", -> beforeEach (done) -> @multi.exec = sinon.stub().callsArgWith(0, null, ["INVALID-HASH-VALUE"]) - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @projectHistoryId, done + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, done it 'should log a hash error', -> @logger.error.calledWith() @@ -612,7 +600,7 @@ describe "RedisManager", -> beforeEach -> @_stringify = JSON.stringify @JSON.stringify = () -> return '["bad bytes! \u0000 <- here"]' - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @projectHistoryId, @callback + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @callback afterEach -> @JSON.stringify = @_stringify @@ -626,7 +614,7 @@ describe "RedisManager", -> describe "with ranges that are too big", -> beforeEach -> @RedisManager._serializeRanges = sinon.stub().yields(new Error("ranges are too large")) - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @projectHistoryId, @callback + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @callback it 'should log an error', -> @logger.error.called.should.equal true @@ -676,11 +664,6 @@ describe "RedisManager", -> .calledWith("Pathname:#{@doc_id}") .should.equal true - it "should delete the pathname for the doc", -> - @multi.del - .calledWith("ProjectHistoryId:#{@doc_id}") - .should.equal true - describe "clearProjectState", -> beforeEach (done) -> @rclient.del = sinon.stub().callsArg(1) @@ -704,7 +687,7 @@ describe "RedisManager", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, 'lines', 'version') @ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields() - @RedisManager.renameDoc @project_id, @doc_id, @userId, @update, @projectHistoryId, @callback + @RedisManager.renameDoc @project_id, @doc_id, @userId, @update, @callback it "update the cached pathname", -> @rclient.set @@ -713,19 +696,19 @@ describe "RedisManager", -> it "should queue an update", -> @ProjectHistoryRedisManager.queueRenameEntity - .calledWithExactly(@project_id, @projectHistoryId, 'doc', @doc_id, @userId, @update, @callback) + .calledWithExactly(@project_id, 'doc', @doc_id, @userId, @update, @callback) .should.equal true describe "the document is not cached in redis", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null) @ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields() - @RedisManager.renameDoc @project_id, @doc_id, @userId, @update, @projectHistoryId, @callback + @RedisManager.renameDoc @project_id, @doc_id, @userId, @update, @callback it "does not update the cached pathname", -> @rclient.set.called.should.equal false it "should queue an update", -> @ProjectHistoryRedisManager.queueRenameEntity - .calledWithExactly(@project_id, @projectHistoryId, 'doc', @doc_id, @userId, @update, @callback) + .calledWithExactly(@project_id, 'doc', @doc_id, @userId, @update, @callback) .should.equal true diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index 383bd1848e..e91c35f7e6 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -7,7 +7,6 @@ SandboxedModule = require('sandboxed-module') describe "UpdateManager", -> beforeEach -> @project_id = "project-id-123" - @projectHistoryId = "history-id-123" @doc_id = "document-id-123" @callback = sinon.stub() @UpdateManager = SandboxedModule.require modulePath, requires: @@ -168,7 +167,7 @@ describe "UpdateManager", -> @doc_ops_length = sinon.stub() @project_ops_length = sinon.stub() @pathname = '/a/b/c.tex' - @DocumentManager.getDoc = sinon.stub().yields(null, @lines, @version, @ranges, @pathname, @projectHistoryId) + @DocumentManager.getDoc = sinon.stub().yields(null, @lines, @version, @ranges, @pathname) @RangesManager.applyUpdate = sinon.stub().yields(null, @updated_ranges) @ShareJsUpdateManager.applyUpdate = sinon.stub().yields(null, @updatedDocLines, @version, @appliedOps) @RedisManager.updateDocument = sinon.stub().yields(null, @doc_ops_length, @project_ops_length) @@ -197,7 +196,7 @@ describe "UpdateManager", -> it "should add metadata to the ops" , -> @UpdateManager._addProjectHistoryMetadataToOps - .calledWith(@appliedOps, @pathname, @projectHistoryId, @lines) + .calledWith(@appliedOps, @pathname, @lines) .should.equal true it "should push the applied ops into the history queue", -> @@ -240,7 +239,7 @@ describe "UpdateManager", -> @callback.calledWith(@error).should.equal true describe "_addProjectHistoryMetadataToOps", -> - it "should add projectHistoryId, pathname and doc_length metadata to the ops", -> + it "should add pathname and doc_length metadata to the ops", -> lines = [ 'some' 'test' @@ -251,23 +250,20 @@ describe "UpdateManager", -> { v: 45, op: [{d: "qux", p: 4}, { i: "bazbaz", p: 14 }] }, { v: 49, op: [{i: "penguin", p: 18}] } ] - @UpdateManager._addProjectHistoryMetadataToOps(appliedOps, @pathname, @projectHistoryId, lines) + @UpdateManager._addProjectHistoryMetadataToOps(appliedOps, @pathname, lines) appliedOps.should.deep.equal [{ - projectHistoryId: @projectHistoryId v: 42 op: [{i: "foo", p: 4}, { i: "bar", p: 6 }] meta: pathname: @pathname doc_length: 14 }, { - projectHistoryId: @projectHistoryId v: 45 op: [{d: "qux", p: 4}, { i: "bazbaz", p: 14 }] meta: pathname: @pathname doc_length: 20 # 14 + 'foo' + 'bar' }, { - projectHistoryId: @projectHistoryId v: 49 op: [{i: "penguin", p: 18}] meta: From 1b63141e4915ae971085d5b1323d01e1b29b71d2 Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Mon, 23 Apr 2018 12:08:04 +0100 Subject: [PATCH 409/769] Revert "Revert "Add projectHistoryId to updates"" --- .../app/coffee/DocumentManager.coffee | 44 +++++++++--------- .../app/coffee/HistoryManager.coffee | 4 +- .../app/coffee/HttpController.coffee | 8 ++-- .../app/coffee/PersistenceManager.coffee | 4 +- .../coffee/ProjectHistoryRedisManager.coffee | 12 +++-- .../app/coffee/ProjectManager.coffee | 10 ++--- .../app/coffee/RedisManager.coffee | 21 +++++---- .../app/coffee/UpdateManager.coffee | 7 +-- .../config/settings.defaults.coffee | 1 + .../DocumentManagerTests.coffee | 35 ++++++++------- .../HistoryManager/HistoryManagerTests.coffee | 5 ++- .../HttpController/HttpControllerTests.coffee | 18 ++++---- .../PersistenceManagerTests.coffee | 6 ++- .../ProjectHistoryRedisManagerTests.coffee | 11 ++++- .../ProjectManager/updateProjectTests.coffee | 32 ++++++------- .../RedisManager/RedisManagerTests.coffee | 45 +++++++++++++------ .../UpdateManager/UpdateManagerTests.coffee | 12 +++-- 17 files changed, 160 insertions(+), 115 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 640ebf63cb..0c50d9b1f3 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -13,39 +13,39 @@ async = require "async" MAX_UNFLUSHED_AGE = 300 * 1000 # 5 mins, document should be flushed to mongo this time after a change module.exports = DocumentManager = - getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, pathname, unflushedTime, alreadyLoaded) ->) -> + getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, pathname, projectHistoryId, unflushedTime, alreadyLoaded) ->) -> timer = new Metrics.Timer("docManager.getDoc") callback = (args...) -> timer.done() _callback(args...) - RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, unflushedTime) -> + RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId, unflushedTime) -> return callback(error) if error? if !lines? or !version? logger.log {project_id, doc_id}, "doc not in redis so getting from persistence API" - PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname) -> + PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) -> return callback(error) if error? - logger.log {project_id, doc_id, lines, version, pathname}, "got doc from persistence API" - RedisManager.putDocInMemory project_id, doc_id, lines, version, ranges, pathname, (error) -> + logger.log {project_id, doc_id, lines, version, pathname, projectHistoryId}, "got doc from persistence API" + RedisManager.putDocInMemory project_id, doc_id, lines, version, ranges, pathname, projectHistoryId, (error) -> return callback(error) if error? - callback null, lines, version, ranges, pathname, null, false + callback null, lines, version, ranges, pathname, projectHistoryId, null, false else - callback null, lines, version, ranges, pathname, unflushedTime, true + callback null, lines, version, ranges, pathname, projectHistoryId, unflushedTime, true - getDocAndRecentOps: (project_id, doc_id, fromVersion, _callback = (error, lines, version, ops, ranges, pathname) ->) -> + getDocAndRecentOps: (project_id, doc_id, fromVersion, _callback = (error, lines, version, ops, ranges, pathname, projectHistoryId) ->) -> timer = new Metrics.Timer("docManager.getDocAndRecentOps") callback = (args...) -> timer.done() _callback(args...) - DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname) -> + DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) -> return callback(error) if error? if fromVersion == -1 - callback null, lines, version, [], ranges, pathname + callback null, lines, version, [], ranges, pathname, projectHistoryId else RedisManager.getPreviousDocOps doc_id, fromVersion, version, (error, ops) -> return callback(error) if error? - callback null, lines, version, ops, ranges, pathname + callback null, lines, version, ops, ranges, pathname, projectHistoryId setDoc: (project_id, doc_id, newLines, source, user_id, undoing, _callback = (error) ->) -> timer = new Metrics.Timer("docManager.setDoc") @@ -57,7 +57,7 @@ module.exports = DocumentManager = return callback(new Error("No lines were provided to setDoc")) UpdateManager = require "./UpdateManager" - DocumentManager.getDoc project_id, doc_id, (error, oldLines, version, ranges, pathname, unflushedTime, alreadyLoaded) -> + DocumentManager.getDoc project_id, doc_id, (error, oldLines, version, ranges, pathname, projectHistoryId, unflushedTime, alreadyLoaded) -> return callback(error) if error? if oldLines? and oldLines.length > 0 and oldLines[0].text? @@ -161,16 +161,16 @@ module.exports = DocumentManager = return callback(error) if error? callback() - renameDoc: (project_id, doc_id, user_id, update, _callback = (error) ->) -> + renameDoc: (project_id, doc_id, user_id, update, projectHistoryId, _callback = (error) ->) -> timer = new Metrics.Timer("docManager.updateProject") callback = (args...) -> timer.done() _callback(args...) - RedisManager.renameDoc project_id, doc_id, user_id, update, callback + RedisManager.renameDoc project_id, doc_id, user_id, update, projectHistoryId, callback getDocAndFlushIfOld: (project_id, doc_id, callback = (error, doc) ->) -> - DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, unflushedTime, alreadyLoaded) -> + DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId, unflushedTime, alreadyLoaded) -> return callback(error) if error? # if doc was already loaded see if it needs to be flushed if alreadyLoaded and unflushedTime? and (Date.now() - unflushedTime) > MAX_UNFLUSHED_AGE @@ -181,21 +181,21 @@ module.exports = DocumentManager = callback(null, lines, version) resyncDocContents: (project_id, doc_id, callback) -> - RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname) -> + RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) -> return callback(error) if error? if !lines? or !version? - PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname) -> + PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) -> return callback(error) if error? - ProjectHistoryRedisManager.queueResyncDocContent project_id, doc_id, lines, version, pathname, callback + ProjectHistoryRedisManager.queueResyncDocContent project_id, projectHistoryId, doc_id, lines, version, pathname, callback else - ProjectHistoryRedisManager.queueResyncDocContent project_id, doc_id, lines, version, pathname, callback + ProjectHistoryRedisManager.queueResyncDocContent project_id, projectHistoryId, doc_id, lines, version, pathname, callback getDocWithLock: (project_id, doc_id, callback = (error, lines, version) ->) -> UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.getDoc, project_id, doc_id, callback - getDocAndRecentOpsWithLock: (project_id, doc_id, fromVersion, callback = (error, lines, version, ops, ranges, pathname) ->) -> + getDocAndRecentOpsWithLock: (project_id, doc_id, fromVersion, callback = (error, lines, version, ops, ranges, pathname, projectHistoryId) ->) -> UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.getDocAndRecentOps, project_id, doc_id, fromVersion, callback @@ -223,9 +223,9 @@ module.exports = DocumentManager = UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.deleteComment, project_id, doc_id, thread_id, callback - renameDocWithLock: (project_id, doc_id, user_id, update, callback = (error) ->) -> + renameDocWithLock: (project_id, doc_id, user_id, update, projectHistoryId, callback = (error) ->) -> UpdateManager = require "./UpdateManager" - UpdateManager.lockUpdatesAndDo DocumentManager.renameDoc, project_id, doc_id, user_id, update, callback + UpdateManager.lockUpdatesAndDo DocumentManager.renameDoc, project_id, doc_id, user_id, update, projectHistoryId, callback resyncDocContentsWithLock: (project_id, doc_id, callback = (error) ->) -> UpdateManager = require "./UpdateManager" diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index c1371615f7..9d39166681 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -65,8 +65,8 @@ module.exports = HistoryManager = newBlock = Math.floor(length / threshold) return newBlock != prevBlock - resyncProjectHistory: (project_id, docs, files, callback) -> - ProjectHistoryRedisManager.queueResyncProjectStructure project_id, docs, files, (error) -> + resyncProjectHistory: (project_id, projectHistoryId, docs, files, callback) -> + ProjectHistoryRedisManager.queueResyncProjectStructure project_id, projectHistoryId, docs, files, (error) -> return callback(error) if error? DocumentManager = require "./DocumentManager" resyncDoc = (doc, cb) -> diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index ce4d8bf637..93f915d662 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -161,10 +161,10 @@ module.exports = HttpController = updateProject: (req, res, next = (error) ->) -> timer = new Metrics.Timer("http.updateProject") project_id = req.params.project_id - {userId, docUpdates, fileUpdates, version} = req.body + {projectHistoryId, userId, docUpdates, fileUpdates, version} = req.body logger.log {project_id, docUpdates, fileUpdates, version}, "updating project via http" - ProjectManager.updateProjectWithLocks project_id, userId, docUpdates, fileUpdates, version, (error) -> + ProjectManager.updateProjectWithLocks project_id, projectHistoryId, userId, docUpdates, fileUpdates, version, (error) -> timer.done() return next(error) if error? logger.log project_id: project_id, "updated project via http" @@ -172,10 +172,10 @@ module.exports = HttpController = resyncProjectHistory: (req, res, next = (error) ->) -> project_id = req.params.project_id - {docs, files} = req.body + {projectHistoryId, docs, files} = req.body logger.log {project_id, docs, files}, "queuing project history resync via http" - HistoryManager.resyncProjectHistory project_id, docs, files, (error) -> + HistoryManager.resyncProjectHistory project_id, projectHistoryId, docs, files, (error) -> return next(error) if error? logger.log {project_id}, "queued project history resync via http" res.send 204 diff --git a/services/document-updater/app/coffee/PersistenceManager.coffee b/services/document-updater/app/coffee/PersistenceManager.coffee index bd5ce5239c..8a43d989a8 100644 --- a/services/document-updater/app/coffee/PersistenceManager.coffee +++ b/services/document-updater/app/coffee/PersistenceManager.coffee @@ -13,7 +13,7 @@ request = (require("requestretry")).defaults({ MAX_HTTP_REQUEST_LENGTH = 5000 # 5 seconds module.exports = PersistenceManager = - getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, pathname) ->) -> + getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, pathname, projectHistoryId) ->) -> timer = new Metrics.Timer("persistenceManager.getDoc") callback = (args...) -> timer.done() @@ -44,7 +44,7 @@ module.exports = PersistenceManager = return callback(new Error("web API response had no valid doc version")) if !body.pathname? return callback(new Error("web API response had no valid doc pathname")) - return callback null, body.lines, body.version, body.ranges, body.pathname + return callback null, body.lines, body.version, body.ranges, body.pathname, body.projectHistoryId else if res.statusCode == 404 return callback(new Errors.NotFoundError("doc not not found: #{url}")) else diff --git a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee index c92b7277f4..42b9f16df2 100644 --- a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee +++ b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee @@ -7,7 +7,7 @@ module.exports = ProjectHistoryRedisManager = queueOps: (project_id, ops..., callback) -> rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), ops..., callback - queueRenameEntity: (project_id, entity_type, entity_id, user_id, projectUpdate, callback) -> + queueRenameEntity: (project_id, projectHistoryId, entity_type, entity_id, user_id, projectUpdate, callback) -> projectUpdate = pathname: projectUpdate.pathname new_pathname: projectUpdate.newPathname @@ -15,6 +15,7 @@ module.exports = ProjectHistoryRedisManager = user_id: user_id ts: new Date() version: projectUpdate.version + projectHistoryId: projectHistoryId projectUpdate[entity_type] = entity_id logger.log {project_id, projectUpdate}, "queue rename operation to project-history" @@ -22,7 +23,7 @@ module.exports = ProjectHistoryRedisManager = ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback - queueAddEntity: (project_id, entity_type, entitiy_id, user_id, projectUpdate, callback = (error) ->) -> + queueAddEntity: (project_id, projectHistoryId, entity_type, entitiy_id, user_id, projectUpdate, callback = (error) ->) -> projectUpdate = pathname: projectUpdate.pathname docLines: projectUpdate.docLines @@ -31,6 +32,7 @@ module.exports = ProjectHistoryRedisManager = user_id: user_id ts: new Date() version: projectUpdate.version + projectHistoryId: projectHistoryId projectUpdate[entity_type] = entitiy_id logger.log {project_id, projectUpdate}, "queue add operation to project-history" @@ -38,21 +40,23 @@ module.exports = ProjectHistoryRedisManager = ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback - queueResyncProjectStructure: (project_id, docs, files, callback) -> + queueResyncProjectStructure: (project_id, projectHistoryId, docs, files, callback) -> logger.log {project_id, docs, files}, "queue project structure resync" projectUpdate = resyncProjectStructure: { docs, files } + projectHistoryId: projectHistoryId meta: ts: new Date() jsonUpdate = JSON.stringify projectUpdate ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback - queueResyncDocContent: (project_id, doc_id, lines, version, pathname, callback) -> + queueResyncDocContent: (project_id, projectHistoryId, doc_id, lines, version, pathname, callback) -> logger.log {project_id, doc_id, lines, version, pathname}, "queue doc content resync" projectUpdate = resyncDocContent: content: lines.join("\n"), version: version + projectHistoryId: projectHistoryId path: pathname doc: doc_id meta: diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index eb7acaede1..cbf7bb661b 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -105,7 +105,7 @@ module.exports = ProjectManager = clearProjectState: (project_id, callback = (error) ->) -> RedisManager.clearProjectState project_id, callback - updateProjectWithLocks: (project_id, user_id, docUpdates, fileUpdates, version, _callback = (error) ->) -> + updateProjectWithLocks: (project_id, projectHistoryId, user_id, docUpdates, fileUpdates, version, _callback = (error) ->) -> timer = new Metrics.Timer("projectManager.updateProject") callback = (args...) -> timer.done() @@ -120,11 +120,11 @@ module.exports = ProjectManager = doc_id = projectUpdate.id projectUpdate.version = "#{project_version}.#{project_subversion++}" if projectUpdate.docLines? - ProjectHistoryRedisManager.queueAddEntity project_id, 'doc', doc_id, user_id, projectUpdate, (error, count) -> + ProjectHistoryRedisManager.queueAddEntity project_id, projectHistoryId, 'doc', doc_id, user_id, projectUpdate, (error, count) -> project_ops_length = count cb(error) else - DocumentManager.renameDocWithLock project_id, doc_id, user_id, projectUpdate, (error, count) -> + DocumentManager.renameDocWithLock project_id, doc_id, user_id, projectUpdate, projectHistoryId, (error, count) -> project_ops_length = count cb(error) @@ -132,11 +132,11 @@ module.exports = ProjectManager = file_id = projectUpdate.id projectUpdate.version = "#{project_version}.#{project_subversion++}" if projectUpdate.url? - ProjectHistoryRedisManager.queueAddEntity project_id, 'file', file_id, user_id, projectUpdate, (error, count) -> + ProjectHistoryRedisManager.queueAddEntity project_id, projectHistoryId, 'file', file_id, user_id, projectUpdate, (error, count) -> project_ops_length = count cb(error) else - ProjectHistoryRedisManager.queueRenameEntity project_id, 'file', file_id, user_id, projectUpdate, (error, count) -> + ProjectHistoryRedisManager.queueRenameEntity project_id, projectHistoryId, 'file', file_id, user_id, projectUpdate, (error, count) -> project_ops_length = count cb(error) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 382ff2b502..a940970176 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -36,7 +36,7 @@ historyKeys = Settings.redis.history.key_schema module.exports = RedisManager = rclient: rclient - putDocInMemory : (project_id, doc_id, docLines, version, ranges, pathname, _callback)-> + putDocInMemory : (project_id, doc_id, docLines, version, ranges, pathname, projectHistoryId, _callback)-> timer = new metrics.Timer("redis.put-doc") callback = (error) -> timer.done() @@ -47,7 +47,7 @@ module.exports = RedisManager = logger.error {err: error, doc_id: doc_id, docLines: docLines}, error.message return callback(error) docHash = RedisManager._computeHash(docLines) - logger.log {project_id, doc_id, version, docHash, pathname}, "putting doc in redis" + logger.log {project_id, doc_id, version, docHash, pathname, projectHistoryId}, "putting doc in redis" RedisManager._serializeRanges ranges, (error, ranges) -> if error? logger.error {err: error, doc_id, project_id}, error.message @@ -62,6 +62,7 @@ module.exports = RedisManager = else multi.del keys.ranges(doc_id:doc_id) multi.set keys.pathname(doc_id:doc_id), pathname + multi.set keys.projectHistoryId(doc_id:doc_id), projectHistoryId multi.exec (error, result) -> return callback(error) if error? # check the hash computed on the redis server @@ -88,6 +89,7 @@ module.exports = RedisManager = multi.del keys.docHash(doc_id:doc_id) multi.del keys.ranges(doc_id:doc_id) multi.del keys.pathname(doc_id:doc_id) + multi.del keys.projectHistoryId(doc_id:doc_id) multi.del keys.unflushedTime(doc_id:doc_id) multi.exec (error) -> return callback(error) if error? @@ -108,7 +110,7 @@ module.exports = RedisManager = clearProjectState: (project_id, callback = (error) ->) -> rclient.del keys.projectState(project_id:project_id), callback - getDoc : (project_id, doc_id, callback = (error, lines, version, ranges, pathname, unflushedTime) ->)-> + getDoc : (project_id, doc_id, callback = (error, lines, version, ranges, pathname, projectHistoryId, unflushedTime) ->)-> timer = new metrics.Timer("redis.get-doc") multi = rclient.multi() multi.get keys.docLines(doc_id:doc_id) @@ -117,8 +119,9 @@ module.exports = RedisManager = multi.get keys.projectKey(doc_id:doc_id) multi.get keys.ranges(doc_id:doc_id) multi.get keys.pathname(doc_id:doc_id) + multi.get keys.projectHistoryId(doc_id:doc_id) multi.get keys.unflushedTime(doc_id:doc_id) - multi.exec (error, [docLines, version, storedHash, doc_project_id, ranges, pathname, unflushedTime])-> + multi.exec (error, [docLines, version, storedHash, doc_project_id, ranges, pathname, projectHistoryId, unflushedTime])-> timeSpan = timer.done() return callback(error) if error? # check if request took too long and bail out. only do this for @@ -147,14 +150,14 @@ module.exports = RedisManager = # doc is not in redis, bail out if !docLines? - return callback null, docLines, version, ranges, pathname, unflushedTime + return callback null, docLines, version, ranges, pathname, projectHistoryId, unflushedTime # doc should be in project set, check if missing (workaround for missing docs from putDoc) rclient.sadd keys.docsInProject(project_id:project_id), doc_id, (error, result) -> return callback(error) if error? if result isnt 0 # doc should already be in set logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, "doc missing from docsInProject set" - callback null, docLines, version, ranges, pathname, unflushedTime + callback null, docLines, version, ranges, pathname, projectHistoryId, unflushedTime getDocVersion: (doc_id, callback = (error, version) ->) -> rclient.get keys.docVersion(doc_id: doc_id), (error, version) -> @@ -272,16 +275,16 @@ module.exports = RedisManager = else callback null, docUpdateCount - renameDoc: (project_id, doc_id, user_id, update, callback = (error) ->) -> + renameDoc: (project_id, doc_id, user_id, update, projectHistoryId, callback = (error) ->) -> RedisManager.getDoc project_id, doc_id, (error, lines, version) -> return callback(error) if error? if lines? and version? rclient.set keys.pathname(doc_id:doc_id), update.newPathname, (error) -> return callback(error) if error? - ProjectHistoryRedisManager.queueRenameEntity project_id, 'doc', doc_id, user_id, update, callback + ProjectHistoryRedisManager.queueRenameEntity project_id, projectHistoryId, 'doc', doc_id, user_id, update, callback else - ProjectHistoryRedisManager.queueRenameEntity project_id, 'doc', doc_id, user_id, update, callback + ProjectHistoryRedisManager.queueRenameEntity project_id, projectHistoryId, 'doc', doc_id, user_id, update, callback clearUnflushedTime: (doc_id, callback = (error) ->) -> rclient.del keys.unflushedTime(doc_id:doc_id), callback diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index 02f0a1b8d1..bfcfb806ca 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -71,7 +71,7 @@ module.exports = UpdateManager = profile = new Profiler("applyUpdate", {project_id, doc_id}) UpdateManager._sanitizeUpdate update profile.log("sanitizeUpdate") - DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname) -> + DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) -> profile.log("getDoc") return callback(error) if error? if !lines? or !version? @@ -80,7 +80,7 @@ module.exports = UpdateManager = profile.log("sharejs.applyUpdate") return callback(error) if error? RangesManager.applyUpdate project_id, doc_id, ranges, appliedOps, updatedDocLines, (error, new_ranges) -> - UpdateManager._addProjectHistoryMetadataToOps(appliedOps, pathname, lines) + UpdateManager._addProjectHistoryMetadataToOps(appliedOps, pathname, projectHistoryId, lines) profile.log("RangesManager.applyUpdate") return callback(error) if error? RedisManager.updateDocument project_id, doc_id, updatedDocLines, version, appliedOps, new_ranges, (error, doc_ops_length, project_ops_length) -> @@ -130,12 +130,13 @@ module.exports = UpdateManager = op.i = op.i.replace(/[\uD800-\uDFFF]/g, "\uFFFD") return update - _addProjectHistoryMetadataToOps: (updates, pathname, lines) -> + _addProjectHistoryMetadataToOps: (updates, pathname, projectHistoryId, lines) -> doc_length = _.reduce lines, (chars, line) -> chars + line.length, 0 doc_length += lines.length - 1 # count newline characters updates.forEach (update) -> + update.projectHistoryId = projectHistoryId update.meta ||= {} update.meta.pathname = pathname update.meta.doc_length = doc_length diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 1c7ebf283e..18f2b1570b 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -46,6 +46,7 @@ module.exports = docsInProject: ({project_id}) -> "DocsIn:#{project_id}" ranges: ({doc_id}) -> "Ranges:#{doc_id}" pathname: ({doc_id}) -> "Pathname:#{doc_id}" + projectHistoryId: ({doc_id}) -> "ProjectHistoryId:#{doc_id}" projectState: ({project_id}) -> "ProjectState:#{project_id}" unflushedTime: ({doc_id}) -> "UnflushedTime:#{doc_id}" # cluster: [{ diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index 16e58a81a7..d7cd18630a 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -25,6 +25,7 @@ describe "DocumentManager", -> "./UpdateManager": @UpdateManager = {} "./RangesManager": @RangesManager = {} @project_id = "project-id-123" + @projectHistoryId = "history-id-123" @doc_id = "doc-id-123" @user_id = 1234 @callback = sinon.stub() @@ -111,7 +112,7 @@ describe "DocumentManager", -> describe "getDocAndRecentOps", -> describe "with a previous version specified", -> beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId) @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) @DocumentManager.getDocAndRecentOps @project_id, @doc_id, @fromVersion, @callback @@ -126,14 +127,14 @@ describe "DocumentManager", -> .should.equal true it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, @ops, @ranges, @pathname).should.equal true + @callback.calledWith(null, @lines, @version, @ops, @ranges, @pathname, @projectHistoryId).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true describe "with no previous version specified", -> beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId) @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) @DocumentManager.getDocAndRecentOps @project_id, @doc_id, -1, @callback @@ -146,7 +147,7 @@ describe "DocumentManager", -> @RedisManager.getPreviousDocOps.called.should.equal false it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, [], @ranges, @pathname).should.equal true + @callback.calledWith(null, @lines, @version, [], @ranges, @pathname, @projectHistoryId).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true @@ -154,7 +155,7 @@ describe "DocumentManager", -> describe "getDoc", -> describe "when the doc exists in Redis", -> beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @unflushedTime) + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId, @unflushedTime) @DocumentManager.getDoc @project_id, @doc_id, @callback it "should get the doc from Redis", -> @@ -163,7 +164,7 @@ describe "DocumentManager", -> .should.equal true it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, @ranges, @pathname, @unflushedTime, true).should.equal true + @callback.calledWith(null, @lines, @version, @ranges, @pathname, @projectHistoryId, @unflushedTime, true).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true @@ -171,7 +172,7 @@ describe "DocumentManager", -> describe "when the doc does not exist in Redis", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null, null, null, null) - @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname) + @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId) @RedisManager.putDocInMemory = sinon.stub().yields() @DocumentManager.getDoc @project_id, @doc_id, @callback @@ -187,11 +188,11 @@ describe "DocumentManager", -> it "should set the doc in Redis", -> @RedisManager.putDocInMemory - .calledWith(@project_id, @doc_id, @lines, @version, @ranges, @pathname) + .calledWith(@project_id, @doc_id, @lines, @version, @ranges, @pathname, @projectHistoryId) .should.equal true it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, @ranges, @pathname, null, false).should.equal true + @callback.calledWith(null, @lines, @version, @ranges, @pathname, @projectHistoryId, null, false).should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true @@ -202,7 +203,7 @@ describe "DocumentManager", -> @beforeLines = ["before", "lines"] @afterLines = ["after", "lines"] @ops = [{ i: "foo", p: 4 }, { d: "bar", p: 42 }] - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, @ranges, @pathname, @unflushedTime, true) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, @ranges, @pathname, @projectHistoryId, @unflushedTime, true) @DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, @ops) @UpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null) @DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2) @@ -402,7 +403,7 @@ describe "DocumentManager", -> describe "when the doc is in Redis", -> describe "and has changes to be flushed", -> beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, Date.now() - 1e9, true) + @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @projectHistoryId, @pathname, Date.now() - 1e9, true) @DocumentManager.getDocAndFlushIfOld @project_id, @doc_id, @callback it "should get the doc", -> @@ -459,11 +460,11 @@ describe "DocumentManager", -> describe "successfully", -> beforeEach -> - @DocumentManager.renameDoc @project_id, @doc_id, @user_id, @update, @callback + @DocumentManager.renameDoc @project_id, @doc_id, @user_id, @update, @projectHistoryId, @callback it "should rename the document", -> @RedisManager.renameDoc - .calledWith(@project_id, @doc_id, @user_id, @update) + .calledWith(@project_id, @doc_id, @user_id, @update, @projectHistoryId) .should.equal true it "should call the callback", -> @@ -472,7 +473,7 @@ describe "DocumentManager", -> describe "resyncDocContents", -> describe "when doc is loaded in redis", -> beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname) + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId) @ProjectHistoryRedisManager.queueResyncDocContent = sinon.stub() @DocumentManager.resyncDocContents @project_id, @doc_id, @callback @@ -483,13 +484,13 @@ describe "DocumentManager", -> it "queues a resync doc content update", -> @ProjectHistoryRedisManager.queueResyncDocContent - .calledWith(@project_id, @doc_id, @lines, @version, @pathname, @callback) + .calledWith(@project_id, @projectHistoryId, @doc_id, @lines, @version, @pathname, @callback) .should.equal true describe "when doc is not loaded in redis", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(2, null) - @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname) + @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId) @ProjectHistoryRedisManager.queueResyncDocContent = sinon.stub() @DocumentManager.resyncDocContents @project_id, @doc_id, @callback @@ -505,5 +506,5 @@ describe "DocumentManager", -> it "queues a resync doc content update", -> @ProjectHistoryRedisManager.queueResyncDocContent - .calledWith(@project_id, @doc_id, @lines, @version, @pathname, @callback) + .calledWith(@project_id, @projectHistoryId, @doc_id, @lines, @version, @pathname, @callback) .should.equal true diff --git a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee index e207cde99d..2233610d28 100644 --- a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee @@ -164,6 +164,7 @@ describe "HistoryManager", -> describe "resyncProjectHistory", -> beforeEach -> + @projectHistoryId = 'history-id-1234' @docs = [ doc: @doc_id path: 'main.tex' @@ -175,11 +176,11 @@ describe "HistoryManager", -> ] @ProjectHistoryRedisManager.queueResyncProjectStructure = sinon.stub().yields() @DocumentManager.resyncDocContentsWithLock = sinon.stub().yields() - @HistoryManager.resyncProjectHistory @project_id, @docs, @files, @callback + @HistoryManager.resyncProjectHistory @project_id, @projectHistoryId, @docs, @files, @callback it "should queue a project structure reync", -> @ProjectHistoryRedisManager.queueResyncProjectStructure - .calledWith(@project_id, @docs, @files) + .calledWith(@project_id, @projectHistoryId, @docs, @files) .should.equal true it "should queue doc content reyncs", -> diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index fca1614c2d..ab6718c12a 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -509,23 +509,24 @@ describe "HttpController", -> describe "updateProject", -> beforeEach -> + @projectHistoryId = "history-id-123" @userId = "user-id-123" @docUpdates = sinon.stub() @fileUpdates = sinon.stub() @version = 1234567 @req = - body: {@userId, @docUpdates, @fileUpdates, @version} + body: {@projectHistoryId, @userId, @docUpdates, @fileUpdates, @version} params: project_id: @project_id describe "successfully", -> beforeEach -> - @ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(5) + @ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(6) @HttpController.updateProject(@req, @res, @next) it "should accept the change", -> @ProjectManager.updateProjectWithLocks - .calledWith(@project_id, @userId, @docUpdates, @fileUpdates, @version) + .calledWith(@project_id, @projectHistoryId, @userId, @docUpdates, @fileUpdates, @version) .should.equal true it "should return a successful No Content response", -> @@ -538,7 +539,7 @@ describe "HttpController", -> describe "when an errors occurs", -> beforeEach -> - @ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(5, new Error("oops")) + @ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(6, new Error("oops")) @HttpController.updateProject(@req, @res, @next) it "should call next with the error", -> @@ -548,23 +549,24 @@ describe "HttpController", -> describe "resyncProjectHistory", -> beforeEach -> + @projectHistoryId = "history-id-123" @docs = sinon.stub() @files = sinon.stub() @fileUpdates = sinon.stub() @req = body: - {@docs, @files} + {@projectHistoryId, @docs, @files} params: project_id: @project_id describe "successfully", -> beforeEach -> - @HistoryManager.resyncProjectHistory = sinon.stub().callsArg(3) + @HistoryManager.resyncProjectHistory = sinon.stub().callsArgWith(4) @HttpController.resyncProjectHistory(@req, @res, @next) it "should accept the change", -> @HistoryManager.resyncProjectHistory - .calledWith(@project_id, @docs, @files) + .calledWith(@project_id, @projectHistoryId, @docs, @files) .should.equal true it "should return a successful No Content response", -> @@ -574,7 +576,7 @@ describe "HttpController", -> describe "when an errors occurs", -> beforeEach -> - @HistoryManager.resyncProjectHistory = sinon.stub().callsArgWith(3, new Error("oops")) + @HistoryManager.resyncProjectHistory = sinon.stub().callsArgWith(4, new Error("oops")) @HttpController.resyncProjectHistory(@req, @res, @next) it "should call next with the error", -> diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee index 937dcf3a77..0f8ad59167 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee @@ -17,6 +17,7 @@ describe "PersistenceManager", -> done: sinon.stub() "logger-sharelatex": @logger = {log: sinon.stub(), err: sinon.stub()} @project_id = "project-id-123" + @projectHistoryId = "history-id-123" @doc_id = "doc-id-123" @lines = ["one", "two", "three"] @version = 42 @@ -36,6 +37,7 @@ describe "PersistenceManager", -> version: @version, ranges: @ranges pathname: @pathname, + projectHistoryId: @projectHistoryId } describe "with a successful response from the web api", -> @@ -60,7 +62,9 @@ describe "PersistenceManager", -> .should.equal true it "should call the callback with the doc lines, version and ranges", -> - @callback.calledWith(null, @lines, @version, @ranges, @pathname).should.equal true + @callback + .calledWith(null, @lines, @version, @ranges, @pathname, @projectHistoryId) + .should.equal true it "should time the execution", -> @Metrics.Timer::done.called.should.equal true diff --git a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee index 8ad0f53b5b..349d3623e6 100644 --- a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee @@ -8,6 +8,7 @@ tk = require "timekeeper" describe "ProjectHistoryRedisManager", -> beforeEach -> @project_id = "project-id-123" + @projectHistoryId = "history-id-123" @user_id = "user-id-123" @callback = sinon.stub() @rclient = {} @@ -50,9 +51,10 @@ describe "ProjectHistoryRedisManager", -> @rawUpdate = pathname: @pathname = '/old' newPathname: @newPathname = '/new' + version: @version = 2 @ProjectHistoryRedisManager.queueOps = sinon.stub() - @ProjectHistoryRedisManager.queueRenameEntity @project_id, 'file', @file_id, @user_id, @rawUpdate, @callback + @ProjectHistoryRedisManager.queueRenameEntity @project_id, @projectHistoryId, 'file', @file_id, @user_id, @rawUpdate, @callback it "should queue an update", -> update = @@ -61,6 +63,8 @@ describe "ProjectHistoryRedisManager", -> meta: user_id: @user_id ts: new Date() + version: @version + projectHistoryId: @projectHistoryId file: @file_id @ProjectHistoryRedisManager.queueOps @@ -75,10 +79,11 @@ describe "ProjectHistoryRedisManager", -> @rawUpdate = pathname: @pathname = '/old' docLines: @docLines = 'a\nb' + version: @version = 2 url: @url = 'filestore.example.com' @ProjectHistoryRedisManager.queueOps = sinon.stub() - @ProjectHistoryRedisManager.queueAddEntity @project_id, 'doc', @doc_id, @user_id, @rawUpdate, @callback + @ProjectHistoryRedisManager.queueAddEntity @project_id, @projectHistoryId, 'doc', @doc_id, @user_id, @rawUpdate, @callback it "should queue an update", -> update = @@ -88,6 +93,8 @@ describe "ProjectHistoryRedisManager", -> meta: user_id: @user_id ts: new Date() + version: @version + projectHistoryId: @projectHistoryId doc: @doc_id @ProjectHistoryRedisManager.queueOps diff --git a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee index 96d2ccc07b..3ed0109be7 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee @@ -18,6 +18,7 @@ describe "ProjectManager", -> done: sinon.stub() @project_id = "project-id-123" + @projectHistoryId = 'history-id-123' @user_id = "user-id-123" @version = 1234567 @HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(false) @@ -27,7 +28,6 @@ describe "ProjectManager", -> describe "updateProjectWithLocks", -> describe "rename operations", -> beforeEach -> - @firstDocUpdate = id: 1 pathname: 'foo' @@ -47,22 +47,22 @@ describe "ProjectManager", -> describe "successfully", -> beforeEach -> - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback + @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should rename the docs in the updates", -> firstDocUpdateWithVersion = _.extend({}, @firstDocUpdate, {version: "#{@version}.0"}) secondDocUpdateWithVersion = _.extend({}, @secondDocUpdate, {version: "#{@version}.1"}) @DocumentManager.renameDocWithLock - .calledWith(@project_id, @firstDocUpdate.id, @user_id, firstDocUpdateWithVersion) + .calledWith(@project_id, @firstDocUpdate.id, @user_id, firstDocUpdateWithVersion, @projectHistoryId) .should.equal true @DocumentManager.renameDocWithLock - .calledWith(@project_id, @secondDocUpdate.id, @user_id, secondDocUpdateWithVersion) + .calledWith(@project_id, @secondDocUpdate.id, @user_id, secondDocUpdateWithVersion, @projectHistoryId) .should.equal true it "should rename the files in the updates", -> firstFileUpdateWithVersion = _.extend({}, @firstFileUpdate, {version: "#{@version}.2"}) @ProjectHistoryRedisManager.queueRenameEntity - .calledWith(@project_id, 'file', @firstFileUpdate.id, @user_id, firstFileUpdateWithVersion) + .calledWith(@project_id, @projectHistoryId, 'file', @firstFileUpdate.id, @user_id, firstFileUpdateWithVersion) .should.equal true it "should not flush the history", -> @@ -77,7 +77,7 @@ describe "ProjectManager", -> beforeEach -> @error = new Error('error') @DocumentManager.renameDocWithLock = sinon.stub().yields(@error) - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback + @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true @@ -86,7 +86,7 @@ describe "ProjectManager", -> beforeEach -> @error = new Error('error') @ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields(@error) - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback + @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true @@ -94,7 +94,7 @@ describe "ProjectManager", -> describe "with enough ops to flush", -> beforeEach -> @HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(true) - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback + @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should flush the history", -> @HistoryManager.flushProjectChangesAsync @@ -121,26 +121,26 @@ describe "ProjectManager", -> describe "successfully", -> beforeEach -> - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback + @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should add the docs in the updates", -> firstDocUpdateWithVersion = _.extend({}, @firstDocUpdate, {version: "#{@version}.0"}) secondDocUpdateWithVersion = _.extend({}, @secondDocUpdate, {version: "#{@version}.1"}) @ProjectHistoryRedisManager.queueAddEntity.getCall(0) - .calledWith(@project_id, 'doc', @firstDocUpdate.id, @user_id, firstDocUpdateWithVersion) + .calledWith(@project_id, @projectHistoryId, 'doc', @firstDocUpdate.id, @user_id, firstDocUpdateWithVersion) .should.equal true @ProjectHistoryRedisManager.queueAddEntity.getCall(1) - .calledWith(@project_id, 'doc', @secondDocUpdate.id, @user_id, secondDocUpdateWithVersion) + .calledWith(@project_id, @projectHistoryId, 'doc', @secondDocUpdate.id, @user_id, secondDocUpdateWithVersion) .should.equal true it "should add the files in the updates", -> firstFileUpdateWithVersion = _.extend({}, @firstFileUpdate, {version: "#{@version}.2"}) secondFileUpdateWithVersion = _.extend({}, @secondFileUpdate, {version: "#{@version}.3"}) @ProjectHistoryRedisManager.queueAddEntity.getCall(2) - .calledWith(@project_id, 'file', @firstFileUpdate.id, @user_id, firstFileUpdateWithVersion) + .calledWith(@project_id, @projectHistoryId, 'file', @firstFileUpdate.id, @user_id, firstFileUpdateWithVersion) .should.equal true @ProjectHistoryRedisManager.queueAddEntity.getCall(3) - .calledWith(@project_id, 'file', @secondFileUpdate.id, @user_id, secondFileUpdateWithVersion) + .calledWith(@project_id, @projectHistoryId, 'file', @secondFileUpdate.id, @user_id, secondFileUpdateWithVersion) .should.equal true it "should not flush the history", -> @@ -155,7 +155,7 @@ describe "ProjectManager", -> beforeEach -> @error = new Error('error') @ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields(@error) - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback + @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true @@ -164,7 +164,7 @@ describe "ProjectManager", -> beforeEach -> @error = new Error('error') @ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields(@error) - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback + @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true @@ -172,7 +172,7 @@ describe "ProjectManager", -> describe "with enough ops to flush", -> beforeEach -> @HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(true) - @ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @version, @callback + @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback it "should flush the history", -> @HistoryManager.flushProjectChangesAsync diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index a3b28d00cb..e2263c21f4 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -33,6 +33,7 @@ describe "RedisManager", -> docsInProject: ({project_id}) -> "DocsIn:#{project_id}" ranges: ({doc_id}) -> "Ranges:#{doc_id}" pathname: ({doc_id}) -> "Pathname:#{doc_id}" + projectHistoryId: ({doc_id}) -> "ProjectHistoryId:#{doc_id}" projectState: ({project_id}) -> "ProjectState:#{project_id}" unflushedTime: ({doc_id}) -> "UnflushedTime:#{doc_id}" history: @@ -59,6 +60,7 @@ describe "RedisManager", -> @doc_id = "doc-id-123" @project_id = "project-id-123" + @projectHistoryId = "history-id-123" @callback = sinon.stub() describe "getDoc", -> @@ -72,7 +74,7 @@ describe "RedisManager", -> @unflushed_time = 12345 @pathname = '/a/b/c.tex' @multi.get = sinon.stub() - @multi.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @hash, @project_id, @json_ranges, @pathname, @unflushed_time]) + @multi.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @hash, @project_id, @json_ranges, @pathname, @projectHistoryId, @unflushed_time]) @rclient.sadd = sinon.stub().yields(null, 0) describe "successfully", -> @@ -109,6 +111,11 @@ describe "RedisManager", -> .calledWith("Pathname:#{@doc_id}") .should.equal true + it "should get the projectHistoryId", -> + @multi.get + .calledWith("ProjectHistoryId:#{@doc_id}") + .should.equal true + it "should check if the document is in the DocsIn set", -> @rclient.sadd .calledWith("DocsIn:#{@project_id}") @@ -116,7 +123,7 @@ describe "RedisManager", -> it 'should return the document', -> @callback - .calledWithExactly(null, @lines, @version, @ranges, @pathname, @unflushed_time) + .calledWithExactly(null, @lines, @version, @ranges, @pathname, @projectHistoryId, @unflushed_time) .should.equal true it 'should not log any errors', -> @@ -125,7 +132,7 @@ describe "RedisManager", -> describe "when the document is not present", -> beforeEach -> - @multi.exec = sinon.stub().callsArgWith(0, null, [null, null, null, null, null, null, null]) + @multi.exec = sinon.stub().callsArgWith(0, null, [null, null, null, null, null, null, null, null]) @rclient.sadd = sinon.stub().yields() @RedisManager.getDoc @project_id, @doc_id, @callback @@ -136,7 +143,7 @@ describe "RedisManager", -> it 'should return an empty result', -> @callback - .calledWithExactly(null, null, 0, {}, null, null) + .calledWithExactly(null, null, 0, {}, null, null, null) .should.equal true it 'should not log any errors', -> @@ -154,7 +161,7 @@ describe "RedisManager", -> it 'should return the document', -> @callback - .calledWithExactly(null, @lines, @version, @ranges, @pathname, @unflushed_time) + .calledWithExactly(null, @lines, @version, @ranges, @pathname, @projectHistoryId, @unflushed_time) .should.equal true describe "with a corrupted document", -> @@ -532,7 +539,7 @@ describe "RedisManager", -> describe "with non-empty ranges", -> beforeEach (done) -> - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, done + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @projectHistoryId, done it "should set the lines", -> @multi.eval @@ -564,6 +571,11 @@ describe "RedisManager", -> .calledWith("Pathname:#{@doc_id}", @pathname) .should.equal true + it "should set the projectHistoryId for the doc", -> + @multi.set + .calledWith("ProjectHistoryId:#{@doc_id}", @projectHistoryId) + .should.equal true + it "should add the doc_id to the project set", -> @rclient.sadd .calledWith("DocsIn:#{@project_id}", @doc_id) @@ -575,7 +587,7 @@ describe "RedisManager", -> describe "with empty ranges", -> beforeEach (done) -> - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, {}, @pathname, done + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, {}, @pathname, @projectHistoryId, done it "should delete the ranges key", -> @multi.del @@ -590,7 +602,7 @@ describe "RedisManager", -> describe "with a corrupted write", -> beforeEach (done) -> @multi.exec = sinon.stub().callsArgWith(0, null, ["INVALID-HASH-VALUE"]) - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, done + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @projectHistoryId, done it 'should log a hash error', -> @logger.error.calledWith() @@ -600,7 +612,7 @@ describe "RedisManager", -> beforeEach -> @_stringify = JSON.stringify @JSON.stringify = () -> return '["bad bytes! \u0000 <- here"]' - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @callback + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @projectHistoryId, @callback afterEach -> @JSON.stringify = @_stringify @@ -614,7 +626,7 @@ describe "RedisManager", -> describe "with ranges that are too big", -> beforeEach -> @RedisManager._serializeRanges = sinon.stub().yields(new Error("ranges are too large")) - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @callback + @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @projectHistoryId, @callback it 'should log an error', -> @logger.error.called.should.equal true @@ -664,6 +676,11 @@ describe "RedisManager", -> .calledWith("Pathname:#{@doc_id}") .should.equal true + it "should delete the pathname for the doc", -> + @multi.del + .calledWith("ProjectHistoryId:#{@doc_id}") + .should.equal true + describe "clearProjectState", -> beforeEach (done) -> @rclient.del = sinon.stub().callsArg(1) @@ -687,7 +704,7 @@ describe "RedisManager", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, 'lines', 'version') @ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields() - @RedisManager.renameDoc @project_id, @doc_id, @userId, @update, @callback + @RedisManager.renameDoc @project_id, @doc_id, @userId, @update, @projectHistoryId, @callback it "update the cached pathname", -> @rclient.set @@ -696,19 +713,19 @@ describe "RedisManager", -> it "should queue an update", -> @ProjectHistoryRedisManager.queueRenameEntity - .calledWithExactly(@project_id, 'doc', @doc_id, @userId, @update, @callback) + .calledWithExactly(@project_id, @projectHistoryId, 'doc', @doc_id, @userId, @update, @callback) .should.equal true describe "the document is not cached in redis", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null) @ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields() - @RedisManager.renameDoc @project_id, @doc_id, @userId, @update, @callback + @RedisManager.renameDoc @project_id, @doc_id, @userId, @update, @projectHistoryId, @callback it "does not update the cached pathname", -> @rclient.set.called.should.equal false it "should queue an update", -> @ProjectHistoryRedisManager.queueRenameEntity - .calledWithExactly(@project_id, 'doc', @doc_id, @userId, @update, @callback) + .calledWithExactly(@project_id, @projectHistoryId, 'doc', @doc_id, @userId, @update, @callback) .should.equal true diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index e91c35f7e6..383bd1848e 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -7,6 +7,7 @@ SandboxedModule = require('sandboxed-module') describe "UpdateManager", -> beforeEach -> @project_id = "project-id-123" + @projectHistoryId = "history-id-123" @doc_id = "document-id-123" @callback = sinon.stub() @UpdateManager = SandboxedModule.require modulePath, requires: @@ -167,7 +168,7 @@ describe "UpdateManager", -> @doc_ops_length = sinon.stub() @project_ops_length = sinon.stub() @pathname = '/a/b/c.tex' - @DocumentManager.getDoc = sinon.stub().yields(null, @lines, @version, @ranges, @pathname) + @DocumentManager.getDoc = sinon.stub().yields(null, @lines, @version, @ranges, @pathname, @projectHistoryId) @RangesManager.applyUpdate = sinon.stub().yields(null, @updated_ranges) @ShareJsUpdateManager.applyUpdate = sinon.stub().yields(null, @updatedDocLines, @version, @appliedOps) @RedisManager.updateDocument = sinon.stub().yields(null, @doc_ops_length, @project_ops_length) @@ -196,7 +197,7 @@ describe "UpdateManager", -> it "should add metadata to the ops" , -> @UpdateManager._addProjectHistoryMetadataToOps - .calledWith(@appliedOps, @pathname, @lines) + .calledWith(@appliedOps, @pathname, @projectHistoryId, @lines) .should.equal true it "should push the applied ops into the history queue", -> @@ -239,7 +240,7 @@ describe "UpdateManager", -> @callback.calledWith(@error).should.equal true describe "_addProjectHistoryMetadataToOps", -> - it "should add pathname and doc_length metadata to the ops", -> + it "should add projectHistoryId, pathname and doc_length metadata to the ops", -> lines = [ 'some' 'test' @@ -250,20 +251,23 @@ describe "UpdateManager", -> { v: 45, op: [{d: "qux", p: 4}, { i: "bazbaz", p: 14 }] }, { v: 49, op: [{i: "penguin", p: 18}] } ] - @UpdateManager._addProjectHistoryMetadataToOps(appliedOps, @pathname, lines) + @UpdateManager._addProjectHistoryMetadataToOps(appliedOps, @pathname, @projectHistoryId, lines) appliedOps.should.deep.equal [{ + projectHistoryId: @projectHistoryId v: 42 op: [{i: "foo", p: 4}, { i: "bar", p: 6 }] meta: pathname: @pathname doc_length: 14 }, { + projectHistoryId: @projectHistoryId v: 45 op: [{d: "qux", p: 4}, { i: "bazbaz", p: 14 }] meta: pathname: @pathname doc_length: 20 # 14 + 'foo' + 'bar' }, { + projectHistoryId: @projectHistoryId v: 49 op: [{i: "penguin", p: 18}] meta: From af92ca70a17de24e3ff4c82720ae93ca8e698d0e Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Mon, 23 Apr 2018 15:19:06 +0100 Subject: [PATCH 410/769] coerce projectHistoryId to integer after reading from Redis --- services/document-updater/app/coffee/RedisManager.coffee | 3 +++ .../test/unit/coffee/RedisManager/RedisManagerTests.coffee | 6 +++--- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index a940970176..25dbafc6e7 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -148,6 +148,9 @@ module.exports = RedisManager = logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, "doc not in project" return callback(new Errors.NotFoundError("document not found")) + if projectHistoryId? + projectHistoryId = parseInt(projectHistoryId) + # doc is not in redis, bail out if !docLines? return callback null, docLines, version, ranges, pathname, projectHistoryId, unflushedTime diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index e2263c21f4..42d06d743a 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -60,7 +60,7 @@ describe "RedisManager", -> @doc_id = "doc-id-123" @project_id = "project-id-123" - @projectHistoryId = "history-id-123" + @projectHistoryId = 123 @callback = sinon.stub() describe "getDoc", -> @@ -74,7 +74,7 @@ describe "RedisManager", -> @unflushed_time = 12345 @pathname = '/a/b/c.tex' @multi.get = sinon.stub() - @multi.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @hash, @project_id, @json_ranges, @pathname, @projectHistoryId, @unflushed_time]) + @multi.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @hash, @project_id, @json_ranges, @pathname, @projectHistoryId.toString(), @unflushed_time]) @rclient.sadd = sinon.stub().yields(null, 0) describe "successfully", -> @@ -111,7 +111,7 @@ describe "RedisManager", -> .calledWith("Pathname:#{@doc_id}") .should.equal true - it "should get the projectHistoryId", -> + it "should get the projectHistoryId as an integer", -> @multi.get .calledWith("ProjectHistoryId:#{@doc_id}") .should.equal true From 64670f272a2a5df423135c391ddc8789c959b615 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 27 Apr 2018 15:45:28 +0100 Subject: [PATCH 411/769] stop OpRangeNotAvailable errors going to sentry --- services/document-updater/app/coffee/DispatchManager.coffee | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/DispatchManager.coffee b/services/document-updater/app/coffee/DispatchManager.coffee index e60d226660..58220be8ee 100644 --- a/services/document-updater/app/coffee/DispatchManager.coffee +++ b/services/document-updater/app/coffee/DispatchManager.coffee @@ -2,6 +2,7 @@ Settings = require('settings-sharelatex') logger = require('logger-sharelatex') Keys = require('./UpdateKeys') redis = require("redis-sharelatex") +Errors = require("./Errors") UpdateManager = require('./UpdateManager') Metrics = require('./Metrics') @@ -23,7 +24,9 @@ module.exports = DispatchManager = # Dispatch this in the background backgroundTask = (cb) -> UpdateManager.processOutstandingUpdatesWithLock project_id, doc_id, (error) -> - logger.error err: error, project_id: project_id, doc_id: doc_id, "error processing update" if error? + # log everything except OpRangeNotAvailable errors, these are normal + if error? and not (error instanceof Errors.OpRangeNotAvailableError) + logger.error err: error, project_id: project_id, doc_id: doc_id, "error processing update" cb() RateLimiter.run backgroundTask, callback From 37f431f14867858f5e60de6889c3dd86d7977ebf Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 27 Apr 2018 16:03:46 +0100 Subject: [PATCH 412/769] also block "delete component" messages from sentry --- .../document-updater/app/coffee/DispatchManager.coffee | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/DispatchManager.coffee b/services/document-updater/app/coffee/DispatchManager.coffee index 58220be8ee..0274901bf1 100644 --- a/services/document-updater/app/coffee/DispatchManager.coffee +++ b/services/document-updater/app/coffee/DispatchManager.coffee @@ -25,8 +25,13 @@ module.exports = DispatchManager = backgroundTask = (cb) -> UpdateManager.processOutstandingUpdatesWithLock project_id, doc_id, (error) -> # log everything except OpRangeNotAvailable errors, these are normal - if error? and not (error instanceof Errors.OpRangeNotAvailableError) - logger.error err: error, project_id: project_id, doc_id: doc_id, "error processing update" + if error? + # downgrade OpRangeNotAvailable and "Delete component" errors so they are not sent to sentry + logAsWarning = (error instanceof Errors.OpRangeNotAvailableError) || error.message?.match(/^Delete component/) + if logAsWarning + logger.warn err: error, project_id: project_id, doc_id: doc_id, "error processing update" + else + logger.error err: error, project_id: project_id, doc_id: doc_id, "error processing update" cb() RateLimiter.run backgroundTask, callback From 7e526395fba569cfb6c776bc1235c2a8685a6cbf Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 30 Apr 2018 09:56:01 +0100 Subject: [PATCH 413/769] fix check for "delete component" message --- services/document-updater/app/coffee/DispatchManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/DispatchManager.coffee b/services/document-updater/app/coffee/DispatchManager.coffee index 0274901bf1..93a22bfc07 100644 --- a/services/document-updater/app/coffee/DispatchManager.coffee +++ b/services/document-updater/app/coffee/DispatchManager.coffee @@ -27,7 +27,7 @@ module.exports = DispatchManager = # log everything except OpRangeNotAvailable errors, these are normal if error? # downgrade OpRangeNotAvailable and "Delete component" errors so they are not sent to sentry - logAsWarning = (error instanceof Errors.OpRangeNotAvailableError) || error.message?.match(/^Delete component/) + logAsWarning = (error instanceof Errors.OpRangeNotAvailableError) || ((typeof error is' string') && error.match(/^Delete component/)) if logAsWarning logger.warn err: error, project_id: project_id, doc_id: doc_id, "error processing update" else From 581963a4af43a543f9939cb95555f595f0a8b1cc Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 23 May 2018 13:52:20 +0100 Subject: [PATCH 414/769] Dockerised env. 1.1.3 build scripts --- services/document-updater/.dockerignore | 60 +++---------------- services/document-updater/Dockerfile | 22 +++++++ services/document-updater/Jenkinsfile | 36 +---------- services/document-updater/Makefile | 25 ++++++-- .../document-updater/docker-compose.ci.yml | 23 ++++--- services/document-updater/docker-compose.yml | 22 ++++--- services/document-updater/nodemon.json | 5 +- services/document-updater/package.json | 12 ++-- 8 files changed, 87 insertions(+), 118 deletions(-) create mode 100644 services/document-updater/Dockerfile diff --git a/services/document-updater/.dockerignore b/services/document-updater/.dockerignore index ad21f261b4..386f26df30 100644 --- a/services/document-updater/.dockerignore +++ b/services/document-updater/.dockerignore @@ -1,53 +1,9 @@ -compileFolder - -Compiled source # -################### -*.com -*.class -*.dll -*.exe -*.o -*.so - -# Packages # -############ -# it's better to unpack these files and commit the raw source -# git has its own built in compression methods -*.7z -*.dmg -*.gz -*.iso -*.jar -*.rar -*.tar -*.zip - -# Logs and databases # -###################### -*.log -*.sql -*.sqlite - -# OS generated files # -###################### -.DS_Store? -ehthumbs.db -Icon? -Thumbs.db - -/node_modules/* - +node_modules/* +gitrev +.git +.gitignore +.npm +.nvmrc +nodemon.json app.js -app/js/* - -test/unit/js/* -test/acceptance/js/* - -forever/ - -**.swp - -# Redis cluster -**/appendonly.aof -**/dump.rdb -**/nodes.conf +**/js/* diff --git a/services/document-updater/Dockerfile b/services/document-updater/Dockerfile new file mode 100644 index 0000000000..aabf01ad91 --- /dev/null +++ b/services/document-updater/Dockerfile @@ -0,0 +1,22 @@ +FROM node:6.9.5 as app + +WORKDIR /app + +#wildcard as some files may not be in all repos +COPY package*.json npm-shrink*.json /app/ + +RUN npm install --quiet + +COPY . /app + + +RUN npm run compile:all + +FROM node:6.9.5 + +COPY --from=app /app /app + +WORKDIR /app +USER node + +CMD ["node","app.js"] diff --git a/services/document-updater/Jenkinsfile b/services/document-updater/Jenkinsfile index ab90aaae29..bc9ba0142f 100644 --- a/services/document-updater/Jenkinsfile +++ b/services/document-updater/Jenkinsfile @@ -9,34 +9,9 @@ pipeline { } stages { - stage('Install') { - agent { - docker { - image 'node:6.9.5' - args "-v /var/lib/jenkins/.npm:/tmp/.npm -e HOME=/tmp" - reuseNode true - } - } + stage('Build') { steps { - // we need to disable logallrefupdates, else git clones - // during the npm install will require git to lookup the - // user id which does not exist in the container's - // /etc/passwd file, causing the clone to fail. - sh 'git config --global core.logallrefupdates false' - sh 'rm -rf node_modules' - sh 'npm install && npm rebuild' - } - } - - stage('Compile') { - agent { - docker { - image 'node:6.9.5' - reuseNode true - } - } - steps { - sh 'npm run compile:all' + sh 'make build' } } @@ -54,12 +29,7 @@ pipeline { stage('Package and publish build') { steps { - sh 'echo ${BUILD_NUMBER} > build_number.txt' - sh 'touch build.tar.gz' // Avoid tar warning about files changing during read - sh 'tar -czf build.tar.gz --exclude=build.tar.gz --exclude-vcs .' - withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") { - s3Upload(file:'build.tar.gz', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/${BUILD_NUMBER}.tar.gz") - } + sh 'make publish' } } diff --git a/services/document-updater/Makefile b/services/document-updater/Makefile index 10adbe3866..215b56a9e9 100644 --- a/services/document-updater/Makefile +++ b/services/document-updater/Makefile @@ -1,13 +1,18 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.0.1 +# Version: 1.1.3 BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) PROJECT_NAME = document-updater DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml -DOCKER_COMPOSE := docker-compose ${DOCKER_COMPOSE_FLAGS} +DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ + BRANCH_NAME=$(BRANCH_NAME) \ + PROJECT_NAME=$(PROJECT_NAME) \ + MOCHA_GREP=${MOCHA_GREP} \ + docker-compose ${DOCKER_COMPOSE_FLAGS} + clean: rm -f app.js @@ -18,12 +23,20 @@ clean: test: test_unit test_acceptance test_unit: - @[ -d test/unit ] && $(DOCKER_COMPOSE) run --rm test_unit -- ${MOCHA_ARGS} || echo "document-updater has no unit tests" + @[ ! -d test/unit ] && echo "document-updater has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit -test_acceptance: test_clean # clear the database before each acceptance test run - @[ -d test/acceptance ] && $(DOCKER_COMPOSE) run --rm test_acceptance -- ${MOCHA_ARGS} || echo "document-updater has no acceptance tests" +test_acceptance: test_clean test_acceptance_pre_run # clear the database before each acceptance test run + @[ ! -d test/acceptance ] && echo "document-updater has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance test_clean: - $(DOCKER_COMPOSE) down -t 0 + $(DOCKER_COMPOSE) down -v -t 0 + +test_acceptance_pre_run: + @[ ! -f test/acceptance/scripts/pre-run ] && echo "document-updater has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/scripts/pre-run +build: + docker build --pull --tag gcr.io/csh-gcdm-test/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) . + +publish: + docker push gcr.io/csh-gcdm-test/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) .PHONY: clean test test_unit test_acceptance test_clean build publish diff --git a/services/document-updater/docker-compose.ci.yml b/services/document-updater/docker-compose.ci.yml index 9f40ba879d..21c006641e 100644 --- a/services/document-updater/docker-compose.ci.yml +++ b/services/document-updater/docker-compose.ci.yml @@ -1,33 +1,32 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.0.1 +# Version: 1.1.3 version: "2" services: test_unit: - image: node:6.9.5 - volumes: - - .:/app - working_dir: /app - entrypoint: npm run test:unit:_run + image: gcr.io/csh-gcdm-test/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + user: node + command: npm run test:unit:_run test_acceptance: - image: node:6.9.5 - volumes: - - .:/app - working_dir: /app + build: . + image: gcr.io/csh-gcdm-test/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER environment: REDIS_HOST: redis MONGO_HOST: mongo + POSTGRES_HOST: postgres depends_on: - - redis - mongo - entrypoint: npm run test:acceptance:_run + - redis + user: node + command: npm run test:acceptance:_run redis: image: redis mongo: image: mongo:3.4 + diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index d55ca56580..f24caa8883 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -1,33 +1,39 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.0.1 +# Version: 1.1.3 version: "2" services: test_unit: - image: node:6.9.5 + build: . volumes: - .:/app working_dir: /app - entrypoint: npm run test:unit + environment: + MOCHA_GREP: ${MOCHA_GREP} + command: npm run test:unit + user: node test_acceptance: - image: node:6.9.5 + build: . volumes: - .:/app + working_dir: /app environment: REDIS_HOST: redis MONGO_HOST: mongo + POSTGRES_HOST: postgres + MOCHA_GREP: ${MOCHA_GREP} + user: node depends_on: - - redis - mongo - working_dir: /app - entrypoint: npm run test:acceptance - + - redis + command: npm run test:acceptance redis: image: redis mongo: image: mongo:3.4 + diff --git a/services/document-updater/nodemon.json b/services/document-updater/nodemon.json index 9a3be8d966..98db38d71b 100644 --- a/services/document-updater/nodemon.json +++ b/services/document-updater/nodemon.json @@ -8,9 +8,12 @@ "execMap": { "js": "npm run start" }, + "watch": [ "app/coffee/", - "app.coffee" + "app.coffee", + "config/" ], "ext": "coffee" + } diff --git a/services/document-updater/package.json b/services/document-updater/package.json index ce5aa9d18c..a23ae73427 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -7,14 +7,14 @@ "url": "https://github.com/sharelatex/document-updater-sharelatex.git" }, "scripts": { - "compile:app": "coffee -o app/js -c app/coffee && coffee -c app.coffee", - "start": "npm run compile:app && node app.js", + "compile:app": "([ -e app/coffee ] && coffee $COFFEE_OPTIONS -o app/js -c app/coffee || echo 'No CoffeeScript folder to compile') && ( [ -e app.coffee ] && coffee $COFFEE_OPTIONS -c app.coffee || echo 'No CoffeeScript app to compile')", + "start": "npm run compile:app && node $NODE_APP_OPTIONS app.js", "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", - "test:acceptance": "npm run compile:app && npm run compile:acceptance_tests && npm run test:acceptance:_run -- $@", + "test:acceptance": "npm run compile:app && npm run compile:acceptance_tests && npm run test:acceptance:_run -- --grep=$MOCHA_GREP", "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js", - "test:unit": "npm run compile:app && npm run compile:unit_tests && npm run test:unit:_run -- $@", - "compile:unit_tests": "[ -e test/unit ] && coffee -o test/unit/js -c test/unit/coffee || echo 'No unit tests to compile'", - "compile:acceptance_tests": "[ -e test/acceptance ] && coffee -o test/acceptance/js -c test/acceptance/coffee || echo 'No acceptance tests to compile'", + "test:unit": "npm run compile:app && npm run compile:unit_tests && npm run test:unit:_run -- --grep=$MOCHA_GREP", + "compile:unit_tests": "[ ! -e test/unit/coffee ] && echo 'No unit tests to compile' || coffee -o test/unit/js -c test/unit/coffee", + "compile:acceptance_tests": "[ ! -e test/acceptance/coffee ] && echo 'No acceptance tests to compile' || coffee -o test/acceptance/js -c test/acceptance/coffee", "compile:all": "npm run compile:app && npm run compile:unit_tests && npm run compile:acceptance_tests", "nodemon": "nodemon --config nodemon.json" }, From 62a10f25f27802343bfac135275a8720226eac2e Mon Sep 17 00:00:00 2001 From: Hayden Faulds Date: Tue, 5 Jun 2018 13:54:05 +0100 Subject: [PATCH 415/769] move tk call to before sandboxed module call --- .../coffee/DocumentManager/DocumentManagerTests.coffee | 8 ++++---- .../ProjectHistoryRedisManagerTests.coffee | 4 ++-- .../unit/coffee/RedisManager/RedisManagerTests.coffee | 6 +++--- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index d7cd18630a..c52bb4b30d 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -8,6 +8,7 @@ tk = require "timekeeper" describe "DocumentManager", -> beforeEach -> + tk.freeze(new Date()) @DocumentManager = SandboxedModule.require modulePath, requires: "./RedisManager": @RedisManager = {} "./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {} @@ -35,6 +36,9 @@ describe "DocumentManager", -> @pathname = '/a/b/c.tex' @unflushedTime = Date.now() + afterEach -> + tk.reset() + describe "flushAndDeleteDoc", -> describe "successfully", -> beforeEach -> @@ -394,12 +398,8 @@ describe "DocumentManager", -> describe "getDocAndFlushIfOld", -> beforeEach -> - tk.freeze(new Date()) @DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2) - afterEach -> - tk.reset() - describe "when the doc is in Redis", -> describe "and has changes to be flushed", -> beforeEach -> diff --git a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee index 349d3623e6..aa777b40d6 100644 --- a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee @@ -26,8 +26,8 @@ describe "ProjectHistoryRedisManager", -> globals: JSON: @JSON = JSON - afterEach -> - tk.reset() + afterEach -> + tk.reset() describe "queueOps", -> beforeEach -> diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 42d06d743a..4f6c24720e 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -55,14 +55,14 @@ describe "RedisManager", -> globals: JSON: @JSON = JSON - afterEach -> - tk.reset() - @doc_id = "doc-id-123" @project_id = "project-id-123" @projectHistoryId = 123 @callback = sinon.stub() + afterEach -> + tk.reset() + describe "getDoc", -> beforeEach -> @lines = ["one", "two", "three", "これは"] # include some utf8 From 6237577f875fc3b00b041b06b1b0a1bfce014f42 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 20 Jul 2018 10:43:31 +0100 Subject: [PATCH 416/769] set a timestamp for the first entry in the projectHistory:Ops queue --- .../coffee/ProjectHistoryRedisManager.coffee | 14 +++++++++-- .../config/settings.defaults.coffee | 1 + .../coffee/ApplyingUpdatesToADocTests.coffee | 23 ++++++++++++++++++- .../ProjectHistoryRedisManagerTests.coffee | 17 +++++++++++--- 4 files changed, 49 insertions(+), 6 deletions(-) diff --git a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee index 42b9f16df2..1cc80ea722 100644 --- a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee +++ b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee @@ -4,8 +4,18 @@ rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdate logger = require('logger-sharelatex') module.exports = ProjectHistoryRedisManager = - queueOps: (project_id, ops..., callback) -> - rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), ops..., callback + queueOps: (project_id, ops..., callback = (error, projectUpdateCount) ->) -> + multi = rclient.multi() + # Push the ops onto the project history queue + multi.rpush projectHistoryKeys.projectHistoryOps({project_id}), ops... + # To record the age of the oldest op on the queue set a timestamp if not + # already present (SETNX). + multi.setnx projectHistoryKeys.projectHistoryFirstOpTimestamp({project_id}), Date.now() + multi.exec (error, result) -> + return callback(error) if error? + # return the number of entries pushed onto the project history queue + callback null, result[0] + queueRenameEntity: (project_id, projectHistoryId, entity_type, entity_id, user_id, projectUpdate, callback) -> projectUpdate = diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 18f2b1570b..dfce36f6e3 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -74,6 +74,7 @@ module.exports = project_history: key_schema: projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:#{project_id}" + projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:#{project_id}" # cluster: [{ # port: "7000" # host: "localhost" diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index cb560a26bb..c6aa6fe856 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -33,7 +33,7 @@ describe "Applying updates to a doc", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] sinon.spy MockWebApi, "getDocument" - + @startTime = Date.now() MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> throw error if error? @@ -67,6 +67,27 @@ describe "Applying updates to a doc", -> JSON.parse(updates[0]).op.should.deep.equal @update.op done() + it "should set the first op timestamp", (done) -> + rclient_history.get ProjectHistoryKeys.projectHistoryFirstOpTimestamp({@project_id}), (error, result) => + throw error if error? + result.should.be.within(@startTime, Date.now()) + @firstOpTimestamp = result + done() + + describe "when sending another update", -> + before (done) -> + @second_update = Object.create(@update) + @second_update.v = @version + 1 + DocUpdaterClient.sendUpdate @project_id, @doc_id, @second_update, (error) -> + throw error if error? + setTimeout done, 200 + + it "should not change the first op timestamp", (done) -> + rclient_history.get ProjectHistoryKeys.projectHistoryFirstOpTimestamp({@project_id}), (error, result) => + throw error if error? + result.should.equal @firstOpTimestamp + done() + describe "when the document is loaded", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] diff --git a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee index aa777b40d6..8c6ff5c7d7 100644 --- a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee @@ -20,6 +20,7 @@ describe "ProjectHistoryRedisManager", -> project_history: key_schema: projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:#{project_id}" + projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:#{project_id}" } "redis-sharelatex": createClient: () => @rclient @@ -32,16 +33,26 @@ describe "ProjectHistoryRedisManager", -> describe "queueOps", -> beforeEach -> @ops = ["mock-op-1", "mock-op-2"] - @rclient.rpush = sinon.stub() + @multi = exec: sinon.stub() + @multi.rpush = sinon.stub() + @multi.setnx = sinon.stub() + @rclient.multi = () => @multi + # @rclient = multi: () => @multi @ProjectHistoryRedisManager.queueOps @project_id, @ops..., @callback it "should queue an update", -> - @rclient.rpush + @multi.rpush .calledWithExactly( "ProjectHistory:Ops:#{@project_id}" @ops[0] @ops[1] - @callback + ).should.equal true + + it "should set the queue timestamp if not present", -> + @multi.setnx + .calledWithExactly( + "ProjectHistory:FirstOpTimestamp:#{@project_id}" + Date.now() ).should.equal true describe "queueRenameEntity", -> From 33e898f67184425e9c4f14b11d41576fda1acb50 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 23 Jul 2018 15:32:33 +0100 Subject: [PATCH 417/769] update to 1.1.9 build scripts --- .../.github/ISSUE_TEMPLATE.md | 38 ++++++++++++++++ .../.github/PULL_REQUEST_TEMPLATE.md | 45 +++++++++++++++++++ services/document-updater/Jenkinsfile | 45 +++++++++++++++++-- services/document-updater/Makefile | 20 ++++++--- .../document-updater/docker-compose.ci.yml | 13 ++++-- services/document-updater/docker-compose.yml | 18 +++++--- services/document-updater/nodemon.json | 5 ++- services/document-updater/package.json | 21 ++++----- 8 files changed, 175 insertions(+), 30 deletions(-) create mode 100644 services/document-updater/.github/ISSUE_TEMPLATE.md create mode 100644 services/document-updater/.github/PULL_REQUEST_TEMPLATE.md diff --git a/services/document-updater/.github/ISSUE_TEMPLATE.md b/services/document-updater/.github/ISSUE_TEMPLATE.md new file mode 100644 index 0000000000..e0093aa90c --- /dev/null +++ b/services/document-updater/.github/ISSUE_TEMPLATE.md @@ -0,0 +1,38 @@ + + +## Steps to Reproduce + + + +1. +2. +3. + +## Expected Behaviour + + +## Observed Behaviour + + + +## Context + + +## Technical Info + + +* URL: +* Browser Name and version: +* Operating System and version (desktop or mobile): +* Signed in as: +* Project and/or file: + +## Analysis + + +## Who Needs to Know? + + + +- +- diff --git a/services/document-updater/.github/PULL_REQUEST_TEMPLATE.md b/services/document-updater/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000000..ed25ee83c1 --- /dev/null +++ b/services/document-updater/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,45 @@ + + +### Description + + + +#### Screenshots + + + +#### Related Issues / PRs + + + +### Review + + + +#### Potential Impact + + + +#### Manual Testing Performed + +- [ ] +- [ ] + +#### Accessibility + + + +### Deployment + + + +#### Deployment Checklist + +- [ ] Update documentation not included in the PR (if any) +- [ ] + +#### Metrics and Monitoring + + + +#### Who Needs to Know? diff --git a/services/document-updater/Jenkinsfile b/services/document-updater/Jenkinsfile index ab90aaae29..fa0f5beedc 100644 --- a/services/document-updater/Jenkinsfile +++ b/services/document-updater/Jenkinsfile @@ -3,6 +3,13 @@ String cron_string = BRANCH_NAME == "master" ? "@daily" : "" pipeline { agent any + environment { + GIT_PROJECT = "document-updater-sharelatex" + JENKINS_WORKFLOW = "document-updater-sharelatex" + TARGET_URL = "${env.JENKINS_URL}blue/organizations/jenkins/${JENKINS_WORKFLOW}/detail/$BRANCH_NAME/$BUILD_NUMBER/pipeline" + GIT_API_URL = "https://api.github.com/repos/sharelatex/${GIT_PROJECT}/statuses/$GIT_COMMIT" + } + triggers { pollSCM('* * * * *') cron(cron_string) @@ -18,16 +25,24 @@ pipeline { } } steps { - // we need to disable logallrefupdates, else git clones - // during the npm install will require git to lookup the - // user id which does not exist in the container's + withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) { + sh "curl $GIT_API_URL \ + --data '{ \ + \"state\" : \"pending\", \ + \"target_url\": \"$TARGET_URL\", \ + \"description\": \"Your build is underway\", \ + \"context\": \"ci/jenkins\" }' \ + -u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD" + } + // we need to disable logallrefupdates, else git clones + // during the npm install will require git to lookup the + // user id which does not exist in the container's // /etc/passwd file, causing the clone to fail. sh 'git config --global core.logallrefupdates false' sh 'rm -rf node_modules' sh 'npm install && npm rebuild' } } - stage('Compile') { agent { docker { @@ -77,6 +92,19 @@ pipeline { post { always { sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_clean' + sh 'make clean' + } + + success { + withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) { + sh "curl $GIT_API_URL \ + --data '{ \ + \"state\" : \"success\", \ + \"target_url\": \"$TARGET_URL\", \ + \"description\": \"Your build succeeded!\", \ + \"context\": \"ci/jenkins\" }' \ + -u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD" + } } failure { @@ -84,6 +112,15 @@ pipeline { to: "${EMAIL_ALERT_TO}", subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}", body: "Build: ${BUILD_URL}") + withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) { + sh "curl $GIT_API_URL \ + --data '{ \ + \"state\" : \"failure\", \ + \"target_url\": \"$TARGET_URL\", \ + \"description\": \"Your build failed\", \ + \"context\": \"ci/jenkins\" }' \ + -u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD" + } } } diff --git a/services/document-updater/Makefile b/services/document-updater/Makefile index 10adbe3866..8b3646251d 100644 --- a/services/document-updater/Makefile +++ b/services/document-updater/Makefile @@ -1,13 +1,18 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.0.1 +# Version: 1.1.9 BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) PROJECT_NAME = document-updater DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml -DOCKER_COMPOSE := docker-compose ${DOCKER_COMPOSE_FLAGS} +DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ + BRANCH_NAME=$(BRANCH_NAME) \ + PROJECT_NAME=$(PROJECT_NAME) \ + MOCHA_GREP=${MOCHA_GREP} \ + docker-compose ${DOCKER_COMPOSE_FLAGS} + clean: rm -f app.js @@ -18,12 +23,15 @@ clean: test: test_unit test_acceptance test_unit: - @[ -d test/unit ] && $(DOCKER_COMPOSE) run --rm test_unit -- ${MOCHA_ARGS} || echo "document-updater has no unit tests" + @[ ! -d test/unit ] && echo "document-updater has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit -test_acceptance: test_clean # clear the database before each acceptance test run - @[ -d test/acceptance ] && $(DOCKER_COMPOSE) run --rm test_acceptance -- ${MOCHA_ARGS} || echo "document-updater has no acceptance tests" +test_acceptance: test_clean test_acceptance_pre_run # clear the database before each acceptance test run + @[ ! -d test/acceptance ] && echo "document-updater has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance test_clean: - $(DOCKER_COMPOSE) down -t 0 + $(DOCKER_COMPOSE) down -v -t 0 + +test_acceptance_pre_run: + @[ ! -f test/acceptance/scripts/pre-run ] && echo "document-updater has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/scripts/pre-run .PHONY: clean test test_unit test_acceptance test_clean build publish diff --git a/services/document-updater/docker-compose.ci.yml b/services/document-updater/docker-compose.ci.yml index 9f40ba879d..4f00796ee6 100644 --- a/services/document-updater/docker-compose.ci.yml +++ b/services/document-updater/docker-compose.ci.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.0.1 +# Version: 1.1.9 version: "2" @@ -11,7 +11,8 @@ services: volumes: - .:/app working_dir: /app - entrypoint: npm run test:unit:_run + user: node + command: npm run test:unit:_run test_acceptance: image: node:6.9.5 @@ -19,12 +20,16 @@ services: - .:/app working_dir: /app environment: + ELASTIC_SEARCH_DSN: es:9200 REDIS_HOST: redis MONGO_HOST: mongo + POSTGRES_HOST: postgres + MOCHA_GREP: ${MOCHA_GREP} depends_on: - - redis - mongo - entrypoint: npm run test:acceptance:_run + - redis + user: node + command: npm run test:acceptance:_run redis: image: redis diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index d55ca56580..899e2d6a62 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.0.1 +# Version: 1.1.9 version: "2" @@ -11,23 +11,31 @@ services: volumes: - .:/app working_dir: /app - entrypoint: npm run test:unit + environment: + MOCHA_GREP: ${MOCHA_GREP} + command: npm run test:unit + user: node test_acceptance: image: node:6.9.5 volumes: - .:/app + working_dir: /app environment: + ELASTIC_SEARCH_DSN: es:9200 REDIS_HOST: redis MONGO_HOST: mongo + POSTGRES_HOST: postgres + MOCHA_GREP: ${MOCHA_GREP} + user: node depends_on: - - redis - mongo - working_dir: /app - entrypoint: npm run test:acceptance + - redis + command: npm run test:acceptance redis: image: redis mongo: image: mongo:3.4 + diff --git a/services/document-updater/nodemon.json b/services/document-updater/nodemon.json index 9a3be8d966..98db38d71b 100644 --- a/services/document-updater/nodemon.json +++ b/services/document-updater/nodemon.json @@ -8,9 +8,12 @@ "execMap": { "js": "npm run start" }, + "watch": [ "app/coffee/", - "app.coffee" + "app.coffee", + "config/" ], "ext": "coffee" + } diff --git a/services/document-updater/package.json b/services/document-updater/package.json index ce5aa9d18c..7d13caa763 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -7,16 +7,17 @@ "url": "https://github.com/sharelatex/document-updater-sharelatex.git" }, "scripts": { - "compile:app": "coffee -o app/js -c app/coffee && coffee -c app.coffee", - "start": "npm run compile:app && node app.js", - "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", - "test:acceptance": "npm run compile:app && npm run compile:acceptance_tests && npm run test:acceptance:_run -- $@", - "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js", - "test:unit": "npm run compile:app && npm run compile:unit_tests && npm run test:unit:_run -- $@", - "compile:unit_tests": "[ -e test/unit ] && coffee -o test/unit/js -c test/unit/coffee || echo 'No unit tests to compile'", - "compile:acceptance_tests": "[ -e test/acceptance ] && coffee -o test/acceptance/js -c test/acceptance/coffee || echo 'No acceptance tests to compile'", - "compile:all": "npm run compile:app && npm run compile:unit_tests && npm run compile:acceptance_tests", - "nodemon": "nodemon --config nodemon.json" + "compile:app": "([ -e app/coffee ] && coffee $COFFEE_OPTIONS -o app/js -c app/coffee || echo 'No CoffeeScript folder to compile') && ( [ -e app.coffee ] && coffee $COFFEE_OPTIONS -c app.coffee || echo 'No CoffeeScript app to compile')", + "start": "npm run compile:app && node $NODE_APP_OPTIONS app.js", + "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 30000 --exit $@ test/acceptance/js", + "test:acceptance": "npm run compile:app && npm run compile:acceptance_tests && npm run test:acceptance:_run -- --grep=$MOCHA_GREP", + "test:unit:_run": "mocha --recursive --reporter spec --exit $@ test/unit/js", + "test:unit": "npm run compile:app && npm run compile:unit_tests && npm run test:unit:_run -- --grep=$MOCHA_GREP", + "compile:unit_tests": "[ ! -e test/unit/coffee ] && echo 'No unit tests to compile' || coffee -o test/unit/js -c test/unit/coffee", + "compile:acceptance_tests": "[ ! -e test/acceptance/coffee ] && echo 'No acceptance tests to compile' || coffee -o test/acceptance/js -c test/acceptance/coffee", + "compile:all": "npm run compile:app && npm run compile:unit_tests && npm run compile:acceptance_tests && npm run compile:smoke_tests", + "nodemon": "nodemon --config nodemon.json", + "compile:smoke_tests": "[ ! -e test/smoke/coffee ] && echo 'No smoke tests to compile' || coffee -o test/smoke/js -c test/smoke/coffee" }, "dependencies": { "async": "^2.5.0", From e471730efbb82922ba91b471e8aa4f61d601d6a8 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 23 Jul 2018 16:05:04 +0100 Subject: [PATCH 418/769] Revert "Merge pull request #33 from sharelatex/bg-add-timestamp-marker-to-project-history-queue" This reverts commit 8ec398b133aa75c45384dd7ceb630cd743f4a15c, reversing changes made to e3a6c934cf53fd245d7d0df737463cc90f954885. --- .../coffee/ProjectHistoryRedisManager.coffee | 14 ++--------- .../config/settings.defaults.coffee | 1 - .../coffee/ApplyingUpdatesToADocTests.coffee | 23 +------------------ .../ProjectHistoryRedisManagerTests.coffee | 17 +++----------- 4 files changed, 6 insertions(+), 49 deletions(-) diff --git a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee index 1cc80ea722..42b9f16df2 100644 --- a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee +++ b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee @@ -4,18 +4,8 @@ rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdate logger = require('logger-sharelatex') module.exports = ProjectHistoryRedisManager = - queueOps: (project_id, ops..., callback = (error, projectUpdateCount) ->) -> - multi = rclient.multi() - # Push the ops onto the project history queue - multi.rpush projectHistoryKeys.projectHistoryOps({project_id}), ops... - # To record the age of the oldest op on the queue set a timestamp if not - # already present (SETNX). - multi.setnx projectHistoryKeys.projectHistoryFirstOpTimestamp({project_id}), Date.now() - multi.exec (error, result) -> - return callback(error) if error? - # return the number of entries pushed onto the project history queue - callback null, result[0] - + queueOps: (project_id, ops..., callback) -> + rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), ops..., callback queueRenameEntity: (project_id, projectHistoryId, entity_type, entity_id, user_id, projectUpdate, callback) -> projectUpdate = diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index dfce36f6e3..18f2b1570b 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -74,7 +74,6 @@ module.exports = project_history: key_schema: projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:#{project_id}" - projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:#{project_id}" # cluster: [{ # port: "7000" # host: "localhost" diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index c6aa6fe856..cb560a26bb 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -33,7 +33,7 @@ describe "Applying updates to a doc", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] sinon.spy MockWebApi, "getDocument" - @startTime = Date.now() + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> throw error if error? @@ -67,27 +67,6 @@ describe "Applying updates to a doc", -> JSON.parse(updates[0]).op.should.deep.equal @update.op done() - it "should set the first op timestamp", (done) -> - rclient_history.get ProjectHistoryKeys.projectHistoryFirstOpTimestamp({@project_id}), (error, result) => - throw error if error? - result.should.be.within(@startTime, Date.now()) - @firstOpTimestamp = result - done() - - describe "when sending another update", -> - before (done) -> - @second_update = Object.create(@update) - @second_update.v = @version + 1 - DocUpdaterClient.sendUpdate @project_id, @doc_id, @second_update, (error) -> - throw error if error? - setTimeout done, 200 - - it "should not change the first op timestamp", (done) -> - rclient_history.get ProjectHistoryKeys.projectHistoryFirstOpTimestamp({@project_id}), (error, result) => - throw error if error? - result.should.equal @firstOpTimestamp - done() - describe "when the document is loaded", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] diff --git a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee index 8c6ff5c7d7..aa777b40d6 100644 --- a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee @@ -20,7 +20,6 @@ describe "ProjectHistoryRedisManager", -> project_history: key_schema: projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:#{project_id}" - projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:#{project_id}" } "redis-sharelatex": createClient: () => @rclient @@ -33,26 +32,16 @@ describe "ProjectHistoryRedisManager", -> describe "queueOps", -> beforeEach -> @ops = ["mock-op-1", "mock-op-2"] - @multi = exec: sinon.stub() - @multi.rpush = sinon.stub() - @multi.setnx = sinon.stub() - @rclient.multi = () => @multi - # @rclient = multi: () => @multi + @rclient.rpush = sinon.stub() @ProjectHistoryRedisManager.queueOps @project_id, @ops..., @callback it "should queue an update", -> - @multi.rpush + @rclient.rpush .calledWithExactly( "ProjectHistory:Ops:#{@project_id}" @ops[0] @ops[1] - ).should.equal true - - it "should set the queue timestamp if not present", -> - @multi.setnx - .calledWithExactly( - "ProjectHistory:FirstOpTimestamp:#{@project_id}" - Date.now() + @callback ).should.equal true describe "queueRenameEntity", -> From fdcb806518761c9e015559e072fe87c25bbfae71 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 20 Jul 2018 10:43:31 +0100 Subject: [PATCH 419/769] set a timestamp for the first entry in the projectHistory:Ops queue --- .../coffee/ProjectHistoryRedisManager.coffee | 14 +++++++++-- .../config/settings.defaults.coffee | 1 + .../coffee/ApplyingUpdatesToADocTests.coffee | 23 ++++++++++++++++++- .../ProjectHistoryRedisManagerTests.coffee | 17 +++++++++++--- 4 files changed, 49 insertions(+), 6 deletions(-) diff --git a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee index 42b9f16df2..1cc80ea722 100644 --- a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee +++ b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee @@ -4,8 +4,18 @@ rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdate logger = require('logger-sharelatex') module.exports = ProjectHistoryRedisManager = - queueOps: (project_id, ops..., callback) -> - rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), ops..., callback + queueOps: (project_id, ops..., callback = (error, projectUpdateCount) ->) -> + multi = rclient.multi() + # Push the ops onto the project history queue + multi.rpush projectHistoryKeys.projectHistoryOps({project_id}), ops... + # To record the age of the oldest op on the queue set a timestamp if not + # already present (SETNX). + multi.setnx projectHistoryKeys.projectHistoryFirstOpTimestamp({project_id}), Date.now() + multi.exec (error, result) -> + return callback(error) if error? + # return the number of entries pushed onto the project history queue + callback null, result[0] + queueRenameEntity: (project_id, projectHistoryId, entity_type, entity_id, user_id, projectUpdate, callback) -> projectUpdate = diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 18f2b1570b..dfce36f6e3 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -74,6 +74,7 @@ module.exports = project_history: key_schema: projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:#{project_id}" + projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:#{project_id}" # cluster: [{ # port: "7000" # host: "localhost" diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index cb560a26bb..c6aa6fe856 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -33,7 +33,7 @@ describe "Applying updates to a doc", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] sinon.spy MockWebApi, "getDocument" - + @startTime = Date.now() MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> throw error if error? @@ -67,6 +67,27 @@ describe "Applying updates to a doc", -> JSON.parse(updates[0]).op.should.deep.equal @update.op done() + it "should set the first op timestamp", (done) -> + rclient_history.get ProjectHistoryKeys.projectHistoryFirstOpTimestamp({@project_id}), (error, result) => + throw error if error? + result.should.be.within(@startTime, Date.now()) + @firstOpTimestamp = result + done() + + describe "when sending another update", -> + before (done) -> + @second_update = Object.create(@update) + @second_update.v = @version + 1 + DocUpdaterClient.sendUpdate @project_id, @doc_id, @second_update, (error) -> + throw error if error? + setTimeout done, 200 + + it "should not change the first op timestamp", (done) -> + rclient_history.get ProjectHistoryKeys.projectHistoryFirstOpTimestamp({@project_id}), (error, result) => + throw error if error? + result.should.equal @firstOpTimestamp + done() + describe "when the document is loaded", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] diff --git a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee index aa777b40d6..8c6ff5c7d7 100644 --- a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee @@ -20,6 +20,7 @@ describe "ProjectHistoryRedisManager", -> project_history: key_schema: projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:#{project_id}" + projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:#{project_id}" } "redis-sharelatex": createClient: () => @rclient @@ -32,16 +33,26 @@ describe "ProjectHistoryRedisManager", -> describe "queueOps", -> beforeEach -> @ops = ["mock-op-1", "mock-op-2"] - @rclient.rpush = sinon.stub() + @multi = exec: sinon.stub() + @multi.rpush = sinon.stub() + @multi.setnx = sinon.stub() + @rclient.multi = () => @multi + # @rclient = multi: () => @multi @ProjectHistoryRedisManager.queueOps @project_id, @ops..., @callback it "should queue an update", -> - @rclient.rpush + @multi.rpush .calledWithExactly( "ProjectHistory:Ops:#{@project_id}" @ops[0] @ops[1] - @callback + ).should.equal true + + it "should set the queue timestamp if not present", -> + @multi.setnx + .calledWithExactly( + "ProjectHistory:FirstOpTimestamp:#{@project_id}" + Date.now() ).should.equal true describe "queueRenameEntity", -> From aa013f0bee14441462fc9a8fe372c9abe5b23b7d Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 16 Aug 2018 11:13:11 +0100 Subject: [PATCH 420/769] limit parallel resync doc requests to web --- services/document-updater/app/coffee/HistoryManager.coffee | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index 9d39166681..802d944ced 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -65,10 +65,12 @@ module.exports = HistoryManager = newBlock = Math.floor(length / threshold) return newBlock != prevBlock + MAX_PARALLEL_REQUESTS: 4 + resyncProjectHistory: (project_id, projectHistoryId, docs, files, callback) -> ProjectHistoryRedisManager.queueResyncProjectStructure project_id, projectHistoryId, docs, files, (error) -> return callback(error) if error? DocumentManager = require "./DocumentManager" resyncDoc = (doc, cb) -> DocumentManager.resyncDocContentsWithLock project_id, doc.doc, cb - async.each docs, resyncDoc, callback + async.eachLimit docs, HistoryManager.MAX_PARALLEL_REQUESTS, resyncDoc, callback From 910b27357da6995c890c3b42a44837aae6d1f32f Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 16 Aug 2018 11:14:11 +0100 Subject: [PATCH 421/769] add more logging to resync operations --- .../document-updater/app/coffee/DocumentManager.coffee | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 0c50d9b1f3..39713a1981 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -181,14 +181,19 @@ module.exports = DocumentManager = callback(null, lines, version) resyncDocContents: (project_id, doc_id, callback) -> + logger.log {project_id: project_id, doc_id: doc_id}, "start resyncing doc contents" RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) -> return callback(error) if error? if !lines? or !version? + logger.log {project_id: project_id, doc_id: doc_id}, "resyncing doc contents - not found in redis - retrieving from web" PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) -> - return callback(error) if error? + if error? + logger.error {project_id: project_id, doc_id: doc_id, getDocError: error}, "resyncing doc contents - error retrieving from web" + return callback(error) ProjectHistoryRedisManager.queueResyncDocContent project_id, projectHistoryId, doc_id, lines, version, pathname, callback else + logger.log {project_id: project_id, doc_id: doc_id}, "resyncing doc contents - doc in redis - will queue in redis" ProjectHistoryRedisManager.queueResyncDocContent project_id, projectHistoryId, doc_id, lines, version, pathname, callback getDocWithLock: (project_id, doc_id, callback = (error, lines, version) ->) -> From 3db7377d7409eecd86d11c0872aa8cc264f19ad8 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 28 Sep 2018 15:11:17 +0100 Subject: [PATCH 422/769] add process.env["REDIS_PASSWORD"] --- services/document-updater/config/settings.defaults.coffee | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 18f2b1570b..840f482a16 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -23,7 +23,7 @@ module.exports = realtime: port: "6379" host: process.env["REDIS_HOST"] or "localhost" - password:"" + password: process.env["REDIS_PASSWORD"] key_schema: pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" # cluster: [{ @@ -35,7 +35,7 @@ module.exports = documentupdater: port: "6379" host: process.env["REDIS_HOST"] or "localhost" - password: "" + password: process.env["REDIS_PASSWORD"] key_schema: blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" docLines: ({doc_id}) -> "doclines:#{doc_id}" @@ -66,7 +66,7 @@ module.exports = history: port: "6379" host: process.env["REDIS_HOST"] or "localhost" - password:"" + password: process.env["REDIS_PASSWORD"] key_schema: uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}" docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}" @@ -84,7 +84,7 @@ module.exports = lock: port: "6379" host: process.env["REDIS_HOST"] or "localhost" - password:"" + password: process.env["REDIS_PASSWORD"] key_schema: blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" # cluster: [{ From 93cf5742514eaf7decf9836c18916b73a80163f7 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 28 Sep 2018 15:13:47 +0100 Subject: [PATCH 423/769] upgrade build scripts 1.1.9 --- .../.github/ISSUE_TEMPLATE.md | 38 ++++++++++++++ .../.github/PULL_REQUEST_TEMPLATE.md | 45 ++++++++++++++++ services/document-updater/Jenkinsfile | 51 ++++++++++++++++++- services/document-updater/Makefile | 11 ++-- services/document-updater/buildscript.txt | 9 ++++ .../document-updater/docker-compose.ci.yml | 9 ++-- services/document-updater/docker-compose.yml | 4 +- services/document-updater/package.json | 9 ++-- 8 files changed, 163 insertions(+), 13 deletions(-) create mode 100644 services/document-updater/.github/ISSUE_TEMPLATE.md create mode 100644 services/document-updater/.github/PULL_REQUEST_TEMPLATE.md create mode 100644 services/document-updater/buildscript.txt diff --git a/services/document-updater/.github/ISSUE_TEMPLATE.md b/services/document-updater/.github/ISSUE_TEMPLATE.md new file mode 100644 index 0000000000..e0093aa90c --- /dev/null +++ b/services/document-updater/.github/ISSUE_TEMPLATE.md @@ -0,0 +1,38 @@ + + +## Steps to Reproduce + + + +1. +2. +3. + +## Expected Behaviour + + +## Observed Behaviour + + + +## Context + + +## Technical Info + + +* URL: +* Browser Name and version: +* Operating System and version (desktop or mobile): +* Signed in as: +* Project and/or file: + +## Analysis + + +## Who Needs to Know? + + + +- +- diff --git a/services/document-updater/.github/PULL_REQUEST_TEMPLATE.md b/services/document-updater/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000000..ed25ee83c1 --- /dev/null +++ b/services/document-updater/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,45 @@ + + +### Description + + + +#### Screenshots + + + +#### Related Issues / PRs + + + +### Review + + + +#### Potential Impact + + + +#### Manual Testing Performed + +- [ ] +- [ ] + +#### Accessibility + + + +### Deployment + + + +#### Deployment Checklist + +- [ ] Update documentation not included in the PR (if any) +- [ ] + +#### Metrics and Monitoring + + + +#### Who Needs to Know? diff --git a/services/document-updater/Jenkinsfile b/services/document-updater/Jenkinsfile index bc9ba0142f..3af2b785b8 100644 --- a/services/document-updater/Jenkinsfile +++ b/services/document-updater/Jenkinsfile @@ -3,12 +3,33 @@ String cron_string = BRANCH_NAME == "master" ? "@daily" : "" pipeline { agent any + environment { + GIT_PROJECT = "document-updater-sharelatex" + JENKINS_WORKFLOW = "document-updater-sharelatex" + TARGET_URL = "${env.JENKINS_URL}blue/organizations/jenkins/${JENKINS_WORKFLOW}/detail/$BRANCH_NAME/$BUILD_NUMBER/pipeline" + GIT_API_URL = "https://api.github.com/repos/sharelatex/${GIT_PROJECT}/statuses/$GIT_COMMIT" + } + triggers { pollSCM('* * * * *') cron(cron_string) } stages { + stage('Install') { + steps { + withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) { + sh "curl $GIT_API_URL \ + --data '{ \ + \"state\" : \"pending\", \ + \"target_url\": \"$TARGET_URL\", \ + \"description\": \"Your build is underway\", \ + \"context\": \"ci/jenkins\" }' \ + -u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD" + } + } + } + stage('Build') { steps { sh 'make build' @@ -29,7 +50,13 @@ pipeline { stage('Package and publish build') { steps { - sh 'make publish' + + withCredentials([file(credentialsId: 'gcr.io_overleaf-ops', variable: 'DOCKER_REPO_KEY_PATH')]) { + sh 'docker login -u _json_key --password-stdin https://gcr.io/overleaf-ops < ${DOCKER_REPO_KEY_PATH}' + } + sh 'DOCKER_REPO=gcr.io/overleaf-ops make publish' + sh 'docker logout https://gcr.io/overleaf-ops' + } } @@ -47,6 +74,19 @@ pipeline { post { always { sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_clean' + sh 'make clean' + } + + success { + withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) { + sh "curl $GIT_API_URL \ + --data '{ \ + \"state\" : \"success\", \ + \"target_url\": \"$TARGET_URL\", \ + \"description\": \"Your build succeeded!\", \ + \"context\": \"ci/jenkins\" }' \ + -u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD" + } } failure { @@ -54,6 +94,15 @@ pipeline { to: "${EMAIL_ALERT_TO}", subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}", body: "Build: ${BUILD_URL}") + withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) { + sh "curl $GIT_API_URL \ + --data '{ \ + \"state\" : \"failure\", \ + \"target_url\": \"$TARGET_URL\", \ + \"description\": \"Your build failed\", \ + \"context\": \"ci/jenkins\" }' \ + -u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD" + } } } diff --git a/services/document-updater/Makefile b/services/document-updater/Makefile index 215b56a9e9..09b26a6055 100644 --- a/services/document-updater/Makefile +++ b/services/document-updater/Makefile @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.3 +# Version: 1.1.9 BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) @@ -15,6 +15,8 @@ DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ clean: + docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) rm -f app.js rm -rf app/js rm -rf test/unit/js @@ -34,9 +36,12 @@ test_clean: test_acceptance_pre_run: @[ ! -f test/acceptance/scripts/pre-run ] && echo "document-updater has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/scripts/pre-run build: - docker build --pull --tag gcr.io/csh-gcdm-test/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) . + docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + . publish: - docker push gcr.io/csh-gcdm-test/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + + docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) .PHONY: clean test test_unit test_acceptance test_clean build publish diff --git a/services/document-updater/buildscript.txt b/services/document-updater/buildscript.txt new file mode 100644 index 0000000000..c9ccb323b4 --- /dev/null +++ b/services/document-updater/buildscript.txt @@ -0,0 +1,9 @@ +--script-version=1.1.9 +document-updater +--node-version=6.9.5 +--acceptance-creds=None +--language=coffeescript +--dependencies=mongo,redis +--docker-repos=gcr.io/overleaf-ops +--kube=false +--build-target=docker diff --git a/services/document-updater/docker-compose.ci.yml b/services/document-updater/docker-compose.ci.yml index 21c006641e..17c4ddd2bf 100644 --- a/services/document-updater/docker-compose.ci.yml +++ b/services/document-updater/docker-compose.ci.yml @@ -1,23 +1,25 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.3 +# Version: 1.1.9 version: "2" services: test_unit: - image: gcr.io/csh-gcdm-test/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER user: node command: npm run test:unit:_run test_acceptance: build: . - image: gcr.io/csh-gcdm-test/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER environment: + ELASTIC_SEARCH_DSN: es:9200 REDIS_HOST: redis MONGO_HOST: mongo POSTGRES_HOST: postgres + MOCHA_GREP: ${MOCHA_GREP} depends_on: - mongo - redis @@ -29,4 +31,3 @@ services: mongo: image: mongo:3.4 - diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index f24caa8883..dcbc14e683 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.3 +# Version: 1.1.9 version: "2" @@ -22,6 +22,7 @@ services: - .:/app working_dir: /app environment: + ELASTIC_SEARCH_DSN: es:9200 REDIS_HOST: redis MONGO_HOST: mongo POSTGRES_HOST: postgres @@ -31,6 +32,7 @@ services: - mongo - redis command: npm run test:acceptance + redis: image: redis diff --git a/services/document-updater/package.json b/services/document-updater/package.json index a23ae73427..7d13caa763 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -9,14 +9,15 @@ "scripts": { "compile:app": "([ -e app/coffee ] && coffee $COFFEE_OPTIONS -o app/js -c app/coffee || echo 'No CoffeeScript folder to compile') && ( [ -e app.coffee ] && coffee $COFFEE_OPTIONS -c app.coffee || echo 'No CoffeeScript app to compile')", "start": "npm run compile:app && node $NODE_APP_OPTIONS app.js", - "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 30000 --exit $@ test/acceptance/js", "test:acceptance": "npm run compile:app && npm run compile:acceptance_tests && npm run test:acceptance:_run -- --grep=$MOCHA_GREP", - "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js", + "test:unit:_run": "mocha --recursive --reporter spec --exit $@ test/unit/js", "test:unit": "npm run compile:app && npm run compile:unit_tests && npm run test:unit:_run -- --grep=$MOCHA_GREP", "compile:unit_tests": "[ ! -e test/unit/coffee ] && echo 'No unit tests to compile' || coffee -o test/unit/js -c test/unit/coffee", "compile:acceptance_tests": "[ ! -e test/acceptance/coffee ] && echo 'No acceptance tests to compile' || coffee -o test/acceptance/js -c test/acceptance/coffee", - "compile:all": "npm run compile:app && npm run compile:unit_tests && npm run compile:acceptance_tests", - "nodemon": "nodemon --config nodemon.json" + "compile:all": "npm run compile:app && npm run compile:unit_tests && npm run compile:acceptance_tests && npm run compile:smoke_tests", + "nodemon": "nodemon --config nodemon.json", + "compile:smoke_tests": "[ ! -e test/smoke/coffee ] && echo 'No smoke tests to compile' || coffee -o test/smoke/js -c test/smoke/coffee" }, "dependencies": { "async": "^2.5.0", From ff673c71d599824e68a3960bc0abb03f21e3ae03 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 28 Sep 2018 17:04:25 +0100 Subject: [PATCH 424/769] set redis port via env var --- services/document-updater/config/settings.defaults.coffee | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 840f482a16..825c1772cc 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -21,7 +21,7 @@ module.exports = redis: realtime: - port: "6379" + port: process.env["REDIS_PORT"] or "6379" host: process.env["REDIS_HOST"] or "localhost" password: process.env["REDIS_PASSWORD"] key_schema: @@ -33,7 +33,7 @@ module.exports = # key_schema: # pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" documentupdater: - port: "6379" + port: process.env["REDIS_PORT"] or "6379" host: process.env["REDIS_HOST"] or "localhost" password: process.env["REDIS_PASSWORD"] key_schema: @@ -64,7 +64,7 @@ module.exports = # ranges: ({doc_id}) -> "Ranges:{#{doc_id}}" # projectState: ({project_id}) -> "ProjectState:{#{project_id}}" history: - port: "6379" + port: process.env["REDIS_PORT"] or "6379" host: process.env["REDIS_HOST"] or "localhost" password: process.env["REDIS_PASSWORD"] key_schema: @@ -82,7 +82,7 @@ module.exports = # uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:{#{doc_id}}" # docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:{#{project_id}}" lock: - port: "6379" + port: process.env["REDIS_PORT"] or "6379" host: process.env["REDIS_HOST"] or "localhost" password: process.env["REDIS_PASSWORD"] key_schema: From c128e0ab1c4859e562c5600049a8d8327780c826 Mon Sep 17 00:00:00 2001 From: Alasdair Smith Date: Tue, 9 Oct 2018 11:46:27 +0100 Subject: [PATCH 425/769] Use setting instead of hard-coding port --- services/document-updater/config/settings.defaults.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index dfce36f6e3..02de30553a 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -10,7 +10,7 @@ module.exports = apis: web: - url: "http://#{process.env["WEB_HOST"] or "localhost"}:3000" + url: "http://#{process.env["WEB_HOST"] or "localhost"}:#{process.env['WEB_PORT'] or 3000}" user: "sharelatex" pass: "password" trackchanges: From aae1352519717b0aeb69c83d46b1a86369820a72 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 30 Oct 2018 11:56:27 +0000 Subject: [PATCH 426/769] ensure that project history is flushed when the project is deleted --- .../app/coffee/HistoryManager.coffee | 9 +++++++++ .../app/coffee/ProjectManager.coffee | 19 +++++++++++-------- .../flushAndDeleteProjectTests.coffee | 10 +++++----- 3 files changed, 25 insertions(+), 13 deletions(-) diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index 802d944ced..8dcbf426f5 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -20,16 +20,25 @@ module.exports = HistoryManager = else if res.statusCode < 200 and res.statusCode >= 300 logger.error { doc_id, project_id }, "track changes api returned a failure status code: #{res.statusCode}" + # flush changes in the background flushProjectChangesAsync: (project_id) -> return if !Settings.apis?.project_history?.enabled + HistoryManager.flushProjectChanges project_id, -> + # flush changes and callback (for when we need to know the queue is flushed) + flushProjectChanges: (project_id, callback = (error) ->) -> + return callback() if !Settings.apis?.project_history?.enabled url = "#{Settings.apis.project_history.url}/project/#{project_id}/flush" logger.log { project_id, url }, "flushing doc in project history api" request.post url, (error, res, body)-> if error? logger.error { error, project_id}, "project history doc to track changes api" + return callback(error) else if res.statusCode < 200 and res.statusCode >= 300 logger.error { project_id }, "project history api returned a failure status code: #{res.statusCode}" + return callback(error) + else + return callback() FLUSH_DOC_EVERY_N_OPS: 100 FLUSH_PROJECT_EVERY_N_OPS: 500 diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index cbf7bb661b..c714f7442a 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -60,14 +60,17 @@ module.exports = ProjectManager = logger.log project_id: project_id, doc_ids: doc_ids, "deleting docs" async.series jobs, () -> - # There is no harm in flushing project history if the previous call - # failed and sometimes it is required - HistoryManager.flushProjectChangesAsync project_id - - if errors.length > 0 - callback new Error("Errors deleting docs. See log for details") - else - callback(null) + # When deleting the project here we want to ensure that project + # history is completely flushed because the project may be + # deleted in web after this call completes, and so further + # attempts to flush would fail after that. + HistoryManager.flushProjectChanges project_id, (error) -> + if errors.length > 0 + callback new Error("Errors deleting docs. See log for details") + else if error? + callback(error) + else + callback(null) getProjectDocsAndFlushIfOld: (project_id, projectStateHash, excludeVersions = {}, _callback = (error, docs) ->) -> timer = new Metrics.Timer("projectManager.getProjectDocsAndFlushIfOld") diff --git a/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee index 51e736aa45..c060be7485 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee @@ -12,7 +12,7 @@ describe "ProjectManager - flushAndDeleteProject", -> "./DocumentManager": @DocumentManager = {} "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } "./HistoryManager": @HistoryManager = - flushProjectChangesAsync: sinon.stub() + flushProjectChanges: sinon.stub().callsArg(1) "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() @@ -40,8 +40,8 @@ describe "ProjectManager - flushAndDeleteProject", -> .should.equal true it "should flush project history", -> - @HistoryManager.flushProjectChangesAsync - .calledWithExactly(@project_id) + @HistoryManager.flushProjectChanges + .calledWith(@project_id) .should.equal true it "should call the callback without error", -> @@ -70,8 +70,8 @@ describe "ProjectManager - flushAndDeleteProject", -> .should.equal true it "should still flush project history", -> - @HistoryManager.flushProjectChangesAsync - .calledWithExactly(@project_id) + @HistoryManager.flushProjectChanges + .calledWith(@project_id) .should.equal true it "should record the error", -> From 5f046ed32960cb00507f06700622af68998bf25f Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 27 Nov 2018 13:41:10 +0000 Subject: [PATCH 427/769] install metrics route and bump metrics version --- services/document-updater/app.coffee | 6 ++++-- services/document-updater/package.json | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index a1a18d5889..516af85fea 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -1,3 +1,6 @@ +Metrics = require "metrics-sharelatex" +Metrics.initialize("doc-updater") + express = require('express') http = require("http") Settings = require('settings-sharelatex') @@ -14,8 +17,7 @@ Errors = require "./app/js/Errors" HttpController = require "./app/js/HttpController" Path = require "path" -Metrics = require "metrics-sharelatex" -Metrics.initialize("doc-updater") + Metrics.mongodb.monitor(Path.resolve(__dirname + "/node_modules/mongojs/node_modules/mongodb"), logger) Metrics.event_loop.monitor(logger, 100) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 7d13caa763..ec17378d85 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -25,7 +25,7 @@ "express": "3.3.4", "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.6", "lynx": "0.0.11", - "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.5.0", + "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v2.0.7", "redis-sharelatex": "git+https://github.com/sharelatex/redis-sharelatex.git#v1.0.4", "request": "2.25.0", "requestretry": "^1.12.0", From 9437cf8b027e6ef7ae60f6004df14744c5f501d1 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 5 Dec 2018 15:11:24 +0000 Subject: [PATCH 428/769] explicity split redis config values --- .../config/settings.defaults.coffee | 24 +++++++++---------- services/document-updater/package.json | 2 +- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index bad2d1429d..e23510afda 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -21,9 +21,9 @@ module.exports = redis: realtime: - port: process.env["REDIS_PORT"] or "6379" - host: process.env["REDIS_HOST"] or "localhost" - password: process.env["REDIS_PASSWORD"] + port: process.env["REAL_TIME_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" + host: process.env["REAL_TIME_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" + password: process.env["REAL_TIME_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" key_schema: pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" # cluster: [{ @@ -33,9 +33,9 @@ module.exports = # key_schema: # pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" documentupdater: - port: process.env["REDIS_PORT"] or "6379" - host: process.env["REDIS_HOST"] or "localhost" - password: process.env["REDIS_PASSWORD"] + port: process.env["DOC_UPDATER_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" + host: process.env["DOC_UPDATER_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" + password: process.env["DOC_UPDATER_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" key_schema: blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" docLines: ({doc_id}) -> "doclines:#{doc_id}" @@ -64,9 +64,9 @@ module.exports = # ranges: ({doc_id}) -> "Ranges:{#{doc_id}}" # projectState: ({project_id}) -> "ProjectState:{#{project_id}}" history: - port: process.env["REDIS_PORT"] or "6379" - host: process.env["REDIS_HOST"] or "localhost" - password: process.env["REDIS_PASSWORD"] + port: process.env["HISTORY_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" + host: process.env["HISTORY_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" + password: process.env["HISTORY_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" key_schema: uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}" docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}" @@ -83,9 +83,9 @@ module.exports = # uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:{#{doc_id}}" # docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:{#{project_id}}" lock: - port: process.env["REDIS_PORT"] or "6379" - host: process.env["REDIS_HOST"] or "localhost" - password: process.env["REDIS_PASSWORD"] + port: process.env["LOCK_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" + host: process.env["LOCK_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" + password: process.env["LOCK_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" key_schema: blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" # cluster: [{ diff --git a/services/document-updater/package.json b/services/document-updater/package.json index ec17378d85..006509bec7 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -25,7 +25,7 @@ "express": "3.3.4", "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.6", "lynx": "0.0.11", - "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v2.0.7", + "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v2.0.8", "redis-sharelatex": "git+https://github.com/sharelatex/redis-sharelatex.git#v1.0.4", "request": "2.25.0", "requestretry": "^1.12.0", From 25e1ad74912cd15b8395963d9f645bba0ede0e53 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 5 Dec 2018 16:17:41 +0000 Subject: [PATCH 429/769] remove redis.realtime real time pushes data into the document updater redis instance --- services/document-updater/app.coffee | 9 --------- .../app/coffee/DispatchManager.coffee | 2 +- .../app/coffee/RealTimeRedisManager.coffee | 4 ++-- .../config/settings.defaults.coffee | 13 +------------ .../coffee/helpers/DocUpdaterClient.coffee | 6 +++--- .../DispatchManager/DispatchManagerTests.coffee | 8 ++++++-- .../ProjectHistoryRedisManagerTests.coffee | 2 ++ .../RealTimeRedisManagerTests.coffee | 2 +- 8 files changed, 16 insertions(+), 30 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 516af85fea..b8a6a63a3e 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -70,15 +70,6 @@ app.get '/status', (req, res)-> else res.send('document updater is alive') -webRedisClient = require("redis-sharelatex").createClient(Settings.redis.realtime) -app.get "/health_check/redis", (req, res, next) -> - webRedisClient.healthCheck (error) -> - if error? - logger.err {err: error}, "failed redis health check" - res.send 500 - else - res.send 200 - docUpdaterRedisClient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) app.get "/health_check/redis_cluster", (req, res, next) -> docUpdaterRedisClient.healthCheck (error) -> diff --git a/services/document-updater/app/coffee/DispatchManager.coffee b/services/document-updater/app/coffee/DispatchManager.coffee index 93a22bfc07..e751534068 100644 --- a/services/document-updater/app/coffee/DispatchManager.coffee +++ b/services/document-updater/app/coffee/DispatchManager.coffee @@ -10,7 +10,7 @@ RateLimitManager = require('./RateLimitManager') module.exports = DispatchManager = createDispatcher: (RateLimiter) -> - client = redis.createClient(Settings.redis.realtime) + client = redis.createClient(Settings.redis.documentupdater) worker = { client: client _waitForUpdateThenDispatchWorker: (callback = (error) ->) -> diff --git a/services/document-updater/app/coffee/RealTimeRedisManager.coffee b/services/document-updater/app/coffee/RealTimeRedisManager.coffee index 7da7ca1f64..64bbe572b8 100644 --- a/services/document-updater/app/coffee/RealTimeRedisManager.coffee +++ b/services/document-updater/app/coffee/RealTimeRedisManager.coffee @@ -1,6 +1,6 @@ Settings = require('settings-sharelatex') -rclient = require("redis-sharelatex").createClient(Settings.redis.realtime) -Keys = Settings.redis.realtime.key_schema +rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) +Keys = Settings.redis.documentupdater.key_schema logger = require('logger-sharelatex') MAX_OPS_PER_ITERATION = 8 # process a limited number of ops for safety diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index e23510afda..7f36e47207 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -20,18 +20,6 @@ module.exports = url: "http://#{process.env["PROJECT_HISTORY_HOST"] or "localhost"}:3054" redis: - realtime: - port: process.env["REAL_TIME_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" - host: process.env["REAL_TIME_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" - password: process.env["REAL_TIME_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" - key_schema: - pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" - # cluster: [{ - # port: "7000" - # host: "localhost" - # }] - # key_schema: - # pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" documentupdater: port: process.env["DOC_UPDATER_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" host: process.env["DOC_UPDATER_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" @@ -49,6 +37,7 @@ module.exports = projectHistoryId: ({doc_id}) -> "ProjectHistoryId:#{doc_id}" projectState: ({project_id}) -> "ProjectState:#{project_id}" unflushedTime: ({doc_id}) -> "UnflushedTime:#{doc_id}" + pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" # cluster: [{ # port: "7000" # host: "localhost" diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee index 7f50d64372..587e6f3af9 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee @@ -1,10 +1,10 @@ Settings = require('settings-sharelatex') -rclient = require("redis-sharelatex").createClient(Settings.redis.realtime) -keys = Settings.redis.realtime.key_schema +rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) +keys = Settings.redis.documentupdater.key_schema request = require("request").defaults(jar: false) async = require "async" -rclient_sub = require("redis-sharelatex").createClient(Settings.redis.realtime) +rclient_sub = require("redis-sharelatex").createClient(Settings.redis.documentupdater) rclient_sub.subscribe "applied-ops" rclient_sub.setMaxListeners(0) diff --git a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee index b749e83a5d..85cdcb5a7e 100644 --- a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee @@ -4,16 +4,21 @@ should = chai.should() modulePath = "../../../../app/js/DispatchManager.js" SandboxedModule = require('sandboxed-module') + describe "DispatchManager", -> beforeEach -> + @timeout(3000) @DispatchManager = SandboxedModule.require modulePath, requires: "./UpdateManager" : @UpdateManager = {} "logger-sharelatex": @logger = { log: sinon.stub() } "settings-sharelatex": @settings = redis: - realtime: {} + documentupdater: {} "redis-sharelatex": @redis = {} "./RateLimitManager": {} + "./Metrics": + Timer: -> + done: -> @callback = sinon.stub() @RateLimiter = { run: (task,cb) -> task(cb) } # run task without rate limit @@ -22,7 +27,6 @@ describe "DispatchManager", -> @client = auth: sinon.stub() @redis.createClient = sinon.stub().returns @client - @worker = @DispatchManager.createDispatcher(@RateLimiter) it "should create a new redis client", -> diff --git a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee index 8c6ff5c7d7..a93545b250 100644 --- a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee @@ -24,6 +24,8 @@ describe "ProjectHistoryRedisManager", -> } "redis-sharelatex": createClient: () => @rclient + "logger-sharelatex": + log:-> globals: JSON: @JSON = JSON diff --git a/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee index a04da996dc..375b85df78 100644 --- a/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee @@ -15,7 +15,7 @@ describe "RealTimeRedisManager", -> "redis-sharelatex": createClient: () => @rclient "settings-sharelatex": redis: - realtime: @settings = + documentupdater: @settings = key_schema: pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" "logger-sharelatex": { log: () -> } From 2505be283a574c4cc4fb242e52471896fc9f9b1e Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 13 Dec 2018 11:03:08 +0000 Subject: [PATCH 430/769] turn down logging, use logger.info for less important data --- .../app/coffee/DocumentManager.coffee | 2 +- .../app/coffee/HistoryRedisManager.coffee | 2 +- .../app/coffee/HttpController.coffee | 8 ++++---- .../app/coffee/RangesManager.coffee | 6 +++--- .../app/coffee/RedisManager.coffee | 2 +- .../app/coffee/ShareJsUpdateManager.coffee | 4 ++-- .../app/coffee/UpdateManager.coffee | 2 +- .../DocumentManager/DocumentManagerTests.coffee | 2 +- .../HistoryRedisManagerTests.coffee | 4 +++- .../HttpController/HttpControllerTests.coffee | 8 ++++---- .../RangesManager/RangesManagerTests.coffee | 15 ++++++++++----- .../coffee/RedisManager/RedisManagerTests.coffee | 2 +- .../ShareJsUpdateManagerTests.coffee | 2 +- .../UpdateManager/UpdateManagerTests.coffee | 2 +- 14 files changed, 34 insertions(+), 27 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 39713a1981..7b8caa3e20 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -25,7 +25,7 @@ module.exports = DocumentManager = logger.log {project_id, doc_id}, "doc not in redis so getting from persistence API" PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) -> return callback(error) if error? - logger.log {project_id, doc_id, lines, version, pathname, projectHistoryId}, "got doc from persistence API" + logger.info {project_id, doc_id, lines, version, pathname, projectHistoryId}, "got doc from persistence API" RedisManager.putDocInMemory project_id, doc_id, lines, version, ranges, pathname, projectHistoryId, (error) -> return callback(error) if error? callback null, lines, version, ranges, pathname, projectHistoryId, null, false diff --git a/services/document-updater/app/coffee/HistoryRedisManager.coffee b/services/document-updater/app/coffee/HistoryRedisManager.coffee index d9a99a09aa..8c37132ada 100644 --- a/services/document-updater/app/coffee/HistoryRedisManager.coffee +++ b/services/document-updater/app/coffee/HistoryRedisManager.coffee @@ -7,7 +7,7 @@ module.exports = HistoryRedisManager = recordDocHasHistoryOps: (project_id, doc_id, ops = [], callback = (error) ->) -> if ops.length == 0 return callback(new Error("cannot push no ops")) # This should never be called with no ops, but protect against a redis error if we sent an empty array to rpush - logger.log project_id: project_id, doc_id: doc_id, "marking doc in project for history ops" + logger.info project_id: project_id, doc_id: doc_id, "marking doc in project for history ops" rclient.sadd Keys.docsWithHistoryOps({project_id}), doc_id, (error) -> return callback(error) if error? callback() diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 93f915d662..6cf03f2cd6 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -11,7 +11,7 @@ module.exports = HttpController = getDoc: (req, res, next = (error) ->) -> doc_id = req.params.doc_id project_id = req.params.project_id - logger.log project_id: project_id, doc_id: doc_id, "getting doc via http" + logger.info project_id: project_id, doc_id: doc_id, "getting doc via http" timer = new Metrics.Timer("http.getDoc") if req.query?.fromVersion? @@ -22,7 +22,7 @@ module.exports = HttpController = DocumentManager.getDocAndRecentOpsWithLock project_id, doc_id, fromVersion, (error, lines, version, ops, ranges, pathname) -> timer.done() return next(error) if error? - logger.log project_id: project_id, doc_id: doc_id, "got doc via http" + logger.info project_id: project_id, doc_id: doc_id, "got doc via http" if !lines? or !version? return next(new Errors.NotFoundError("document not found")) res.send JSON.stringify @@ -44,13 +44,13 @@ module.exports = HttpController = projectStateHash = req.query?.state # exclude is string of existing docs "id:version,id:version,..." excludeItems = req.query?.exclude?.split(',') or [] - logger.log project_id: project_id, exclude: excludeItems, "getting docs via http" + logger.info project_id: project_id, exclude: excludeItems, "getting docs via http" timer = new Metrics.Timer("http.getAllDocs") excludeVersions = {} for item in excludeItems [id,version] = item?.split(':') excludeVersions[id] = version - logger.log {project_id: project_id, projectStateHash: projectStateHash, excludeVersions: excludeVersions}, "excluding versions" + logger.info {project_id: project_id, projectStateHash: projectStateHash, excludeVersions: excludeVersions}, "excluding versions" ProjectManager.getProjectDocsAndFlushIfOld project_id, projectStateHash, excludeVersions, (error, result) -> timer.done() if error instanceof Errors.ProjectStateChangedError diff --git a/services/document-updater/app/coffee/RangesManager.coffee b/services/document-updater/app/coffee/RangesManager.coffee index d0653bb6a2..ce3fa5dfca 100644 --- a/services/document-updater/app/coffee/RangesManager.coffee +++ b/services/document-updater/app/coffee/RangesManager.coffee @@ -30,12 +30,12 @@ module.exports = RangesManager = return callback(error) response = RangesManager._getRanges rangesTracker - logger.log {project_id, doc_id, changesCount: response.changes?.length, commentsCount: response.comments?.length}, "applied updates to ranges" + logger.info {project_id, doc_id, changesCount: response.changes?.length, commentsCount: response.comments?.length}, "applied updates to ranges" callback null, response acceptChanges: (change_ids, ranges, callback = (error, ranges) ->) -> {changes, comments} = ranges - logger.log "accepting #{ change_ids.length } changes in ranges" + logger.info "accepting #{ change_ids.length } changes in ranges" rangesTracker = new RangesTracker(changes, comments) rangesTracker.removeChangeIds(change_ids) response = RangesManager._getRanges(rangesTracker) @@ -43,7 +43,7 @@ module.exports = RangesManager = deleteComment: (comment_id, ranges, callback = (error, ranges) ->) -> {changes, comments} = ranges - logger.log {comment_id}, "deleting comment in ranges" + logger.info {comment_id}, "deleting comment in ranges" rangesTracker = new RangesTracker(changes, comments) rangesTracker.removeCommentId(comment_id) response = RangesManager._getRanges(rangesTracker) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 25dbafc6e7..064c6144af 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -233,7 +233,7 @@ module.exports = RedisManager = newHash = RedisManager._computeHash(newDocLines) opVersions = appliedOps.map (op) -> op?.v - logger.log doc_id: doc_id, version: newVersion, hash: newHash, op_versions: opVersions, "updating doc in redis" + logger.info doc_id: doc_id, version: newVersion, hash: newHash, op_versions: opVersions, "updating doc in redis" RedisManager._serializeRanges ranges, (error, ranges) -> if error? diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee index a5cc6070cb..7a79b82724 100644 --- a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee +++ b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee @@ -20,7 +20,7 @@ module.exports = ShareJsUpdateManager = return model applyUpdate: (project_id, doc_id, update, lines, version, callback = (error, updatedDocLines) ->) -> - logger.log project_id: project_id, doc_id: doc_id, update: update, "applying sharejs updates" + logger.info project_id: project_id, doc_id: doc_id, update: update, "applying sharejs updates" jobs = [] # We could use a global model for all docs, but we're hitting issues with the @@ -39,7 +39,7 @@ module.exports = ShareJsUpdateManager = ShareJsUpdateManager._sendOp(project_id, doc_id, update) else return callback(error) - logger.log project_id: project_id, doc_id: doc_id, error: error, "applied update" + logger.info project_id: project_id, doc_id: doc_id, error: error, "applied update" model.getSnapshot doc_key, (error, data) => return callback(error) if error? docLines = data.snapshot.split(/\r\n|\n|\r/) diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index bfcfb806ca..43aea49512 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -47,7 +47,7 @@ module.exports = UpdateManager = profile = new Profiler("fetchAndApplyUpdates", {project_id, doc_id}) RealTimeRedisManager.getPendingUpdatesForDoc doc_id, (error, updates) => return callback(error) if error? - logger.log {project_id: project_id, doc_id: doc_id, count: updates.length}, "processing updates" + logger.info {project_id: project_id, doc_id: doc_id, count: updates.length}, "processing updates" if updates.length == 0 return callback() profile.log("getPendingUpdatesForDoc") diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index c52bb4b30d..736ceeee2d 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -16,7 +16,7 @@ describe "DocumentManager", -> "./HistoryManager": @HistoryManager = flushDocChangesAsync: sinon.stub() flushProjectChangesAsync: sinon.stub() - "logger-sharelatex": @logger = {log: sinon.stub()} + "logger-sharelatex": @logger = {log: sinon.stub(), info: sinon.stub()} "./DocOpsManager": @DocOpsManager = {} "./Metrics": @Metrics = Timer: class Timer diff --git a/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee index ca3937d4c5..a1d0e11b81 100644 --- a/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee @@ -19,7 +19,9 @@ describe "HistoryRedisManager", -> key_schema: uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}" docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}" - "logger-sharelatex": { log: () -> } + "logger-sharelatex": + log: -> + info: -> @doc_id = "doc-id-123" @project_id = "project-id-123" @callback = sinon.stub() diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index ab6718c12a..46daa9a63b 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -12,7 +12,7 @@ describe "HttpController", -> "./HistoryManager": @HistoryManager = flushProjectChangesAsync: sinon.stub() "./ProjectManager": @ProjectManager = {} - "logger-sharelatex" : @logger = { log: sinon.stub() } + "logger-sharelatex" : @logger = { log: sinon.stub(), info: sinon.stub() } "./Metrics": @Metrics = {} "./Errors" : Errors @Metrics.Timer = class Timer @@ -59,7 +59,7 @@ describe "HttpController", -> .should.equal true it "should log the request", -> - @logger.log + @logger.info .calledWith(doc_id: @doc_id, project_id: @project_id, "getting doc via http") .should.equal true @@ -88,7 +88,7 @@ describe "HttpController", -> .should.equal true it "should log the request", -> - @logger.log + @logger.info .calledWith(doc_id: @doc_id, project_id: @project_id, "getting doc via http") .should.equal true @@ -475,7 +475,7 @@ describe "HttpController", -> .should.equal true it "should log the request", -> - @logger.log + @logger.info .calledWith({project_id: @project_id, exclude: []}, "getting docs via http") .should.equal true diff --git a/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee b/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee index b11c73489e..e7322f0e63 100644 --- a/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee @@ -7,10 +7,15 @@ SandboxedModule = require('sandboxed-module') describe "RangesManager", -> beforeEach -> + @logger = + error: sinon.stub() + log: sinon.stub() + warn: sinon.stub() + info: sinon.stub() + @RangesManager = SandboxedModule.require modulePath, requires: - "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } - + "logger-sharelatex": @logger @doc_id = "doc-id-123" @project_id = "project-id-123" @user_id = "user-id-123" @@ -184,7 +189,7 @@ describe "RangesManager", -> beforeEach -> @RangesManager = SandboxedModule.require modulePath, requires: - "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } + "logger-sharelatex": @logger "./RangesTracker":@RangesTracker = SandboxedModule.require "../../../../app/js/RangesTracker.js" @ranges = { @@ -226,7 +231,7 @@ describe "RangesManager", -> done() it "should log the call with the correct number of changes", -> - @logger.log + @logger.info .calledWith("accepting 1 changes in ranges") .should.equal true @@ -258,7 +263,7 @@ describe "RangesManager", -> done() it "should log the call with the correct number of changes", -> - @logger.log + @logger.info .calledWith("accepting #{ @change_ids.length } changes in ranges") .should.equal true diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 4f6c24720e..9505339ddf 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -14,7 +14,7 @@ describe "RedisManager", -> tk.freeze(new Date()) @RedisManager = SandboxedModule.require modulePath, requires: - "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } + "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub(), info:-> } "./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {} "settings-sharelatex": @settings = { documentupdater: {logHashErrors: {write:true, read:true}} diff --git a/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee index b7364b00a4..d95e7497fb 100644 --- a/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee @@ -16,7 +16,7 @@ describe "ShareJsUpdateManager", -> constructor: (@db) -> "./ShareJsDB" : @ShareJsDB = { mockDB: true } "redis-sharelatex" : createClient: () => @rclient = auth:-> - "logger-sharelatex": @logger = { log: sinon.stub() } + "logger-sharelatex": @logger = { log: sinon.stub(), info: -> } "./RealTimeRedisManager": @RealTimeRedisManager = {} globals: clearTimeout: @clearTimeout = sinon.stub() diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index 383bd1848e..623e2eec0c 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -16,7 +16,7 @@ describe "UpdateManager", -> "./RealTimeRedisManager" : @RealTimeRedisManager = {} "./ShareJsUpdateManager" : @ShareJsUpdateManager = {} "./HistoryManager" : @HistoryManager = {} - "logger-sharelatex": @logger = { log: sinon.stub() } + "logger-sharelatex": @logger = { log: sinon.stub(), info:-> } "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() From d9ec90f0e26a915dfdbb7f92be87798212440f98 Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Fri, 4 Jan 2019 09:22:09 +0000 Subject: [PATCH 431/769] Move to metrics v2 and shrinkwrap --- services/document-updater/app.coffee | 1 + services/document-updater/npm-shrinkwrap.json | 2087 +++++++++++++++++ services/document-updater/package.json | 2 +- 3 files changed, 2089 insertions(+), 1 deletion(-) create mode 100644 services/document-updater/npm-shrinkwrap.json diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index b8a6a63a3e..cbcaf23897 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -26,6 +26,7 @@ app.configure -> app.use(Metrics.http.monitor(logger)); app.use express.bodyParser() app.use app.router +Metrics.injectMetricsRoute(app) DispatchManager.createAndStartDispatchers(Settings.dispatcherCount || 10) diff --git a/services/document-updater/npm-shrinkwrap.json b/services/document-updater/npm-shrinkwrap.json new file mode 100644 index 0000000000..a9c29cbc70 --- /dev/null +++ b/services/document-updater/npm-shrinkwrap.json @@ -0,0 +1,2087 @@ +{ + "name": "document-updater-sharelatex", + "version": "0.1.4", + "dependencies": { + "@google-cloud/common": { + "version": "0.27.0", + "from": "@google-cloud/common@>=0.27.0 <0.28.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.27.0.tgz" + }, + "@google-cloud/debug-agent": { + "version": "3.0.1", + "from": "@google-cloud/debug-agent@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-3.0.1.tgz", + "dependencies": { + "coffeescript": { + "version": "2.3.2", + "from": "coffeescript@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.3.2.tgz" + } + } + }, + "@google-cloud/profiler": { + "version": "0.2.3", + "from": "@google-cloud/profiler@>=0.2.3 <0.3.0", + "resolved": "https://registry.npmjs.org/@google-cloud/profiler/-/profiler-0.2.3.tgz", + "dependencies": { + "@google-cloud/common": { + "version": "0.26.2", + "from": "@google-cloud/common@>=0.26.0 <0.27.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.26.2.tgz" + }, + "through2": { + "version": "3.0.0", + "from": "through2@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.0.tgz" + } + } + }, + "@google-cloud/projectify": { + "version": "0.3.2", + "from": "@google-cloud/projectify@>=0.3.2 <0.4.0", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.2.tgz" + }, + "@google-cloud/promisify": { + "version": "0.3.1", + "from": "@google-cloud/promisify@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.3.1.tgz" + }, + "@google-cloud/trace-agent": { + "version": "3.5.0", + "from": "@google-cloud/trace-agent@>=3.2.0 <4.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-3.5.0.tgz", + "dependencies": { + "@google-cloud/common": { + "version": "0.28.0", + "from": "@google-cloud/common@>=0.28.0 <0.29.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.28.0.tgz" + }, + "methods": { + "version": "1.1.2", + "from": "methods@>=1.1.1 <2.0.0", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz" + }, + "uuid": { + "version": "3.3.2", + "from": "uuid@>=3.0.1 <4.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz" + } + } + }, + "@protobufjs/aspromise": { + "version": "1.1.2", + "from": "@protobufjs/aspromise@>=1.1.2 <2.0.0", + "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz" + }, + "@protobufjs/base64": { + "version": "1.1.2", + "from": "@protobufjs/base64@>=1.1.2 <2.0.0", + "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz" + }, + "@protobufjs/codegen": { + "version": "2.0.4", + "from": "@protobufjs/codegen@>=2.0.4 <3.0.0", + "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz" + }, + "@protobufjs/eventemitter": { + "version": "1.1.0", + "from": "@protobufjs/eventemitter@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz" + }, + "@protobufjs/fetch": { + "version": "1.1.0", + "from": "@protobufjs/fetch@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz" + }, + "@protobufjs/float": { + "version": "1.0.2", + "from": "@protobufjs/float@>=1.0.2 <2.0.0", + "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz" + }, + "@protobufjs/inquire": { + "version": "1.1.0", + "from": "@protobufjs/inquire@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz" + }, + "@protobufjs/path": { + "version": "1.1.2", + "from": "@protobufjs/path@>=1.1.2 <2.0.0", + "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz" + }, + "@protobufjs/pool": { + "version": "1.1.0", + "from": "@protobufjs/pool@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz" + }, + "@protobufjs/utf8": { + "version": "1.1.0", + "from": "@protobufjs/utf8@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz" + }, + "@sindresorhus/is": { + "version": "0.13.0", + "from": "@sindresorhus/is@>=0.13.0 <0.14.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.13.0.tgz" + }, + "@sinonjs/commons": { + "version": "1.3.0", + "from": "@sinonjs/commons@>=1.2.0 <2.0.0", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.3.0.tgz" + }, + "@sinonjs/formatio": { + "version": "3.1.0", + "from": "@sinonjs/formatio@>=3.1.0 <4.0.0", + "resolved": "https://registry.npmjs.org/@sinonjs/formatio/-/formatio-3.1.0.tgz" + }, + "@sinonjs/samsam": { + "version": "3.0.2", + "from": "@sinonjs/samsam@>=3.0.2 <4.0.0", + "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-3.0.2.tgz" + }, + "@types/caseless": { + "version": "0.12.1", + "from": "@types/caseless@*", + "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.1.tgz" + }, + "@types/console-log-level": { + "version": "1.4.0", + "from": "@types/console-log-level@>=1.4.0 <2.0.0", + "resolved": "https://registry.npmjs.org/@types/console-log-level/-/console-log-level-1.4.0.tgz" + }, + "@types/duplexify": { + "version": "3.6.0", + "from": "@types/duplexify@>=3.5.0 <4.0.0", + "resolved": "https://registry.npmjs.org/@types/duplexify/-/duplexify-3.6.0.tgz" + }, + "@types/form-data": { + "version": "2.2.1", + "from": "@types/form-data@*", + "resolved": "https://registry.npmjs.org/@types/form-data/-/form-data-2.2.1.tgz" + }, + "@types/long": { + "version": "4.0.0", + "from": "@types/long@>=4.0.0 <5.0.0", + "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.0.tgz" + }, + "@types/node": { + "version": "10.12.18", + "from": "@types/node@*", + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.12.18.tgz" + }, + "@types/request": { + "version": "2.48.1", + "from": "@types/request@>=2.47.0 <3.0.0", + "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.1.tgz" + }, + "@types/semver": { + "version": "5.5.0", + "from": "@types/semver@>=5.5.0 <6.0.0", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-5.5.0.tgz" + }, + "@types/tough-cookie": { + "version": "2.3.4", + "from": "@types/tough-cookie@*", + "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-2.3.4.tgz" + }, + "abbrev": { + "version": "1.1.1", + "from": "abbrev@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz" + }, + "acorn": { + "version": "5.7.3", + "from": "acorn@>=5.0.3 <6.0.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-5.7.3.tgz" + }, + "agent-base": { + "version": "4.2.1", + "from": "agent-base@>=4.1.0 <5.0.0", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.2.1.tgz" + }, + "ajv": { + "version": "6.6.2", + "from": "ajv@>=6.5.5 <7.0.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.6.2.tgz" + }, + "ansi-regex": { + "version": "0.2.1", + "from": "ansi-regex@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-0.2.1.tgz" + }, + "ansi-styles": { + "version": "1.1.0", + "from": "ansi-styles@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-1.1.0.tgz" + }, + "argparse": { + "version": "0.1.16", + "from": "argparse@>=0.1.11 <0.2.0", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-0.1.16.tgz", + "dependencies": { + "underscore": { + "version": "1.7.0", + "from": "underscore@>=1.7.0 <1.8.0", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz" + }, + "underscore.string": { + "version": "2.4.0", + "from": "underscore.string@>=2.4.0 <2.5.0", + "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.4.0.tgz" + } + } + }, + "array-from": { + "version": "2.1.1", + "from": "array-from@>=2.1.1 <3.0.0", + "resolved": "https://registry.npmjs.org/array-from/-/array-from-2.1.1.tgz" + }, + "arrify": { + "version": "1.0.1", + "from": "arrify@>=1.0.1 <2.0.0", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz" + }, + "asn1": { + "version": "0.1.11", + "from": "asn1@0.1.11", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.1.11.tgz" + }, + "assert-plus": { + "version": "0.1.5", + "from": "assert-plus@>=0.1.5 <0.2.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.1.5.tgz" + }, + "assertion-error": { + "version": "1.1.0", + "from": "assertion-error@^1.0.1", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz" + }, + "async": { + "version": "2.6.1", + "from": "async@>=2.5.0 <3.0.0", + "resolved": "https://registry.npmjs.org/async/-/async-2.6.1.tgz" + }, + "async-listener": { + "version": "0.6.10", + "from": "async-listener@>=0.6.0 <0.7.0", + "resolved": "https://registry.npmjs.org/async-listener/-/async-listener-0.6.10.tgz" + }, + "asynckit": { + "version": "0.4.0", + "from": "asynckit@>=0.4.0 <0.5.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz" + }, + "aws-sign": { + "version": "0.3.0", + "from": "aws-sign@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/aws-sign/-/aws-sign-0.3.0.tgz" + }, + "aws-sign2": { + "version": "0.7.0", + "from": "aws-sign2@>=0.7.0 <0.8.0", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz" + }, + "aws4": { + "version": "1.8.0", + "from": "aws4@>=1.8.0 <2.0.0", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz" + }, + "axios": { + "version": "0.18.0", + "from": "axios@>=0.18.0 <0.19.0", + "resolved": "http://registry.npmjs.org/axios/-/axios-0.18.0.tgz" + }, + "balanced-match": { + "version": "1.0.0", + "from": "balanced-match@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz" + }, + "bcrypt-pbkdf": { + "version": "1.0.2", + "from": "bcrypt-pbkdf@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz" + }, + "bignumber.js": { + "version": "7.2.1", + "from": "bignumber.js@>=7.0.0 <8.0.0", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz" + }, + "bindings": { + "version": "1.3.1", + "from": "bindings@>=1.2.1 <2.0.0", + "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.3.1.tgz" + }, + "bintrees": { + "version": "1.0.1", + "from": "bintrees@1.0.1", + "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz" + }, + "bluebird": { + "version": "3.5.3", + "from": "bluebird@>=3.3.4 <4.0.0", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.5.3.tgz" + }, + "boom": { + "version": "0.4.2", + "from": "boom@>=0.4.0 <0.5.0", + "resolved": "https://registry.npmjs.org/boom/-/boom-0.4.2.tgz" + }, + "brace-expansion": { + "version": "1.1.11", + "from": "brace-expansion@>=1.1.7 <2.0.0", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz" + }, + "browser-stdout": { + "version": "1.3.1", + "from": "browser-stdout@1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "dev": true + }, + "buffer-crc32": { + "version": "0.2.1", + "from": "buffer-crc32@0.2.1", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.1.tgz" + }, + "buffer-equal-constant-time": { + "version": "1.0.1", + "from": "buffer-equal-constant-time@1.0.1", + "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz" + }, + "builtin-modules": { + "version": "3.0.0", + "from": "builtin-modules@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.0.0.tgz" + }, + "bunyan": { + "version": "0.22.3", + "from": "bunyan@>=0.22.1 <0.23.0", + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-0.22.3.tgz", + "dev": true + }, + "buster-core": { + "version": "0.6.4", + "from": "buster-core@0.6.4", + "resolved": "https://registry.npmjs.org/buster-core/-/buster-core-0.6.4.tgz" + }, + "buster-format": { + "version": "0.5.6", + "from": "buster-format@>=0.5.0 <0.6.0", + "resolved": "https://registry.npmjs.org/buster-format/-/buster-format-0.5.6.tgz" + }, + "bytes": { + "version": "0.2.0", + "from": "bytes@0.2.0", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-0.2.0.tgz" + }, + "caseless": { + "version": "0.12.0", + "from": "caseless@>=0.12.0 <0.13.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz" + }, + "chai": { + "version": "3.5.0", + "from": "chai@>=3.5.0 <4.0.0", + "resolved": "https://registry.npmjs.org/chai/-/chai-3.5.0.tgz", + "dev": true, + "dependencies": { + "type-detect": { + "version": "1.0.0", + "from": "type-detect@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-1.0.0.tgz", + "dev": true + } + } + }, + "chai-spies": { + "version": "0.7.1", + "from": "chai-spies@>=0.7.1 <0.8.0", + "resolved": "https://registry.npmjs.org/chai-spies/-/chai-spies-0.7.1.tgz", + "dev": true + }, + "chalk": { + "version": "0.5.1", + "from": "chalk@>=0.5.0 <0.6.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-0.5.1.tgz" + }, + "check-error": { + "version": "1.0.2", + "from": "check-error@>=1.0.2 <2.0.0", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz" + }, + "cluster-key-slot": { + "version": "1.0.12", + "from": "cluster-key-slot@>=1.0.6 <2.0.0", + "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.0.12.tgz" + }, + "coffee-script": { + "version": "1.7.1", + "from": "coffee-script@>=1.7.0 <1.8.0", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.7.1.tgz" + }, + "colors": { + "version": "0.6.2", + "from": "colors@>=0.6.2 <0.7.0", + "resolved": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz" + }, + "combined-stream": { + "version": "0.0.7", + "from": "combined-stream@>=0.0.4 <0.1.0", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz" + }, + "commander": { + "version": "1.2.0", + "from": "commander@1.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-1.2.0.tgz" + }, + "concat-map": { + "version": "0.0.1", + "from": "concat-map@0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" + }, + "connect": { + "version": "2.8.4", + "from": "connect@2.8.4", + "resolved": "https://registry.npmjs.org/connect/-/connect-2.8.4.tgz" + }, + "console-log-level": { + "version": "1.4.0", + "from": "console-log-level@>=1.4.0 <2.0.0", + "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.0.tgz" + }, + "continuation-local-storage": { + "version": "3.2.1", + "from": "continuation-local-storage@>=3.2.1 <4.0.0", + "resolved": "https://registry.npmjs.org/continuation-local-storage/-/continuation-local-storage-3.2.1.tgz" + }, + "cookie": { + "version": "0.1.0", + "from": "cookie@0.1.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.1.0.tgz" + }, + "cookie-jar": { + "version": "0.3.0", + "from": "cookie-jar@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/cookie-jar/-/cookie-jar-0.3.0.tgz" + }, + "cookie-signature": { + "version": "1.0.1", + "from": "cookie-signature@1.0.1", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.1.tgz" + }, + "core-util-is": { + "version": "1.0.2", + "from": "core-util-is@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" + }, + "cryptiles": { + "version": "0.2.2", + "from": "cryptiles@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.2.2.tgz" + }, + "ctype": { + "version": "0.5.3", + "from": "ctype@0.5.3", + "resolved": "https://registry.npmjs.org/ctype/-/ctype-0.5.3.tgz" + }, + "dashdash": { + "version": "1.14.1", + "from": "dashdash@>=1.12.0 <2.0.0", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", + "dependencies": { + "assert-plus": { + "version": "1.0.0", + "from": "assert-plus@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" + } + } + }, + "dateformat": { + "version": "1.0.2-1.2.3", + "from": "dateformat@1.0.2-1.2.3", + "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-1.0.2-1.2.3.tgz" + }, + "debug": { + "version": "4.1.1", + "from": "debug@*", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz" + }, + "deep-eql": { + "version": "0.1.3", + "from": "deep-eql@>=0.1.3 <0.2.0", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-0.1.3.tgz", + "dev": true, + "dependencies": { + "type-detect": { + "version": "0.1.1", + "from": "type-detect@0.1.1", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-0.1.1.tgz", + "dev": true + } + } + }, + "delay": { + "version": "4.1.0", + "from": "delay@>=4.0.1 <5.0.0", + "resolved": "https://registry.npmjs.org/delay/-/delay-4.1.0.tgz" + }, + "delayed-stream": { + "version": "0.0.5", + "from": "delayed-stream@0.0.5", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz" + }, + "denque": { + "version": "1.4.0", + "from": "denque@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/denque/-/denque-1.4.0.tgz" + }, + "diff": { + "version": "3.5.0", + "from": "diff@3.5.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz" + }, + "dtrace-provider": { + "version": "0.2.8", + "from": "dtrace-provider@0.2.8", + "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.2.8.tgz", + "dev": true, + "optional": true + }, + "duplexify": { + "version": "3.6.1", + "from": "duplexify@>=3.6.0 <4.0.0", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.6.1.tgz" + }, + "ecc-jsbn": { + "version": "0.1.2", + "from": "ecc-jsbn@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz" + }, + "ecdsa-sig-formatter": { + "version": "1.0.10", + "from": "ecdsa-sig-formatter@1.0.10", + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.10.tgz" + }, + "emitter-listener": { + "version": "1.1.2", + "from": "emitter-listener@>=1.1.1 <2.0.0", + "resolved": "https://registry.npmjs.org/emitter-listener/-/emitter-listener-1.1.2.tgz" + }, + "end-of-stream": { + "version": "1.4.1", + "from": "end-of-stream@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.1.tgz" + }, + "ent": { + "version": "2.2.0", + "from": "ent@>=2.2.0 <3.0.0", + "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz" + }, + "es6-promise": { + "version": "4.2.5", + "from": "es6-promise@>=4.0.3 <5.0.0", + "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.5.tgz" + }, + "es6-promisify": { + "version": "5.0.0", + "from": "es6-promisify@>=5.0.0 <6.0.0", + "resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz" + }, + "escape-string-regexp": { + "version": "1.0.5", + "from": "escape-string-regexp@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" + }, + "esprima": { + "version": "1.0.4", + "from": "esprima@>=1.0.2 <1.1.0", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-1.0.4.tgz" + }, + "eventemitter2": { + "version": "0.4.14", + "from": "eventemitter2@>=0.4.13 <0.5.0", + "resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-0.4.14.tgz" + }, + "exit": { + "version": "0.1.2", + "from": "exit@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz" + }, + "express": { + "version": "3.3.4", + "from": "express@3.3.4", + "resolved": "https://registry.npmjs.org/express/-/express-3.3.4.tgz" + }, + "extend": { + "version": "3.0.2", + "from": "extend@>=3.0.1 <4.0.0", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz" + }, + "extsprintf": { + "version": "1.3.0", + "from": "extsprintf@1.3.0", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz" + }, + "fast-deep-equal": { + "version": "2.0.1", + "from": "fast-deep-equal@>=2.0.1 <3.0.0", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz" + }, + "fast-json-stable-stringify": { + "version": "2.0.0", + "from": "fast-json-stable-stringify@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz" + }, + "findit2": { + "version": "2.2.3", + "from": "findit2@>=2.2.3 <3.0.0", + "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz" + }, + "findup-sync": { + "version": "0.1.3", + "from": "findup-sync@>=0.1.2 <0.2.0", + "resolved": "https://registry.npmjs.org/findup-sync/-/findup-sync-0.1.3.tgz", + "dependencies": { + "glob": { + "version": "3.2.11", + "from": "glob@>=3.2.9 <3.3.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-3.2.11.tgz" + }, + "lodash": { + "version": "2.4.2", + "from": "lodash@>=2.4.1 <2.5.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" + }, + "minimatch": { + "version": "0.3.0", + "from": "minimatch@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.3.0.tgz" + } + } + }, + "flexbuffer": { + "version": "0.0.6", + "from": "flexbuffer@0.0.6", + "resolved": "https://registry.npmjs.org/flexbuffer/-/flexbuffer-0.0.6.tgz" + }, + "follow-redirects": { + "version": "1.6.1", + "from": "follow-redirects@>=1.3.0 <2.0.0", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.6.1.tgz", + "dependencies": { + "debug": { + "version": "3.1.0", + "from": "debug@3.1.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz" + }, + "ms": { + "version": "2.0.0", + "from": "ms@2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz" + } + } + }, + "forever-agent": { + "version": "0.5.2", + "from": "forever-agent@>=0.5.0 <0.6.0", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.5.2.tgz" + }, + "form-data": { + "version": "0.1.4", + "from": "form-data@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-0.1.4.tgz", + "dependencies": { + "async": { + "version": "0.9.2", + "from": "async@>=0.9.0 <0.10.0", + "resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz" + } + } + }, + "formidable": { + "version": "1.0.14", + "from": "formidable@1.0.14", + "resolved": "https://registry.npmjs.org/formidable/-/formidable-1.0.14.tgz" + }, + "fresh": { + "version": "0.1.0", + "from": "fresh@0.1.0", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.1.0.tgz" + }, + "fs-extra": { + "version": "0.9.1", + "from": "fs-extra@>=0.9.1 <0.10.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-0.9.1.tgz", + "dependencies": { + "mkdirp": { + "version": "0.5.1", + "from": "mkdirp@>=0.5.0 <0.6.0", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz" + }, + "ncp": { + "version": "0.5.1", + "from": "ncp@>=0.5.1 <0.6.0", + "resolved": "https://registry.npmjs.org/ncp/-/ncp-0.5.1.tgz" + } + } + }, + "fs.realpath": { + "version": "1.0.0", + "from": "fs.realpath@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "dev": true + }, + "gaxios": { + "version": "1.0.6", + "from": "gaxios@>=1.0.2 <2.0.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.0.6.tgz" + }, + "gcp-metadata": { + "version": "0.9.3", + "from": "gcp-metadata@>=0.9.0 <0.10.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.9.3.tgz" + }, + "get-func-name": { + "version": "2.0.0", + "from": "get-func-name@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz" + }, + "getobject": { + "version": "0.1.0", + "from": "getobject@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/getobject/-/getobject-0.1.0.tgz" + }, + "getpass": { + "version": "0.1.7", + "from": "getpass@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", + "dependencies": { + "assert-plus": { + "version": "1.0.0", + "from": "assert-plus@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" + } + } + }, + "glob": { + "version": "6.0.4", + "from": "glob@>=6.0.1 <7.0.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz" + }, + "google-auth-library": { + "version": "2.0.2", + "from": "google-auth-library@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-2.0.2.tgz", + "dependencies": { + "gcp-metadata": { + "version": "0.7.0", + "from": "gcp-metadata@>=0.7.0 <0.8.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.7.0.tgz" + }, + "lru-cache": { + "version": "5.1.1", + "from": "lru-cache@>=5.0.0 <6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz" + } + } + }, + "google-p12-pem": { + "version": "1.0.3", + "from": "google-p12-pem@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.3.tgz" + }, + "graceful-fs": { + "version": "1.2.3", + "from": "graceful-fs@>=1.2.0 <1.3.0", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-1.2.3.tgz" + }, + "growl": { + "version": "1.10.5", + "from": "growl@1.10.5", + "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", + "dev": true + }, + "grunt": { + "version": "0.4.5", + "from": "grunt@>=0.4.5 <0.5.0", + "resolved": "https://registry.npmjs.org/grunt/-/grunt-0.4.5.tgz", + "dependencies": { + "async": { + "version": "0.1.22", + "from": "async@>=0.1.22 <0.2.0", + "resolved": "https://registry.npmjs.org/async/-/async-0.1.22.tgz" + }, + "coffee-script": { + "version": "1.3.3", + "from": "coffee-script@>=1.3.3 <1.4.0", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.3.3.tgz" + }, + "glob": { + "version": "3.1.21", + "from": "glob@>=3.1.21 <3.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-3.1.21.tgz" + }, + "inherits": { + "version": "1.0.2", + "from": "inherits@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-1.0.2.tgz" + }, + "lodash": { + "version": "0.9.2", + "from": "lodash@>=0.9.2 <0.10.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-0.9.2.tgz" + }, + "minimatch": { + "version": "0.2.14", + "from": "minimatch@>=0.2.12 <0.3.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz" + }, + "rimraf": { + "version": "2.2.8", + "from": "rimraf@>=2.2.8 <2.3.0", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz" + } + } + }, + "grunt-bunyan": { + "version": "0.5.0", + "from": "grunt-bunyan@>=0.5.0 <0.6.0", + "resolved": "https://registry.npmjs.org/grunt-bunyan/-/grunt-bunyan-0.5.0.tgz", + "dependencies": { + "lodash": { + "version": "2.4.2", + "from": "lodash@>=2.4.1 <2.5.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" + } + } + }, + "grunt-contrib-clean": { + "version": "0.6.0", + "from": "grunt-contrib-clean@>=0.6.0 <0.7.0", + "resolved": "https://registry.npmjs.org/grunt-contrib-clean/-/grunt-contrib-clean-0.6.0.tgz", + "dependencies": { + "rimraf": { + "version": "2.2.8", + "from": "rimraf@>=2.2.1 <2.3.0", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz" + } + } + }, + "grunt-contrib-coffee": { + "version": "0.11.1", + "from": "grunt-contrib-coffee@>=0.11.0 <0.12.0", + "resolved": "https://registry.npmjs.org/grunt-contrib-coffee/-/grunt-contrib-coffee-0.11.1.tgz", + "dependencies": { + "lodash": { + "version": "2.4.2", + "from": "lodash@>=2.4.1 <2.5.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" + } + } + }, + "grunt-execute": { + "version": "0.2.2", + "from": "grunt-execute@>=0.2.2 <0.3.0", + "resolved": "https://registry.npmjs.org/grunt-execute/-/grunt-execute-0.2.2.tgz" + }, + "grunt-legacy-log": { + "version": "0.1.3", + "from": "grunt-legacy-log@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/grunt-legacy-log/-/grunt-legacy-log-0.1.3.tgz", + "dependencies": { + "lodash": { + "version": "2.4.2", + "from": "lodash@>=2.4.1 <2.5.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" + }, + "underscore.string": { + "version": "2.3.3", + "from": "underscore.string@>=2.3.3 <2.4.0", + "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.3.3.tgz" + } + } + }, + "grunt-legacy-log-utils": { + "version": "0.1.1", + "from": "grunt-legacy-log-utils@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/grunt-legacy-log-utils/-/grunt-legacy-log-utils-0.1.1.tgz", + "dependencies": { + "lodash": { + "version": "2.4.2", + "from": "lodash@>=2.4.1 <2.5.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" + }, + "underscore.string": { + "version": "2.3.3", + "from": "underscore.string@>=2.3.3 <2.4.0", + "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.3.3.tgz" + } + } + }, + "grunt-legacy-util": { + "version": "0.2.0", + "from": "grunt-legacy-util@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/grunt-legacy-util/-/grunt-legacy-util-0.2.0.tgz", + "dependencies": { + "async": { + "version": "0.1.22", + "from": "async@>=0.1.22 <0.2.0", + "resolved": "https://registry.npmjs.org/async/-/async-0.1.22.tgz" + }, + "lodash": { + "version": "0.9.2", + "from": "lodash@>=0.9.2 <0.10.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-0.9.2.tgz" + } + } + }, + "grunt-mocha-test": { + "version": "0.11.0", + "from": "grunt-mocha-test@>=0.11.0 <0.12.0", + "resolved": "https://registry.npmjs.org/grunt-mocha-test/-/grunt-mocha-test-0.11.0.tgz", + "dependencies": { + "commander": { + "version": "2.0.0", + "from": "commander@2.0.0", + "resolved": "http://registry.npmjs.org/commander/-/commander-2.0.0.tgz" + }, + "diff": { + "version": "1.0.7", + "from": "diff@1.0.7", + "resolved": "https://registry.npmjs.org/diff/-/diff-1.0.7.tgz" + }, + "glob": { + "version": "3.2.3", + "from": "glob@3.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-3.2.3.tgz" + }, + "graceful-fs": { + "version": "2.0.3", + "from": "graceful-fs@>=2.0.0 <2.1.0", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-2.0.3.tgz" + }, + "growl": { + "version": "1.7.0", + "from": "growl@>=1.7.0 <1.8.0", + "resolved": "https://registry.npmjs.org/growl/-/growl-1.7.0.tgz" + }, + "minimatch": { + "version": "0.2.14", + "from": "minimatch@~0.2.11", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz" + }, + "mocha": { + "version": "1.20.1", + "from": "mocha@>=1.20.0 <1.21.0", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-1.20.1.tgz" + } + } + }, + "gtoken": { + "version": "2.3.0", + "from": "gtoken@>=2.3.0 <3.0.0", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.0.tgz", + "dependencies": { + "mime": { + "version": "2.4.0", + "from": "mime@>=2.2.0 <3.0.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.0.tgz" + }, + "pify": { + "version": "3.0.0", + "from": "pify@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz" + } + } + }, + "har-schema": { + "version": "2.0.0", + "from": "har-schema@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz" + }, + "har-validator": { + "version": "5.1.3", + "from": "har-validator@>=5.1.0 <5.2.0", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz" + }, + "has-ansi": { + "version": "0.1.0", + "from": "has-ansi@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-0.1.0.tgz" + }, + "has-flag": { + "version": "3.0.0", + "from": "has-flag@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz" + }, + "hawk": { + "version": "1.0.0", + "from": "hawk@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/hawk/-/hawk-1.0.0.tgz" + }, + "he": { + "version": "1.1.1", + "from": "he@1.1.1", + "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", + "dev": true + }, + "hex2dec": { + "version": "1.1.1", + "from": "hex2dec@>=1.0.1 <2.0.0", + "resolved": "https://registry.npmjs.org/hex2dec/-/hex2dec-1.1.1.tgz" + }, + "hoek": { + "version": "0.9.1", + "from": "hoek@>=0.9.0 <0.10.0", + "resolved": "https://registry.npmjs.org/hoek/-/hoek-0.9.1.tgz" + }, + "hooker": { + "version": "0.2.3", + "from": "hooker@>=0.2.3 <0.3.0", + "resolved": "https://registry.npmjs.org/hooker/-/hooker-0.2.3.tgz" + }, + "http-signature": { + "version": "0.10.1", + "from": "http-signature@>=0.10.0 <0.11.0", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-0.10.1.tgz" + }, + "https-proxy-agent": { + "version": "2.2.1", + "from": "https-proxy-agent@>=2.2.1 <3.0.0", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.1.tgz", + "dependencies": { + "debug": { + "version": "3.2.6", + "from": "debug@>=3.1.0 <4.0.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz" + } + } + }, + "iconv-lite": { + "version": "0.2.11", + "from": "iconv-lite@>=0.2.11 <0.3.0", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.2.11.tgz" + }, + "inflight": { + "version": "1.0.6", + "from": "inflight@>=1.0.4 <2.0.0", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz" + }, + "inherits": { + "version": "2.0.3", + "from": "inherits@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + }, + "ioredis": { + "version": "3.2.2", + "from": "ioredis@>=3.2.1 <4.0.0", + "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-3.2.2.tgz", + "dependencies": { + "debug": { + "version": "2.6.9", + "from": "debug@>=2.6.9 <3.0.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" + }, + "ms": { + "version": "2.0.0", + "from": "ms@2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz" + } + } + }, + "is": { + "version": "3.3.0", + "from": "is@>=3.2.0 <4.0.0", + "resolved": "https://registry.npmjs.org/is/-/is-3.3.0.tgz" + }, + "is-buffer": { + "version": "1.1.6", + "from": "is-buffer@>=1.1.5 <2.0.0", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz" + }, + "is-typedarray": { + "version": "1.0.0", + "from": "is-typedarray@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz" + }, + "isarray": { + "version": "0.0.1", + "from": "isarray@0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "isstream": { + "version": "0.1.2", + "from": "isstream@>=0.1.2 <0.2.0", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz" + }, + "jade": { + "version": "0.26.3", + "from": "jade@0.26.3", + "resolved": "https://registry.npmjs.org/jade/-/jade-0.26.3.tgz", + "dependencies": { + "commander": { + "version": "0.6.1", + "from": "commander@0.6.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-0.6.1.tgz" + }, + "mkdirp": { + "version": "0.3.0", + "from": "mkdirp@0.3.0", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.0.tgz" + } + } + }, + "js-yaml": { + "version": "2.0.5", + "from": "js-yaml@>=2.0.5 <2.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-2.0.5.tgz" + }, + "jsbn": { + "version": "0.1.1", + "from": "jsbn@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz" + }, + "json-bigint": { + "version": "0.3.0", + "from": "json-bigint@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-0.3.0.tgz" + }, + "json-schema": { + "version": "0.2.3", + "from": "json-schema@0.2.3", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz" + }, + "json-schema-traverse": { + "version": "0.4.1", + "from": "json-schema-traverse@>=0.4.1 <0.5.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz" + }, + "json-stringify-safe": { + "version": "5.0.1", + "from": "json-stringify-safe@5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz" + }, + "jsonfile": { + "version": "1.1.1", + "from": "jsonfile@>=1.1.0 <1.2.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-1.1.1.tgz" + }, + "jsprim": { + "version": "1.4.1", + "from": "jsprim@>=1.2.2 <2.0.0", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", + "dependencies": { + "assert-plus": { + "version": "1.0.0", + "from": "assert-plus@1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" + } + } + }, + "just-extend": { + "version": "4.0.2", + "from": "just-extend@>=4.0.2 <5.0.0", + "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-4.0.2.tgz" + }, + "jwa": { + "version": "1.1.6", + "from": "jwa@>=1.1.5 <2.0.0", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.1.6.tgz" + }, + "jws": { + "version": "3.1.5", + "from": "jws@>=3.1.5 <4.0.0", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.1.5.tgz" + }, + "keypress": { + "version": "0.1.0", + "from": "keypress@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/keypress/-/keypress-0.1.0.tgz" + }, + "lodash": { + "version": "4.17.11", + "from": "lodash@>=4.17.10 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.11.tgz" + }, + "lodash.assign": { + "version": "4.2.0", + "from": "lodash.assign@>=4.2.0 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash.assign/-/lodash.assign-4.2.0.tgz" + }, + "lodash.bind": { + "version": "4.2.1", + "from": "lodash.bind@>=4.2.1 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash.bind/-/lodash.bind-4.2.1.tgz" + }, + "lodash.clone": { + "version": "4.5.0", + "from": "lodash.clone@>=4.5.0 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash.clone/-/lodash.clone-4.5.0.tgz" + }, + "lodash.clonedeep": { + "version": "4.5.0", + "from": "lodash.clonedeep@>=4.5.0 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz" + }, + "lodash.defaults": { + "version": "4.2.0", + "from": "lodash.defaults@>=4.2.0 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz" + }, + "lodash.difference": { + "version": "4.5.0", + "from": "lodash.difference@>=4.5.0 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash.difference/-/lodash.difference-4.5.0.tgz" + }, + "lodash.flatten": { + "version": "4.4.0", + "from": "lodash.flatten@>=4.4.0 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz" + }, + "lodash.foreach": { + "version": "4.5.0", + "from": "lodash.foreach@>=4.5.0 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash.foreach/-/lodash.foreach-4.5.0.tgz" + }, + "lodash.get": { + "version": "4.4.2", + "from": "lodash.get@>=4.4.2 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz" + }, + "lodash.isempty": { + "version": "4.4.0", + "from": "lodash.isempty@>=4.4.0 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash.isempty/-/lodash.isempty-4.4.0.tgz" + }, + "lodash.keys": { + "version": "4.2.0", + "from": "lodash.keys@>=4.2.0 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash.keys/-/lodash.keys-4.2.0.tgz" + }, + "lodash.noop": { + "version": "3.0.1", + "from": "lodash.noop@>=3.0.1 <4.0.0", + "resolved": "https://registry.npmjs.org/lodash.noop/-/lodash.noop-3.0.1.tgz" + }, + "lodash.partial": { + "version": "4.2.1", + "from": "lodash.partial@>=4.2.1 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash.partial/-/lodash.partial-4.2.1.tgz" + }, + "lodash.pick": { + "version": "4.4.0", + "from": "lodash.pick@>=4.4.0 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash.pick/-/lodash.pick-4.4.0.tgz" + }, + "lodash.pickby": { + "version": "4.6.0", + "from": "lodash.pickby@>=4.6.0 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz" + }, + "lodash.sample": { + "version": "4.2.1", + "from": "lodash.sample@>=4.2.1 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash.sample/-/lodash.sample-4.2.1.tgz" + }, + "lodash.shuffle": { + "version": "4.2.0", + "from": "lodash.shuffle@>=4.2.0 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash.shuffle/-/lodash.shuffle-4.2.0.tgz" + }, + "lodash.values": { + "version": "4.3.0", + "from": "lodash.values@>=4.3.0 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash.values/-/lodash.values-4.3.0.tgz" + }, + "logger-sharelatex": { + "version": "1.5.6", + "from": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.6", + "resolved": "git+https://github.com/sharelatex/logger-sharelatex.git#b2956ec56b582b9f4fc8fdda8dc00c06e77c5537", + "dependencies": { + "bunyan": { + "version": "1.5.1", + "from": "bunyan@1.5.1", + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.5.1.tgz" + }, + "chai": { + "version": "4.2.0", + "from": "chai@latest", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.2.0.tgz" + }, + "coffee-script": { + "version": "1.12.4", + "from": "coffee-script@1.12.4", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.12.4.tgz" + }, + "deep-eql": { + "version": "3.0.1", + "from": "deep-eql@>=3.0.1 <4.0.0", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz" + }, + "dtrace-provider": { + "version": "0.6.0", + "from": "dtrace-provider@>=0.6.0 <0.7.0", + "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.6.0.tgz", + "optional": true + }, + "sandboxed-module": { + "version": "2.0.3", + "from": "sandboxed-module@latest", + "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-2.0.3.tgz" + }, + "sinon": { + "version": "7.2.2", + "from": "sinon@latest", + "resolved": "https://registry.npmjs.org/sinon/-/sinon-7.2.2.tgz" + }, + "supports-color": { + "version": "5.5.0", + "from": "supports-color@>=5.5.0 <6.0.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz" + }, + "timekeeper": { + "version": "1.0.0", + "from": "timekeeper@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/timekeeper/-/timekeeper-1.0.0.tgz" + } + } + }, + "lolex": { + "version": "3.0.0", + "from": "lolex@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/lolex/-/lolex-3.0.0.tgz" + }, + "long": { + "version": "4.0.0", + "from": "long@>=4.0.0 <5.0.0", + "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz" + }, + "lru-cache": { + "version": "2.7.3", + "from": "lru-cache@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz" + }, + "lsmod": { + "version": "1.0.0", + "from": "lsmod@1.0.0", + "resolved": "https://registry.npmjs.org/lsmod/-/lsmod-1.0.0.tgz" + }, + "lynx": { + "version": "0.0.11", + "from": "lynx@0.0.11", + "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.0.11.tgz" + }, + "mersenne": { + "version": "0.0.4", + "from": "mersenne@>=0.0.3 <0.1.0", + "resolved": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz" + }, + "methods": { + "version": "0.0.1", + "from": "methods@0.0.1", + "resolved": "https://registry.npmjs.org/methods/-/methods-0.0.1.tgz" + }, + "metrics-sharelatex": { + "version": "2.0.12", + "from": "git+https://github.com/sharelatex/metrics-sharelatex.git#v2.0.12", + "resolved": "git+https://github.com/sharelatex/metrics-sharelatex.git#3ac1621ef049e2f2d88a83b3a41011333d609662", + "dependencies": { + "coffee-script": { + "version": "1.6.0", + "from": "coffee-script@1.6.0", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz" + }, + "lynx": { + "version": "0.1.1", + "from": "lynx@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz" + }, + "underscore": { + "version": "1.6.0", + "from": "underscore@>=1.6.0 <1.7.0", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz" + } + } + }, + "mime": { + "version": "1.2.11", + "from": "mime@>=1.2.9 <1.3.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz" + }, + "mime-db": { + "version": "1.37.0", + "from": "mime-db@>=1.37.0 <1.38.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.37.0.tgz" + }, + "mime-types": { + "version": "2.1.21", + "from": "mime-types@>=2.1.19 <2.2.0", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.21.tgz" + }, + "minimatch": { + "version": "3.0.4", + "from": "minimatch@>=2.0.0 <3.0.0||>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz" + }, + "minimist": { + "version": "0.0.8", + "from": "minimist@0.0.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" + }, + "mkdirp": { + "version": "0.3.5", + "from": "mkdirp@>=0.3.5 <0.4.0", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz" + }, + "mocha": { + "version": "5.2.0", + "from": "mocha@>=5.0.1 <6.0.0", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-5.2.0.tgz", + "dev": true, + "dependencies": { + "commander": { + "version": "2.15.1", + "from": "commander@2.15.1", + "resolved": "http://registry.npmjs.org/commander/-/commander-2.15.1.tgz", + "dev": true + }, + "debug": { + "version": "3.1.0", + "from": "debug@3.1.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", + "dev": true + }, + "glob": { + "version": "7.1.2", + "from": "glob@7.1.2", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz", + "dev": true + }, + "mkdirp": { + "version": "0.5.1", + "from": "mkdirp@0.5.1", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "dev": true + }, + "ms": { + "version": "2.0.0", + "from": "ms@2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "dev": true + }, + "supports-color": { + "version": "5.4.0", + "from": "supports-color@5.4.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.4.0.tgz", + "dev": true + } + } + }, + "module-details-from-path": { + "version": "1.0.3", + "from": "module-details-from-path@>=1.0.3 <2.0.0", + "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz" + }, + "ms": { + "version": "2.1.1", + "from": "ms@>=2.1.1 <3.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz" + }, + "mv": { + "version": "2.1.1", + "from": "mv@~2", + "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", + "optional": true, + "dependencies": { + "mkdirp": { + "version": "0.5.1", + "from": "mkdirp@>=0.5.1 <0.6.0", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "optional": true + } + } + }, + "nan": { + "version": "2.12.1", + "from": "nan@>=2.0.8 <3.0.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.12.1.tgz" + }, + "ncp": { + "version": "2.0.0", + "from": "ncp@~2.0.0", + "resolved": "http://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", + "optional": true + }, + "nise": { + "version": "1.4.8", + "from": "nise@>=1.4.7 <2.0.0", + "resolved": "https://registry.npmjs.org/nise/-/nise-1.4.8.tgz", + "dependencies": { + "lolex": { + "version": "2.7.5", + "from": "lolex@>=2.3.2 <3.0.0", + "resolved": "https://registry.npmjs.org/lolex/-/lolex-2.7.5.tgz" + } + } + }, + "node-fetch": { + "version": "2.3.0", + "from": "node-fetch@>=2.2.0 <3.0.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.3.0.tgz" + }, + "node-forge": { + "version": "0.7.6", + "from": "node-forge@>=0.7.5 <0.8.0", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.7.6.tgz" + }, + "nopt": { + "version": "1.0.10", + "from": "nopt@>=1.0.10 <1.1.0", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-1.0.10.tgz" + }, + "oauth-sign": { + "version": "0.3.0", + "from": "oauth-sign@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.3.0.tgz" + }, + "once": { + "version": "1.4.0", + "from": "once@>=1.3.0 <2.0.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz" + }, + "p-limit": { + "version": "2.1.0", + "from": "p-limit@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.1.0.tgz" + }, + "p-try": { + "version": "2.0.0", + "from": "p-try@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.0.0.tgz" + }, + "parse-duration": { + "version": "0.1.1", + "from": "parse-duration@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.1.1.tgz" + }, + "parse-ms": { + "version": "2.0.0", + "from": "parse-ms@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.0.0.tgz" + }, + "path-is-absolute": { + "version": "1.0.1", + "from": "path-is-absolute@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" + }, + "path-parse": { + "version": "1.0.6", + "from": "path-parse@>=1.0.6 <2.0.0", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz" + }, + "path-to-regexp": { + "version": "1.7.0", + "from": "path-to-regexp@>=1.7.0 <2.0.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.7.0.tgz" + }, + "pathval": { + "version": "1.1.0", + "from": "pathval@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.0.tgz" + }, + "pause": { + "version": "0.0.1", + "from": "pause@0.0.1", + "resolved": "https://registry.npmjs.org/pause/-/pause-0.0.1.tgz" + }, + "performance-now": { + "version": "2.1.0", + "from": "performance-now@>=2.1.0 <3.0.0", + "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz" + }, + "pify": { + "version": "4.0.1", + "from": "pify@>=4.0.1 <5.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz" + }, + "pretty-ms": { + "version": "4.0.0", + "from": "pretty-ms@>=4.0.0 <5.0.0", + "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-4.0.0.tgz" + }, + "process-nextick-args": { + "version": "2.0.0", + "from": "process-nextick-args@>=2.0.0 <2.1.0", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz" + }, + "prom-client": { + "version": "11.2.1", + "from": "prom-client@>=11.1.3 <12.0.0", + "resolved": "https://registry.npmjs.org/prom-client/-/prom-client-11.2.1.tgz" + }, + "protobufjs": { + "version": "6.8.8", + "from": "protobufjs@>=6.8.6 <6.9.0", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz" + }, + "psl": { + "version": "1.1.31", + "from": "psl@>=1.1.24 <2.0.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.1.31.tgz" + }, + "punycode": { + "version": "2.1.1", + "from": "punycode@>=2.1.0 <3.0.0", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz" + }, + "q": { + "version": "0.9.2", + "from": "q@0.9.2", + "resolved": "https://registry.npmjs.org/q/-/q-0.9.2.tgz" + }, + "qs": { + "version": "0.6.5", + "from": "qs@0.6.5", + "resolved": "https://registry.npmjs.org/qs/-/qs-0.6.5.tgz" + }, + "range-parser": { + "version": "0.0.4", + "from": "range-parser@0.0.4", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-0.0.4.tgz" + }, + "raven": { + "version": "1.2.1", + "from": "raven@>=1.1.3 <2.0.0", + "resolved": "https://registry.npmjs.org/raven/-/raven-1.2.1.tgz", + "dependencies": { + "cookie": { + "version": "0.3.1", + "from": "cookie@0.3.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz" + } + } + }, + "readable-stream": { + "version": "2.3.6", + "from": "readable-stream@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "dependencies": { + "isarray": { + "version": "1.0.0", + "from": "isarray@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" + } + } + }, + "redis": { + "version": "0.12.1", + "from": "redis@0.12.1", + "resolved": "https://registry.npmjs.org/redis/-/redis-0.12.1.tgz" + }, + "redis-commands": { + "version": "1.4.0", + "from": "redis-commands@>=1.2.0 <2.0.0", + "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.4.0.tgz" + }, + "redis-parser": { + "version": "2.6.0", + "from": "redis-parser@>=2.4.0 <3.0.0", + "resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-2.6.0.tgz" + }, + "redis-sentinel": { + "version": "0.1.1", + "from": "redis-sentinel@0.1.1", + "resolved": "https://registry.npmjs.org/redis-sentinel/-/redis-sentinel-0.1.1.tgz", + "dependencies": { + "redis": { + "version": "0.11.0", + "from": "redis@>=0.11.0 <0.12.0", + "resolved": "https://registry.npmjs.org/redis/-/redis-0.11.0.tgz" + } + } + }, + "redis-sharelatex": { + "version": "1.0.4", + "from": "git+https://github.com/sharelatex/redis-sharelatex.git#v1.0.4", + "resolved": "git+https://github.com/sharelatex/redis-sharelatex.git#ca4e906559c1405d132e8edd7db763d64a57be62", + "dependencies": { + "coffee-script": { + "version": "1.8.0", + "from": "coffee-script@1.8.0", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.8.0.tgz" + }, + "underscore": { + "version": "1.7.0", + "from": "underscore@1.7.0", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz" + } + } + }, + "request": { + "version": "2.25.0", + "from": "request@2.25.0", + "resolved": "https://registry.npmjs.org/request/-/request-2.25.0.tgz", + "dependencies": { + "node-uuid": { + "version": "1.4.8", + "from": "node-uuid@>=1.4.0 <1.5.0", + "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz" + } + } + }, + "requestretry": { + "version": "1.13.0", + "from": "requestretry@>=1.12.0 <2.0.0", + "resolved": "https://registry.npmjs.org/requestretry/-/requestretry-1.13.0.tgz", + "dependencies": { + "assert-plus": { + "version": "1.0.0", + "from": "assert-plus@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" + }, + "combined-stream": { + "version": "1.0.7", + "from": "combined-stream@>=1.0.6 <1.1.0", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.7.tgz" + }, + "delayed-stream": { + "version": "1.0.0", + "from": "delayed-stream@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" + }, + "forever-agent": { + "version": "0.6.1", + "from": "forever-agent@>=0.6.1 <0.7.0", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz" + }, + "form-data": { + "version": "2.3.3", + "from": "form-data@>=2.3.2 <2.4.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz" + }, + "http-signature": { + "version": "1.2.0", + "from": "http-signature@>=1.2.0 <1.3.0", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz" + }, + "oauth-sign": { + "version": "0.9.0", + "from": "oauth-sign@>=0.9.0 <0.10.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz" + }, + "qs": { + "version": "6.5.2", + "from": "qs@>=6.5.2 <6.6.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz" + }, + "request": { + "version": "2.88.0", + "from": "request@>=2.74.0 <3.0.0", + "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz" + }, + "tunnel-agent": { + "version": "0.6.0", + "from": "tunnel-agent@>=0.6.0 <0.7.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz" + }, + "uuid": { + "version": "3.3.2", + "from": "uuid@>=3.3.2 <4.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz" + } + } + }, + "require-in-the-middle": { + "version": "3.1.0", + "from": "require-in-the-middle@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-3.1.0.tgz" + }, + "require-like": { + "version": "0.1.2", + "from": "require-like@0.1.2", + "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz" + }, + "resolve": { + "version": "1.9.0", + "from": "resolve@>=1.5.0 <2.0.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.9.0.tgz" + }, + "retry-axios": { + "version": "0.3.2", + "from": "retry-axios@0.3.2", + "resolved": "https://registry.npmjs.org/retry-axios/-/retry-axios-0.3.2.tgz" + }, + "retry-request": { + "version": "4.0.0", + "from": "retry-request@>=4.0.0 <5.0.0", + "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-4.0.0.tgz" + }, + "rimraf": { + "version": "2.4.5", + "from": "rimraf@>=2.4.0 <2.5.0", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz" + }, + "safe-buffer": { + "version": "5.1.2", + "from": "safe-buffer@>=5.1.1 <5.2.0", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" + }, + "safe-json-stringify": { + "version": "1.2.0", + "from": "safe-json-stringify@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.2.0.tgz", + "optional": true + }, + "safer-buffer": { + "version": "2.1.2", + "from": "safer-buffer@>=2.0.2 <3.0.0", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz" + }, + "sandboxed-module": { + "version": "0.2.2", + "from": "sandboxed-module@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-0.2.2.tgz", + "dependencies": { + "stack-trace": { + "version": "0.0.6", + "from": "stack-trace@0.0.6", + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.6.tgz" + } + } + }, + "semver": { + "version": "5.6.0", + "from": "semver@>=5.5.0 <6.0.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.6.0.tgz" + }, + "send": { + "version": "0.1.3", + "from": "send@0.1.3", + "resolved": "https://registry.npmjs.org/send/-/send-0.1.3.tgz" + }, + "settings-sharelatex": { + "version": "1.0.0", + "from": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0", + "resolved": "git+https://github.com/sharelatex/settings-sharelatex.git#cbc5e41c1dbe6789721a14b3fdae05bf22546559", + "dependencies": { + "coffee-script": { + "version": "1.6.0", + "from": "coffee-script@1.6.0", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz" + } + } + }, + "shimmer": { + "version": "1.2.0", + "from": "shimmer@>=1.2.0 <2.0.0", + "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.0.tgz" + }, + "sigmund": { + "version": "1.0.1", + "from": "sigmund@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz" + }, + "sinon": { + "version": "1.5.2", + "from": "sinon@>=1.5.2 <1.6.0", + "resolved": "https://registry.npmjs.org/sinon/-/sinon-1.5.2.tgz" + }, + "sntp": { + "version": "0.2.4", + "from": "sntp@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/sntp/-/sntp-0.2.4.tgz" + }, + "source-map": { + "version": "0.6.1", + "from": "source-map@>=0.6.1 <0.7.0", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz" + }, + "split": { + "version": "1.0.1", + "from": "split@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz" + }, + "sshpk": { + "version": "1.16.0", + "from": "sshpk@>=1.7.0 <2.0.0", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.0.tgz", + "dependencies": { + "asn1": { + "version": "0.2.4", + "from": "asn1@>=0.2.3 <0.3.0", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz" + }, + "assert-plus": { + "version": "1.0.0", + "from": "assert-plus@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" + } + } + }, + "stack-trace": { + "version": "0.0.9", + "from": "stack-trace@0.0.9", + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz" + }, + "statsd-parser": { + "version": "0.0.4", + "from": "statsd-parser@>=0.0.4 <0.1.0", + "resolved": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz" + }, + "stream-shift": { + "version": "1.0.0", + "from": "stream-shift@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.0.tgz" + }, + "string_decoder": { + "version": "1.1.1", + "from": "string_decoder@>=1.1.1 <1.2.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" + }, + "strip-ansi": { + "version": "0.3.0", + "from": "strip-ansi@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-0.3.0.tgz" + }, + "supports-color": { + "version": "0.2.0", + "from": "supports-color@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-0.2.0.tgz" + }, + "symbol-observable": { + "version": "1.2.0", + "from": "symbol-observable@>=1.2.0 <2.0.0", + "resolved": "https://registry.npmjs.org/symbol-observable/-/symbol-observable-1.2.0.tgz" + }, + "tdigest": { + "version": "0.1.1", + "from": "tdigest@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.1.tgz" + }, + "teeny-request": { + "version": "3.11.3", + "from": "teeny-request@>=3.11.1 <4.0.0", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-3.11.3.tgz", + "dependencies": { + "uuid": { + "version": "3.3.2", + "from": "uuid@>=3.3.2 <4.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz" + } + } + }, + "text-encoding": { + "version": "0.6.4", + "from": "text-encoding@>=0.6.4 <0.7.0", + "resolved": "https://registry.npmjs.org/text-encoding/-/text-encoding-0.6.4.tgz" + }, + "through": { + "version": "2.3.8", + "from": "through@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz" + }, + "through2": { + "version": "2.0.5", + "from": "through2@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz" + }, + "timekeeper": { + "version": "2.1.2", + "from": "timekeeper@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/timekeeper/-/timekeeper-2.1.2.tgz", + "dev": true + }, + "tough-cookie": { + "version": "2.4.3", + "from": "tough-cookie@>=2.4.3 <2.5.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", + "dependencies": { + "punycode": { + "version": "1.4.1", + "from": "punycode@>=1.4.1 <2.0.0", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz" + } + } + }, + "tunnel-agent": { + "version": "0.3.0", + "from": "tunnel-agent@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.3.0.tgz" + }, + "tweetnacl": { + "version": "0.14.5", + "from": "tweetnacl@>=0.14.0 <0.15.0", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz" + }, + "type-detect": { + "version": "4.0.8", + "from": "type-detect@>=4.0.5 <5.0.0", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz" + }, + "uid2": { + "version": "0.0.2", + "from": "uid2@0.0.2", + "resolved": "https://registry.npmjs.org/uid2/-/uid2-0.0.2.tgz" + }, + "underscore": { + "version": "1.2.2", + "from": "underscore@1.2.2", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.2.2.tgz" + }, + "underscore.string": { + "version": "2.2.1", + "from": "underscore.string@>=2.2.1 <2.3.0", + "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.2.1.tgz" + }, + "uri-js": { + "version": "4.2.2", + "from": "uri-js@>=4.2.2 <5.0.0", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz" + }, + "util-deprecate": { + "version": "1.0.2", + "from": "util-deprecate@>=1.0.1 <1.1.0", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" + }, + "uuid": { + "version": "3.0.0", + "from": "uuid@3.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.0.tgz" + }, + "verror": { + "version": "1.10.0", + "from": "verror@1.10.0", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", + "dependencies": { + "assert-plus": { + "version": "1.0.0", + "from": "assert-plus@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" + } + } + }, + "when": { + "version": "3.7.8", + "from": "when@>=3.7.7 <4.0.0", + "resolved": "https://registry.npmjs.org/when/-/when-3.7.8.tgz" + }, + "which": { + "version": "1.0.9", + "from": "which@>=1.0.5 <1.1.0", + "resolved": "https://registry.npmjs.org/which/-/which-1.0.9.tgz" + }, + "wrappy": { + "version": "1.0.2", + "from": "wrappy@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + }, + "xtend": { + "version": "4.0.1", + "from": "xtend@>=4.0.1 <4.1.0", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz" + }, + "yallist": { + "version": "3.0.3", + "from": "yallist@>=3.0.2 <4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz" + } + } +} diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 006509bec7..a497ea8844 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -25,7 +25,7 @@ "express": "3.3.4", "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.6", "lynx": "0.0.11", - "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v2.0.8", + "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v2.0.12", "redis-sharelatex": "git+https://github.com/sharelatex/redis-sharelatex.git#v1.0.4", "request": "2.25.0", "requestretry": "^1.12.0", From 0aa7315c27a372b8e2b5d5c59972670dffd17da9 Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Fri, 4 Jan 2019 09:24:09 +0000 Subject: [PATCH 432/769] Bump buildscripts to 1.1.10 --- services/document-updater/Dockerfile | 2 +- services/document-updater/Makefile | 2 +- services/document-updater/buildscript.txt | 2 +- services/document-updater/docker-compose.ci.yml | 2 +- services/document-updater/docker-compose.yml | 2 +- services/document-updater/package.json | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/services/document-updater/Dockerfile b/services/document-updater/Dockerfile index aabf01ad91..59f5e61889 100644 --- a/services/document-updater/Dockerfile +++ b/services/document-updater/Dockerfile @@ -19,4 +19,4 @@ COPY --from=app /app /app WORKDIR /app USER node -CMD ["node","app.js"] +CMD ["node", "--expose-gc", "app.js"] diff --git a/services/document-updater/Makefile b/services/document-updater/Makefile index 09b26a6055..336a84f5d6 100644 --- a/services/document-updater/Makefile +++ b/services/document-updater/Makefile @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.9 +# Version: 1.1.10 BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) diff --git a/services/document-updater/buildscript.txt b/services/document-updater/buildscript.txt index c9ccb323b4..a545422ca4 100644 --- a/services/document-updater/buildscript.txt +++ b/services/document-updater/buildscript.txt @@ -1,4 +1,4 @@ ---script-version=1.1.9 +--script-version=1.1.10 document-updater --node-version=6.9.5 --acceptance-creds=None diff --git a/services/document-updater/docker-compose.ci.yml b/services/document-updater/docker-compose.ci.yml index 17c4ddd2bf..5ab90e1825 100644 --- a/services/document-updater/docker-compose.ci.yml +++ b/services/document-updater/docker-compose.ci.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.9 +# Version: 1.1.10 version: "2" diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index dcbc14e683..aeceafb3f3 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.9 +# Version: 1.1.10 version: "2" diff --git a/services/document-updater/package.json b/services/document-updater/package.json index a497ea8844..3f5b5cb126 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -7,7 +7,7 @@ "url": "https://github.com/sharelatex/document-updater-sharelatex.git" }, "scripts": { - "compile:app": "([ -e app/coffee ] && coffee $COFFEE_OPTIONS -o app/js -c app/coffee || echo 'No CoffeeScript folder to compile') && ( [ -e app.coffee ] && coffee $COFFEE_OPTIONS -c app.coffee || echo 'No CoffeeScript app to compile')", + "compile:app": "([ -e app/coffee ] && coffee -m $COFFEE_OPTIONS -o app/js -c app/coffee || echo 'No CoffeeScript folder to compile') && ( [ -e app.coffee ] && coffee -m $COFFEE_OPTIONS -c app.coffee || echo 'No CoffeeScript app to compile')", "start": "npm run compile:app && node $NODE_APP_OPTIONS app.js", "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 30000 --exit $@ test/acceptance/js", "test:acceptance": "npm run compile:app && npm run compile:acceptance_tests && npm run test:acceptance:_run -- --grep=$MOCHA_GREP", From 05a80c7cba67c98471c47a6030641ca9f810fea6 Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Fri, 4 Jan 2019 09:31:00 +0000 Subject: [PATCH 433/769] Add app.map to .gitignore --- services/document-updater/.gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/services/document-updater/.gitignore b/services/document-updater/.gitignore index ad21f261b4..df0820fb06 100644 --- a/services/document-updater/.gitignore +++ b/services/document-updater/.gitignore @@ -39,6 +39,7 @@ Thumbs.db app.js app/js/* +app.map test/unit/js/* test/acceptance/js/* From a783c6b3cca2a292ffaadd9c089e4d285c1d053c Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Fri, 4 Jan 2019 09:33:08 +0000 Subject: [PATCH 434/769] Bump logger to 1.5.9 --- services/document-updater/npm-shrinkwrap.json | 6 +++--- services/document-updater/package.json | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/services/document-updater/npm-shrinkwrap.json b/services/document-updater/npm-shrinkwrap.json index a9c29cbc70..83661f0574 100644 --- a/services/document-updater/npm-shrinkwrap.json +++ b/services/document-updater/npm-shrinkwrap.json @@ -1292,9 +1292,9 @@ "resolved": "https://registry.npmjs.org/lodash.values/-/lodash.values-4.3.0.tgz" }, "logger-sharelatex": { - "version": "1.5.6", - "from": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.6", - "resolved": "git+https://github.com/sharelatex/logger-sharelatex.git#b2956ec56b582b9f4fc8fdda8dc00c06e77c5537", + "version": "1.5.9", + "from": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.9", + "resolved": "git+https://github.com/sharelatex/logger-sharelatex.git#e8e1b95052f62e107336053e4a983f81cdbdf589", "dependencies": { "bunyan": { "version": "1.5.1", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 3f5b5cb126..d8dfc91bf8 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -23,7 +23,7 @@ "async": "^2.5.0", "coffee-script": "~1.7.0", "express": "3.3.4", - "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.6", + "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.9", "lynx": "0.0.11", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v2.0.12", "redis-sharelatex": "git+https://github.com/sharelatex/redis-sharelatex.git#v1.0.4", From 5db1913cec345e8b480e515e88ab7c2202bed19e Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Tue, 8 Jan 2019 15:00:26 +0000 Subject: [PATCH 435/769] Bump settings to v1.1.0 --- services/document-updater/npm-shrinkwrap.json | 6 +++--- services/document-updater/package.json | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/services/document-updater/npm-shrinkwrap.json b/services/document-updater/npm-shrinkwrap.json index 83661f0574..7b7ef11221 100644 --- a/services/document-updater/npm-shrinkwrap.json +++ b/services/document-updater/npm-shrinkwrap.json @@ -1858,9 +1858,9 @@ "resolved": "https://registry.npmjs.org/send/-/send-0.1.3.tgz" }, "settings-sharelatex": { - "version": "1.0.0", - "from": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0", - "resolved": "git+https://github.com/sharelatex/settings-sharelatex.git#cbc5e41c1dbe6789721a14b3fdae05bf22546559", + "version": "1.1.0", + "from": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.1.0", + "resolved": "git+https://github.com/sharelatex/settings-sharelatex.git#93f63d029b52fef8825c3a401b2b6a7ba29b4750", "dependencies": { "coffee-script": { "version": "1.6.0", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index d8dfc91bf8..0bffffdaa0 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -30,7 +30,7 @@ "request": "2.25.0", "requestretry": "^1.12.0", "sandboxed-module": "~0.2.0", - "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0", + "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.1.0", "sinon": "~1.5.2", "underscore": "1.2.2" }, From 3eb2d27d4aa99cb113cc5af5ac6c7cc682509c9c Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Tue, 8 Jan 2019 15:01:48 +0000 Subject: [PATCH 436/769] Add **/*.map to .gitignore --- services/document-updater/.gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/.gitignore b/services/document-updater/.gitignore index df0820fb06..dbce694d3b 100644 --- a/services/document-updater/.gitignore +++ b/services/document-updater/.gitignore @@ -39,7 +39,7 @@ Thumbs.db app.js app/js/* -app.map +**/*.map test/unit/js/* test/acceptance/js/* From 76b054c7f4cc544a3724cfdcbfb6cfb02abed6fb Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 25 Jan 2019 16:32:26 +0000 Subject: [PATCH 437/769] bump node redis to 1.0.5 --- services/document-updater/npm-shrinkwrap.json | 18 +----------------- 1 file changed, 1 insertion(+), 17 deletions(-) diff --git a/services/document-updater/npm-shrinkwrap.json b/services/document-updater/npm-shrinkwrap.json index 7b7ef11221..d116d14988 100644 --- a/services/document-updater/npm-shrinkwrap.json +++ b/services/document-updater/npm-shrinkwrap.json @@ -1698,23 +1698,7 @@ } } }, - "redis-sharelatex": { - "version": "1.0.4", - "from": "git+https://github.com/sharelatex/redis-sharelatex.git#v1.0.4", - "resolved": "git+https://github.com/sharelatex/redis-sharelatex.git#ca4e906559c1405d132e8edd7db763d64a57be62", - "dependencies": { - "coffee-script": { - "version": "1.8.0", - "from": "coffee-script@1.8.0", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.8.0.tgz" - }, - "underscore": { - "version": "1.7.0", - "from": "underscore@1.7.0", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz" - } - } - }, + "request": { "version": "2.25.0", "from": "request@2.25.0", From d279a9149c8a4a44f47afb9e8a8021207238e4dd Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 25 Jan 2019 17:24:19 +0000 Subject: [PATCH 438/769] add dispatcherCount to config file --- services/document-updater/config/settings.defaults.coffee | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 7f36e47207..8074183168 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -86,5 +86,7 @@ module.exports = max_doc_length: 2 * 1024 * 1024 # 2mb + dispatcherCount: process.env["DISPATCHER_COUNT"] + mongo: url: "mongodb://#{process.env["MONGO_HOST"] or "localhost"}/sharelatex" From 3051d7db4b96e68dc245704e0fa7f409f7621360 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 30 Jan 2019 16:24:17 +0000 Subject: [PATCH 439/769] bulk upgrade to 1.1.12, moved to npm rather than git --- services/document-updater/Jenkinsfile | 10 +- services/document-updater/Makefile | 6 +- services/document-updater/buildscript.txt | 5 +- .../document-updater/docker-compose.ci.yml | 13 +- services/document-updater/docker-compose.yml | 12 +- services/document-updater/npm-shrinkwrap.json | 1154 ++++++----------- services/document-updater/package.json | 8 +- 7 files changed, 420 insertions(+), 788 deletions(-) diff --git a/services/document-updater/Jenkinsfile b/services/document-updater/Jenkinsfile index 3af2b785b8..9ec298487b 100644 --- a/services/document-updater/Jenkinsfile +++ b/services/document-updater/Jenkinsfile @@ -48,8 +48,11 @@ pipeline { } } - stage('Package and publish build') { + stage('Package and docker push') { steps { + sh 'echo ${BUILD_NUMBER} > build_number.txt' + sh 'touch build.tar.gz' // Avoid tar warning about files changing during read + sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make tar' withCredentials([file(credentialsId: 'gcr.io_overleaf-ops', variable: 'DOCKER_REPO_KEY_PATH')]) { sh 'docker login -u _json_key --password-stdin https://gcr.io/overleaf-ops < ${DOCKER_REPO_KEY_PATH}' @@ -60,9 +63,12 @@ pipeline { } } - stage('Publish build number') { + stage('Publish to s3') { steps { sh 'echo ${BRANCH_NAME}-${BUILD_NUMBER} > build_number.txt' + withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") { + s3Upload(file:'build.tar.gz', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/${BUILD_NUMBER}.tar.gz") + } withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") { // The deployment process uses this file to figure out the latest build s3Upload(file:'build_number.txt', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/latest") diff --git a/services/document-updater/Makefile b/services/document-updater/Makefile index 336a84f5d6..07216e7800 100644 --- a/services/document-updater/Makefile +++ b/services/document-updater/Makefile @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.10 +# Version: 1.1.12 BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) @@ -13,7 +13,6 @@ DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ MOCHA_GREP=${MOCHA_GREP} \ docker-compose ${DOCKER_COMPOSE_FLAGS} - clean: docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) @@ -40,6 +39,9 @@ build: --tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ . +tar: + $(DOCKER_COMPOSE) up tar + publish: docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) diff --git a/services/document-updater/buildscript.txt b/services/document-updater/buildscript.txt index a545422ca4..aa39cdc02f 100644 --- a/services/document-updater/buildscript.txt +++ b/services/document-updater/buildscript.txt @@ -1,9 +1,8 @@ ---script-version=1.1.10 document-updater +--language=coffeescript --node-version=6.9.5 --acceptance-creds=None ---language=coffeescript --dependencies=mongo,redis --docker-repos=gcr.io/overleaf-ops ---kube=false --build-target=docker +--script-version=1.1.12 diff --git a/services/document-updater/docker-compose.ci.yml b/services/document-updater/docker-compose.ci.yml index 5ab90e1825..36b52f8f8b 100644 --- a/services/document-updater/docker-compose.ci.yml +++ b/services/document-updater/docker-compose.ci.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.10 +# Version: 1.1.12 version: "2" @@ -11,6 +11,7 @@ services: user: node command: npm run test:unit:_run + test_acceptance: build: . image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER @@ -26,6 +27,16 @@ services: user: node command: npm run test:acceptance:_run + + + tar: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + volumes: + - ./:/tmp/build/ + command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . + user: root + redis: image: redis diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index aeceafb3f3..8bb7857cb6 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.10 +# Version: 1.1.12 version: "2" @@ -33,6 +33,16 @@ services: - redis command: npm run test:acceptance + + + tar: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + volumes: + - ./:/tmp/build/ + command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . + user: root + redis: image: redis diff --git a/services/document-updater/npm-shrinkwrap.json b/services/document-updater/npm-shrinkwrap.json index d116d14988..26742b3c9b 100644 --- a/services/document-updater/npm-shrinkwrap.json +++ b/services/document-updater/npm-shrinkwrap.json @@ -38,7 +38,7 @@ }, "@google-cloud/projectify": { "version": "0.3.2", - "from": "@google-cloud/projectify@>=0.3.2 <0.4.0", + "from": "@google-cloud/projectify@>=0.3.0 <0.4.0", "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.2.tgz" }, "@google-cloud/promisify": { @@ -47,14 +47,19 @@ "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.3.1.tgz" }, "@google-cloud/trace-agent": { - "version": "3.5.0", + "version": "3.5.2", "from": "@google-cloud/trace-agent@>=3.2.0 <4.0.0", - "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-3.5.0.tgz", + "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-3.5.2.tgz", "dependencies": { "@google-cloud/common": { - "version": "0.28.0", - "from": "@google-cloud/common@>=0.28.0 <0.29.0", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.28.0.tgz" + "version": "0.30.2", + "from": "@google-cloud/common@>=0.30.0 <0.31.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.30.2.tgz" + }, + "google-auth-library": { + "version": "3.0.1", + "from": "google-auth-library@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-3.0.1.tgz" }, "methods": { "version": "1.1.2", @@ -63,7 +68,7 @@ }, "uuid": { "version": "3.3.2", - "from": "uuid@>=3.0.1 <4.0.0", + "from": "uuid@^3.0.1", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz" } } @@ -123,21 +128,6 @@ "from": "@sindresorhus/is@>=0.13.0 <0.14.0", "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.13.0.tgz" }, - "@sinonjs/commons": { - "version": "1.3.0", - "from": "@sinonjs/commons@>=1.2.0 <2.0.0", - "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.3.0.tgz" - }, - "@sinonjs/formatio": { - "version": "3.1.0", - "from": "@sinonjs/formatio@>=3.1.0 <4.0.0", - "resolved": "https://registry.npmjs.org/@sinonjs/formatio/-/formatio-3.1.0.tgz" - }, - "@sinonjs/samsam": { - "version": "3.0.2", - "from": "@sinonjs/samsam@>=3.0.2 <4.0.0", - "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-3.0.2.tgz" - }, "@types/caseless": { "version": "0.12.1", "from": "@types/caseless@*", @@ -164,9 +154,9 @@ "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.0.tgz" }, "@types/node": { - "version": "10.12.18", + "version": "10.12.20", "from": "@types/node@*", - "resolved": "https://registry.npmjs.org/@types/node/-/node-10.12.18.tgz" + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.12.20.tgz" }, "@types/request": { "version": "2.48.1", @@ -179,14 +169,9 @@ "resolved": "https://registry.npmjs.org/@types/semver/-/semver-5.5.0.tgz" }, "@types/tough-cookie": { - "version": "2.3.4", + "version": "2.3.5", "from": "@types/tough-cookie@*", - "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-2.3.4.tgz" - }, - "abbrev": { - "version": "1.1.1", - "from": "abbrev@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz" + "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-2.3.5.tgz" }, "acorn": { "version": "5.7.3", @@ -199,41 +184,9 @@ "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.2.1.tgz" }, "ajv": { - "version": "6.6.2", - "from": "ajv@>=6.5.5 <7.0.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.6.2.tgz" - }, - "ansi-regex": { - "version": "0.2.1", - "from": "ansi-regex@>=0.2.0 <0.3.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-0.2.1.tgz" - }, - "ansi-styles": { - "version": "1.1.0", - "from": "ansi-styles@>=1.1.0 <2.0.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-1.1.0.tgz" - }, - "argparse": { - "version": "0.1.16", - "from": "argparse@>=0.1.11 <0.2.0", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-0.1.16.tgz", - "dependencies": { - "underscore": { - "version": "1.7.0", - "from": "underscore@>=1.7.0 <1.8.0", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz" - }, - "underscore.string": { - "version": "2.4.0", - "from": "underscore.string@>=2.4.0 <2.5.0", - "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.4.0.tgz" - } - } - }, - "array-from": { - "version": "2.1.1", - "from": "array-from@>=2.1.1 <3.0.0", - "resolved": "https://registry.npmjs.org/array-from/-/array-from-2.1.1.tgz" + "version": "5.5.2", + "from": "ajv@>=5.1.0 <6.0.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz" }, "arrify": { "version": "1.0.1", @@ -253,12 +206,13 @@ "assertion-error": { "version": "1.1.0", "from": "assertion-error@^1.0.1", - "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz" + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "dev": true }, "async": { - "version": "2.6.1", + "version": "2.6.0", "from": "async@>=2.5.0 <3.0.0", - "resolved": "https://registry.npmjs.org/async/-/async-2.6.1.tgz" + "resolved": "https://registry.npmjs.org/async/-/async-2.6.0.tgz" }, "async-listener": { "version": "0.6.10", @@ -281,9 +235,9 @@ "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz" }, "aws4": { - "version": "1.8.0", - "from": "aws4@>=1.8.0 <2.0.0", - "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz" + "version": "1.6.0", + "from": "aws4@>=1.6.0 <2.0.0", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.6.0.tgz" }, "axios": { "version": "0.18.0", @@ -292,13 +246,19 @@ }, "balanced-match": { "version": "1.0.0", - "from": "balanced-match@>=1.0.0 <2.0.0", + "from": "balanced-match@^1.0.0", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz" }, + "base64-js": { + "version": "1.3.0", + "from": "base64-js@>=1.3.0 <2.0.0", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.0.tgz" + }, "bcrypt-pbkdf": { - "version": "1.0.2", + "version": "1.0.1", "from": "bcrypt-pbkdf@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz" + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz", + "optional": true }, "bignumber.js": { "version": "7.2.1", @@ -306,29 +266,24 @@ "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz" }, "bindings": { - "version": "1.3.1", + "version": "1.4.0", "from": "bindings@>=1.2.1 <2.0.0", - "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.3.1.tgz" + "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.4.0.tgz" }, "bintrees": { "version": "1.0.1", "from": "bintrees@1.0.1", "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz" }, - "bluebird": { - "version": "3.5.3", - "from": "bluebird@>=3.3.4 <4.0.0", - "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.5.3.tgz" - }, "boom": { "version": "0.4.2", "from": "boom@>=0.4.0 <0.5.0", "resolved": "https://registry.npmjs.org/boom/-/boom-0.4.2.tgz" }, "brace-expansion": { - "version": "1.1.11", - "from": "brace-expansion@>=1.1.7 <2.0.0", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz" + "version": "1.1.8", + "from": "brace-expansion@^1.1.7", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz" }, "browser-stdout": { "version": "1.3.1", @@ -397,31 +352,21 @@ "resolved": "https://registry.npmjs.org/chai-spies/-/chai-spies-0.7.1.tgz", "dev": true }, - "chalk": { - "version": "0.5.1", - "from": "chalk@>=0.5.0 <0.6.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-0.5.1.tgz" - }, - "check-error": { - "version": "1.0.2", - "from": "check-error@>=1.0.2 <2.0.0", - "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz" - }, "cluster-key-slot": { - "version": "1.0.12", + "version": "1.0.8", "from": "cluster-key-slot@>=1.0.6 <2.0.0", - "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.0.12.tgz" + "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.0.8.tgz" + }, + "co": { + "version": "4.6.0", + "from": "co@>=4.6.0 <5.0.0", + "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz" }, "coffee-script": { "version": "1.7.1", "from": "coffee-script@>=1.7.0 <1.8.0", "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.7.1.tgz" }, - "colors": { - "version": "0.6.2", - "from": "colors@>=0.6.2 <0.7.0", - "resolved": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz" - }, "combined-stream": { "version": "0.0.7", "from": "combined-stream@>=0.0.4 <0.1.0", @@ -469,7 +414,7 @@ }, "core-util-is": { "version": "1.0.2", - "from": "core-util-is@>=1.0.0 <1.1.0", + "from": "core-util-is@1.0.2", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" }, "cryptiles": { @@ -494,15 +439,10 @@ } } }, - "dateformat": { - "version": "1.0.2-1.2.3", - "from": "dateformat@1.0.2-1.2.3", - "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-1.0.2-1.2.3.tgz" - }, "debug": { - "version": "4.1.1", + "version": "3.1.0", "from": "debug@*", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz" + "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz" }, "deep-eql": { "version": "0.1.3", @@ -536,7 +476,8 @@ "diff": { "version": "3.5.0", "from": "diff@3.5.0", - "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz" + "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", + "dev": true }, "dtrace-provider": { "version": "0.2.8", @@ -551,9 +492,10 @@ "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.6.1.tgz" }, "ecc-jsbn": { - "version": "0.1.2", + "version": "0.1.1", "from": "ecc-jsbn@>=0.1.1 <0.2.0", - "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz" + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz", + "optional": true }, "ecdsa-sig-formatter": { "version": "1.0.10", @@ -587,23 +529,9 @@ }, "escape-string-regexp": { "version": "1.0.5", - "from": "escape-string-regexp@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" - }, - "esprima": { - "version": "1.0.4", - "from": "esprima@>=1.0.2 <1.1.0", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-1.0.4.tgz" - }, - "eventemitter2": { - "version": "0.4.14", - "from": "eventemitter2@>=0.4.13 <0.5.0", - "resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-0.4.14.tgz" - }, - "exit": { - "version": "0.1.2", - "from": "exit@>=0.1.1 <0.2.0", - "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz" + "from": "escape-string-regexp@^1.0.2", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "dev": true }, "express": { "version": "3.3.4", @@ -611,9 +539,9 @@ "resolved": "https://registry.npmjs.org/express/-/express-3.3.4.tgz" }, "extend": { - "version": "3.0.2", - "from": "extend@>=3.0.1 <4.0.0", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz" + "version": "3.0.1", + "from": "extend@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.1.tgz" }, "extsprintf": { "version": "1.3.0", @@ -621,42 +549,30 @@ "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz" }, "fast-deep-equal": { - "version": "2.0.1", - "from": "fast-deep-equal@>=2.0.1 <3.0.0", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz" + "version": "1.0.0", + "from": "fast-deep-equal@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-1.0.0.tgz" }, "fast-json-stable-stringify": { "version": "2.0.0", "from": "fast-json-stable-stringify@>=2.0.0 <3.0.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz" }, + "fast-text-encoding": { + "version": "1.0.0", + "from": "fast-text-encoding@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.0.tgz" + }, + "file-uri-to-path": { + "version": "1.0.0", + "from": "file-uri-to-path@1.0.0", + "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz" + }, "findit2": { "version": "2.2.3", "from": "findit2@>=2.2.3 <3.0.0", "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz" }, - "findup-sync": { - "version": "0.1.3", - "from": "findup-sync@>=0.1.2 <0.2.0", - "resolved": "https://registry.npmjs.org/findup-sync/-/findup-sync-0.1.3.tgz", - "dependencies": { - "glob": { - "version": "3.2.11", - "from": "glob@>=3.2.9 <3.3.0", - "resolved": "https://registry.npmjs.org/glob/-/glob-3.2.11.tgz" - }, - "lodash": { - "version": "2.4.2", - "from": "lodash@>=2.4.1 <2.5.0", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" - }, - "minimatch": { - "version": "0.3.0", - "from": "minimatch@>=0.3.0 <0.4.0", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.3.0.tgz" - } - } - }, "flexbuffer": { "version": "0.0.6", "from": "flexbuffer@0.0.6", @@ -665,19 +581,7 @@ "follow-redirects": { "version": "1.6.1", "from": "follow-redirects@>=1.3.0 <2.0.0", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.6.1.tgz", - "dependencies": { - "debug": { - "version": "3.1.0", - "from": "debug@3.1.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz" - }, - "ms": { - "version": "2.0.0", - "from": "ms@2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz" - } - } + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.6.1.tgz" }, "forever-agent": { "version": "0.5.2", @@ -706,23 +610,6 @@ "from": "fresh@0.1.0", "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.1.0.tgz" }, - "fs-extra": { - "version": "0.9.1", - "from": "fs-extra@>=0.9.1 <0.10.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-0.9.1.tgz", - "dependencies": { - "mkdirp": { - "version": "0.5.1", - "from": "mkdirp@>=0.5.0 <0.6.0", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz" - }, - "ncp": { - "version": "0.5.1", - "from": "ncp@>=0.5.1 <0.6.0", - "resolved": "https://registry.npmjs.org/ncp/-/ncp-0.5.1.tgz" - } - } - }, "fs.realpath": { "version": "1.0.0", "from": "fs.realpath@>=1.0.0 <2.0.0", @@ -730,25 +617,22 @@ "dev": true }, "gaxios": { - "version": "1.0.6", - "from": "gaxios@>=1.0.2 <2.0.0", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.0.6.tgz" + "version": "1.2.7", + "from": "gaxios@>=1.0.4 <2.0.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.2.7.tgz", + "dependencies": { + "extend": { + "version": "3.0.2", + "from": "extend@>=3.0.2 <4.0.0", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz" + } + } }, "gcp-metadata": { "version": "0.9.3", "from": "gcp-metadata@>=0.9.0 <0.10.0", "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.9.3.tgz" }, - "get-func-name": { - "version": "2.0.0", - "from": "get-func-name@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz" - }, - "getobject": { - "version": "0.1.0", - "from": "getobject@>=0.1.0 <0.2.0", - "resolved": "https://registry.npmjs.org/getobject/-/getobject-0.1.0.tgz" - }, "getpass": { "version": "0.1.7", "from": "getpass@>=0.1.1 <0.2.0", @@ -763,8 +647,9 @@ }, "glob": { "version": "6.0.4", - "from": "glob@>=6.0.1 <7.0.0", - "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz" + "from": "glob@^6.0.1", + "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", + "optional": true }, "google-auth-library": { "version": "2.0.2", @@ -775,11 +660,6 @@ "version": "0.7.0", "from": "gcp-metadata@>=0.7.0 <0.8.0", "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.7.0.tgz" - }, - "lru-cache": { - "version": "5.1.1", - "from": "lru-cache@>=5.0.0 <6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz" } } }, @@ -788,207 +668,15 @@ "from": "google-p12-pem@>=1.0.0 <2.0.0", "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.3.tgz" }, - "graceful-fs": { - "version": "1.2.3", - "from": "graceful-fs@>=1.2.0 <1.3.0", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-1.2.3.tgz" - }, - "growl": { - "version": "1.10.5", - "from": "growl@1.10.5", - "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", - "dev": true - }, - "grunt": { - "version": "0.4.5", - "from": "grunt@>=0.4.5 <0.5.0", - "resolved": "https://registry.npmjs.org/grunt/-/grunt-0.4.5.tgz", - "dependencies": { - "async": { - "version": "0.1.22", - "from": "async@>=0.1.22 <0.2.0", - "resolved": "https://registry.npmjs.org/async/-/async-0.1.22.tgz" - }, - "coffee-script": { - "version": "1.3.3", - "from": "coffee-script@>=1.3.3 <1.4.0", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.3.3.tgz" - }, - "glob": { - "version": "3.1.21", - "from": "glob@>=3.1.21 <3.2.0", - "resolved": "https://registry.npmjs.org/glob/-/glob-3.1.21.tgz" - }, - "inherits": { - "version": "1.0.2", - "from": "inherits@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-1.0.2.tgz" - }, - "lodash": { - "version": "0.9.2", - "from": "lodash@>=0.9.2 <0.10.0", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-0.9.2.tgz" - }, - "minimatch": { - "version": "0.2.14", - "from": "minimatch@>=0.2.12 <0.3.0", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz" - }, - "rimraf": { - "version": "2.2.8", - "from": "rimraf@>=2.2.8 <2.3.0", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz" - } - } - }, - "grunt-bunyan": { - "version": "0.5.0", - "from": "grunt-bunyan@>=0.5.0 <0.6.0", - "resolved": "https://registry.npmjs.org/grunt-bunyan/-/grunt-bunyan-0.5.0.tgz", - "dependencies": { - "lodash": { - "version": "2.4.2", - "from": "lodash@>=2.4.1 <2.5.0", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" - } - } - }, - "grunt-contrib-clean": { - "version": "0.6.0", - "from": "grunt-contrib-clean@>=0.6.0 <0.7.0", - "resolved": "https://registry.npmjs.org/grunt-contrib-clean/-/grunt-contrib-clean-0.6.0.tgz", - "dependencies": { - "rimraf": { - "version": "2.2.8", - "from": "rimraf@>=2.2.1 <2.3.0", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz" - } - } - }, - "grunt-contrib-coffee": { - "version": "0.11.1", - "from": "grunt-contrib-coffee@>=0.11.0 <0.12.0", - "resolved": "https://registry.npmjs.org/grunt-contrib-coffee/-/grunt-contrib-coffee-0.11.1.tgz", - "dependencies": { - "lodash": { - "version": "2.4.2", - "from": "lodash@>=2.4.1 <2.5.0", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" - } - } - }, - "grunt-execute": { - "version": "0.2.2", - "from": "grunt-execute@>=0.2.2 <0.3.0", - "resolved": "https://registry.npmjs.org/grunt-execute/-/grunt-execute-0.2.2.tgz" - }, - "grunt-legacy-log": { - "version": "0.1.3", - "from": "grunt-legacy-log@>=0.1.0 <0.2.0", - "resolved": "https://registry.npmjs.org/grunt-legacy-log/-/grunt-legacy-log-0.1.3.tgz", - "dependencies": { - "lodash": { - "version": "2.4.2", - "from": "lodash@>=2.4.1 <2.5.0", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" - }, - "underscore.string": { - "version": "2.3.3", - "from": "underscore.string@>=2.3.3 <2.4.0", - "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.3.3.tgz" - } - } - }, - "grunt-legacy-log-utils": { - "version": "0.1.1", - "from": "grunt-legacy-log-utils@>=0.1.1 <0.2.0", - "resolved": "https://registry.npmjs.org/grunt-legacy-log-utils/-/grunt-legacy-log-utils-0.1.1.tgz", - "dependencies": { - "lodash": { - "version": "2.4.2", - "from": "lodash@>=2.4.1 <2.5.0", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" - }, - "underscore.string": { - "version": "2.3.3", - "from": "underscore.string@>=2.3.3 <2.4.0", - "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.3.3.tgz" - } - } - }, - "grunt-legacy-util": { - "version": "0.2.0", - "from": "grunt-legacy-util@>=0.2.0 <0.3.0", - "resolved": "https://registry.npmjs.org/grunt-legacy-util/-/grunt-legacy-util-0.2.0.tgz", - "dependencies": { - "async": { - "version": "0.1.22", - "from": "async@>=0.1.22 <0.2.0", - "resolved": "https://registry.npmjs.org/async/-/async-0.1.22.tgz" - }, - "lodash": { - "version": "0.9.2", - "from": "lodash@>=0.9.2 <0.10.0", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-0.9.2.tgz" - } - } - }, - "grunt-mocha-test": { - "version": "0.11.0", - "from": "grunt-mocha-test@>=0.11.0 <0.12.0", - "resolved": "https://registry.npmjs.org/grunt-mocha-test/-/grunt-mocha-test-0.11.0.tgz", - "dependencies": { - "commander": { - "version": "2.0.0", - "from": "commander@2.0.0", - "resolved": "http://registry.npmjs.org/commander/-/commander-2.0.0.tgz" - }, - "diff": { - "version": "1.0.7", - "from": "diff@1.0.7", - "resolved": "https://registry.npmjs.org/diff/-/diff-1.0.7.tgz" - }, - "glob": { - "version": "3.2.3", - "from": "glob@3.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-3.2.3.tgz" - }, - "graceful-fs": { - "version": "2.0.3", - "from": "graceful-fs@>=2.0.0 <2.1.0", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-2.0.3.tgz" - }, - "growl": { - "version": "1.7.0", - "from": "growl@>=1.7.0 <1.8.0", - "resolved": "https://registry.npmjs.org/growl/-/growl-1.7.0.tgz" - }, - "minimatch": { - "version": "0.2.14", - "from": "minimatch@~0.2.11", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz" - }, - "mocha": { - "version": "1.20.1", - "from": "mocha@>=1.20.0 <1.21.0", - "resolved": "https://registry.npmjs.org/mocha/-/mocha-1.20.1.tgz" - } - } - }, "gtoken": { - "version": "2.3.0", + "version": "2.3.2", "from": "gtoken@>=2.3.0 <3.0.0", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.0.tgz", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.2.tgz", "dependencies": { "mime": { "version": "2.4.0", "from": "mime@>=2.2.0 <3.0.0", "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.0.tgz" - }, - "pify": { - "version": "3.0.0", - "from": "pify@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz" } } }, @@ -998,19 +686,9 @@ "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz" }, "har-validator": { - "version": "5.1.3", - "from": "har-validator@>=5.1.0 <5.2.0", - "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz" - }, - "has-ansi": { - "version": "0.1.0", - "from": "has-ansi@>=0.1.0 <0.2.0", - "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-0.1.0.tgz" - }, - "has-flag": { - "version": "3.0.0", - "from": "has-flag@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz" + "version": "5.0.3", + "from": "har-validator@>=5.0.3 <5.1.0", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.0.3.tgz" }, "hawk": { "version": "1.0.0", @@ -1024,20 +702,15 @@ "dev": true }, "hex2dec": { - "version": "1.1.1", + "version": "1.1.2", "from": "hex2dec@>=1.0.1 <2.0.0", - "resolved": "https://registry.npmjs.org/hex2dec/-/hex2dec-1.1.1.tgz" + "resolved": "https://registry.npmjs.org/hex2dec/-/hex2dec-1.1.2.tgz" }, "hoek": { "version": "0.9.1", "from": "hoek@>=0.9.0 <0.10.0", "resolved": "https://registry.npmjs.org/hoek/-/hoek-0.9.1.tgz" }, - "hooker": { - "version": "0.2.3", - "from": "hooker@>=0.2.3 <0.3.0", - "resolved": "https://registry.npmjs.org/hooker/-/hooker-0.2.3.tgz" - }, "http-signature": { "version": "0.10.1", "from": "http-signature@>=0.10.0 <0.11.0", @@ -1046,23 +719,11 @@ "https-proxy-agent": { "version": "2.2.1", "from": "https-proxy-agent@>=2.2.1 <3.0.0", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.1.tgz", - "dependencies": { - "debug": { - "version": "3.2.6", - "from": "debug@>=3.1.0 <4.0.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz" - } - } - }, - "iconv-lite": { - "version": "0.2.11", - "from": "iconv-lite@>=0.2.11 <0.3.0", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.2.11.tgz" + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.1.tgz" }, "inflight": { "version": "1.0.6", - "from": "inflight@>=1.0.4 <2.0.0", + "from": "inflight@^1.0.4", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz" }, "inherits": { @@ -1071,21 +732,9 @@ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" }, "ioredis": { - "version": "3.2.2", - "from": "ioredis@>=3.2.1 <4.0.0", - "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-3.2.2.tgz", - "dependencies": { - "debug": { - "version": "2.6.9", - "from": "debug@>=2.6.9 <3.0.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" - }, - "ms": { - "version": "2.0.0", - "from": "ms@2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz" - } - } + "version": "4.6.0", + "from": "ioredis@4.6.0", + "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.6.0.tgz" }, "is": { "version": "3.3.0", @@ -1103,41 +752,20 @@ "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz" }, "isarray": { - "version": "0.0.1", - "from": "isarray@0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + "version": "1.0.0", + "from": "isarray@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" }, "isstream": { "version": "0.1.2", "from": "isstream@>=0.1.2 <0.2.0", "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz" }, - "jade": { - "version": "0.26.3", - "from": "jade@0.26.3", - "resolved": "https://registry.npmjs.org/jade/-/jade-0.26.3.tgz", - "dependencies": { - "commander": { - "version": "0.6.1", - "from": "commander@0.6.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-0.6.1.tgz" - }, - "mkdirp": { - "version": "0.3.0", - "from": "mkdirp@0.3.0", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.0.tgz" - } - } - }, - "js-yaml": { - "version": "2.0.5", - "from": "js-yaml@>=2.0.5 <2.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-2.0.5.tgz" - }, "jsbn": { "version": "0.1.1", "from": "jsbn@>=0.1.0 <0.2.0", - "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz" + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "optional": true }, "json-bigint": { "version": "0.3.0", @@ -1150,20 +778,15 @@ "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz" }, "json-schema-traverse": { - "version": "0.4.1", - "from": "json-schema-traverse@>=0.4.1 <0.5.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz" + "version": "0.3.1", + "from": "json-schema-traverse@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz" }, "json-stringify-safe": { "version": "5.0.1", "from": "json-stringify-safe@5.0.1", "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz" }, - "jsonfile": { - "version": "1.1.1", - "from": "jsonfile@>=1.1.0 <1.2.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-1.1.1.tgz" - }, "jsprim": { "version": "1.4.1", "from": "jsprim@>=1.2.2 <2.0.0", @@ -1176,20 +799,15 @@ } } }, - "just-extend": { - "version": "4.0.2", - "from": "just-extend@>=4.0.2 <5.0.0", - "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-4.0.2.tgz" - }, "jwa": { - "version": "1.1.6", - "from": "jwa@>=1.1.5 <2.0.0", - "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.1.6.tgz" + "version": "1.2.0", + "from": "jwa@>=1.2.0 <2.0.0", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.2.0.tgz" }, "jws": { - "version": "3.1.5", + "version": "3.2.1", "from": "jws@>=3.1.5 <4.0.0", - "resolved": "https://registry.npmjs.org/jws/-/jws-3.1.5.tgz" + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.1.tgz" }, "keypress": { "version": "0.1.0", @@ -1197,124 +815,64 @@ "resolved": "https://registry.npmjs.org/keypress/-/keypress-0.1.0.tgz" }, "lodash": { - "version": "4.17.11", - "from": "lodash@>=4.17.10 <5.0.0", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.11.tgz" - }, - "lodash.assign": { - "version": "4.2.0", - "from": "lodash.assign@>=4.2.0 <5.0.0", - "resolved": "https://registry.npmjs.org/lodash.assign/-/lodash.assign-4.2.0.tgz" - }, - "lodash.bind": { - "version": "4.2.1", - "from": "lodash.bind@>=4.2.1 <5.0.0", - "resolved": "https://registry.npmjs.org/lodash.bind/-/lodash.bind-4.2.1.tgz" - }, - "lodash.clone": { - "version": "4.5.0", - "from": "lodash.clone@>=4.5.0 <5.0.0", - "resolved": "https://registry.npmjs.org/lodash.clone/-/lodash.clone-4.5.0.tgz" - }, - "lodash.clonedeep": { - "version": "4.5.0", - "from": "lodash.clonedeep@>=4.5.0 <5.0.0", - "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz" + "version": "4.17.4", + "from": "lodash@>=4.14.0 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz" }, "lodash.defaults": { "version": "4.2.0", "from": "lodash.defaults@>=4.2.0 <5.0.0", "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz" }, - "lodash.difference": { - "version": "4.5.0", - "from": "lodash.difference@>=4.5.0 <5.0.0", - "resolved": "https://registry.npmjs.org/lodash.difference/-/lodash.difference-4.5.0.tgz" - }, "lodash.flatten": { "version": "4.4.0", "from": "lodash.flatten@>=4.4.0 <5.0.0", "resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz" }, - "lodash.foreach": { - "version": "4.5.0", - "from": "lodash.foreach@>=4.5.0 <5.0.0", - "resolved": "https://registry.npmjs.org/lodash.foreach/-/lodash.foreach-4.5.0.tgz" - }, - "lodash.get": { - "version": "4.4.2", - "from": "lodash.get@>=4.4.2 <5.0.0", - "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz" - }, - "lodash.isempty": { - "version": "4.4.0", - "from": "lodash.isempty@>=4.4.0 <5.0.0", - "resolved": "https://registry.npmjs.org/lodash.isempty/-/lodash.isempty-4.4.0.tgz" - }, - "lodash.keys": { - "version": "4.2.0", - "from": "lodash.keys@>=4.2.0 <5.0.0", - "resolved": "https://registry.npmjs.org/lodash.keys/-/lodash.keys-4.2.0.tgz" - }, - "lodash.noop": { - "version": "3.0.1", - "from": "lodash.noop@>=3.0.1 <4.0.0", - "resolved": "https://registry.npmjs.org/lodash.noop/-/lodash.noop-3.0.1.tgz" - }, - "lodash.partial": { - "version": "4.2.1", - "from": "lodash.partial@>=4.2.1 <5.0.0", - "resolved": "https://registry.npmjs.org/lodash.partial/-/lodash.partial-4.2.1.tgz" - }, - "lodash.pick": { - "version": "4.4.0", - "from": "lodash.pick@>=4.4.0 <5.0.0", - "resolved": "https://registry.npmjs.org/lodash.pick/-/lodash.pick-4.4.0.tgz" - }, "lodash.pickby": { "version": "4.6.0", "from": "lodash.pickby@>=4.6.0 <5.0.0", "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz" }, - "lodash.sample": { - "version": "4.2.1", - "from": "lodash.sample@>=4.2.1 <5.0.0", - "resolved": "https://registry.npmjs.org/lodash.sample/-/lodash.sample-4.2.1.tgz" - }, - "lodash.shuffle": { - "version": "4.2.0", - "from": "lodash.shuffle@>=4.2.0 <5.0.0", - "resolved": "https://registry.npmjs.org/lodash.shuffle/-/lodash.shuffle-4.2.0.tgz" - }, - "lodash.values": { - "version": "4.3.0", - "from": "lodash.values@>=4.3.0 <5.0.0", - "resolved": "https://registry.npmjs.org/lodash.values/-/lodash.values-4.3.0.tgz" - }, "logger-sharelatex": { - "version": "1.5.9", - "from": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.9", - "resolved": "git+https://github.com/sharelatex/logger-sharelatex.git#e8e1b95052f62e107336053e4a983f81cdbdf589", + "version": "1.6.0", + "from": "logger-sharelatex@1.6.0", + "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.6.0.tgz", "dependencies": { + "ajv": { + "version": "6.7.0", + "from": "ajv@>=6.5.5 <7.0.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.7.0.tgz" + }, + "assert-plus": { + "version": "1.0.0", + "from": "assert-plus@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" + }, + "aws4": { + "version": "1.8.0", + "from": "aws4@>=1.8.0 <2.0.0", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz" + }, "bunyan": { "version": "1.5.1", "from": "bunyan@1.5.1", "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.5.1.tgz" }, - "chai": { - "version": "4.2.0", - "from": "chai@latest", - "resolved": "https://registry.npmjs.org/chai/-/chai-4.2.0.tgz" - }, "coffee-script": { "version": "1.12.4", "from": "coffee-script@1.12.4", "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.12.4.tgz" }, - "deep-eql": { - "version": "3.0.1", - "from": "deep-eql@>=3.0.1 <4.0.0", - "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz" + "combined-stream": { + "version": "1.0.7", + "from": "combined-stream@>=1.0.6 <1.1.0", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.7.tgz" + }, + "delayed-stream": { + "version": "1.0.0", + "from": "delayed-stream@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" }, "dtrace-provider": { "version": "0.6.0", @@ -1322,42 +880,97 @@ "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.6.0.tgz", "optional": true }, - "sandboxed-module": { - "version": "2.0.3", - "from": "sandboxed-module@latest", - "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-2.0.3.tgz" + "extend": { + "version": "3.0.2", + "from": "extend@>=3.0.2 <3.1.0", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz" }, - "sinon": { - "version": "7.2.2", - "from": "sinon@latest", - "resolved": "https://registry.npmjs.org/sinon/-/sinon-7.2.2.tgz" + "fast-deep-equal": { + "version": "2.0.1", + "from": "fast-deep-equal@>=2.0.1 <3.0.0", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz" }, - "supports-color": { - "version": "5.5.0", - "from": "supports-color@>=5.5.0 <6.0.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz" + "forever-agent": { + "version": "0.6.1", + "from": "forever-agent@>=0.6.1 <0.7.0", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz" }, - "timekeeper": { - "version": "1.0.0", - "from": "timekeeper@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/timekeeper/-/timekeeper-1.0.0.tgz" + "form-data": { + "version": "2.3.3", + "from": "form-data@>=2.3.2 <2.4.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz" + }, + "har-validator": { + "version": "5.1.3", + "from": "har-validator@>=5.1.0 <5.2.0", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz" + }, + "http-signature": { + "version": "1.2.0", + "from": "http-signature@>=1.2.0 <1.3.0", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz" + }, + "json-schema-traverse": { + "version": "0.4.1", + "from": "json-schema-traverse@>=0.4.1 <0.5.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz" + }, + "mime-db": { + "version": "1.37.0", + "from": "mime-db@>=1.37.0 <1.38.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.37.0.tgz" + }, + "mime-types": { + "version": "2.1.21", + "from": "mime-types@>=2.1.19 <2.2.0", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.21.tgz" + }, + "oauth-sign": { + "version": "0.9.0", + "from": "oauth-sign@>=0.9.0 <0.10.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz" + }, + "qs": { + "version": "6.5.2", + "from": "qs@>=6.5.2 <6.6.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz" + }, + "request": { + "version": "2.88.0", + "from": "request@>=2.88.0 <3.0.0", + "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz" + }, + "safe-buffer": { + "version": "5.1.2", + "from": "safe-buffer@>=5.1.2 <6.0.0", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" + }, + "tough-cookie": { + "version": "2.4.3", + "from": "tough-cookie@>=2.4.3 <2.5.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz" + }, + "tunnel-agent": { + "version": "0.6.0", + "from": "tunnel-agent@>=0.6.0 <0.7.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz" + }, + "uuid": { + "version": "3.3.2", + "from": "uuid@>=3.3.2 <4.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz" } } }, - "lolex": { - "version": "3.0.0", - "from": "lolex@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/lolex/-/lolex-3.0.0.tgz" - }, "long": { "version": "4.0.0", "from": "long@>=4.0.0 <5.0.0", "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz" }, "lru-cache": { - "version": "2.7.3", - "from": "lru-cache@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz" + "version": "5.1.1", + "from": "lru-cache@>=5.0.0 <6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz" }, "lsmod": { "version": "1.0.0", @@ -1380,9 +993,9 @@ "resolved": "https://registry.npmjs.org/methods/-/methods-0.0.1.tgz" }, "metrics-sharelatex": { - "version": "2.0.12", - "from": "git+https://github.com/sharelatex/metrics-sharelatex.git#v2.0.12", - "resolved": "git+https://github.com/sharelatex/metrics-sharelatex.git#3ac1621ef049e2f2d88a83b3a41011333d609662", + "version": "2.1.1", + "from": "metrics-sharelatex@2.1.1", + "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.1.1.tgz", "dependencies": { "coffee-script": { "version": "1.6.0", @@ -1407,18 +1020,18 @@ "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz" }, "mime-db": { - "version": "1.37.0", - "from": "mime-db@>=1.37.0 <1.38.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.37.0.tgz" + "version": "1.30.0", + "from": "mime-db@>=1.30.0 <1.31.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.30.0.tgz" }, "mime-types": { - "version": "2.1.21", - "from": "mime-types@>=2.1.19 <2.2.0", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.21.tgz" + "version": "2.1.17", + "from": "mime-types@>=2.1.17 <2.2.0", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.17.tgz" }, "minimatch": { "version": "3.0.4", - "from": "minimatch@>=2.0.0 <3.0.0||>=3.0.0 <4.0.0", + "from": "minimatch@2 || 3", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz" }, "minimist": { @@ -1440,13 +1053,7 @@ "commander": { "version": "2.15.1", "from": "commander@2.15.1", - "resolved": "http://registry.npmjs.org/commander/-/commander-2.15.1.tgz", - "dev": true - }, - "debug": { - "version": "3.1.0", - "from": "debug@3.1.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.15.1.tgz", "dev": true }, "glob": { @@ -1455,18 +1062,24 @@ "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz", "dev": true }, + "growl": { + "version": "1.10.5", + "from": "growl@1.10.5", + "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", + "dev": true + }, + "has-flag": { + "version": "3.0.0", + "from": "has-flag@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "dev": true + }, "mkdirp": { "version": "0.5.1", "from": "mkdirp@0.5.1", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", "dev": true }, - "ms": { - "version": "2.0.0", - "from": "ms@2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "dev": true - }, "supports-color": { "version": "5.4.0", "from": "supports-color@5.4.0", @@ -1481,9 +1094,9 @@ "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz" }, "ms": { - "version": "2.1.1", - "from": "ms@>=2.1.1 <3.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz" + "version": "2.0.0", + "from": "ms@2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz" }, "mv": { "version": "2.1.1", @@ -1507,21 +1120,9 @@ "ncp": { "version": "2.0.0", "from": "ncp@~2.0.0", - "resolved": "http://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", "optional": true }, - "nise": { - "version": "1.4.8", - "from": "nise@>=1.4.7 <2.0.0", - "resolved": "https://registry.npmjs.org/nise/-/nise-1.4.8.tgz", - "dependencies": { - "lolex": { - "version": "2.7.5", - "from": "lolex@>=2.3.2 <3.0.0", - "resolved": "https://registry.npmjs.org/lolex/-/lolex-2.7.5.tgz" - } - } - }, "node-fetch": { "version": "2.3.0", "from": "node-fetch@>=2.2.0 <3.0.0", @@ -1529,14 +1130,9 @@ }, "node-forge": { "version": "0.7.6", - "from": "node-forge@>=0.7.5 <0.8.0", + "from": "node-forge@>=0.7.4 <0.8.0", "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.7.6.tgz" }, - "nopt": { - "version": "1.0.10", - "from": "nopt@>=1.0.10 <1.1.0", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-1.0.10.tgz" - }, "oauth-sign": { "version": "0.3.0", "from": "oauth-sign@>=0.3.0 <0.4.0", @@ -1544,7 +1140,7 @@ }, "once": { "version": "1.4.0", - "from": "once@>=1.3.0 <2.0.0", + "from": "once@^1.3.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz" }, "p-limit": { @@ -1569,24 +1165,14 @@ }, "path-is-absolute": { "version": "1.0.1", - "from": "path-is-absolute@>=1.0.0 <2.0.0", + "from": "path-is-absolute@^1.0.0", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" }, "path-parse": { "version": "1.0.6", - "from": "path-parse@>=1.0.6 <2.0.0", + "from": "path-parse@>=1.0.5 <2.0.0", "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz" }, - "path-to-regexp": { - "version": "1.7.0", - "from": "path-to-regexp@>=1.7.0 <2.0.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.7.0.tgz" - }, - "pathval": { - "version": "1.1.0", - "from": "pathval@>=1.1.0 <2.0.0", - "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.0.tgz" - }, "pause": { "version": "0.0.1", "from": "pause@0.0.1", @@ -1599,7 +1185,7 @@ }, "pify": { "version": "4.0.1", - "from": "pify@>=4.0.1 <5.0.0", + "from": "pify@>=4.0.0 <5.0.0", "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz" }, "pretty-ms": { @@ -1628,9 +1214,9 @@ "resolved": "https://registry.npmjs.org/psl/-/psl-1.1.31.tgz" }, "punycode": { - "version": "2.1.1", - "from": "punycode@>=2.1.0 <3.0.0", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz" + "version": "1.4.1", + "from": "punycode@>=1.4.1 <2.0.0", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz" }, "q": { "version": "0.9.2", @@ -1662,14 +1248,7 @@ "readable-stream": { "version": "2.3.6", "from": "readable-stream@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", - "dependencies": { - "isarray": { - "version": "1.0.0", - "from": "isarray@>=1.0.0 <1.1.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" - } - } + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz" }, "redis": { "version": "0.12.1", @@ -1678,13 +1257,18 @@ }, "redis-commands": { "version": "1.4.0", - "from": "redis-commands@>=1.2.0 <2.0.0", + "from": "redis-commands@1.4.0", "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.4.0.tgz" }, + "redis-errors": { + "version": "1.2.0", + "from": "redis-errors@>=1.2.0 <2.0.0", + "resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz" + }, "redis-parser": { - "version": "2.6.0", - "from": "redis-parser@>=2.4.0 <3.0.0", - "resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-2.6.0.tgz" + "version": "3.0.0", + "from": "redis-parser@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz" }, "redis-sentinel": { "version": "0.1.1", @@ -1698,7 +1282,23 @@ } } }, - + "redis-sharelatex": { + "version": "1.0.5", + "from": "https://registry.npmjs.org/redis-sharelatex/-/redis-sharelatex-1.0.5.tgz", + "resolved": "https://registry.npmjs.org/redis-sharelatex/-/redis-sharelatex-1.0.5.tgz", + "dependencies": { + "coffee-script": { + "version": "1.8.0", + "from": "coffee-script@1.8.0", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.8.0.tgz" + }, + "underscore": { + "version": "1.7.0", + "from": "underscore@1.7.0", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz" + } + } + }, "request": { "version": "2.25.0", "from": "request@2.25.0", @@ -1712,19 +1312,36 @@ } }, "requestretry": { - "version": "1.13.0", + "version": "1.12.2", "from": "requestretry@>=1.12.0 <2.0.0", - "resolved": "https://registry.npmjs.org/requestretry/-/requestretry-1.13.0.tgz", + "resolved": "https://registry.npmjs.org/requestretry/-/requestretry-1.12.2.tgz", "dependencies": { "assert-plus": { "version": "1.0.0", "from": "assert-plus@>=1.0.0 <2.0.0", "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" }, + "boom": { + "version": "4.3.1", + "from": "boom@>=4.0.0 <5.0.0", + "resolved": "https://registry.npmjs.org/boom/-/boom-4.3.1.tgz" + }, "combined-stream": { - "version": "1.0.7", - "from": "combined-stream@>=1.0.6 <1.1.0", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.7.tgz" + "version": "1.0.5", + "from": "combined-stream@>=1.0.5 <1.1.0", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz" + }, + "cryptiles": { + "version": "3.1.2", + "from": "cryptiles@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-3.1.2.tgz", + "dependencies": { + "boom": { + "version": "5.2.0", + "from": "boom@>=5.0.0 <6.0.0", + "resolved": "https://registry.npmjs.org/boom/-/boom-5.2.0.tgz" + } + } }, "delayed-stream": { "version": "1.0.0", @@ -1737,9 +1354,19 @@ "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz" }, "form-data": { - "version": "2.3.3", - "from": "form-data@>=2.3.2 <2.4.0", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz" + "version": "2.3.1", + "from": "form-data@>=2.3.1 <2.4.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.1.tgz" + }, + "hawk": { + "version": "6.0.2", + "from": "hawk@>=6.0.2 <6.1.0", + "resolved": "https://registry.npmjs.org/hawk/-/hawk-6.0.2.tgz" + }, + "hoek": { + "version": "4.2.0", + "from": "hoek@>=4.0.0 <5.0.0", + "resolved": "https://registry.npmjs.org/hoek/-/hoek-4.2.0.tgz" }, "http-signature": { "version": "1.2.0", @@ -1747,19 +1374,24 @@ "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz" }, "oauth-sign": { - "version": "0.9.0", - "from": "oauth-sign@>=0.9.0 <0.10.0", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz" + "version": "0.8.2", + "from": "oauth-sign@>=0.8.2 <0.9.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz" }, "qs": { - "version": "6.5.2", - "from": "qs@>=6.5.2 <6.6.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz" + "version": "6.5.1", + "from": "qs@>=6.5.1 <6.6.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz" }, "request": { - "version": "2.88.0", + "version": "2.83.0", "from": "request@>=2.74.0 <3.0.0", - "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz" + "resolved": "https://registry.npmjs.org/request/-/request-2.83.0.tgz" + }, + "sntp": { + "version": "2.1.0", + "from": "sntp@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/sntp/-/sntp-2.1.0.tgz" }, "tunnel-agent": { "version": "0.6.0", @@ -1767,9 +1399,9 @@ "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz" }, "uuid": { - "version": "3.3.2", - "from": "uuid@>=3.3.2 <4.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz" + "version": "3.1.0", + "from": "uuid@>=3.1.0 <4.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.1.0.tgz" } } }, @@ -1784,13 +1416,13 @@ "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz" }, "resolve": { - "version": "1.9.0", + "version": "1.10.0", "from": "resolve@>=1.5.0 <2.0.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.9.0.tgz" + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.10.0.tgz" }, "retry-axios": { "version": "0.3.2", - "from": "retry-axios@0.3.2", + "from": "retry-axios@>=0.3.2 <0.4.0", "resolved": "https://registry.npmjs.org/retry-axios/-/retry-axios-0.3.2.tgz" }, "retry-request": { @@ -1800,13 +1432,14 @@ }, "rimraf": { "version": "2.4.5", - "from": "rimraf@>=2.4.0 <2.5.0", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz" + "from": "rimraf@~2.4.0", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", + "optional": true }, "safe-buffer": { - "version": "5.1.2", - "from": "safe-buffer@>=5.1.1 <5.2.0", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" + "version": "5.1.1", + "from": "safe-buffer@>=5.1.1 <6.0.0", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz" }, "safe-json-stringify": { "version": "1.2.0", @@ -1814,11 +1447,6 @@ "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.2.0.tgz", "optional": true }, - "safer-buffer": { - "version": "2.1.2", - "from": "safer-buffer@>=2.0.2 <3.0.0", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz" - }, "sandboxed-module": { "version": "0.2.2", "from": "sandboxed-module@>=0.2.0 <0.3.0", @@ -1843,8 +1471,8 @@ }, "settings-sharelatex": { "version": "1.1.0", - "from": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.1.0", - "resolved": "git+https://github.com/sharelatex/settings-sharelatex.git#93f63d029b52fef8825c3a401b2b6a7ba29b4750", + "from": "settings-sharelatex@1.1.0", + "resolved": "https://registry.npmjs.org/settings-sharelatex/-/settings-sharelatex-1.1.0.tgz", "dependencies": { "coffee-script": { "version": "1.6.0", @@ -1854,14 +1482,9 @@ } }, "shimmer": { - "version": "1.2.0", + "version": "1.2.1", "from": "shimmer@>=1.2.0 <2.0.0", - "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.0.tgz" - }, - "sigmund": { - "version": "1.0.1", - "from": "sigmund@>=1.0.0 <1.1.0", - "resolved": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz" + "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz" }, "sinon": { "version": "1.5.2", @@ -1884,14 +1507,14 @@ "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz" }, "sshpk": { - "version": "1.16.0", + "version": "1.13.1", "from": "sshpk@>=1.7.0 <2.0.0", - "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.0.tgz", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.13.1.tgz", "dependencies": { "asn1": { - "version": "0.2.4", + "version": "0.2.3", "from": "asn1@>=0.2.3 <0.3.0", - "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz" + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz" }, "assert-plus": { "version": "1.0.0", @@ -1905,6 +1528,11 @@ "from": "stack-trace@0.0.9", "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz" }, + "standard-as-callback": { + "version": "1.0.1", + "from": "standard-as-callback@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-1.0.1.tgz" + }, "statsd-parser": { "version": "0.0.4", "from": "statsd-parser@>=0.0.4 <0.1.0", @@ -1920,15 +1548,10 @@ "from": "string_decoder@>=1.1.1 <1.2.0", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" }, - "strip-ansi": { - "version": "0.3.0", - "from": "strip-ansi@>=0.3.0 <0.4.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-0.3.0.tgz" - }, - "supports-color": { - "version": "0.2.0", - "from": "supports-color@>=0.2.0 <0.3.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-0.2.0.tgz" + "stringstream": { + "version": "0.0.5", + "from": "stringstream@>=0.0.5 <0.1.0", + "resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz" }, "symbol-observable": { "version": "1.2.0", @@ -1942,7 +1565,7 @@ }, "teeny-request": { "version": "3.11.3", - "from": "teeny-request@>=3.11.1 <4.0.0", + "from": "teeny-request@>=3.6.0 <4.0.0", "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-3.11.3.tgz", "dependencies": { "uuid": { @@ -1952,11 +1575,6 @@ } } }, - "text-encoding": { - "version": "0.6.4", - "from": "text-encoding@>=0.6.4 <0.7.0", - "resolved": "https://registry.npmjs.org/text-encoding/-/text-encoding-0.6.4.tgz" - }, "through": { "version": "2.3.8", "from": "through@>=2.0.0 <3.0.0", @@ -1964,26 +1582,19 @@ }, "through2": { "version": "2.0.5", - "from": "through2@>=2.0.0 <3.0.0", + "from": "through2@>=2.0.3 <3.0.0", "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz" }, "timekeeper": { - "version": "2.1.2", + "version": "2.0.0", "from": "timekeeper@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/timekeeper/-/timekeeper-2.1.2.tgz", + "resolved": "https://registry.npmjs.org/timekeeper/-/timekeeper-2.0.0.tgz", "dev": true }, "tough-cookie": { - "version": "2.4.3", - "from": "tough-cookie@>=2.4.3 <2.5.0", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", - "dependencies": { - "punycode": { - "version": "1.4.1", - "from": "punycode@>=1.4.1 <2.0.0", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz" - } - } + "version": "2.3.3", + "from": "tough-cookie@>=2.3.3 <2.4.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.3.tgz" }, "tunnel-agent": { "version": "0.3.0", @@ -1993,12 +1604,8 @@ "tweetnacl": { "version": "0.14.5", "from": "tweetnacl@>=0.14.0 <0.15.0", - "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz" - }, - "type-detect": { - "version": "4.0.8", - "from": "type-detect@>=4.0.5 <5.0.0", - "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz" + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "optional": true }, "uid2": { "version": "0.0.2", @@ -2010,15 +1617,17 @@ "from": "underscore@1.2.2", "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.2.2.tgz" }, - "underscore.string": { - "version": "2.2.1", - "from": "underscore.string@>=2.2.1 <2.3.0", - "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.2.1.tgz" - }, "uri-js": { "version": "4.2.2", "from": "uri-js@>=4.2.2 <5.0.0", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz" + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz", + "dependencies": { + "punycode": { + "version": "2.1.1", + "from": "punycode@>=2.1.0 <3.0.0", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz" + } + } }, "util-deprecate": { "version": "1.0.2", @@ -2047,14 +1656,9 @@ "from": "when@>=3.7.7 <4.0.0", "resolved": "https://registry.npmjs.org/when/-/when-3.7.8.tgz" }, - "which": { - "version": "1.0.9", - "from": "which@>=1.0.5 <1.1.0", - "resolved": "https://registry.npmjs.org/which/-/which-1.0.9.tgz" - }, "wrappy": { "version": "1.0.2", - "from": "wrappy@>=1.0.0 <2.0.0", + "from": "wrappy@1", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" }, "xtend": { diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 0bffffdaa0..fa95246d21 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -23,14 +23,14 @@ "async": "^2.5.0", "coffee-script": "~1.7.0", "express": "3.3.4", - "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.9", + "logger-sharelatex": "^1.6.0", "lynx": "0.0.11", - "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v2.0.12", - "redis-sharelatex": "git+https://github.com/sharelatex/redis-sharelatex.git#v1.0.4", + "metrics-sharelatex": "^2.1.1", + "redis-sharelatex": "^1.0.5", "request": "2.25.0", "requestretry": "^1.12.0", "sandboxed-module": "~0.2.0", - "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.1.0", + "settings-sharelatex": "^1.1.0", "sinon": "~1.5.2", "underscore": "1.2.2" }, From 98de529e42cf1d2cdf621caa37267b6e3f4f3d01 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 1 Feb 2019 19:40:20 +0000 Subject: [PATCH 440/769] make config look like chef config --- .../config/settings.defaults.coffee | 95 +++++++++---------- services/document-updater/package.json | 2 +- 2 files changed, 46 insertions(+), 51 deletions(-) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 8074183168..cd3a736cf2 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -20,69 +20,64 @@ module.exports = url: "http://#{process.env["PROJECT_HISTORY_HOST"] or "localhost"}:3054" redis: - documentupdater: - port: process.env["DOC_UPDATER_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" - host: process.env["DOC_UPDATER_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" - password: process.env["DOC_UPDATER_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" - key_schema: - blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" - docLines: ({doc_id}) -> "doclines:#{doc_id}" - docOps: ({doc_id}) -> "DocOps:#{doc_id}" - docVersion: ({doc_id}) -> "DocVersion:#{doc_id}" - docHash: ({doc_id}) -> "DocHash:#{doc_id}" - projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" - docsInProject: ({project_id}) -> "DocsIn:#{project_id}" - ranges: ({doc_id}) -> "Ranges:#{doc_id}" - pathname: ({doc_id}) -> "Pathname:#{doc_id}" - projectHistoryId: ({doc_id}) -> "ProjectHistoryId:#{doc_id}" - projectState: ({project_id}) -> "ProjectState:#{project_id}" - unflushedTime: ({doc_id}) -> "UnflushedTime:#{doc_id}" - pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" - # cluster: [{ - # port: "7000" - # host: "localhost" - # }] - # key_schema: - # blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" - # docLines: ({doc_id}) -> "doclines:{#{doc_id}}" - # docOps: ({doc_id}) -> "DocOps:{#{doc_id}}" - # docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}" - # docHash: ({doc_id}) -> "DocHash:{#{doc_id}}" - # projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" - # docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" - # ranges: ({doc_id}) -> "Ranges:{#{doc_id}}" - # projectState: ({project_id}) -> "ProjectState:{#{project_id}}" + realtime: + port: process.env["REAL_TIME_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" + host: process.env["REAL_TIME_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" + password: process.env["REAL_TIME_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" + redisOptions: + keepAlive: 100 + history: port: process.env["HISTORY_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" host: process.env["HISTORY_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" password: process.env["HISTORY_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" key_schema: - uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}" - docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}" + uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:{#{doc_id}}" + docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:{#{project_id}}" + redisOptions: + keepAlive: 100 project_history: + port: process.env["HISTORY_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" + host: process.env["HISTORY_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" + password: process.env["HISTORY_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" key_schema: - projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:#{project_id}" - projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:#{project_id}" - # cluster: [{ - # port: "7000" - # host: "localhost" - # }] - # key_schema: - # uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:{#{doc_id}}" - # docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:{#{project_id}}" + projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:{#{project_id}}" + projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:{#{project_id}}" + redisOptions: + keepAlive: 100 + lock: port: process.env["LOCK_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" host: process.env["LOCK_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" password: process.env["LOCK_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" key_schema: - blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" - # cluster: [{ - # port: "7000" - # host: "localhost" - # }] - # key_schema: - # blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" + blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" + redisOptions: + keepAlive: 100 + + documentupdater: + port: process.env["DOC_UPDATER_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" + host: process.env["DOC_UPDATER_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" + password: process.env["DOC_UPDATER_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" + redisOptions: + keepAlive: 100 + key_schema: + blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" + docLines: ({doc_id}) -> "doclines:{#{doc_id}}" + docOps: ({doc_id}) -> "DocOps:{#{doc_id}}" + docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}" + docHash: ({doc_id}) -> "DocHash:{#{doc_id}}" + projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" + docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" + ranges: ({doc_id}) -> "Ranges:{#{doc_id}}" + unflushedTime: ({doc_id}) -> "UnflushedTime:{#{doc_id}}" + pathname: ({doc_id}) -> "Pathname:{#{doc_id}}" + projectHistoryId: ({doc_id}) -> "ProjectHistoryId:{#{doc_id}}" + projectState: ({project_id}) -> "ProjectState:{#{project_id}}" + pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" + redisOptions: + keepAlive: 100 max_doc_length: 2 * 1024 * 1024 # 2mb diff --git a/services/document-updater/package.json b/services/document-updater/package.json index fa95246d21..5ab28811b8 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -26,7 +26,7 @@ "logger-sharelatex": "^1.6.0", "lynx": "0.0.11", "metrics-sharelatex": "^2.1.1", - "redis-sharelatex": "^1.0.5", + "redis-sharelatex": "https://registry.npmjs.org/redis-sharelatex/-/redis-sharelatex-1.0.5.tgz", "request": "2.25.0", "requestretry": "^1.12.0", "sandboxed-module": "~0.2.0", From e44a02391a7025f48c04dc22b67a97e8760112f5 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 1 Feb 2019 20:04:43 +0000 Subject: [PATCH 441/769] standardise name of document-updater --- services/document-updater/app.coffee | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index cbcaf23897..a22a552d59 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -1,11 +1,11 @@ Metrics = require "metrics-sharelatex" -Metrics.initialize("doc-updater") +Metrics.initialize("document-updater") express = require('express') http = require("http") Settings = require('settings-sharelatex') logger = require('logger-sharelatex') -logger.initialize("documentupdater") +logger.initialize("document-updater") logger.logger.serializers.docs = require("./app/js/LoggerSerializers").docs logger.logger.serializers.files = require("./app/js/LoggerSerializers").files if Settings.sentry?.dsn? From 8d3fb729c552c81b5696dff6729504070b8645ac Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 1 Feb 2019 20:10:47 +0000 Subject: [PATCH 442/769] point to web_api --- services/document-updater/config/settings.defaults.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index cd3a736cf2..20d0a53549 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -10,7 +10,7 @@ module.exports = apis: web: - url: "http://#{process.env["WEB_HOST"] or "localhost"}:#{process.env['WEB_PORT'] or 3000}" + url: "http://#{process.env['WEB_API_HOST'] or process.env['WEB_HOST'] or "localhost"}:#{process.env['WEB_API_PORT'] or process.env['WEB_PORT'] or 3000}" user: "sharelatex" pass: "password" trackchanges: From 4e1a2c787cbdb0b3c01e31daefb9d96392bceefe Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 6 Feb 2019 15:29:22 +0000 Subject: [PATCH 443/769] Revert "turn down logging, use logger.info for less important data" This reverts commit c5f91428e3c7702fbbd3ffd1ef7a772d513f33f2. --- .../app/coffee/DocumentManager.coffee | 2 +- .../app/coffee/HistoryRedisManager.coffee | 2 +- .../app/coffee/HttpController.coffee | 8 ++++---- .../app/coffee/RangesManager.coffee | 6 +++--- .../app/coffee/RedisManager.coffee | 2 +- .../app/coffee/ShareJsUpdateManager.coffee | 4 ++-- .../app/coffee/UpdateManager.coffee | 2 +- .../DocumentManager/DocumentManagerTests.coffee | 2 +- .../HistoryRedisManagerTests.coffee | 4 +--- .../HttpController/HttpControllerTests.coffee | 8 ++++---- .../RangesManager/RangesManagerTests.coffee | 15 +++++---------- .../coffee/RedisManager/RedisManagerTests.coffee | 2 +- .../ShareJsUpdateManagerTests.coffee | 2 +- .../UpdateManager/UpdateManagerTests.coffee | 2 +- 14 files changed, 27 insertions(+), 34 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 7b8caa3e20..39713a1981 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -25,7 +25,7 @@ module.exports = DocumentManager = logger.log {project_id, doc_id}, "doc not in redis so getting from persistence API" PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) -> return callback(error) if error? - logger.info {project_id, doc_id, lines, version, pathname, projectHistoryId}, "got doc from persistence API" + logger.log {project_id, doc_id, lines, version, pathname, projectHistoryId}, "got doc from persistence API" RedisManager.putDocInMemory project_id, doc_id, lines, version, ranges, pathname, projectHistoryId, (error) -> return callback(error) if error? callback null, lines, version, ranges, pathname, projectHistoryId, null, false diff --git a/services/document-updater/app/coffee/HistoryRedisManager.coffee b/services/document-updater/app/coffee/HistoryRedisManager.coffee index 8c37132ada..d9a99a09aa 100644 --- a/services/document-updater/app/coffee/HistoryRedisManager.coffee +++ b/services/document-updater/app/coffee/HistoryRedisManager.coffee @@ -7,7 +7,7 @@ module.exports = HistoryRedisManager = recordDocHasHistoryOps: (project_id, doc_id, ops = [], callback = (error) ->) -> if ops.length == 0 return callback(new Error("cannot push no ops")) # This should never be called with no ops, but protect against a redis error if we sent an empty array to rpush - logger.info project_id: project_id, doc_id: doc_id, "marking doc in project for history ops" + logger.log project_id: project_id, doc_id: doc_id, "marking doc in project for history ops" rclient.sadd Keys.docsWithHistoryOps({project_id}), doc_id, (error) -> return callback(error) if error? callback() diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 6cf03f2cd6..93f915d662 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -11,7 +11,7 @@ module.exports = HttpController = getDoc: (req, res, next = (error) ->) -> doc_id = req.params.doc_id project_id = req.params.project_id - logger.info project_id: project_id, doc_id: doc_id, "getting doc via http" + logger.log project_id: project_id, doc_id: doc_id, "getting doc via http" timer = new Metrics.Timer("http.getDoc") if req.query?.fromVersion? @@ -22,7 +22,7 @@ module.exports = HttpController = DocumentManager.getDocAndRecentOpsWithLock project_id, doc_id, fromVersion, (error, lines, version, ops, ranges, pathname) -> timer.done() return next(error) if error? - logger.info project_id: project_id, doc_id: doc_id, "got doc via http" + logger.log project_id: project_id, doc_id: doc_id, "got doc via http" if !lines? or !version? return next(new Errors.NotFoundError("document not found")) res.send JSON.stringify @@ -44,13 +44,13 @@ module.exports = HttpController = projectStateHash = req.query?.state # exclude is string of existing docs "id:version,id:version,..." excludeItems = req.query?.exclude?.split(',') or [] - logger.info project_id: project_id, exclude: excludeItems, "getting docs via http" + logger.log project_id: project_id, exclude: excludeItems, "getting docs via http" timer = new Metrics.Timer("http.getAllDocs") excludeVersions = {} for item in excludeItems [id,version] = item?.split(':') excludeVersions[id] = version - logger.info {project_id: project_id, projectStateHash: projectStateHash, excludeVersions: excludeVersions}, "excluding versions" + logger.log {project_id: project_id, projectStateHash: projectStateHash, excludeVersions: excludeVersions}, "excluding versions" ProjectManager.getProjectDocsAndFlushIfOld project_id, projectStateHash, excludeVersions, (error, result) -> timer.done() if error instanceof Errors.ProjectStateChangedError diff --git a/services/document-updater/app/coffee/RangesManager.coffee b/services/document-updater/app/coffee/RangesManager.coffee index ce3fa5dfca..d0653bb6a2 100644 --- a/services/document-updater/app/coffee/RangesManager.coffee +++ b/services/document-updater/app/coffee/RangesManager.coffee @@ -30,12 +30,12 @@ module.exports = RangesManager = return callback(error) response = RangesManager._getRanges rangesTracker - logger.info {project_id, doc_id, changesCount: response.changes?.length, commentsCount: response.comments?.length}, "applied updates to ranges" + logger.log {project_id, doc_id, changesCount: response.changes?.length, commentsCount: response.comments?.length}, "applied updates to ranges" callback null, response acceptChanges: (change_ids, ranges, callback = (error, ranges) ->) -> {changes, comments} = ranges - logger.info "accepting #{ change_ids.length } changes in ranges" + logger.log "accepting #{ change_ids.length } changes in ranges" rangesTracker = new RangesTracker(changes, comments) rangesTracker.removeChangeIds(change_ids) response = RangesManager._getRanges(rangesTracker) @@ -43,7 +43,7 @@ module.exports = RangesManager = deleteComment: (comment_id, ranges, callback = (error, ranges) ->) -> {changes, comments} = ranges - logger.info {comment_id}, "deleting comment in ranges" + logger.log {comment_id}, "deleting comment in ranges" rangesTracker = new RangesTracker(changes, comments) rangesTracker.removeCommentId(comment_id) response = RangesManager._getRanges(rangesTracker) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 064c6144af..25dbafc6e7 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -233,7 +233,7 @@ module.exports = RedisManager = newHash = RedisManager._computeHash(newDocLines) opVersions = appliedOps.map (op) -> op?.v - logger.info doc_id: doc_id, version: newVersion, hash: newHash, op_versions: opVersions, "updating doc in redis" + logger.log doc_id: doc_id, version: newVersion, hash: newHash, op_versions: opVersions, "updating doc in redis" RedisManager._serializeRanges ranges, (error, ranges) -> if error? diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee index 7a79b82724..a5cc6070cb 100644 --- a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee +++ b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee @@ -20,7 +20,7 @@ module.exports = ShareJsUpdateManager = return model applyUpdate: (project_id, doc_id, update, lines, version, callback = (error, updatedDocLines) ->) -> - logger.info project_id: project_id, doc_id: doc_id, update: update, "applying sharejs updates" + logger.log project_id: project_id, doc_id: doc_id, update: update, "applying sharejs updates" jobs = [] # We could use a global model for all docs, but we're hitting issues with the @@ -39,7 +39,7 @@ module.exports = ShareJsUpdateManager = ShareJsUpdateManager._sendOp(project_id, doc_id, update) else return callback(error) - logger.info project_id: project_id, doc_id: doc_id, error: error, "applied update" + logger.log project_id: project_id, doc_id: doc_id, error: error, "applied update" model.getSnapshot doc_key, (error, data) => return callback(error) if error? docLines = data.snapshot.split(/\r\n|\n|\r/) diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index 43aea49512..bfcfb806ca 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -47,7 +47,7 @@ module.exports = UpdateManager = profile = new Profiler("fetchAndApplyUpdates", {project_id, doc_id}) RealTimeRedisManager.getPendingUpdatesForDoc doc_id, (error, updates) => return callback(error) if error? - logger.info {project_id: project_id, doc_id: doc_id, count: updates.length}, "processing updates" + logger.log {project_id: project_id, doc_id: doc_id, count: updates.length}, "processing updates" if updates.length == 0 return callback() profile.log("getPendingUpdatesForDoc") diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index 736ceeee2d..c52bb4b30d 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -16,7 +16,7 @@ describe "DocumentManager", -> "./HistoryManager": @HistoryManager = flushDocChangesAsync: sinon.stub() flushProjectChangesAsync: sinon.stub() - "logger-sharelatex": @logger = {log: sinon.stub(), info: sinon.stub()} + "logger-sharelatex": @logger = {log: sinon.stub()} "./DocOpsManager": @DocOpsManager = {} "./Metrics": @Metrics = Timer: class Timer diff --git a/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee index a1d0e11b81..ca3937d4c5 100644 --- a/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee @@ -19,9 +19,7 @@ describe "HistoryRedisManager", -> key_schema: uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}" docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}" - "logger-sharelatex": - log: -> - info: -> + "logger-sharelatex": { log: () -> } @doc_id = "doc-id-123" @project_id = "project-id-123" @callback = sinon.stub() diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index 46daa9a63b..ab6718c12a 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -12,7 +12,7 @@ describe "HttpController", -> "./HistoryManager": @HistoryManager = flushProjectChangesAsync: sinon.stub() "./ProjectManager": @ProjectManager = {} - "logger-sharelatex" : @logger = { log: sinon.stub(), info: sinon.stub() } + "logger-sharelatex" : @logger = { log: sinon.stub() } "./Metrics": @Metrics = {} "./Errors" : Errors @Metrics.Timer = class Timer @@ -59,7 +59,7 @@ describe "HttpController", -> .should.equal true it "should log the request", -> - @logger.info + @logger.log .calledWith(doc_id: @doc_id, project_id: @project_id, "getting doc via http") .should.equal true @@ -88,7 +88,7 @@ describe "HttpController", -> .should.equal true it "should log the request", -> - @logger.info + @logger.log .calledWith(doc_id: @doc_id, project_id: @project_id, "getting doc via http") .should.equal true @@ -475,7 +475,7 @@ describe "HttpController", -> .should.equal true it "should log the request", -> - @logger.info + @logger.log .calledWith({project_id: @project_id, exclude: []}, "getting docs via http") .should.equal true diff --git a/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee b/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee index e7322f0e63..b11c73489e 100644 --- a/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee @@ -7,15 +7,10 @@ SandboxedModule = require('sandboxed-module') describe "RangesManager", -> beforeEach -> - @logger = - error: sinon.stub() - log: sinon.stub() - warn: sinon.stub() - info: sinon.stub() - @RangesManager = SandboxedModule.require modulePath, requires: - "logger-sharelatex": @logger + "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } + @doc_id = "doc-id-123" @project_id = "project-id-123" @user_id = "user-id-123" @@ -189,7 +184,7 @@ describe "RangesManager", -> beforeEach -> @RangesManager = SandboxedModule.require modulePath, requires: - "logger-sharelatex": @logger + "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } "./RangesTracker":@RangesTracker = SandboxedModule.require "../../../../app/js/RangesTracker.js" @ranges = { @@ -231,7 +226,7 @@ describe "RangesManager", -> done() it "should log the call with the correct number of changes", -> - @logger.info + @logger.log .calledWith("accepting 1 changes in ranges") .should.equal true @@ -263,7 +258,7 @@ describe "RangesManager", -> done() it "should log the call with the correct number of changes", -> - @logger.info + @logger.log .calledWith("accepting #{ @change_ids.length } changes in ranges") .should.equal true diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 9505339ddf..4f6c24720e 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -14,7 +14,7 @@ describe "RedisManager", -> tk.freeze(new Date()) @RedisManager = SandboxedModule.require modulePath, requires: - "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub(), info:-> } + "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } "./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {} "settings-sharelatex": @settings = { documentupdater: {logHashErrors: {write:true, read:true}} diff --git a/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee index d95e7497fb..b7364b00a4 100644 --- a/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee @@ -16,7 +16,7 @@ describe "ShareJsUpdateManager", -> constructor: (@db) -> "./ShareJsDB" : @ShareJsDB = { mockDB: true } "redis-sharelatex" : createClient: () => @rclient = auth:-> - "logger-sharelatex": @logger = { log: sinon.stub(), info: -> } + "logger-sharelatex": @logger = { log: sinon.stub() } "./RealTimeRedisManager": @RealTimeRedisManager = {} globals: clearTimeout: @clearTimeout = sinon.stub() diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index 623e2eec0c..383bd1848e 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -16,7 +16,7 @@ describe "UpdateManager", -> "./RealTimeRedisManager" : @RealTimeRedisManager = {} "./ShareJsUpdateManager" : @ShareJsUpdateManager = {} "./HistoryManager" : @HistoryManager = {} - "logger-sharelatex": @logger = { log: sinon.stub(), info:-> } + "logger-sharelatex": @logger = { log: sinon.stub() } "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() From 2998750a33a6ea0c20d7f7666daa92070317105a Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 6 Feb 2019 16:01:44 +0000 Subject: [PATCH 444/769] fix redis version lock --- services/document-updater/npm-shrinkwrap.json | 2 +- services/document-updater/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/services/document-updater/npm-shrinkwrap.json b/services/document-updater/npm-shrinkwrap.json index 26742b3c9b..095e360c00 100644 --- a/services/document-updater/npm-shrinkwrap.json +++ b/services/document-updater/npm-shrinkwrap.json @@ -1284,7 +1284,7 @@ }, "redis-sharelatex": { "version": "1.0.5", - "from": "https://registry.npmjs.org/redis-sharelatex/-/redis-sharelatex-1.0.5.tgz", + "from": "redis-sharelatex@latest", "resolved": "https://registry.npmjs.org/redis-sharelatex/-/redis-sharelatex-1.0.5.tgz", "dependencies": { "coffee-script": { diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 5ab28811b8..fa95246d21 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -26,7 +26,7 @@ "logger-sharelatex": "^1.6.0", "lynx": "0.0.11", "metrics-sharelatex": "^2.1.1", - "redis-sharelatex": "https://registry.npmjs.org/redis-sharelatex/-/redis-sharelatex-1.0.5.tgz", + "redis-sharelatex": "^1.0.5", "request": "2.25.0", "requestretry": "^1.12.0", "sandboxed-module": "~0.2.0", From 73bd2644019d5558cfaad03c7559fa785684080b Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 7 Feb 2019 14:55:24 +0000 Subject: [PATCH 445/769] remove realtime keys in settings, no longer used --- services/document-updater/config/settings.defaults.coffee | 6 ------ 1 file changed, 6 deletions(-) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 20d0a53549..9ae402bc4e 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -20,12 +20,6 @@ module.exports = url: "http://#{process.env["PROJECT_HISTORY_HOST"] or "localhost"}:3054" redis: - realtime: - port: process.env["REAL_TIME_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" - host: process.env["REAL_TIME_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" - password: process.env["REAL_TIME_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" - redisOptions: - keepAlive: 100 history: port: process.env["HISTORY_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" From b5564095f35975b1bbf76790556c5561d858dbc3 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 7 Feb 2019 15:10:40 +0000 Subject: [PATCH 446/769] add /health_check/redis route back in --- services/document-updater/app.coffee | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index a22a552d59..04f884930e 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -71,7 +71,16 @@ app.get '/status', (req, res)-> else res.send('document updater is alive') + docUpdaterRedisClient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) +app.get "/health_check/redis", (req, res, next) -> + docUpdaterRedisClient.healthCheck (error) -> + if error? + logger.err {err: error}, "failed redis health check" + res.send 500 + else + res.send 200 + app.get "/health_check/redis_cluster", (req, res, next) -> docUpdaterRedisClient.healthCheck (error) -> if error? From ecaef6485b236d7c3dbe305b3c00481a8377bbdb Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 7 Feb 2019 15:27:51 +0000 Subject: [PATCH 447/769] revert the removal of realtime keyspace --- .../document-updater/app/coffee/DispatchManager.coffee | 2 +- .../app/coffee/RealTimeRedisManager.coffee | 4 ++-- .../document-updater/config/settings.defaults.coffee | 9 +++++++++ .../acceptance/coffee/helpers/DocUpdaterClient.coffee | 6 +++--- .../coffee/DispatchManager/DispatchManagerTests.coffee | 2 +- .../RealTimeRedisManagerTests.coffee | 2 +- 6 files changed, 17 insertions(+), 8 deletions(-) diff --git a/services/document-updater/app/coffee/DispatchManager.coffee b/services/document-updater/app/coffee/DispatchManager.coffee index e751534068..93a22bfc07 100644 --- a/services/document-updater/app/coffee/DispatchManager.coffee +++ b/services/document-updater/app/coffee/DispatchManager.coffee @@ -10,7 +10,7 @@ RateLimitManager = require('./RateLimitManager') module.exports = DispatchManager = createDispatcher: (RateLimiter) -> - client = redis.createClient(Settings.redis.documentupdater) + client = redis.createClient(Settings.redis.realtime) worker = { client: client _waitForUpdateThenDispatchWorker: (callback = (error) ->) -> diff --git a/services/document-updater/app/coffee/RealTimeRedisManager.coffee b/services/document-updater/app/coffee/RealTimeRedisManager.coffee index 64bbe572b8..7da7ca1f64 100644 --- a/services/document-updater/app/coffee/RealTimeRedisManager.coffee +++ b/services/document-updater/app/coffee/RealTimeRedisManager.coffee @@ -1,6 +1,6 @@ Settings = require('settings-sharelatex') -rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) -Keys = Settings.redis.documentupdater.key_schema +rclient = require("redis-sharelatex").createClient(Settings.redis.realtime) +Keys = Settings.redis.realtime.key_schema logger = require('logger-sharelatex') MAX_OPS_PER_ITERATION = 8 # process a limited number of ops for safety diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 9ae402bc4e..2fbe80915f 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -21,6 +21,15 @@ module.exports = redis: + realtime: + port: process.env["REAL_TIME_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" + host: process.env["REAL_TIME_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" + password: process.env["REAL_TIME_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" + redisOptions: + keepAlive: 100 + key_schema: + pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" + history: port: process.env["HISTORY_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" host: process.env["HISTORY_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee index 587e6f3af9..7f50d64372 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee @@ -1,10 +1,10 @@ Settings = require('settings-sharelatex') -rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) -keys = Settings.redis.documentupdater.key_schema +rclient = require("redis-sharelatex").createClient(Settings.redis.realtime) +keys = Settings.redis.realtime.key_schema request = require("request").defaults(jar: false) async = require "async" -rclient_sub = require("redis-sharelatex").createClient(Settings.redis.documentupdater) +rclient_sub = require("redis-sharelatex").createClient(Settings.redis.realtime) rclient_sub.subscribe "applied-ops" rclient_sub.setMaxListeners(0) diff --git a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee index 85cdcb5a7e..990688ce86 100644 --- a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee @@ -13,7 +13,7 @@ describe "DispatchManager", -> "logger-sharelatex": @logger = { log: sinon.stub() } "settings-sharelatex": @settings = redis: - documentupdater: {} + realtime: {} "redis-sharelatex": @redis = {} "./RateLimitManager": {} "./Metrics": diff --git a/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee index 375b85df78..a04da996dc 100644 --- a/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee @@ -15,7 +15,7 @@ describe "RealTimeRedisManager", -> "redis-sharelatex": createClient: () => @rclient "settings-sharelatex": redis: - documentupdater: @settings = + realtime: @settings = key_schema: pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" "logger-sharelatex": { log: () -> } From 08723f89722d5c43e60afe70df249533e2a9aef2 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 7 Feb 2019 15:53:26 +0000 Subject: [PATCH 448/769] revert health check redis types --- services/document-updater/app.coffee | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 04f884930e..26d5db40b6 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -71,16 +71,16 @@ app.get '/status', (req, res)-> else res.send('document updater is alive') - -docUpdaterRedisClient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) +webRedisClient = require("redis-sharelatex").createClient(Settings.redis.realtime) app.get "/health_check/redis", (req, res, next) -> - docUpdaterRedisClient.healthCheck (error) -> + webRedisClient.healthCheck (error) -> if error? logger.err {err: error}, "failed redis health check" res.send 500 else res.send 200 - + +docUpdaterRedisClient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) app.get "/health_check/redis_cluster", (req, res, next) -> docUpdaterRedisClient.healthCheck (error) -> if error? From 3bc4cb492a7fad6b7587a29e155993bd697d27c4 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 7 Feb 2019 16:27:52 +0000 Subject: [PATCH 449/769] added log line --- services/document-updater/app/coffee/DispatchManager.coffee | 1 + 1 file changed, 1 insertion(+) diff --git a/services/document-updater/app/coffee/DispatchManager.coffee b/services/document-updater/app/coffee/DispatchManager.coffee index 93a22bfc07..cfefa3e9d0 100644 --- a/services/document-updater/app/coffee/DispatchManager.coffee +++ b/services/document-updater/app/coffee/DispatchManager.coffee @@ -16,6 +16,7 @@ module.exports = DispatchManager = _waitForUpdateThenDispatchWorker: (callback = (error) ->) -> timer = new Metrics.Timer "worker.waiting" worker.client.blpop "pending-updates-list", 0, (error, result) -> + logger.log("getting pending-updates-list", error, result) timer.done() return callback(error) if error? return callback() if !result? From 0a0fc91f282dce72ae1cccda2f483542e6218cd7 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 12 Feb 2019 13:27:13 +0000 Subject: [PATCH 450/769] Update app.coffee --- services/document-updater/app.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 26d5db40b6..70c3fc875d 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -1,5 +1,5 @@ Metrics = require "metrics-sharelatex" -Metrics.initialize("document-updater") +Metrics.initialize("doc-updater") express = require('express') http = require("http") From 8c5d74faefca26e5f63e739cd19822edb07495e1 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 12 Feb 2019 16:45:11 +0000 Subject: [PATCH 451/769] use explicit json content-type to avoid security issues with text/html --- .../app/coffee/HttpController.coffee | 2 +- .../HttpController/HttpControllerTests.coffee | 17 ++++++++++------- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 93f915d662..d0e8e1994b 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -25,7 +25,7 @@ module.exports = HttpController = logger.log project_id: project_id, doc_id: doc_id, "got doc via http" if !lines? or !version? return next(new Errors.NotFoundError("document not found")) - res.send JSON.stringify + res.json id: doc_id lines: lines version: version diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index ab6718c12a..15b9142647 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -22,6 +22,7 @@ describe "HttpController", -> @next = sinon.stub() @res = send: sinon.stub() + json: sinon.stub() describe "getDoc", -> beforeEach -> @@ -47,15 +48,15 @@ describe "HttpController", -> .should.equal true it "should return the doc as JSON", -> - @res.send - .calledWith(JSON.stringify({ + @res.json + .calledWith({ id: @doc_id lines: @lines version: @version ops: [] ranges: @ranges pathname: @pathname - })) + }) .should.equal true it "should log the request", -> @@ -68,7 +69,7 @@ describe "HttpController", -> describe "when recent ops are requested", -> beforeEach -> - @DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, @lines, @version, @ops) + @DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, @lines, @version, @ops, @ranges, @pathname) @req.query = fromVersion: "#{@fromVersion}" @HttpController.getDoc(@req, @res, @next) @@ -78,13 +79,15 @@ describe "HttpController", -> .should.equal true it "should return the doc as JSON", -> - @res.send - .calledWith(JSON.stringify({ + @res.json + .calledWith({ id: @doc_id lines: @lines version: @version ops: @ops - })) + ranges: @ranges + pathname: @pathname + }) .should.equal true it "should log the request", -> From 937e8fe0715a570cc950d6e11f9fb5174b61d420 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 15 Feb 2019 13:58:29 +0000 Subject: [PATCH 452/769] use redis cluster key in defaults for pendingupdates --- services/document-updater/config/settings.defaults.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 2fbe80915f..118fbef505 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -28,7 +28,7 @@ module.exports = redisOptions: keepAlive: 100 key_schema: - pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" + pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" history: port: process.env["HISTORY_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" From 4d7f9f3c1a8816d0ff8b3df68d80812b04a41d9a Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 5 Mar 2019 17:34:27 +0000 Subject: [PATCH 453/769] add sentry into settings.defaults --- services/document-updater/config/settings.defaults.coffee | 3 +++ 1 file changed, 3 insertions(+) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 118fbef505..d3dd079ace 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -88,3 +88,6 @@ module.exports = mongo: url: "mongodb://#{process.env["MONGO_HOST"] or "localhost"}/sharelatex" + + sentry: + dsn: process.env.SENTRY_DSN \ No newline at end of file From fd1425d83fc883180bdeb1a759391f9ad19e6ea2 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 21 Mar 2019 12:10:15 +0000 Subject: [PATCH 454/769] include a unique id in every message published to redis --- .../app/coffee/RealTimeRedisManager.coffee | 9 +++++++++ .../coffee/ApplyingUpdatesToADocTests.coffee | 2 +- .../RealTimeRedisManagerTests.coffee | 12 ++++++++++++ 3 files changed, 22 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RealTimeRedisManager.coffee b/services/document-updater/app/coffee/RealTimeRedisManager.coffee index 7da7ca1f64..da6f47f2ff 100644 --- a/services/document-updater/app/coffee/RealTimeRedisManager.coffee +++ b/services/document-updater/app/coffee/RealTimeRedisManager.coffee @@ -2,6 +2,12 @@ Settings = require('settings-sharelatex') rclient = require("redis-sharelatex").createClient(Settings.redis.realtime) Keys = Settings.redis.realtime.key_schema logger = require('logger-sharelatex') +os = require "os" +crypto = require "crypto" + +HOST = os.hostname() +RND = crypto.randomBytes(4).toString('hex') # generate a random key for this process +COUNT = 0 MAX_OPS_PER_ITERATION = 8 # process a limited number of ops for safety @@ -26,4 +32,7 @@ module.exports = RealTimeRedisManager = rclient.llen Keys.pendingUpdates({doc_id}), callback sendData: (data) -> + # create a unique message id using a counter + message_id = "doc:#{HOST}:#{RND}-#{COUNT++}" + data?._id = message_id rclient.publish "applied-ops", JSON.stringify(data) diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index c6aa6fe856..5c080e0924 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -232,7 +232,7 @@ describe "Applying updates to a doc", -> @messageCallback.called.should.equal true [channel, message] = @messageCallback.args[0] channel.should.equal "applied-ops" - JSON.parse(message).should.deep.equal { + JSON.parse(message).should.deep.include { project_id: @project_id, doc_id: @doc_id, error:'Delete component \'not the correct content\' does not match deleted text \'one\ntwo\nthree\'' diff --git a/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee index a04da996dc..32ec3d9020 100644 --- a/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee @@ -19,6 +19,9 @@ describe "RealTimeRedisManager", -> key_schema: pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" "logger-sharelatex": { log: () -> } + "crypto": @crypto = { randomBytes: sinon.stub().withArgs(4).returns(Buffer.from([0x1, 0x2, 0x3, 0x4])) } + "os": @os = {hostname: sinon.stub().returns("somehost")} + @doc_id = "doc-id-123" @project_id = "project-id-123" @callback = sinon.stub() @@ -74,3 +77,12 @@ describe "RealTimeRedisManager", -> it "should return the length", -> @callback.calledWith(null, @length).should.equal true + + describe "sendData", -> + beforeEach -> + @message_id = "doc:somehost:01020304-0" + @rclient.publish = sinon.stub() + @RealTimeRedisManager.sendData({op: "thisop"}) + + it "should send the op with a message id", -> + @rclient.publish.calledWith("applied-ops", JSON.stringify({op:"thisop",_id:@message_id})).should.equal true \ No newline at end of file From cc1f3fce5b3b56246f874b55bc0cf38f2b850091 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 8 Apr 2019 13:43:24 +0100 Subject: [PATCH 455/769] check incoming hash when present --- .../app/coffee/ShareJsUpdateManager.coffee | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee index a5cc6070cb..6631c7537b 100644 --- a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee +++ b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee @@ -6,6 +6,7 @@ Keys = require "./UpdateKeys" {EventEmitter} = require "events" util = require "util" RealTimeRedisManager = require "./RealTimeRedisManager" +crypto = require "crypto" ShareJsModel:: = {} util.inherits ShareJsModel, EventEmitter @@ -42,6 +43,10 @@ module.exports = ShareJsUpdateManager = logger.log project_id: project_id, doc_id: doc_id, error: error, "applied update" model.getSnapshot doc_key, (error, data) => return callback(error) if error? + # only check hash when present and no other updates have been applied + if update.hash? and update.v == version + ourHash = ShareJsUpdateManager._computeHash(data.snapshot) + return callback(new Error("Invalid hash")) if ourHash != update.hash docLines = data.snapshot.split(/\r\n|\n|\r/) callback(null, docLines, data.v, model.db.appliedOps[doc_key] or []) @@ -53,3 +58,9 @@ module.exports = ShareJsUpdateManager = _sendOp: (project_id, doc_id, op) -> RealTimeRedisManager.sendData {project_id, doc_id, op} + _computeHash: (content) -> + return crypto.createHash('sha1') + .update("blob " + content.length + "\x00") + .update(content, 'utf8') + .digest('hex') + From 3c635c8d9886baf828dffb1f4f2c1c9cf3223fd4 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 9 Apr 2019 09:20:48 +0100 Subject: [PATCH 456/769] check version before it is modified by applyOp --- .../document-updater/app/coffee/ShareJsUpdateManager.coffee | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee index 6631c7537b..f4f3674c75 100644 --- a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee +++ b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee @@ -23,7 +23,8 @@ module.exports = ShareJsUpdateManager = applyUpdate: (project_id, doc_id, update, lines, version, callback = (error, updatedDocLines) ->) -> logger.log project_id: project_id, doc_id: doc_id, update: update, "applying sharejs updates" jobs = [] - + # record the update version before it is modified + incomingUpdateVersion = update.version # We could use a global model for all docs, but we're hitting issues with the # internal state of ShareJS not being accessible for clearing caches, and # getting stuck due to queued callbacks (line 260 of sharejs/server/model.coffee) @@ -44,7 +45,7 @@ module.exports = ShareJsUpdateManager = model.getSnapshot doc_key, (error, data) => return callback(error) if error? # only check hash when present and no other updates have been applied - if update.hash? and update.v == version + if update.hash? and incomingUpdateVersion == version ourHash = ShareJsUpdateManager._computeHash(data.snapshot) return callback(new Error("Invalid hash")) if ourHash != update.hash docLines = data.snapshot.split(/\r\n|\n|\r/) From 3d76f4b9bfdd6d90d4b6c9c9e55b19996df43a6a Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 11 Apr 2019 13:25:03 +0100 Subject: [PATCH 457/769] Record a snapshot to mongo when a doc's comments/changes get collapsed --- .../app/coffee/RangesManager.coffee | 24 ++++-- .../app/coffee/RedisManager.coffee | 1 - .../app/coffee/SnapshotManager.coffee | 39 +++++++++ .../app/coffee/UpdateManager.coffee | 11 ++- .../app/coffee/mongojs.coffee | 7 ++ services/document-updater/npm-shrinkwrap.json | 86 +++++++++++++++++-- services/document-updater/package.json | 5 +- .../test/acceptance/coffee/RangesTests.coffee | 66 +++++++++++++- .../ProjectManager/updateProjectTests.coffee | 2 +- .../RangesManager/RangesManagerTests.coffee | 33 ++++++- .../UpdateManager/UpdateManagerTests.coffee | 21 ++++- 11 files changed, 274 insertions(+), 21 deletions(-) create mode 100644 services/document-updater/app/coffee/SnapshotManager.coffee create mode 100644 services/document-updater/app/coffee/mongojs.coffee diff --git a/services/document-updater/app/coffee/RangesManager.coffee b/services/document-updater/app/coffee/RangesManager.coffee index d0653bb6a2..bcb16a39c9 100644 --- a/services/document-updater/app/coffee/RangesManager.coffee +++ b/services/document-updater/app/coffee/RangesManager.coffee @@ -1,13 +1,15 @@ RangesTracker = require "./RangesTracker" logger = require "logger-sharelatex" +_ = require "lodash" module.exports = RangesManager = MAX_COMMENTS: 500 MAX_CHANGES: 2000 - applyUpdate: (project_id, doc_id, entries = {}, updates = [], newDocLines, callback = (error, new_entries) ->) -> - {changes, comments} = entries + applyUpdate: (project_id, doc_id, entries = {}, updates = [], newDocLines, callback = (error, new_entries, ranges_were_collapsed) ->) -> + {changes, comments} = _.cloneDeep(entries) rangesTracker = new RangesTracker(changes, comments) + emptyRangeCountBefore = RangesManager._emptyRangesCount(rangesTracker) for update in updates rangesTracker.track_changes = !!update.meta.tc if !!update.meta.tc @@ -29,9 +31,11 @@ module.exports = RangesManager = logger.error {err: error, project_id, doc_id, newDocLines, updates}, "error validating ranges" return callback(error) + emptyRangeCountAfter = RangesManager._emptyRangesCount(rangesTracker) + rangesWereCollapsed = emptyRangeCountAfter > emptyRangeCountBefore response = RangesManager._getRanges rangesTracker - logger.log {project_id, doc_id, changesCount: response.changes?.length, commentsCount: response.comments?.length}, "applied updates to ranges" - callback null, response + logger.log {project_id, doc_id, changesCount: response.changes?.length, commentsCount: response.comments?.length, rangesWereCollapsed}, "applied updates to ranges" + callback null, response, rangesWereCollapsed acceptChanges: (change_ids, ranges, callback = (error, ranges) ->) -> {changes, comments} = ranges @@ -59,4 +63,14 @@ module.exports = RangesManager = if rangesTracker.comments?.length > 0 response ?= {} response.comments = rangesTracker.comments - return response \ No newline at end of file + return response + + _emptyRangesCount: (ranges) -> + count = 0 + for comment in (ranges.comments or []) + if comment.op.c == "" + count++ + for change in (ranges.changes or []) when change.op.i? + if change.op.i == "" + count++ + return count \ No newline at end of file diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 25dbafc6e7..76c7a9a8d0 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -1,6 +1,5 @@ Settings = require('settings-sharelatex') rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) -_ = require('underscore') logger = require('logger-sharelatex') metrics = require('./Metrics') Errors = require "./Errors" diff --git a/services/document-updater/app/coffee/SnapshotManager.coffee b/services/document-updater/app/coffee/SnapshotManager.coffee new file mode 100644 index 0000000000..5a756b34db --- /dev/null +++ b/services/document-updater/app/coffee/SnapshotManager.coffee @@ -0,0 +1,39 @@ +{db, ObjectId} = require "./mongojs" + +module.exports = SnapshotManager = + recordSnapshot: (project_id, doc_id, version, lines, ranges, callback) -> + try + project_id = ObjectId(project_id) + doc_id = ObjectId(doc_id) + catch error + return callback(error) + db.docSnapshots.insert { + project_id, doc_id, version, lines + ranges: SnapshotManager.jsonRangesToMongo(ranges), + ts: new Date() + }, callback + + jsonRangesToMongo: (ranges) -> + return null if !ranges? + + updateMetadata = (metadata) -> + if metadata?.ts? + metadata.ts = new Date(metadata.ts) + if metadata?.user_id? + metadata.user_id = SnapshotManager._safeObjectId(metadata.user_id) + + for change in ranges.changes or [] + change.id = SnapshotManager._safeObjectId(change.id) + updateMetadata(change.metadata) + for comment in ranges.comments or [] + comment.id = SnapshotManager._safeObjectId(comment.id) + if comment.op?.t? + comment.op.t = SnapshotManager._safeObjectId(comment.op.t) + updateMetadata(comment.metadata) + return ranges + + _safeObjectId: (data) -> + try + return ObjectId(data) + catch error + return data diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index bfcfb806ca..5f9dcf5317 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -4,13 +4,14 @@ RealTimeRedisManager = require "./RealTimeRedisManager" ShareJsUpdateManager = require "./ShareJsUpdateManager" HistoryManager = require "./HistoryManager" Settings = require('settings-sharelatex') -_ = require("underscore") +_ = require("lodash") async = require("async") logger = require('logger-sharelatex') Metrics = require "./Metrics" Errors = require "./Errors" DocumentManager = require "./DocumentManager" RangesManager = require "./RangesManager" +SnapshotManager = require "./SnapshotManager" Profiler = require "./Profiler" module.exports = UpdateManager = @@ -76,13 +77,19 @@ module.exports = UpdateManager = return callback(error) if error? if !lines? or !version? return callback(new Errors.NotFoundError("document not found: #{doc_id}")) + previousVersion = version ShareJsUpdateManager.applyUpdate project_id, doc_id, update, lines, version, (error, updatedDocLines, version, appliedOps) -> profile.log("sharejs.applyUpdate") return callback(error) if error? - RangesManager.applyUpdate project_id, doc_id, ranges, appliedOps, updatedDocLines, (error, new_ranges) -> + RangesManager.applyUpdate project_id, doc_id, ranges, appliedOps, updatedDocLines, (error, new_ranges, ranges_were_collapsed) -> UpdateManager._addProjectHistoryMetadataToOps(appliedOps, pathname, projectHistoryId, lines) profile.log("RangesManager.applyUpdate") return callback(error) if error? + if ranges_were_collapsed + logger.log {project_id, doc_id, previousVersion, lines, ranges, update}, "update collapsed some ranges, snapshotting previous content" + SnapshotManager.recordSnapshot project_id, doc_id, previousVersion, lines, ranges, (error) -> + if error? + logger.error {err: error, project_id, doc_id, version, lines, ranges}, "error recording snapshot" RedisManager.updateDocument project_id, doc_id, updatedDocLines, version, appliedOps, new_ranges, (error, doc_ops_length, project_ops_length) -> profile.log("RedisManager.updateDocument") return callback(error) if error? diff --git a/services/document-updater/app/coffee/mongojs.coffee b/services/document-updater/app/coffee/mongojs.coffee new file mode 100644 index 0000000000..8f8f1a9ab9 --- /dev/null +++ b/services/document-updater/app/coffee/mongojs.coffee @@ -0,0 +1,7 @@ +Settings = require "settings-sharelatex" +mongojs = require "mongojs" +db = mongojs(Settings.mongo.url, ["docSnapshots"]) +module.exports = + db: db + ObjectId: mongojs.ObjectId + diff --git a/services/document-updater/npm-shrinkwrap.json b/services/document-updater/npm-shrinkwrap.json index 095e360c00..0f4f0fb697 100644 --- a/services/document-updater/npm-shrinkwrap.json +++ b/services/document-updater/npm-shrinkwrap.json @@ -291,6 +291,11 @@ "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", "dev": true }, + "bson": { + "version": "1.0.9", + "from": "bson@~1.0.4", + "resolved": "https://registry.npmjs.org/bson/-/bson-1.0.9.tgz" + }, "buffer-crc32": { "version": "0.2.1", "from": "buffer-crc32@0.2.1", @@ -301,6 +306,11 @@ "from": "buffer-equal-constant-time@1.0.1", "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz" }, + "buffer-shims": { + "version": "1.0.0", + "from": "buffer-shims@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/buffer-shims/-/buffer-shims-1.0.0.tgz" + }, "builtin-modules": { "version": "3.0.0", "from": "builtin-modules@>=3.0.0 <4.0.0", @@ -364,7 +374,7 @@ }, "coffee-script": { "version": "1.7.1", - "from": "coffee-script@>=1.7.0 <1.8.0", + "from": "coffee-script@1.7.1", "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.7.1.tgz" }, "combined-stream": { @@ -491,6 +501,11 @@ "from": "duplexify@>=3.6.0 <4.0.0", "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.6.1.tgz" }, + "each-series": { + "version": "1.0.0", + "from": "each-series@^1.0.0", + "resolved": "https://registry.npmjs.org/each-series/-/each-series-1.0.0.tgz" + }, "ecc-jsbn": { "version": "0.1.1", "from": "ecc-jsbn@>=0.1.1 <0.2.0", @@ -816,7 +831,7 @@ }, "lodash": { "version": "4.17.4", - "from": "lodash@>=4.14.0 <5.0.0", + "from": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz" }, "lodash.defaults": { @@ -1093,6 +1108,43 @@ "from": "module-details-from-path@>=1.0.3 <2.0.0", "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz" }, + "mongodb": { + "version": "2.2.36", + "from": "mongodb@^2.2.31", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-2.2.36.tgz", + "dependencies": { + "es6-promise": { + "version": "3.2.1", + "from": "es6-promise@3.2.1", + "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-3.2.1.tgz" + }, + "process-nextick-args": { + "version": "1.0.7", + "from": "process-nextick-args@>=1.0.6 <1.1.0", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz" + }, + "readable-stream": { + "version": "2.2.7", + "from": "readable-stream@2.2.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.2.7.tgz" + }, + "string_decoder": { + "version": "1.0.3", + "from": "string_decoder@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.0.3.tgz" + } + } + }, + "mongodb-core": { + "version": "2.1.20", + "from": "mongodb-core@2.1.20", + "resolved": "https://registry.npmjs.org/mongodb-core/-/mongodb-core-2.1.20.tgz" + }, + "mongojs": { + "version": "2.6.0", + "from": "https://registry.npmjs.org/mongojs/-/mongojs-2.6.0.tgz", + "resolved": "https://registry.npmjs.org/mongojs/-/mongojs-2.6.0.tgz" + }, "ms": { "version": "2.0.0", "from": "ms@2.0.0", @@ -1158,6 +1210,11 @@ "from": "parse-duration@>=0.1.1 <0.2.0", "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.1.1.tgz" }, + "parse-mongo-url": { + "version": "1.1.1", + "from": "parse-mongo-url@^1.1.1", + "resolved": "https://registry.npmjs.org/parse-mongo-url/-/parse-mongo-url-1.1.1.tgz" + }, "parse-ms": { "version": "2.0.0", "from": "parse-ms@>=2.0.0 <3.0.0", @@ -1405,6 +1462,11 @@ } } }, + "require_optional": { + "version": "1.0.1", + "from": "require_optional@~1.0.0", + "resolved": "https://registry.npmjs.org/require_optional/-/require_optional-1.0.1.tgz" + }, "require-in-the-middle": { "version": "3.1.0", "from": "require-in-the-middle@>=3.0.0 <4.0.0", @@ -1420,6 +1482,11 @@ "from": "resolve@>=1.5.0 <2.0.0", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.10.0.tgz" }, + "resolve-from": { + "version": "2.0.0", + "from": "resolve-from@^2.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz" + }, "retry-axios": { "version": "0.3.2", "from": "retry-axios@>=0.3.2 <0.4.0", @@ -1585,12 +1652,22 @@ "from": "through2@>=2.0.3 <3.0.0", "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz" }, + "thunky": { + "version": "1.0.3", + "from": "thunky@^1.0.2", + "resolved": "https://registry.npmjs.org/thunky/-/thunky-1.0.3.tgz" + }, "timekeeper": { "version": "2.0.0", "from": "timekeeper@>=2.0.0 <3.0.0", "resolved": "https://registry.npmjs.org/timekeeper/-/timekeeper-2.0.0.tgz", "dev": true }, + "to-mongodb-core": { + "version": "2.0.0", + "from": "to-mongodb-core@^2.0.0", + "resolved": "https://registry.npmjs.org/to-mongodb-core/-/to-mongodb-core-2.0.0.tgz" + }, "tough-cookie": { "version": "2.3.3", "from": "tough-cookie@>=2.3.3 <2.4.0", @@ -1612,11 +1689,6 @@ "from": "uid2@0.0.2", "resolved": "https://registry.npmjs.org/uid2/-/uid2-0.0.2.tgz" }, - "underscore": { - "version": "1.2.2", - "from": "underscore@1.2.2", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.2.2.tgz" - }, "uri-js": { "version": "4.2.2", "from": "uri-js@>=4.2.2 <5.0.0", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index fa95246d21..f0682a7c30 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -23,16 +23,17 @@ "async": "^2.5.0", "coffee-script": "~1.7.0", "express": "3.3.4", + "lodash": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz", "logger-sharelatex": "^1.6.0", "lynx": "0.0.11", "metrics-sharelatex": "^2.1.1", + "mongojs": "https://registry.npmjs.org/mongojs/-/mongojs-2.6.0.tgz", "redis-sharelatex": "^1.0.5", "request": "2.25.0", "requestretry": "^1.12.0", "sandboxed-module": "~0.2.0", "settings-sharelatex": "^1.1.0", - "sinon": "~1.5.2", - "underscore": "1.2.2" + "sinon": "~1.5.2" }, "devDependencies": { "bunyan": "~0.22.1", diff --git a/services/document-updater/test/acceptance/coffee/RangesTests.coffee b/services/document-updater/test/acceptance/coffee/RangesTests.coffee index 95c80440c0..c6df55b459 100644 --- a/services/document-updater/test/acceptance/coffee/RangesTests.coffee +++ b/services/document-updater/test/acceptance/coffee/RangesTests.coffee @@ -4,11 +4,15 @@ chai.should() expect = chai.expect async = require "async" +{db, ObjectId} = require "../../../app/js/mongojs" MockWebApi = require "./helpers/MockWebApi" DocUpdaterClient = require "./helpers/DocUpdaterClient" DocUpdaterApp = require "./helpers/DocUpdaterApp" describe "Ranges", -> + before (done) -> + DocUpdaterApp.ensureRunning done + describe "tracking changes from ops", -> before (done) -> @project_id = DocUpdaterClient.randomId() @@ -305,4 +309,64 @@ describe "Ranges", -> throw error if error? ranges = data.ranges expect(ranges.changes).to.be.undefined - done() \ No newline at end of file + done() + + describe.only "deleting text surrounding a comment", -> + before (done) -> + @project_id = DocUpdaterClient.randomId() + @user_id = DocUpdaterClient.randomId() + @doc_id = DocUpdaterClient.randomId() + MockWebApi.insertDoc @project_id, @doc_id, { + lines: ["foo bar baz"] + version: 0 + ranges: { + comments: [{ + op: { c: "a", p: 5, tid: @tid = DocUpdaterClient.randomId() } + metadata: + user_id: @user_id + ts: new Date() + }] + } + } + @updates = [{ + doc: @doc_id + op: [{ d: "foo ", p: 0 }] + v: 0 + meta: { user_id: @user_id } + }, { + doc: @doc_id + op: [{ d: "bar ", p: 0 }] + v: 1 + meta: { user_id: @user_id } + }] + jobs = [] + for update in @updates + do (update) => + jobs.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc_id, update, callback + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + throw error if error? + async.series jobs, (error) -> + throw error if error? + setTimeout () => + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, data) => + throw error if error? + done() + , 200 + + it "should write a snapshot from before the destructive change", (done) -> + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, data) => + return done(error) if error? + db.docSnapshots.find { + project_id: ObjectId(@project_id), + doc_id: ObjectId(@doc_id) + }, (error, docSnapshots) => + return done(error) if error? + expect(docSnapshots.length).to.equal 1 + expect(docSnapshots[0].version).to.equal 1 + expect(docSnapshots[0].lines).to.deep.equal ["bar baz"] + expect(docSnapshots[0].ranges.comments[0].op).to.deep.equal { + c: "a", + p: 1, + tid: @tid + } + done() diff --git a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee index 3ed0109be7..635e562669 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee @@ -3,7 +3,7 @@ chai = require('chai') should = chai.should() modulePath = "../../../../app/js/ProjectManager.js" SandboxedModule = require('sandboxed-module') -_ = require('underscore') +_ = require('lodash') describe "ProjectManager", -> beforeEach -> diff --git a/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee b/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee index b11c73489e..93d5d26e2f 100644 --- a/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee @@ -50,8 +50,9 @@ describe "RangesManager", -> it "should return the modified the comments and changes", -> @callback.called.should.equal true - [error, entries] = @callback.args[0] + [error, entries, ranges_were_collapsed] = @callback.args[0] expect(error).to.be.null + expect(ranges_were_collapsed).to.equal false entries.comments[0].op.should.deep.equal { c: "three " p: 8 @@ -180,6 +181,36 @@ describe "RangesManager", -> expect(error).to.not.be.null expect(error.message).to.equal("Change ({\"op\":{\"i\":\"five\",\"p\":15},\"metadata\":{\"user_id\":\"user-id-123\"}}) doesn't match text (\"our \")") + + describe "with an update that collapses a range", -> + beforeEach -> + @updates = [{ + meta: + user_id: @user_id + op: [{ + d: "one" + p: 0 + t: "thread-id-1" + }] + }] + @entries = { + comments: [{ + op: + c: "n" + p: 1 + t: "thread-id-2" + metadata: + user_id: @user_id + }] + changes: [] + } + @RangesManager.applyUpdate @project_id, @doc_id, @entries, @updates, @newDocLines, @callback + + it "should return ranges_were_collapsed == true", -> + @callback.called.should.equal true + [error, entries, ranges_were_collapsed] = @callback.args[0] + expect(ranges_were_collapsed).to.equal true + describe "acceptChanges", -> beforeEach -> @RangesManager = SandboxedModule.require modulePath, diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index 383bd1848e..32c305aedd 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -23,6 +23,7 @@ describe "UpdateManager", -> "settings-sharelatex": @Settings = {} "./DocumentManager": @DocumentManager = {} "./RangesManager": @RangesManager = {} + "./SnapshotManager": @SnapshotManager = {} "./Profiler": class Profiler log: sinon.stub().returns { end: sinon.stub() } end: sinon.stub() @@ -169,7 +170,7 @@ describe "UpdateManager", -> @project_ops_length = sinon.stub() @pathname = '/a/b/c.tex' @DocumentManager.getDoc = sinon.stub().yields(null, @lines, @version, @ranges, @pathname, @projectHistoryId) - @RangesManager.applyUpdate = sinon.stub().yields(null, @updated_ranges) + @RangesManager.applyUpdate = sinon.stub().yields(null, @updated_ranges, false) @ShareJsUpdateManager.applyUpdate = sinon.stub().yields(null, @updatedDocLines, @version, @appliedOps) @RedisManager.updateDocument = sinon.stub().yields(null, @doc_ops_length, @project_ops_length) @RealTimeRedisManager.sendData = sinon.stub() @@ -239,6 +240,24 @@ describe "UpdateManager", -> it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true + describe "when ranges get collapsed", -> + beforeEach -> + @RangesManager.applyUpdate = sinon.stub().yields(null, @updated_ranges, true) + @SnapshotManager.recordSnapshot = sinon.stub().yields() + @UpdateManager.applyUpdate @project_id, @doc_id, @update, @callback + + it "should call SnapshotManager.recordSnapshot", -> + @SnapshotManager.recordSnapshot + .calledWith( + @project_id, + @doc_id, + @version, + @lines, + @ranges + ) + .should.equal true + + describe "_addProjectHistoryMetadataToOps", -> it "should add projectHistoryId, pathname and doc_length metadata to the ops", -> lines = [ From 33478f95fdbabd8e8f2d0668770c28246f65c3cb Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 11 Apr 2019 16:32:31 +0100 Subject: [PATCH 458/769] Fix package.json versions --- services/document-updater/npm-shrinkwrap.json | 4 ++-- services/document-updater/package.json | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/services/document-updater/npm-shrinkwrap.json b/services/document-updater/npm-shrinkwrap.json index 0f4f0fb697..f30ce4f581 100644 --- a/services/document-updater/npm-shrinkwrap.json +++ b/services/document-updater/npm-shrinkwrap.json @@ -831,7 +831,7 @@ }, "lodash": { "version": "4.17.4", - "from": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz", + "from": "lodash@4.17.4", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz" }, "lodash.defaults": { @@ -1142,7 +1142,7 @@ }, "mongojs": { "version": "2.6.0", - "from": "https://registry.npmjs.org/mongojs/-/mongojs-2.6.0.tgz", + "from": "mongojs@2.6.0", "resolved": "https://registry.npmjs.org/mongojs/-/mongojs-2.6.0.tgz" }, "ms": { diff --git a/services/document-updater/package.json b/services/document-updater/package.json index f0682a7c30..386f48e7ba 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -23,11 +23,11 @@ "async": "^2.5.0", "coffee-script": "~1.7.0", "express": "3.3.4", - "lodash": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz", + "lodash": "^4.17.4", "logger-sharelatex": "^1.6.0", "lynx": "0.0.11", "metrics-sharelatex": "^2.1.1", - "mongojs": "https://registry.npmjs.org/mongojs/-/mongojs-2.6.0.tgz", + "mongojs": "^2.6.0", "redis-sharelatex": "^1.0.5", "request": "2.25.0", "requestretry": "^1.12.0", From 52f3596e53b85661d6ae2de961620870da6d8ad7 Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 16 Apr 2019 11:05:17 +0100 Subject: [PATCH 459/769] Review feedback --- .../app/coffee/SnapshotManager.coffee | 7 +++++-- .../app/coffee/UpdateManager.coffee | 19 +++++++++++++------ .../test/acceptance/coffee/RangesTests.coffee | 2 +- .../UpdateManager/UpdateManagerTests.coffee | 1 + 4 files changed, 20 insertions(+), 9 deletions(-) diff --git a/services/document-updater/app/coffee/SnapshotManager.coffee b/services/document-updater/app/coffee/SnapshotManager.coffee index 5a756b34db..86670b648d 100644 --- a/services/document-updater/app/coffee/SnapshotManager.coffee +++ b/services/document-updater/app/coffee/SnapshotManager.coffee @@ -1,17 +1,20 @@ {db, ObjectId} = require "./mongojs" module.exports = SnapshotManager = - recordSnapshot: (project_id, doc_id, version, lines, ranges, callback) -> + recordSnapshot: (project_id, doc_id, version, pathname, lines, ranges, callback) -> try project_id = ObjectId(project_id) doc_id = ObjectId(doc_id) catch error return callback(error) db.docSnapshots.insert { - project_id, doc_id, version, lines + project_id, doc_id, version, lines, pathname, ranges: SnapshotManager.jsonRangesToMongo(ranges), ts: new Date() }, callback + # Suggested indexes: + # db.docSnapshots.createIndex({project_id:1, doc_id:1}) + # db.docSnapshots.createIndex({ts:1},{expiresAfterSeconds: 30*24*3600)) # expires after 30 days jsonRangesToMongo: (ranges) -> return null if !ranges? diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index 5f9dcf5317..2fe7508fed 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -85,17 +85,24 @@ module.exports = UpdateManager = UpdateManager._addProjectHistoryMetadataToOps(appliedOps, pathname, projectHistoryId, lines) profile.log("RangesManager.applyUpdate") return callback(error) if error? - if ranges_were_collapsed - logger.log {project_id, doc_id, previousVersion, lines, ranges, update}, "update collapsed some ranges, snapshotting previous content" - SnapshotManager.recordSnapshot project_id, doc_id, previousVersion, lines, ranges, (error) -> - if error? - logger.error {err: error, project_id, doc_id, version, lines, ranges}, "error recording snapshot" RedisManager.updateDocument project_id, doc_id, updatedDocLines, version, appliedOps, new_ranges, (error, doc_ops_length, project_ops_length) -> profile.log("RedisManager.updateDocument") return callback(error) if error? HistoryManager.recordAndFlushHistoryOps project_id, doc_id, appliedOps, doc_ops_length, project_ops_length, (error) -> profile.log("recordAndFlushHistoryOps") - callback(error) + return callback(error) if error? + if ranges_were_collapsed + logger.log {project_id, doc_id, previousVersion, lines, ranges, update}, "update collapsed some ranges, snapshotting previous content" + # Do this last, since it's a mongo call, and so potentially longest running + # If it overruns the lock, it's ok, since all of our redis work is done + SnapshotManager.recordSnapshot project_id, doc_id, previousVersion, pathname, lines, ranges, (error) -> + if error? + logger.error {err: error, project_id, doc_id, version, lines, ranges}, "error recording snapshot" + return callback(error) + else + callback() + else + callback() lockUpdatesAndDo: (method, project_id, doc_id, args..., callback) -> profile = new Profiler("lockUpdatesAndDo", {project_id, doc_id}) diff --git a/services/document-updater/test/acceptance/coffee/RangesTests.coffee b/services/document-updater/test/acceptance/coffee/RangesTests.coffee index c6df55b459..52946f4823 100644 --- a/services/document-updater/test/acceptance/coffee/RangesTests.coffee +++ b/services/document-updater/test/acceptance/coffee/RangesTests.coffee @@ -311,7 +311,7 @@ describe "Ranges", -> expect(ranges.changes).to.be.undefined done() - describe.only "deleting text surrounding a comment", -> + describe "deleting text surrounding a comment", -> before (done) -> @project_id = DocUpdaterClient.randomId() @user_id = DocUpdaterClient.randomId() diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index 32c305aedd..0fdbbb9728 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -252,6 +252,7 @@ describe "UpdateManager", -> @project_id, @doc_id, @version, + @pathname, @lines, @ranges ) From c1c23e4bee82c750c4e0f844dc7e69b196a2350e Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Mon, 22 Apr 2019 20:02:48 -0400 Subject: [PATCH 460/769] record last author id on document flush This is a multi-steps process: * get a update's `user_id` from the metadata * store the `user_id` (`lastUpdatedBy`) and current date (`lastUpdatedAt`) for the document in Redis on every updates * fetch `lastUpdatedAt` and `lastUpdatedBy` from Redis on document flush * send the data to web to be persisted in Mongo --- .../app/coffee/DocumentManager.coffee | 8 +- .../app/coffee/PersistenceManager.coffee | 4 +- .../app/coffee/RedisManager.coffee | 17 ++++- .../app/coffee/UpdateManager.coffee | 2 +- .../config/settings.defaults.coffee | 2 + .../coffee/FlushingDocsTests.coffee | 10 ++- .../coffee/helpers/MockWebApi.coffee | 6 +- .../DocumentManagerTests.coffee | 10 ++- .../PersistenceManagerTests.coffee | 12 ++- .../RedisManager/RedisManagerTests.coffee | 75 +++++++++++++++---- .../UpdateManager/UpdateManagerTests.coffee | 5 +- 11 files changed, 115 insertions(+), 36 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 39713a1981..5183a3aaea 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -102,14 +102,14 @@ module.exports = DocumentManager = callback = (args...) -> timer.done() _callback(args...) - RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges) -> + RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy) -> return callback(error) if error? if !lines? or !version? logger.log project_id: project_id, doc_id: doc_id, "doc is not loaded so not flushing" callback null # TODO: return a flag to bail out, as we go on to remove doc from memory? else logger.log project_id: project_id, doc_id: doc_id, version: version, "flushing doc" - PersistenceManager.setDoc project_id, doc_id, lines, version, ranges, (error) -> + PersistenceManager.setDoc project_id, doc_id, lines, version, ranges, lastUpdatedAt, lastUpdatedBy, (error) -> return callback(error) if error? RedisManager.clearUnflushedTime doc_id, callback @@ -141,7 +141,7 @@ module.exports = DocumentManager = return callback(new Errors.NotFoundError("document not found: #{doc_id}")) RangesManager.acceptChanges change_ids, ranges, (error, new_ranges) -> return callback(error) if error? - RedisManager.updateDocument project_id, doc_id, lines, version, [], new_ranges, (error) -> + RedisManager.updateDocument project_id, doc_id, lines, version, [], new_ranges, {}, (error) -> return callback(error) if error? callback() @@ -157,7 +157,7 @@ module.exports = DocumentManager = return callback(new Errors.NotFoundError("document not found: #{doc_id}")) RangesManager.deleteComment comment_id, ranges, (error, new_ranges) -> return callback(error) if error? - RedisManager.updateDocument project_id, doc_id, lines, version, [], new_ranges, (error) -> + RedisManager.updateDocument project_id, doc_id, lines, version, [], new_ranges, {}, (error) -> return callback(error) if error? callback() diff --git a/services/document-updater/app/coffee/PersistenceManager.coffee b/services/document-updater/app/coffee/PersistenceManager.coffee index 8a43d989a8..ee80453137 100644 --- a/services/document-updater/app/coffee/PersistenceManager.coffee +++ b/services/document-updater/app/coffee/PersistenceManager.coffee @@ -50,7 +50,7 @@ module.exports = PersistenceManager = else return callback(new Error("error accessing web API: #{url} #{res.statusCode}")) - setDoc: (project_id, doc_id, lines, version, ranges, _callback = (error) ->) -> + setDoc: (project_id, doc_id, lines, version, ranges, lastUpdatedAt, lastUpdatedBy,_callback = (error) ->) -> timer = new Metrics.Timer("persistenceManager.setDoc") callback = (args...) -> timer.done() @@ -64,6 +64,8 @@ module.exports = PersistenceManager = lines: lines ranges: ranges version: version + lastUpdatedBy: lastUpdatedBy + lastUpdatedAt: lastUpdatedAt auth: user: Settings.apis.web.user pass: Settings.apis.web.pass diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 76c7a9a8d0..85918f4608 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -90,6 +90,8 @@ module.exports = RedisManager = multi.del keys.pathname(doc_id:doc_id) multi.del keys.projectHistoryId(doc_id:doc_id) multi.del keys.unflushedTime(doc_id:doc_id) + multi.del keys.lastUpdatedAt(doc_id: doc_id) + multi.del keys.lastUpdatedBy(doc_id: doc_id) multi.exec (error) -> return callback(error) if error? multi = rclient.multi() @@ -120,7 +122,9 @@ module.exports = RedisManager = multi.get keys.pathname(doc_id:doc_id) multi.get keys.projectHistoryId(doc_id:doc_id) multi.get keys.unflushedTime(doc_id:doc_id) - multi.exec (error, [docLines, version, storedHash, doc_project_id, ranges, pathname, projectHistoryId, unflushedTime])-> + multi.get keys.lastUpdatedAt(doc_id: doc_id) + multi.get keys.lastUpdatedBy(doc_id: doc_id) + multi.exec (error, [docLines, version, storedHash, doc_project_id, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy])-> timeSpan = timer.done() return callback(error) if error? # check if request took too long and bail out. only do this for @@ -152,14 +156,14 @@ module.exports = RedisManager = # doc is not in redis, bail out if !docLines? - return callback null, docLines, version, ranges, pathname, projectHistoryId, unflushedTime + return callback null, docLines, version, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy # doc should be in project set, check if missing (workaround for missing docs from putDoc) rclient.sadd keys.docsInProject(project_id:project_id), doc_id, (error, result) -> return callback(error) if error? if result isnt 0 # doc should already be in set logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, "doc missing from docsInProject set" - callback null, docLines, version, ranges, pathname, projectHistoryId, unflushedTime + callback null, docLines, version, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy getDocVersion: (doc_id, callback = (error, version) ->) -> rclient.get keys.docVersion(doc_id: doc_id), (error, version) -> @@ -209,7 +213,7 @@ module.exports = RedisManager = DOC_OPS_TTL: 60 * minutes DOC_OPS_MAX_LENGTH: 100 - updateDocument : (project_id, doc_id, docLines, newVersion, appliedOps = [], ranges, callback = (error) ->)-> + updateDocument : (project_id, doc_id, docLines, newVersion, appliedOps = [], ranges, updateMeta, callback = (error) ->)-> RedisManager.getDocVersion doc_id, (error, currentVersion) -> return callback(error) if error? if currentVersion + appliedOps.length != newVersion @@ -261,6 +265,11 @@ module.exports = RedisManager = # hasn't been modified before (the content in mongo has been # valid up to this point). Otherwise leave it alone ("NX" flag). multi.set keys.unflushedTime(doc_id: doc_id), Date.now(), "NX" + multi.set keys.lastUpdatedAt(doc_id: doc_id), Date.now() # index 8 + if updateMeta?.user_id + multi.set keys.lastUpdatedBy(doc_id: doc_id), updateMeta.user_id # index 9 + else + multi.del keys.lastUpdatedBy(doc_id: doc_id) # index 9 multi.exec (error, result) -> return callback(error) if error? # check the hash computed on the redis server diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index 2fe7508fed..e5ede11173 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -85,7 +85,7 @@ module.exports = UpdateManager = UpdateManager._addProjectHistoryMetadataToOps(appliedOps, pathname, projectHistoryId, lines) profile.log("RangesManager.applyUpdate") return callback(error) if error? - RedisManager.updateDocument project_id, doc_id, updatedDocLines, version, appliedOps, new_ranges, (error, doc_ops_length, project_ops_length) -> + RedisManager.updateDocument project_id, doc_id, updatedDocLines, version, appliedOps, new_ranges, update.meta, (error, doc_ops_length, project_ops_length) -> profile.log("RedisManager.updateDocument") return callback(error) if error? HistoryManager.recordAndFlushHistoryOps project_id, doc_id, appliedOps, doc_ops_length, project_ops_length, (error) -> diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index d3dd079ace..f68077bb8b 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -79,6 +79,8 @@ module.exports = projectHistoryId: ({doc_id}) -> "ProjectHistoryId:{#{doc_id}}" projectState: ({project_id}) -> "ProjectState:{#{project_id}}" pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" + lastUpdatedBy: ({doc_id}) -> "lastUpdatedBy:{#{doc_id}}" + lastUpdatedAt: ({doc_id}) -> "lastUpdatedAt:{#{doc_id}}" redisOptions: keepAlive: 100 diff --git a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee index 709159ccfb..4f19f13c2f 100644 --- a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee +++ b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee @@ -14,6 +14,7 @@ describe "Flushing a doc to Mongo", -> @version = 42 @update = doc: @doc_id + meta: { user_id: 'last-author-fake-id' } op: [{ i: "one and a half\n" p: 4 @@ -42,6 +43,13 @@ describe "Flushing a doc to Mongo", -> .calledWith(@project_id, @doc_id, @result, @version + 1) .should.equal true + it "should flush the last update author and time to the web api", -> + lastUpdatedAt = MockWebApi.setDocument.lastCall.args[5] + parseInt(lastUpdatedAt).should.be.closeTo((new Date()).getTime(), 30000) + + lastUpdatedBy = MockWebApi.setDocument.lastCall.args[6] + lastUpdatedBy.should.equal 'last-author-fake-id' + describe "when the doc does not exist in the doc updater", -> before (done) -> [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] @@ -65,7 +73,7 @@ describe "Flushing a doc to Mongo", -> version: @version } t = 30000 - sinon.stub MockWebApi, "setDocument", (project_id, doc_id, lines, version, ranges, callback = (error) ->) -> + sinon.stub MockWebApi, "setDocument", (project_id, doc_id, lines, version, ranges, lastUpdatedAt, lastUpdatedBy, callback = (error) ->) -> setTimeout callback, t t = 0 DocUpdaterClient.preloadDoc @project_id, @doc_id, done diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee index 8523f7752e..5ee673ccf7 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee @@ -12,12 +12,14 @@ module.exports = MockWebApi = doc.pathname = '/a/b/c.tex' @docs["#{project_id}:#{doc_id}"] = doc - setDocument: (project_id, doc_id, lines, version, ranges, callback = (error) ->) -> + setDocument: (project_id, doc_id, lines, version, ranges, lastUpdatedAt, lastUpdatedBy, callback = (error) ->) -> doc = @docs["#{project_id}:#{doc_id}"] ||= {} doc.lines = lines doc.version = version doc.ranges = ranges doc.pathname = '/a/b/c.tex' + doc.lastUpdatedAt = lastUpdatedAt + doc.lastUpdatedBy = lastUpdatedBy callback null getDocument: (project_id, doc_id, callback = (error, doc) ->) -> @@ -34,7 +36,7 @@ module.exports = MockWebApi = res.send 404 app.post "/project/:project_id/doc/:doc_id", express.bodyParser(), (req, res, next) => - MockWebApi.setDocument req.params.project_id, req.params.doc_id, req.body.lines, req.body.version, req.body.ranges, (error) -> + MockWebApi.setDocument req.params.project_id, req.params.doc_id, req.body.lines, req.body.version, req.body.ranges, req.body.lastUpdatedAt, req.body.lastUpdatedBy, (error) -> if error? res.send 500 else diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index c52bb4b30d..dc57022a5a 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -35,6 +35,8 @@ describe "DocumentManager", -> @ranges = { comments: "mock", entries: "mock" } @pathname = '/a/b/c.tex' @unflushedTime = Date.now() + @lastUpdatedAt = Date.now() + @lastUpdatedBy = 'last-author-id' afterEach -> tk.reset() @@ -70,7 +72,7 @@ describe "DocumentManager", -> describe "flushDocIfLoaded", -> describe "when the doc is in Redis", -> beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges) + @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId, @unflushedTime, @lastUpdatedAt, @lastUpdatedBy) @RedisManager.clearUnflushedTime = sinon.stub().callsArgWith(1, null) @PersistenceManager.setDoc = sinon.stub().yields() @DocumentManager.flushDocIfLoaded @project_id, @doc_id, @callback @@ -82,7 +84,7 @@ describe "DocumentManager", -> it "should write the doc lines to the persistence layer", -> @PersistenceManager.setDoc - .calledWith(@project_id, @doc_id, @lines, @version, @ranges) + .calledWith(@project_id, @doc_id, @lines, @version, @ranges, @lastUpdatedAt, @lastUpdatedBy) .should.equal true it "should call the callback without error", -> @@ -324,7 +326,7 @@ describe "DocumentManager", -> it "should save the updated ranges", -> @RedisManager.updateDocument - .calledWith(@project_id, @doc_id, @lines, @version, [], @updated_ranges) + .calledWith(@project_id, @doc_id, @lines, @version, [], @updated_ranges, {}) .should.equal true it "should call the callback", -> @@ -378,7 +380,7 @@ describe "DocumentManager", -> it "should save the updated ranges", -> @RedisManager.updateDocument - .calledWith(@project_id, @doc_id, @lines, @version, [], @updated_ranges) + .calledWith(@project_id, @doc_id, @lines, @version, [], @updated_ranges, {}) .should.equal true it "should call the callback", -> diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee index 0f8ad59167..d1308ad899 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee @@ -24,6 +24,8 @@ describe "PersistenceManager", -> @callback = sinon.stub() @ranges = { comments: "mock", entries: "mock" } @pathname = '/a/b/c.tex' + @lastUpdatedAt = Date.now() + @lastUpdatedBy = 'last-author-id' @Settings.apis = web: url: @url = "www.example.com" @@ -133,7 +135,7 @@ describe "PersistenceManager", -> describe "with a successful response from the web api", -> beforeEach -> @request.callsArgWith(1, null, {statusCode: 200}) - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @ranges, @callback) + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @ranges, @lastUpdatedAt, @lastUpdatedBy, @callback) it "should call the web api", -> @request @@ -143,6 +145,8 @@ describe "PersistenceManager", -> lines: @lines version: @version ranges: @ranges + lastUpdatedAt: @lastUpdatedAt + lastUpdatedBy: @lastUpdatedBy method: "POST" auth: user: @user @@ -162,7 +166,7 @@ describe "PersistenceManager", -> describe "when request returns an error", -> beforeEach -> @request.callsArgWith(1, @error = new Error("oops"), null, null) - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @ranges, @callback) + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @ranges, @lastUpdatedAt, @lastUpdatedBy, @callback) it "should return the error", -> @callback.calledWith(@error).should.equal true @@ -173,7 +177,7 @@ describe "PersistenceManager", -> describe "when the request returns 404", -> beforeEach -> @request.callsArgWith(1, null, {statusCode: 404}, "") - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @ranges, @callback) + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @ranges, @lastUpdatedAt, @lastUpdatedBy, @callback) it "should return a NotFoundError", -> @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true @@ -184,7 +188,7 @@ describe "PersistenceManager", -> describe "when the request returns an error status code", -> beforeEach -> @request.callsArgWith(1, null, {statusCode: 500}, "") - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @ranges, @callback) + @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @ranges, @lastUpdatedAt, @lastUpdatedBy, @callback) it "should return an error", -> @callback.calledWith(new Error("web api error")).should.equal true diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 4f6c24720e..cdfdc45ac2 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -36,6 +36,8 @@ describe "RedisManager", -> projectHistoryId: ({doc_id}) -> "ProjectHistoryId:#{doc_id}" projectState: ({project_id}) -> "ProjectState:#{project_id}" unflushedTime: ({doc_id}) -> "UnflushedTime:#{doc_id}" + lastUpdatedBy: ({doc_id}) -> "lastUpdatedBy:#{doc_id}" + lastUpdatedAt: ({doc_id}) -> "lastUpdatedAt:#{doc_id}" history: key_schema: uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}" @@ -116,6 +118,16 @@ describe "RedisManager", -> .calledWith("ProjectHistoryId:#{@doc_id}") .should.equal true + it "should get lastUpdatedAt", -> + @multi.get + .calledWith("lastUpdatedAt:#{@doc_id}") + .should.equal true + + it "should get lastUpdatedBy", -> + @multi.get + .calledWith("lastUpdatedBy:#{@doc_id}") + .should.equal true + it "should check if the document is in the DocsIn set", -> @rclient.sadd .calledWith("DocsIn:#{@project_id}") @@ -123,7 +135,7 @@ describe "RedisManager", -> it 'should return the document', -> @callback - .calledWithExactly(null, @lines, @version, @ranges, @pathname, @projectHistoryId, @unflushed_time) + .calledWithExactly(null, @lines, @version, @ranges, @pathname, @projectHistoryId, @unflushed_time, @lastUpdatedAt, @lastUpdatedBy) .should.equal true it 'should not log any errors', -> @@ -132,7 +144,7 @@ describe "RedisManager", -> describe "when the document is not present", -> beforeEach -> - @multi.exec = sinon.stub().callsArgWith(0, null, [null, null, null, null, null, null, null, null]) + @multi.exec = sinon.stub().callsArgWith(0, null, [null, null, null, null, null, null, null, null, null, null]) @rclient.sadd = sinon.stub().yields() @RedisManager.getDoc @project_id, @doc_id, @callback @@ -143,7 +155,7 @@ describe "RedisManager", -> it 'should return an empty result', -> @callback - .calledWithExactly(null, null, 0, {}, null, null, null) + .calledWithExactly(null, null, 0, {}, null, null, null, null, null) .should.equal true it 'should not log any errors', -> @@ -161,7 +173,7 @@ describe "RedisManager", -> it 'should return the document', -> @callback - .calledWithExactly(null, @lines, @version, @ranges, @pathname, @projectHistoryId, @unflushed_time) + .calledWithExactly(null, @lines, @version, @ranges, @pathname, @projectHistoryId, @unflushed_time, @lastUpdatedAt, @lastUpdatedBy) .should.equal true describe "with a corrupted document", -> @@ -329,6 +341,7 @@ describe "RedisManager", -> @version = 42 @hash = crypto.createHash('sha1').update(JSON.stringify(@lines),'utf8').digest('hex') @ranges = { comments: "mock", entries: "mock" } + @updateMeta = { user_id: 'last-author-fake-id' } @doc_update_list_length = sinon.stub() @project_update_list_length = sinon.stub() @@ -340,7 +353,7 @@ describe "RedisManager", -> @multi.del = sinon.stub() @multi.eval = sinon.stub() @multi.exec = sinon.stub().callsArgWith(0, null, - [@hash, null, null, null, null, null, null, @doc_update_list_length] + [@hash, null, null, null, null, null, null, @doc_update_list_length, null, null] ) @ProjectHistoryRedisManager.queueOps = sinon.stub().callsArgWith( @ops.length + 1, null, @project_update_list_length @@ -353,7 +366,7 @@ describe "RedisManager", -> describe "with project history enabled", -> beforeEach -> @settings.apis.project_history.enabled = true - @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @callback + @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @updateMeta, @callback it "should get the current doc version to check for consistency", -> @RedisManager.getDocVersion @@ -385,6 +398,16 @@ describe "RedisManager", -> .calledWith("UnflushedTime:#{@doc_id}", Date.now(), "NX") .should.equal true + it "should set the last updated time", -> + @multi.set + .calledWith("lastUpdatedAt:#{@doc_id}", Date.now()) + .should.equal true + + it "should set the last updater", -> + @multi.set + .calledWith("lastUpdatedBy:#{@doc_id}", 'last-author-fake-id') + .should.equal true + it "should push the doc op into the doc ops list", -> @multi.rpush .calledWith("DocOps:#{@doc_id}", JSON.stringify(@ops[0]), JSON.stringify(@ops[1])) @@ -423,7 +446,7 @@ describe "RedisManager", -> beforeEach -> @rclient.rpush = sinon.stub() @settings.apis.project_history.enabled = false - @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @callback + @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @updateMeta, @callback it "should not push the updates into the project history ops list", -> @rclient.rpush.called.should.equal false @@ -436,7 +459,7 @@ describe "RedisManager", -> describe "with an inconsistent version", -> beforeEach -> @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length - 1) - @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @callback + @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @updateMeta, @callback it "should not call multi.exec", -> @multi.exec.called.should.equal false @@ -450,7 +473,7 @@ describe "RedisManager", -> beforeEach -> @rclient.rpush = sinon.stub().callsArgWith(1, null, @project_update_list_length) @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version) - @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, [], @ranges, @callback + @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, [], @ranges, @updateMeta, @callback it "should not try to enqueue doc updates", -> @multi.rpush @@ -470,7 +493,7 @@ describe "RedisManager", -> describe "with empty ranges", -> beforeEach -> @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) - @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, {}, @callback + @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, {}, @updateMeta, @callback it "should not set the ranges", -> @multi.set @@ -487,7 +510,7 @@ describe "RedisManager", -> @badHash = "INVALID-HASH-VALUE" @multi.exec = sinon.stub().callsArgWith(0, null, [@badHash]) @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) - @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @callback + @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @updateMeta, @callback it 'should log a hash error', -> @logger.error.calledWith() @@ -501,7 +524,7 @@ describe "RedisManager", -> @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) @_stringify = JSON.stringify @JSON.stringify = () -> return '["bad bytes! \u0000 <- here"]' - @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @callback + @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @updateMeta, @callback afterEach -> @JSON.stringify = @_stringify @@ -516,7 +539,7 @@ describe "RedisManager", -> beforeEach -> @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) @RedisManager._serializeRanges = sinon.stub().yields(new Error("ranges are too large")) - @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @callback + @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @updateMeta, @callback it 'should log an error', -> @logger.error.called.should.equal true @@ -524,6 +547,21 @@ describe "RedisManager", -> it "should call the callback with the error", -> @callback.calledWith(new Error("ranges are too large")).should.equal true + describe "without user id from meta", -> + beforeEach -> + @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) + @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, {}, @callback + + it "should set the last updater to null", -> + @multi.del + .calledWith("lastUpdatedBy:#{@doc_id}") + .should.equal true + + it "should still set the last updated time", -> + @multi.set + .calledWith("lastUpdatedAt:#{@doc_id}", Date.now()) + .should.equal true + describe "putDocInMemory", -> beforeEach -> @multi.set = sinon.stub() @@ -681,6 +719,17 @@ describe "RedisManager", -> .calledWith("ProjectHistoryId:#{@doc_id}") .should.equal true + it "should delete lastUpdatedAt", -> + @multi.del + .calledWith("lastUpdatedAt:#{@doc_id}") + .should.equal true + + it "should delete lastUpdatedBy", -> + @multi.del + .calledWith("lastUpdatedBy:#{@doc_id}") + .should.equal true + + describe "clearProjectState", -> beforeEach (done) -> @rclient.del = sinon.stub().callsArg(1) diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index 0fdbbb9728..ac8d4c742c 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -159,7 +159,8 @@ describe "UpdateManager", -> describe "applyUpdate", -> beforeEach -> - @update = {op: [{p: 42, i: "foo"}]} + @updateMeta = { user_id: 'last-author-fake-id' } + @update = {op: [{p: 42, i: "foo"}], meta: @updateMeta} @updatedDocLines = ["updated", "lines"] @version = 34 @lines = ["original", "lines"] @@ -193,7 +194,7 @@ describe "UpdateManager", -> it "should save the document", -> @RedisManager.updateDocument - .calledWith(@project_id, @doc_id, @updatedDocLines, @version, @appliedOps, @updated_ranges) + .calledWith(@project_id, @doc_id, @updatedDocLines, @version, @appliedOps, @updated_ranges, @updateMeta) .should.equal true it "should add metadata to the ops" , -> From d5d1736a5efd78145c9ca27215083b766701b25e Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 2 May 2019 16:30:36 +0100 Subject: [PATCH 461/769] adds /flush_all_projects project --- services/document-updater/app.coffee | 2 + .../app/coffee/HttpController.coffee | 15 +++++ .../app/coffee/ProjectFlusher.coffee | 61 +++++++++++++++++++ 3 files changed, 78 insertions(+) create mode 100644 services/document-updater/app/coffee/ProjectFlusher.coffee diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 70c3fc875d..66d941b832 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -59,6 +59,8 @@ app.post '/project/:project_id/doc/:doc_id/change/:change_id/accept', HttpCont app.post '/project/:project_id/doc/:doc_id/change/accept', HttpController.acceptChanges app.del '/project/:project_id/doc/:doc_id/comment/:comment_id', HttpController.deleteComment +app.get '/flush_all_projects', HttpController.flushAllProjects + app.get '/total', (req, res)-> timer = new Metrics.Timer("http.allDocList") RedisManager.getCountOfDocsInMemory (err, count)-> diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index d0e8e1994b..5a5c248ee9 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -4,6 +4,8 @@ ProjectManager = require "./ProjectManager" Errors = require "./Errors" logger = require "logger-sharelatex" Metrics = require "./Metrics" +ProjectFlusher = require("./ProjectFlusher") + TWO_MEGABYTES = 2 * 1024 * 1024 @@ -179,3 +181,16 @@ module.exports = HttpController = return next(error) if error? logger.log {project_id}, "queued project history resync via http" res.send 204 + + flushAllProjects: (req, res, next = (error)-> )-> + res.setTimeout(5 * 60 * 1000) + limit = req.query.limit || 1000 + concurrency = req.query.concurrency || 5 + ProjectFlusher.flushAllProjects limit, concurrency, (err, project_ids)-> + if err? + logger.err err:err, "error bulk flushing projects" + res.send 500 + else + res.send project_ids + + diff --git a/services/document-updater/app/coffee/ProjectFlusher.coffee b/services/document-updater/app/coffee/ProjectFlusher.coffee new file mode 100644 index 0000000000..b6ef3d77ca --- /dev/null +++ b/services/document-updater/app/coffee/ProjectFlusher.coffee @@ -0,0 +1,61 @@ +request = require("request") +Settings = require('settings-sharelatex') +RedisManager = require("./RedisManager") +rclient = RedisManager.rclient +docUpdaterKeys = Settings.redis.documentupdater.key_schema +async = require("async") +ProjectManager = require("./ProjectManager") +_ = require("lodash") + +ProjectFlusher = + + # iterate over keys asynchronously using redis scan (non-blocking) + # handle all the cluster nodes or single redis server + _getKeys: (pattern, limit, callback) -> + nodes = rclient.nodes?('master') || [ rclient ]; + doKeyLookupForNode = (node, cb) -> + ProjectFlusher._getKeysFromNode node, pattern, limit, cb + async.concatSeries nodes, doKeyLookupForNode, callback + + _getKeysFromNode: (node, pattern, limit = 1000, callback) -> + cursor = 0 # redis iterator + keySet = {} # use hash to avoid duplicate results + batchSize = if limit? then Math.min(limit, 1000) else 1000 + # scan over all keys looking for pattern + doIteration = (cb) -> + node.scan cursor, "MATCH", pattern, "COUNT", batchSize, (error, reply) -> + return callback(error) if error? + [cursor, keys] = reply + for key in keys + keySet[key] = true + keys = Object.keys(keySet) + noResults = cursor == "0" # redis returns string results not numeric + limitReached = (limit? && keys.length >= limit) + if noResults || limitReached + return callback(null, keys) + else + setTimeout doIteration, 10 # avoid hitting redis too hard + doIteration() + + # extract ids from keys like DocsWithHistoryOps:57fd0b1f53a8396d22b2c24b + # or docsInProject:{57fd0b1f53a8396d22b2c24b} (for redis cluster) + _extractIds: (keyList) -> + ids = for key in keyList + m = key.match(/:\{?([0-9a-f]{24})\}?/) # extract object id + m[1] + return ids + + flushAllProjects: (limit, concurrency = 5, callback)-> + ProjectFlusher._getKeys docUpdaterKeys.docsInProject({project_id:"*"}), limit, (error, project_keys) -> + if error? + logger.err err:error, "error getting keys for flushing" + return callback(error) + project_ids = ProjectFlusher._extractIds(project_keys) + jobs = _.map project_ids, (project_id)-> + return (cb)-> + ProjectManager.flushAndDeleteProjectWithLocks project_id, cb + async.parallelLimit jobs, concurrency, (error)-> + return callback(error, project_ids) + + +module.exports = ProjectFlusher \ No newline at end of file From daca83a057c583d2abbc8d7c2b8c750c30a40bd6 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 2 May 2019 16:54:22 +0100 Subject: [PATCH 462/769] add dryRun option to flush all projects --- .../document-updater/app/coffee/HttpController.coffee | 8 +++++--- .../document-updater/app/coffee/ProjectFlusher.coffee | 8 +++++--- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 5a5c248ee9..54a69deae9 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -184,9 +184,11 @@ module.exports = HttpController = flushAllProjects: (req, res, next = (error)-> )-> res.setTimeout(5 * 60 * 1000) - limit = req.query.limit || 1000 - concurrency = req.query.concurrency || 5 - ProjectFlusher.flushAllProjects limit, concurrency, (err, project_ids)-> + options = + limit : req.query.limit || 1000 + concurrency : req.query.concurrency || 5 + dryRun : req.query.dryRun || false + ProjectFlusher.flushAllProjects options, (err, project_ids)-> if err? logger.err err:err, "error bulk flushing projects" res.send 500 diff --git a/services/document-updater/app/coffee/ProjectFlusher.coffee b/services/document-updater/app/coffee/ProjectFlusher.coffee index b6ef3d77ca..fabc334930 100644 --- a/services/document-updater/app/coffee/ProjectFlusher.coffee +++ b/services/document-updater/app/coffee/ProjectFlusher.coffee @@ -45,16 +45,18 @@ ProjectFlusher = m[1] return ids - flushAllProjects: (limit, concurrency = 5, callback)-> - ProjectFlusher._getKeys docUpdaterKeys.docsInProject({project_id:"*"}), limit, (error, project_keys) -> + flushAllProjects: (options, callback)-> + ProjectFlusher._getKeys docUpdaterKeys.docsInProject({project_id:"*"}), options.limit, (error, project_keys) -> if error? logger.err err:error, "error getting keys for flushing" return callback(error) project_ids = ProjectFlusher._extractIds(project_keys) + if options.dryRun + return callback(null, project_ids) jobs = _.map project_ids, (project_id)-> return (cb)-> ProjectManager.flushAndDeleteProjectWithLocks project_id, cb - async.parallelLimit jobs, concurrency, (error)-> + async.parallelLimit jobs, options.concurrency, (error)-> return callback(error, project_ids) From ca6bfd89297c2e6df3a6f97f9e8a6b171ccb98b1 Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Mon, 6 May 2019 17:28:52 +0200 Subject: [PATCH 463/769] Update README --- services/document-updater/README.md | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/services/document-updater/README.md b/services/document-updater/README.md index f86dcda412..a4ba8aa55e 100644 --- a/services/document-updater/README.md +++ b/services/document-updater/README.md @@ -1,13 +1,4 @@ -document-updater-sharelatex -=========================== +THIS WILL BE DELETED SOON +DO NOT UPDATE +USE https://github.com/overleaf/document-updater INSTEAD -An API for applying incoming updates to documents in real-time. - -[![Build Status](https://travis-ci.org/sharelatex/document-updater-sharelatex.png?branch=master)](https://travis-ci.org/sharelatex/document-updater-sharelatex) - -License -------- - -The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the `LICENSE` file. - -Copyright (c) ShareLaTeX, 2014. From 4b8a27a2207c23a4b1b723822ce438480b789559 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 7 May 2019 14:00:45 +0100 Subject: [PATCH 464/769] change github url --- services/document-updater/Jenkinsfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/document-updater/Jenkinsfile b/services/document-updater/Jenkinsfile index 9ec298487b..c8e5e33a50 100644 --- a/services/document-updater/Jenkinsfile +++ b/services/document-updater/Jenkinsfile @@ -4,10 +4,10 @@ pipeline { agent any environment { - GIT_PROJECT = "document-updater-sharelatex" - JENKINS_WORKFLOW = "document-updater-sharelatex" + GIT_PROJECT = "document-updater" + JENKINS_WORKFLOW = "document-updater" TARGET_URL = "${env.JENKINS_URL}blue/organizations/jenkins/${JENKINS_WORKFLOW}/detail/$BRANCH_NAME/$BUILD_NUMBER/pipeline" - GIT_API_URL = "https://api.github.com/repos/sharelatex/${GIT_PROJECT}/statuses/$GIT_COMMIT" + GIT_API_URL = "https://api.github.com/repos/overleaf/${GIT_PROJECT}/statuses/$GIT_COMMIT" } triggers { From e57741cb80185c2b5a2bcdfc5055fe5bb126e41b Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 7 May 2019 15:46:30 +0100 Subject: [PATCH 465/769] stub out project flusher for unit tests --- .../test/unit/coffee/HttpController/HttpControllerTests.coffee | 1 + 1 file changed, 1 insertion(+) diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index 15b9142647..d7a27db854 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -13,6 +13,7 @@ describe "HttpController", -> flushProjectChangesAsync: sinon.stub() "./ProjectManager": @ProjectManager = {} "logger-sharelatex" : @logger = { log: sinon.stub() } + "./ProjectFlusher": {flushAllProjects:->} "./Metrics": @Metrics = {} "./Errors" : Errors @Metrics.Timer = class Timer From d316f172bf55d7c9691ef3e5e308af5787c50ff0 Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Tue, 7 May 2019 16:44:56 +0200 Subject: [PATCH 466/769] update repo URL for Jenkins --- services/document-updater/Jenkinsfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/document-updater/Jenkinsfile b/services/document-updater/Jenkinsfile index 9ec298487b..9abbdc917b 100644 --- a/services/document-updater/Jenkinsfile +++ b/services/document-updater/Jenkinsfile @@ -4,10 +4,10 @@ pipeline { agent any environment { - GIT_PROJECT = "document-updater-sharelatex" - JENKINS_WORKFLOW = "document-updater-sharelatex" + GIT_PROJECT = "document-updater" + JENKINS_WORKFLOW = "document-updater-sharelatex-internal" TARGET_URL = "${env.JENKINS_URL}blue/organizations/jenkins/${JENKINS_WORKFLOW}/detail/$BRANCH_NAME/$BUILD_NUMBER/pipeline" - GIT_API_URL = "https://api.github.com/repos/sharelatex/${GIT_PROJECT}/statuses/$GIT_COMMIT" + GIT_API_URL = "https://api.github.com/repos/overleaf/${GIT_PROJECT}/statuses/$GIT_COMMIT" } triggers { From 8b40da701ee49d3ca4e30a45f97d7718ba287270 Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Tue, 7 May 2019 16:44:48 +0200 Subject: [PATCH 467/769] update README - remove build status badge - change app name - update copyright notice - update links --- services/document-updater/README.md | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/services/document-updater/README.md b/services/document-updater/README.md index f86dcda412..f9cf7c76bb 100644 --- a/services/document-updater/README.md +++ b/services/document-updater/README.md @@ -1,13 +1,11 @@ -document-updater-sharelatex +overleaf/document-updater =========================== An API for applying incoming updates to documents in real-time. -[![Build Status](https://travis-ci.org/sharelatex/document-updater-sharelatex.png?branch=master)](https://travis-ci.org/sharelatex/document-updater-sharelatex) - License ------- The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the `LICENSE` file. -Copyright (c) ShareLaTeX, 2014. +Copyright (c) Overleaf, 2014-2019. From 27a8248196584749847016f432b0eb56f6fba635 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 7 May 2019 14:15:26 +0100 Subject: [PATCH 468/769] convert "Delete component" errors into warnings --- .../app/coffee/DispatchManager.coffee | 2 +- .../document-updater/app/coffee/Errors.coffee | 8 +++ .../app/coffee/ShareJsUpdateManager.coffee | 8 +++ .../coffee/ApplyingUpdatesToADocTests.coffee | 2 +- .../DispatchManagerTests.coffee | 67 +++++++++++++------ .../ShareJsUpdateManagerTests.coffee | 1 + 6 files changed, 67 insertions(+), 21 deletions(-) diff --git a/services/document-updater/app/coffee/DispatchManager.coffee b/services/document-updater/app/coffee/DispatchManager.coffee index cfefa3e9d0..419aa17de7 100644 --- a/services/document-updater/app/coffee/DispatchManager.coffee +++ b/services/document-updater/app/coffee/DispatchManager.coffee @@ -28,7 +28,7 @@ module.exports = DispatchManager = # log everything except OpRangeNotAvailable errors, these are normal if error? # downgrade OpRangeNotAvailable and "Delete component" errors so they are not sent to sentry - logAsWarning = (error instanceof Errors.OpRangeNotAvailableError) || ((typeof error is' string') && error.match(/^Delete component/)) + logAsWarning = (error instanceof Errors.OpRangeNotAvailableError) || (error instanceof Errors.DeleteMismatchError) if logAsWarning logger.warn err: error, project_id: project_id, doc_id: doc_id, "error processing update" else diff --git a/services/document-updater/app/coffee/Errors.coffee b/services/document-updater/app/coffee/Errors.coffee index e5e93fa458..e3d08e7641 100644 --- a/services/document-updater/app/coffee/Errors.coffee +++ b/services/document-updater/app/coffee/Errors.coffee @@ -19,7 +19,15 @@ ProjectStateChangedError = (message) -> return error ProjectStateChangedError.prototype.__proto__ = Error.prototype +DeleteMismatchError = (message) -> + error = new Error(message) + error.name = "DeleteMismatchError" + error.__proto__ = DeleteMismatchError.prototype + return error +DeleteMismatchError.prototype.__proto__ = Error.prototype + module.exports = Errors = NotFoundError: NotFoundError OpRangeNotAvailableError: OpRangeNotAvailableError ProjectStateChangedError: ProjectStateChangedError + DeleteMismatchError: DeleteMismatchError diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee index f4f3674c75..66b8367395 100644 --- a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee +++ b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee @@ -7,6 +7,8 @@ Keys = require "./UpdateKeys" util = require "util" RealTimeRedisManager = require "./RealTimeRedisManager" crypto = require "crypto" +metrics = require('./Metrics') +Errors = require("./Errors") ShareJsModel:: = {} util.inherits ShareJsModel, EventEmitter @@ -36,9 +38,15 @@ module.exports = ShareJsUpdateManager = model.applyOp doc_key, update, (error) -> if error? if error == "Op already submitted" + metrics.inc "sharejs.already-submitted" logger.warn {project_id, doc_id, update}, "op has already been submitted" update.dup = true ShareJsUpdateManager._sendOp(project_id, doc_id, update) + else if /^Delete component/.test(error) + metrics.inc "sharejs.delete-mismatch" + logger.warn {project_id, doc_id, update, shareJsErr: error}, "sharejs delete does not match" + error = new Errors.DeleteMismatchError("Delete component does not match") + return callback(error) else return callback(error) logger.log project_id: project_id, doc_id: doc_id, error: error, "applied update" diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index 5c080e0924..d0d40ba08e 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -235,7 +235,7 @@ describe "Applying updates to a doc", -> JSON.parse(message).should.deep.include { project_id: @project_id, doc_id: @doc_id, - error:'Delete component \'not the correct content\' does not match deleted text \'one\ntwo\nthree\'' + error:'Delete component does not match' } describe "with enough updates to flush to the track changes api", -> diff --git a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee index 990688ce86..af36d10a31 100644 --- a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee @@ -3,19 +3,20 @@ chai = require('chai') should = chai.should() modulePath = "../../../../app/js/DispatchManager.js" SandboxedModule = require('sandboxed-module') - +Errors = require "../../../../app/js/Errors.js" describe "DispatchManager", -> beforeEach -> @timeout(3000) @DispatchManager = SandboxedModule.require modulePath, requires: "./UpdateManager" : @UpdateManager = {} - "logger-sharelatex": @logger = { log: sinon.stub() } + "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub(), warn: sinon.stub() } "settings-sharelatex": @settings = redis: realtime: {} "redis-sharelatex": @redis = {} "./RateLimitManager": {} + "./Errors": Errors "./Metrics": Timer: -> done: -> @@ -38,23 +39,51 @@ describe "DispatchManager", -> @doc_id = "doc-id-123" @doc_key = "#{@project_id}:#{@doc_id}" @client.blpop = sinon.stub().callsArgWith(2, null, ["pending-updates-list", @doc_key]) - @UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArg(2) - - @worker._waitForUpdateThenDispatchWorker @callback - - it "should call redis with BLPOP", -> - @client.blpop - .calledWith("pending-updates-list", 0) - .should.equal true - - it "should call processOutstandingUpdatesWithLock", -> - @UpdateManager.processOutstandingUpdatesWithLock - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should call the callback", -> - @callback.called.should.equal true - + + describe "in the normal case", -> + beforeEach -> + @UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArg(2) + @worker._waitForUpdateThenDispatchWorker @callback + + it "should call redis with BLPOP", -> + @client.blpop + .calledWith("pending-updates-list", 0) + .should.equal true + + it "should call processOutstandingUpdatesWithLock", -> + @UpdateManager.processOutstandingUpdatesWithLock + .calledWith(@project_id, @doc_id) + .should.equal true + + it "should not log any errors", -> + @logger.error.called.should.equal false + @logger.warn.called.should.equal false + + it "should call the callback", -> + @callback.called.should.equal true + + describe "with an error", -> + beforeEach -> + @UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArgWith(2, new Error("a generic error")) + @worker._waitForUpdateThenDispatchWorker @callback + + it "should log an error", -> + @logger.error.called.should.equal true + + it "should call the callback", -> + @callback.called.should.equal true + + describe "with a 'Delete component' error", -> + beforeEach -> + @UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArgWith(2, new Errors.DeleteMismatchError()) + @worker._waitForUpdateThenDispatchWorker @callback + + it "should log a warning", -> + @logger.warn.called.should.equal true + + it "should call the callback", -> + @callback.called.should.equal true + describe "run", -> it "should call _waitForUpdateThenDispatchWorker until shutting down", (done) -> callCount = 0 diff --git a/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee index b7364b00a4..236112684a 100644 --- a/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee @@ -18,6 +18,7 @@ describe "ShareJsUpdateManager", -> "redis-sharelatex" : createClient: () => @rclient = auth:-> "logger-sharelatex": @logger = { log: sinon.stub() } "./RealTimeRedisManager": @RealTimeRedisManager = {} + "./Metrics": @metrics = { inc: sinon.stub() } globals: clearTimeout: @clearTimeout = sinon.stub() From e95059f98e17d6a9684bbff5708d94dc5e03c980 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 3 Jun 2019 10:01:10 +0100 Subject: [PATCH 469/769] handle non-urgent flushes in background --- .../document-updater/app/coffee/HistoryManager.coffee | 10 ++++++---- .../document-updater/app/coffee/HttpController.coffee | 4 +++- .../document-updater/app/coffee/ProjectFlusher.coffee | 2 +- .../document-updater/app/coffee/ProjectManager.coffee | 4 ++-- .../coffee/HistoryManager/HistoryManagerTests.coffee | 2 +- .../coffee/HttpController/HttpControllerTests.coffee | 4 ++-- .../ProjectManager/flushAndDeleteProjectTests.coffee | 10 +++++----- 7 files changed, 20 insertions(+), 16 deletions(-) diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index 8dcbf426f5..0c3ab9cea1 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -23,14 +23,16 @@ module.exports = HistoryManager = # flush changes in the background flushProjectChangesAsync: (project_id) -> return if !Settings.apis?.project_history?.enabled - HistoryManager.flushProjectChanges project_id, -> + HistoryManager.flushProjectChanges project_id, {background:true}, -> # flush changes and callback (for when we need to know the queue is flushed) - flushProjectChanges: (project_id, callback = (error) ->) -> + flushProjectChanges: (project_id, options, callback = (error) ->) -> return callback() if !Settings.apis?.project_history?.enabled url = "#{Settings.apis.project_history.url}/project/#{project_id}/flush" - logger.log { project_id, url }, "flushing doc in project history api" - request.post url, (error, res, body)-> + qs = {} + qs.background = true if options.background # pass on the background flush option if present + logger.log { project_id, url, qs }, "flushing doc in project history api" + request.post {url: url, qs: qs}, (error, res, body)-> if error? logger.error { error, project_id}, "project history doc to track changes api" return callback(error) diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 54a69deae9..b35943f5cd 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -131,7 +131,9 @@ module.exports = HttpController = project_id = req.params.project_id logger.log project_id: project_id, "deleting project via http" timer = new Metrics.Timer("http.deleteProject") - ProjectManager.flushAndDeleteProjectWithLocks project_id, (error) -> + options = {} + options.background = true if req.query?.background # allow non-urgent flushes to be queued + ProjectManager.flushAndDeleteProjectWithLocks project_id, options, (error) -> timer.done() return next(error) if error? logger.log project_id: project_id, "deleted project via http" diff --git a/services/document-updater/app/coffee/ProjectFlusher.coffee b/services/document-updater/app/coffee/ProjectFlusher.coffee index fabc334930..5ae1c34d76 100644 --- a/services/document-updater/app/coffee/ProjectFlusher.coffee +++ b/services/document-updater/app/coffee/ProjectFlusher.coffee @@ -55,7 +55,7 @@ ProjectFlusher = return callback(null, project_ids) jobs = _.map project_ids, (project_id)-> return (cb)-> - ProjectManager.flushAndDeleteProjectWithLocks project_id, cb + ProjectManager.flushAndDeleteProjectWithLocks project_id, {background:true}, cb async.parallelLimit jobs, options.concurrency, (error)-> return callback(error, project_ids) diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index c714f7442a..4271186b7a 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -39,7 +39,7 @@ module.exports = ProjectManager = else callback(null) - flushAndDeleteProjectWithLocks: (project_id, _callback = (error) ->) -> + flushAndDeleteProjectWithLocks: (project_id, options, _callback = (error) ->) -> timer = new Metrics.Timer("projectManager.flushAndDeleteProjectWithLocks") callback = (args...) -> timer.done() @@ -64,7 +64,7 @@ module.exports = ProjectManager = # history is completely flushed because the project may be # deleted in web after this call completes, and so further # attempts to flush would fail after that. - HistoryManager.flushProjectChanges project_id, (error) -> + HistoryManager.flushProjectChanges project_id, options, (error) -> if errors.length > 0 callback new Error("Errors deleting docs. See log for details") else if error? diff --git a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee index 2233610d28..1198bf7c7b 100644 --- a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee @@ -43,7 +43,7 @@ describe "HistoryManager", -> it "should send a request to the project history api", -> @request.post - .calledWith("#{@Settings.apis.project_history.url}/project/#{@project_id}/flush") + .calledWith({url: "#{@Settings.apis.project_history.url}/project/#{@project_id}/flush", qs:{background:true}}) .should.equal true describe "recordAndFlushHistoryOps", -> diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index d7a27db854..6429a4031d 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -322,7 +322,7 @@ describe "HttpController", -> describe "successfully", -> beforeEach -> - @ProjectManager.flushAndDeleteProjectWithLocks = sinon.stub().callsArgWith(1) + @ProjectManager.flushAndDeleteProjectWithLocks = sinon.stub().callsArgWith(2) @HttpController.deleteProject(@req, @res, @next) it "should delete the project", -> @@ -345,7 +345,7 @@ describe "HttpController", -> describe "when an errors occurs", -> beforeEach -> - @ProjectManager.flushAndDeleteProjectWithLocks = sinon.stub().callsArgWith(1, new Error("oops")) + @ProjectManager.flushAndDeleteProjectWithLocks = sinon.stub().callsArgWith(2, new Error("oops")) @HttpController.deleteProject(@req, @res, @next) it "should call next with the error", -> diff --git a/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee index c060be7485..08fb6eab04 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee @@ -12,7 +12,7 @@ describe "ProjectManager - flushAndDeleteProject", -> "./DocumentManager": @DocumentManager = {} "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } "./HistoryManager": @HistoryManager = - flushProjectChanges: sinon.stub().callsArg(1) + flushProjectChanges: sinon.stub().callsArg(2) "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() @@ -24,7 +24,7 @@ describe "ProjectManager - flushAndDeleteProject", -> @doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"] @RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, @doc_ids) @DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArg(2) - @ProjectManager.flushAndDeleteProjectWithLocks @project_id, (error) => + @ProjectManager.flushAndDeleteProjectWithLocks @project_id, {}, (error) => @callback(error) done() @@ -41,7 +41,7 @@ describe "ProjectManager - flushAndDeleteProject", -> it "should flush project history", -> @HistoryManager.flushProjectChanges - .calledWith(@project_id) + .calledWith(@project_id, {}) .should.equal true it "should call the callback without error", -> @@ -59,7 +59,7 @@ describe "ProjectManager - flushAndDeleteProject", -> callback(@error = new Error("oops, something went wrong")) else callback() - @ProjectManager.flushAndDeleteProjectWithLocks @project_id, (error) => + @ProjectManager.flushAndDeleteProjectWithLocks @project_id, {}, (error) => @callback(error) done() @@ -71,7 +71,7 @@ describe "ProjectManager - flushAndDeleteProject", -> it "should still flush project history", -> @HistoryManager.flushProjectChanges - .calledWith(@project_id) + .calledWith(@project_id, {}) .should.equal true it "should record the error", -> From 966478cac48fed23e3a26e40f3298a1585ffac72 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 11 Jun 2019 14:11:46 +0100 Subject: [PATCH 470/769] fix hash check to use 'v' field instead of version --- .../document-updater/app/coffee/ShareJsUpdateManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee index 66b8367395..c81cccdc77 100644 --- a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee +++ b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee @@ -26,7 +26,7 @@ module.exports = ShareJsUpdateManager = logger.log project_id: project_id, doc_id: doc_id, update: update, "applying sharejs updates" jobs = [] # record the update version before it is modified - incomingUpdateVersion = update.version + incomingUpdateVersion = update.v # We could use a global model for all docs, but we're hitting issues with the # internal state of ShareJS not being accessible for clearing caches, and # getting stuck due to queued callbacks (line 260 of sharejs/server/model.coffee) From 984b2d38e6b0867fe4dc7dc2f17f4e41277b34cd Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 11 Jun 2019 16:33:14 +0100 Subject: [PATCH 471/769] add unit tests --- .../ShareJsUpdateManagerTests.coffee | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee index 236112684a..363705845f 100644 --- a/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee @@ -3,6 +3,7 @@ chai = require('chai') should = chai.should() modulePath = "../../../../app/js/ShareJsUpdateManager.js" SandboxedModule = require('sandboxed-module') +crypto = require('crypto') describe "ShareJsUpdateManager", -> beforeEach -> @@ -26,8 +27,10 @@ describe "ShareJsUpdateManager", -> beforeEach -> @lines = ["one", "two"] @version = 34 - @update = {p: 4, t: "foo"} @updatedDocLines = ["onefoo", "two"] + content = @updatedDocLines.join("\n") + @hash = crypto.createHash('sha1').update("blob " + content.length + "\x00").update(content, 'utf8').digest('hex') + @update = {p: 4, t: "foo", v:@version, hash:@hash} @model = applyOp: sinon.stub().callsArg(2) getSnapshot: sinon.stub() @@ -90,6 +93,18 @@ describe "ShareJsUpdateManager", -> it "should call the callback with the error", -> @callback.calledWith(@error).should.equal true + describe "with an invalid hash", -> + beforeEach (done) -> + @error = new Error("invalid hash") + @model.getSnapshot.callsArgWith(1, null, {snapshot: "unexpected content", v: @version}) + @model.db.appliedOps["#{@project_id}:#{@doc_id}"] = @appliedOps = ["mock-ops"] + @ShareJsUpdateManager.applyUpdate @project_id, @doc_id, @update, @lines, @version, (err, docLines, version, appliedOps) => + @callback(err, docLines, version, appliedOps) + done() + + it "should call the callback with the error", -> + @callback.calledWith(@error).should.equal true + describe "_listenForOps", -> beforeEach -> @model = on: (event, callback) => From d50b93df2ff5344d95ba111160f2d1b247841b37 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 11 Jun 2019 13:53:35 +0100 Subject: [PATCH 472/769] add metric for invalid hash and other sharejs errors --- .../app/coffee/ShareJsUpdateManager.coffee | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee index 66b8367395..e15815373c 100644 --- a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee +++ b/services/document-updater/app/coffee/ShareJsUpdateManager.coffee @@ -48,6 +48,7 @@ module.exports = ShareJsUpdateManager = error = new Errors.DeleteMismatchError("Delete component does not match") return callback(error) else + metrics.inc "sharejs.other-error" return callback(error) logger.log project_id: project_id, doc_id: doc_id, error: error, "applied update" model.getSnapshot doc_key, (error, data) => @@ -55,7 +56,11 @@ module.exports = ShareJsUpdateManager = # only check hash when present and no other updates have been applied if update.hash? and incomingUpdateVersion == version ourHash = ShareJsUpdateManager._computeHash(data.snapshot) - return callback(new Error("Invalid hash")) if ourHash != update.hash + if ourHash != update.hash + metrics.inc "sharejs.hash-fail" + return callback(new Error("Invalid hash")) + else + metrics.inc "sharejs.hash-pass", 0.001 docLines = data.snapshot.split(/\r\n|\n|\r/) callback(null, docLines, data.v, model.db.appliedOps[doc_key] or []) From d9a737f97ca35f0182f505aeff52a5761dfb61a4 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 13 Jun 2019 14:21:38 +0100 Subject: [PATCH 473/769] return failed and succesfully flushed projects when flushing everything --- .../document-updater/app/coffee/ProjectFlusher.coffee | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/ProjectFlusher.coffee b/services/document-updater/app/coffee/ProjectFlusher.coffee index fabc334930..c84d759804 100644 --- a/services/document-updater/app/coffee/ProjectFlusher.coffee +++ b/services/document-updater/app/coffee/ProjectFlusher.coffee @@ -56,8 +56,15 @@ ProjectFlusher = jobs = _.map project_ids, (project_id)-> return (cb)-> ProjectManager.flushAndDeleteProjectWithLocks project_id, cb - async.parallelLimit jobs, options.concurrency, (error)-> - return callback(error, project_ids) + async.parallelLimit async.reflectAll(jobs), options.concurrency, (error, results)-> + success = [] + failure = [] + _.each results, (result, i)-> + if result.error? + failure.push(project_ids[i]) + else + success.push(project_ids[i]) + return callback(error, {success:success, failure:failure}) module.exports = ProjectFlusher \ No newline at end of file From 080b482e51965ef376388d7e251a01e1ef3a2daf Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Wed, 19 Jun 2019 10:25:54 +0100 Subject: [PATCH 474/769] Update logger, metrics, and redis Also fix acceptance tests, broken by a change in the redis driver behaviour. It now returns promises from most operations, which confuses mocha. --- services/document-updater/Jenkinsfile | 2 +- services/document-updater/Makefile | 6 +- services/document-updater/buildscript.txt | 2 +- .../document-updater/docker-compose.ci.yml | 5 +- services/document-updater/docker-compose.yml | 8 +- services/document-updater/npm-shrinkwrap.json | 406 ++++++++++-------- services/document-updater/package.json | 6 +- .../coffee/ApplyingUpdatesToADocTests.coffee | 22 + ...lyingUpdatesToProjectStructureTests.coffee | 14 + .../coffee/SettingADocumentTests.coffee | 10 + 10 files changed, 288 insertions(+), 193 deletions(-) diff --git a/services/document-updater/Jenkinsfile b/services/document-updater/Jenkinsfile index 9abbdc917b..2862de8f47 100644 --- a/services/document-updater/Jenkinsfile +++ b/services/document-updater/Jenkinsfile @@ -5,7 +5,7 @@ pipeline { environment { GIT_PROJECT = "document-updater" - JENKINS_WORKFLOW = "document-updater-sharelatex-internal" + JENKINS_WORKFLOW = "document-updater-sharelatex" TARGET_URL = "${env.JENKINS_URL}blue/organizations/jenkins/${JENKINS_WORKFLOW}/detail/$BRANCH_NAME/$BUILD_NUMBER/pipeline" GIT_API_URL = "https://api.github.com/repos/overleaf/${GIT_PROJECT}/statuses/$GIT_COMMIT" } diff --git a/services/document-updater/Makefile b/services/document-updater/Makefile index 07216e7800..ce7210ccf3 100644 --- a/services/document-updater/Makefile +++ b/services/document-updater/Makefile @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.12 +# Version: 1.1.21 BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) @@ -26,7 +26,9 @@ test: test_unit test_acceptance test_unit: @[ ! -d test/unit ] && echo "document-updater has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit -test_acceptance: test_clean test_acceptance_pre_run # clear the database before each acceptance test run +test_acceptance: test_clean test_acceptance_pre_run test_acceptance_run + +test_acceptance_run: @[ ! -d test/acceptance ] && echo "document-updater has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance test_clean: diff --git a/services/document-updater/buildscript.txt b/services/document-updater/buildscript.txt index aa39cdc02f..ebef72a5cc 100644 --- a/services/document-updater/buildscript.txt +++ b/services/document-updater/buildscript.txt @@ -5,4 +5,4 @@ document-updater --dependencies=mongo,redis --docker-repos=gcr.io/overleaf-ops --build-target=docker ---script-version=1.1.12 +--script-version=1.1.21 diff --git a/services/document-updater/docker-compose.ci.yml b/services/document-updater/docker-compose.ci.yml index 36b52f8f8b..d2bcca9ec6 100644 --- a/services/document-updater/docker-compose.ci.yml +++ b/services/document-updater/docker-compose.ci.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.12 +# Version: 1.1.21 version: "2" @@ -10,6 +10,8 @@ services: image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER user: node command: npm run test:unit:_run + environment: + NODE_ENV: test test_acceptance: @@ -21,6 +23,7 @@ services: MONGO_HOST: mongo POSTGRES_HOST: postgres MOCHA_GREP: ${MOCHA_GREP} + NODE_ENV: test depends_on: - mongo - redis diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index 8bb7857cb6..02ccd930ba 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -1,18 +1,19 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.12 +# Version: 1.1.21 version: "2" services: test_unit: - build: . + image: node:6.9.5 volumes: - .:/app working_dir: /app environment: MOCHA_GREP: ${MOCHA_GREP} + NODE_ENV: test command: npm run test:unit user: node @@ -27,6 +28,8 @@ services: MONGO_HOST: mongo POSTGRES_HOST: postgres MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ERROR + NODE_ENV: test user: node depends_on: - mongo @@ -49,3 +52,4 @@ services: mongo: image: mongo:3.4 + diff --git a/services/document-updater/npm-shrinkwrap.json b/services/document-updater/npm-shrinkwrap.json index f30ce4f581..794836656c 100644 --- a/services/document-updater/npm-shrinkwrap.json +++ b/services/document-updater/npm-shrinkwrap.json @@ -3,19 +3,31 @@ "version": "0.1.4", "dependencies": { "@google-cloud/common": { - "version": "0.27.0", - "from": "@google-cloud/common@>=0.27.0 <0.28.0", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.27.0.tgz" + "version": "0.32.1", + "from": "@google-cloud/common@>=0.32.0 <0.33.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.32.1.tgz", + "dependencies": { + "extend": { + "version": "3.0.2", + "from": "extend@>=3.0.2 <4.0.0", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz" + } + } }, "@google-cloud/debug-agent": { - "version": "3.0.1", + "version": "3.2.0", "from": "@google-cloud/debug-agent@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-3.0.1.tgz", + "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-3.2.0.tgz", "dependencies": { "coffeescript": { - "version": "2.3.2", + "version": "2.4.1", "from": "coffeescript@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.3.2.tgz" + "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.4.1.tgz" + }, + "semver": { + "version": "6.1.1", + "from": "semver@>=6.0.0 <7.0.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.1.1.tgz" } } }, @@ -29,43 +41,65 @@ "from": "@google-cloud/common@>=0.26.0 <0.27.0", "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.26.2.tgz" }, + "@google-cloud/promisify": { + "version": "0.3.1", + "from": "@google-cloud/promisify@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.3.1.tgz" + }, + "arrify": { + "version": "1.0.1", + "from": "arrify@>=1.0.1 <2.0.0", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz" + }, + "gcp-metadata": { + "version": "0.9.3", + "from": "gcp-metadata@>=0.9.0 <0.10.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.9.3.tgz" + }, + "google-auth-library": { + "version": "2.0.2", + "from": "google-auth-library@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-2.0.2.tgz", + "dependencies": { + "gcp-metadata": { + "version": "0.7.0", + "from": "gcp-metadata@>=0.7.0 <0.8.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.7.0.tgz" + } + } + }, "through2": { - "version": "3.0.0", + "version": "3.0.1", "from": "through2@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.0.tgz" + "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.1.tgz" } } }, "@google-cloud/projectify": { - "version": "0.3.2", - "from": "@google-cloud/projectify@>=0.3.0 <0.4.0", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.2.tgz" + "version": "0.3.3", + "from": "@google-cloud/projectify@>=0.3.3 <0.4.0", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.3.tgz" }, "@google-cloud/promisify": { - "version": "0.3.1", - "from": "@google-cloud/promisify@>=0.3.0 <0.4.0", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.3.1.tgz" + "version": "0.4.0", + "from": "@google-cloud/promisify@>=0.4.0 <0.5.0", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.4.0.tgz" }, "@google-cloud/trace-agent": { - "version": "3.5.2", + "version": "3.6.1", "from": "@google-cloud/trace-agent@>=3.2.0 <4.0.0", - "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-3.5.2.tgz", + "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-3.6.1.tgz", "dependencies": { - "@google-cloud/common": { - "version": "0.30.2", - "from": "@google-cloud/common@>=0.30.0 <0.31.0", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.30.2.tgz" - }, - "google-auth-library": { - "version": "3.0.1", - "from": "google-auth-library@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-3.0.1.tgz" - }, "methods": { "version": "1.1.2", "from": "methods@>=1.1.1 <2.0.0", "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz" }, + "semver": { + "version": "6.1.1", + "from": "semver@^6.0.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.1.1.tgz" + }, "uuid": { "version": "3.3.2", "from": "uuid@^3.0.1", @@ -124,14 +158,14 @@ "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz" }, "@sindresorhus/is": { - "version": "0.13.0", - "from": "@sindresorhus/is@>=0.13.0 <0.14.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.13.0.tgz" + "version": "0.15.0", + "from": "@sindresorhus/is@>=0.15.0 <0.16.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.15.0.tgz" }, "@types/caseless": { - "version": "0.12.1", + "version": "0.12.2", "from": "@types/caseless@*", - "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.1.tgz" + "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.2.tgz" }, "@types/console-log-level": { "version": "1.4.0", @@ -154,9 +188,9 @@ "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.0.tgz" }, "@types/node": { - "version": "10.12.20", + "version": "12.0.8", "from": "@types/node@*", - "resolved": "https://registry.npmjs.org/@types/node/-/node-10.12.20.tgz" + "resolved": "https://registry.npmjs.org/@types/node/-/node-12.0.8.tgz" }, "@types/request": { "version": "2.48.1", @@ -173,15 +207,20 @@ "from": "@types/tough-cookie@*", "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-2.3.5.tgz" }, + "abort-controller": { + "version": "3.0.0", + "from": "abort-controller@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz" + }, "acorn": { - "version": "5.7.3", - "from": "acorn@>=5.0.3 <6.0.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-5.7.3.tgz" + "version": "6.1.1", + "from": "acorn@>=6.0.0 <7.0.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.1.1.tgz" }, "agent-base": { - "version": "4.2.1", + "version": "4.3.0", "from": "agent-base@>=4.1.0 <5.0.0", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.2.1.tgz" + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz" }, "ajv": { "version": "5.5.2", @@ -189,9 +228,9 @@ "resolved": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz" }, "arrify": { - "version": "1.0.1", - "from": "arrify@>=1.0.1 <2.0.0", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz" + "version": "2.0.1", + "from": "arrify@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz" }, "asn1": { "version": "0.1.11", @@ -240,9 +279,9 @@ "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.6.0.tgz" }, "axios": { - "version": "0.18.0", + "version": "0.18.1", "from": "axios@>=0.18.0 <0.19.0", - "resolved": "http://registry.npmjs.org/axios/-/axios-0.18.0.tgz" + "resolved": "https://registry.npmjs.org/axios/-/axios-0.18.1.tgz" }, "balanced-match": { "version": "1.0.0", @@ -266,9 +305,9 @@ "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz" }, "bindings": { - "version": "1.4.0", + "version": "1.5.0", "from": "bindings@>=1.2.1 <2.0.0", - "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.4.0.tgz" + "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz" }, "bintrees": { "version": "1.0.1", @@ -312,9 +351,9 @@ "resolved": "https://registry.npmjs.org/buffer-shims/-/buffer-shims-1.0.0.tgz" }, "builtin-modules": { - "version": "3.0.0", + "version": "3.1.0", "from": "builtin-modules@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.0.0.tgz" + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.1.0.tgz" }, "bunyan": { "version": "0.22.3", @@ -398,9 +437,9 @@ "resolved": "https://registry.npmjs.org/connect/-/connect-2.8.4.tgz" }, "console-log-level": { - "version": "1.4.0", + "version": "1.4.1", "from": "console-log-level@>=1.4.0 <2.0.0", - "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.0.tgz" + "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz" }, "continuation-local-storage": { "version": "3.2.1", @@ -469,9 +508,9 @@ } }, "delay": { - "version": "4.1.0", + "version": "4.3.0", "from": "delay@>=4.0.1 <5.0.0", - "resolved": "https://registry.npmjs.org/delay/-/delay-4.1.0.tgz" + "resolved": "https://registry.npmjs.org/delay/-/delay-4.3.0.tgz" }, "delayed-stream": { "version": "0.0.5", @@ -479,9 +518,9 @@ "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz" }, "denque": { - "version": "1.4.0", + "version": "1.4.1", "from": "denque@>=1.1.0 <2.0.0", - "resolved": "https://registry.npmjs.org/denque/-/denque-1.4.0.tgz" + "resolved": "https://registry.npmjs.org/denque/-/denque-1.4.1.tgz" }, "diff": { "version": "3.5.0", @@ -497,9 +536,9 @@ "optional": true }, "duplexify": { - "version": "3.6.1", + "version": "3.7.1", "from": "duplexify@>=3.6.0 <4.0.0", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.6.1.tgz" + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz" }, "each-series": { "version": "1.0.0", @@ -513,9 +552,9 @@ "optional": true }, "ecdsa-sig-formatter": { - "version": "1.0.10", - "from": "ecdsa-sig-formatter@1.0.10", - "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.10.tgz" + "version": "1.0.11", + "from": "ecdsa-sig-formatter@1.0.11", + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz" }, "emitter-listener": { "version": "1.1.2", @@ -533,9 +572,9 @@ "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz" }, "es6-promise": { - "version": "4.2.5", + "version": "4.2.8", "from": "es6-promise@>=4.0.3 <5.0.0", - "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.5.tgz" + "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz" }, "es6-promisify": { "version": "5.0.0", @@ -548,6 +587,11 @@ "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", "dev": true }, + "event-target-shim": { + "version": "5.0.1", + "from": "event-target-shim@>=5.0.0 <6.0.0", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz" + }, "express": { "version": "3.3.4", "from": "express@3.3.4", @@ -588,15 +632,10 @@ "from": "findit2@>=2.2.3 <3.0.0", "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz" }, - "flexbuffer": { - "version": "0.0.6", - "from": "flexbuffer@0.0.6", - "resolved": "https://registry.npmjs.org/flexbuffer/-/flexbuffer-0.0.6.tgz" - }, "follow-redirects": { - "version": "1.6.1", - "from": "follow-redirects@>=1.3.0 <2.0.0", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.6.1.tgz" + "version": "1.5.10", + "from": "follow-redirects@1.5.10", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.10.tgz" }, "forever-agent": { "version": "0.5.2", @@ -632,9 +671,9 @@ "dev": true }, "gaxios": { - "version": "1.2.7", - "from": "gaxios@>=1.0.4 <2.0.0", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.2.7.tgz", + "version": "1.8.4", + "from": "gaxios@>=1.2.1 <2.0.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.8.4.tgz", "dependencies": { "extend": { "version": "3.0.2", @@ -644,9 +683,9 @@ } }, "gcp-metadata": { - "version": "0.9.3", - "from": "gcp-metadata@>=0.9.0 <0.10.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.9.3.tgz" + "version": "1.0.0", + "from": "gcp-metadata@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-1.0.0.tgz" }, "getpass": { "version": "0.1.7", @@ -667,31 +706,24 @@ "optional": true }, "google-auth-library": { - "version": "2.0.2", - "from": "google-auth-library@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-2.0.2.tgz", - "dependencies": { - "gcp-metadata": { - "version": "0.7.0", - "from": "gcp-metadata@>=0.7.0 <0.8.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.7.0.tgz" - } - } + "version": "3.1.2", + "from": "google-auth-library@>=3.1.1 <4.0.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-3.1.2.tgz" }, "google-p12-pem": { - "version": "1.0.3", + "version": "1.0.4", "from": "google-p12-pem@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.3.tgz" + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.4.tgz" }, "gtoken": { - "version": "2.3.2", - "from": "gtoken@>=2.3.0 <3.0.0", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.2.tgz", + "version": "2.3.3", + "from": "gtoken@>=2.3.2 <3.0.0", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.3.tgz", "dependencies": { "mime": { - "version": "2.4.0", + "version": "2.4.4", "from": "mime@>=2.2.0 <3.0.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.0.tgz" + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz" } } }, @@ -747,9 +779,9 @@ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" }, "ioredis": { - "version": "4.6.0", - "from": "ioredis@4.6.0", - "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.6.0.tgz" + "version": "4.9.5", + "from": "ioredis@>=4.9.1 <4.10.0", + "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.9.5.tgz" }, "is": { "version": "3.3.0", @@ -757,9 +789,9 @@ "resolved": "https://registry.npmjs.org/is/-/is-3.3.0.tgz" }, "is-buffer": { - "version": "1.1.6", - "from": "is-buffer@>=1.1.5 <2.0.0", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz" + "version": "2.0.3", + "from": "is-buffer@>=2.0.2 <3.0.0", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.3.tgz" }, "is-typedarray": { "version": "1.0.0", @@ -815,14 +847,14 @@ } }, "jwa": { - "version": "1.2.0", - "from": "jwa@>=1.2.0 <2.0.0", - "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.2.0.tgz" + "version": "1.4.1", + "from": "jwa@>=1.4.1 <2.0.0", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz" }, "jws": { - "version": "3.2.1", + "version": "3.2.2", "from": "jws@>=3.1.5 <4.0.0", - "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.1.tgz" + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz" }, "keypress": { "version": "0.1.0", @@ -850,14 +882,14 @@ "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz" }, "logger-sharelatex": { - "version": "1.6.0", - "from": "logger-sharelatex@1.6.0", - "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.6.0.tgz", + "version": "1.7.0", + "from": "logger-sharelatex@1.7.0", + "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.7.0.tgz", "dependencies": { "ajv": { - "version": "6.7.0", + "version": "6.10.0", "from": "ajv@>=6.5.5 <7.0.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.7.0.tgz" + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.10.0.tgz" }, "assert-plus": { "version": "1.0.0", @@ -870,19 +902,14 @@ "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz" }, "bunyan": { - "version": "1.5.1", - "from": "bunyan@1.5.1", - "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.5.1.tgz" - }, - "coffee-script": { - "version": "1.12.4", - "from": "coffee-script@1.12.4", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.12.4.tgz" + "version": "1.8.12", + "from": "bunyan@1.8.12", + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.12.tgz" }, "combined-stream": { - "version": "1.0.7", + "version": "1.0.8", "from": "combined-stream@>=1.0.6 <1.1.0", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.7.tgz" + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz" }, "delayed-stream": { "version": "1.0.0", @@ -890,9 +917,9 @@ "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" }, "dtrace-provider": { - "version": "0.6.0", - "from": "dtrace-provider@>=0.6.0 <0.7.0", - "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.6.0.tgz", + "version": "0.8.7", + "from": "dtrace-provider@>=0.8.0 <0.9.0", + "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.8.7.tgz", "optional": true }, "extend": { @@ -931,14 +958,14 @@ "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz" }, "mime-db": { - "version": "1.37.0", - "from": "mime-db@>=1.37.0 <1.38.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.37.0.tgz" + "version": "1.40.0", + "from": "mime-db@1.40.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.40.0.tgz" }, "mime-types": { - "version": "2.1.21", + "version": "2.1.24", "from": "mime-types@>=2.1.19 <2.2.0", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.21.tgz" + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.24.tgz" }, "oauth-sign": { "version": "0.9.0", @@ -1008,9 +1035,9 @@ "resolved": "https://registry.npmjs.org/methods/-/methods-0.0.1.tgz" }, "metrics-sharelatex": { - "version": "2.1.1", - "from": "metrics-sharelatex@2.1.1", - "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.1.1.tgz", + "version": "2.2.0", + "from": "metrics-sharelatex@2.2.0", + "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.2.0.tgz", "dependencies": { "coffee-script": { "version": "1.6.0", @@ -1021,11 +1048,6 @@ "version": "0.1.1", "from": "lynx@>=0.1.1 <0.2.0", "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz" - }, - "underscore": { - "version": "1.6.0", - "from": "underscore@>=1.6.0 <1.7.0", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz" } } }, @@ -1108,6 +1130,12 @@ "from": "module-details-from-path@>=1.0.3 <2.0.0", "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz" }, + "moment": { + "version": "2.24.0", + "from": "moment@>=2.10.6 <3.0.0", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.24.0.tgz", + "optional": true + }, "mongodb": { "version": "2.2.36", "from": "mongodb@^2.2.31", @@ -1176,14 +1204,14 @@ "optional": true }, "node-fetch": { - "version": "2.3.0", - "from": "node-fetch@>=2.2.0 <3.0.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.3.0.tgz" + "version": "2.6.0", + "from": "node-fetch@>=2.3.0 <3.0.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz" }, "node-forge": { - "version": "0.7.6", - "from": "node-forge@>=0.7.4 <0.8.0", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.7.6.tgz" + "version": "0.8.4", + "from": "node-forge@>=0.8.0 <0.9.0", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.4.tgz" }, "oauth-sign": { "version": "0.3.0", @@ -1196,14 +1224,14 @@ "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz" }, "p-limit": { - "version": "2.1.0", - "from": "p-limit@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.1.0.tgz" + "version": "2.2.0", + "from": "p-limit@>=2.2.0 <3.0.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.0.tgz" }, "p-try": { - "version": "2.0.0", + "version": "2.2.0", "from": "p-try@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.0.0.tgz" + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz" }, "parse-duration": { "version": "0.1.1", @@ -1216,9 +1244,9 @@ "resolved": "https://registry.npmjs.org/parse-mongo-url/-/parse-mongo-url-1.1.1.tgz" }, "parse-ms": { - "version": "2.0.0", + "version": "2.1.0", "from": "parse-ms@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.0.0.tgz" + "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.1.0.tgz" }, "path-is-absolute": { "version": "1.0.1", @@ -1256,19 +1284,26 @@ "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz" }, "prom-client": { - "version": "11.2.1", + "version": "11.5.1", "from": "prom-client@>=11.1.3 <12.0.0", - "resolved": "https://registry.npmjs.org/prom-client/-/prom-client-11.2.1.tgz" + "resolved": "https://registry.npmjs.org/prom-client/-/prom-client-11.5.1.tgz" }, "protobufjs": { "version": "6.8.8", "from": "protobufjs@>=6.8.6 <6.9.0", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz" + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz", + "dependencies": { + "@types/node": { + "version": "10.14.9", + "from": "@types/node@>=10.1.0 <11.0.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.14.9.tgz" + } + } }, "psl": { - "version": "1.1.31", + "version": "1.1.32", "from": "psl@>=1.1.24 <2.0.0", - "resolved": "https://registry.npmjs.org/psl/-/psl-1.1.31.tgz" + "resolved": "https://registry.npmjs.org/psl/-/psl-1.1.32.tgz" }, "punycode": { "version": "1.4.1", @@ -1291,9 +1326,9 @@ "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-0.0.4.tgz" }, "raven": { - "version": "1.2.1", - "from": "raven@>=1.1.3 <2.0.0", - "resolved": "https://registry.npmjs.org/raven/-/raven-1.2.1.tgz", + "version": "1.1.3", + "from": "raven@1.1.3", + "resolved": "https://registry.npmjs.org/raven/-/raven-1.1.3.tgz", "dependencies": { "cookie": { "version": "0.3.1", @@ -1308,9 +1343,9 @@ "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz" }, "redis": { - "version": "0.12.1", - "from": "redis@0.12.1", - "resolved": "https://registry.npmjs.org/redis/-/redis-0.12.1.tgz" + "version": "0.11.0", + "from": "redis@>=0.11.0 <0.12.0", + "resolved": "https://registry.npmjs.org/redis/-/redis-0.11.0.tgz" }, "redis-commands": { "version": "1.4.0", @@ -1330,19 +1365,12 @@ "redis-sentinel": { "version": "0.1.1", "from": "redis-sentinel@0.1.1", - "resolved": "https://registry.npmjs.org/redis-sentinel/-/redis-sentinel-0.1.1.tgz", - "dependencies": { - "redis": { - "version": "0.11.0", - "from": "redis@>=0.11.0 <0.12.0", - "resolved": "https://registry.npmjs.org/redis/-/redis-0.11.0.tgz" - } - } + "resolved": "https://registry.npmjs.org/redis-sentinel/-/redis-sentinel-0.1.1.tgz" }, "redis-sharelatex": { - "version": "1.0.5", - "from": "redis-sharelatex@latest", - "resolved": "https://registry.npmjs.org/redis-sharelatex/-/redis-sharelatex-1.0.5.tgz", + "version": "1.0.8", + "from": "redis-sharelatex@1.0.8", + "resolved": "https://registry.npmjs.org/redis-sharelatex/-/redis-sharelatex-1.0.8.tgz", "dependencies": { "coffee-script": { "version": "1.8.0", @@ -1468,9 +1496,21 @@ "resolved": "https://registry.npmjs.org/require_optional/-/require_optional-1.0.1.tgz" }, "require-in-the-middle": { - "version": "3.1.0", - "from": "require-in-the-middle@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-3.1.0.tgz" + "version": "4.0.0", + "from": "require-in-the-middle@>=4.0.0 <5.0.0", + "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-4.0.0.tgz", + "dependencies": { + "debug": { + "version": "4.1.1", + "from": "debug@>=4.1.1 <5.0.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz" + }, + "ms": { + "version": "2.1.2", + "from": "ms@>=2.1.1 <3.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz" + } + } }, "require-like": { "version": "0.1.2", @@ -1478,9 +1518,9 @@ "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz" }, "resolve": { - "version": "1.10.0", - "from": "resolve@>=1.5.0 <2.0.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.10.0.tgz" + "version": "1.11.0", + "from": "resolve@>=1.10.0 <2.0.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.11.0.tgz" }, "resolve-from": { "version": "2.0.0", @@ -1596,9 +1636,9 @@ "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz" }, "standard-as-callback": { - "version": "1.0.1", - "from": "standard-as-callback@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-1.0.1.tgz" + "version": "2.0.1", + "from": "standard-as-callback@>=2.0.1 <3.0.0", + "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.0.1.tgz" }, "statsd-parser": { "version": "0.0.4", @@ -1620,11 +1660,6 @@ "from": "stringstream@>=0.0.5 <0.1.0", "resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz" }, - "symbol-observable": { - "version": "1.2.0", - "from": "symbol-observable@>=1.2.0 <2.0.0", - "resolved": "https://registry.npmjs.org/symbol-observable/-/symbol-observable-1.2.0.tgz" - }, "tdigest": { "version": "0.1.1", "from": "tdigest@>=0.1.1 <0.2.0", @@ -1689,6 +1724,11 @@ "from": "uid2@0.0.2", "resolved": "https://registry.npmjs.org/uid2/-/uid2-0.0.2.tgz" }, + "underscore": { + "version": "1.6.0", + "from": "underscore@>=1.6.0 <1.7.0", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz" + }, "uri-js": { "version": "4.2.2", "from": "uri-js@>=4.2.2 <5.0.0", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 386f48e7ba..a08f300865 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -24,11 +24,11 @@ "coffee-script": "~1.7.0", "express": "3.3.4", "lodash": "^4.17.4", - "logger-sharelatex": "^1.6.0", + "logger-sharelatex": "^1.7.0", "lynx": "0.0.11", - "metrics-sharelatex": "^2.1.1", + "metrics-sharelatex": "^2.2.0", "mongojs": "^2.6.0", - "redis-sharelatex": "^1.0.5", + "redis-sharelatex": "^1.0.8", "request": "2.25.0", "requestretry": "^1.12.0", "sandboxed-module": "~0.2.0", diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index d0d40ba08e..879d0fe371 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -38,6 +38,7 @@ describe "Applying updates to a doc", -> DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> throw error if error? setTimeout done, 200 + return null after -> MockWebApi.getDocument.restore() @@ -51,6 +52,7 @@ describe "Applying updates to a doc", -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => doc.lines.should.deep.equal @result done() + return null it "should push the applied updates to the track changes api", (done) -> rclient_history.lrange HistoryKeys.uncompressedHistoryOps({@doc_id}), 0, -1, (error, updates) => @@ -60,12 +62,14 @@ describe "Applying updates to a doc", -> throw error if error? result.should.equal 1 done() + return null it "should push the applied updates to the project history changes api", (done) -> rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => throw error if error? JSON.parse(updates[0]).op.should.deep.equal @update.op done() + return null it "should set the first op timestamp", (done) -> rclient_history.get ProjectHistoryKeys.projectHistoryFirstOpTimestamp({@project_id}), (error, result) => @@ -73,6 +77,7 @@ describe "Applying updates to a doc", -> result.should.be.within(@startTime, Date.now()) @firstOpTimestamp = result done() + return null describe "when sending another update", -> before (done) -> @@ -81,12 +86,14 @@ describe "Applying updates to a doc", -> DocUpdaterClient.sendUpdate @project_id, @doc_id, @second_update, (error) -> throw error if error? setTimeout done, 200 + return null it "should not change the first op timestamp", (done) -> rclient_history.get ProjectHistoryKeys.projectHistoryFirstOpTimestamp({@project_id}), (error, result) => throw error if error? result.should.equal @firstOpTimestamp done() + return null describe "when the document is loaded", -> before (done) -> @@ -99,6 +106,7 @@ describe "Applying updates to a doc", -> DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> throw error if error? setTimeout done, 200 + return null after -> MockWebApi.getDocument.restore() @@ -110,6 +118,7 @@ describe "Applying updates to a doc", -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => doc.lines.should.deep.equal @result done() + return null it "should push the applied updates to the track changes api", (done) -> rclient_history.lrange HistoryKeys.uncompressedHistoryOps({@doc_id}), 0, -1, (error, updates) => @@ -117,11 +126,13 @@ describe "Applying updates to a doc", -> rclient_history.sismember HistoryKeys.docsWithHistoryOps({@project_id}), @doc_id, (error, result) => result.should.equal 1 done() + return null it "should push the applied updates to the project history changes api", (done) -> rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => JSON.parse(updates[0]).op.should.deep.equal @update.op done() + return null describe "when the document has been deleted", -> @@ -161,6 +172,7 @@ describe "Applying updates to a doc", -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => doc.lines.should.deep.equal @my_result done() + return null it "should push the applied updates to the track changes api", (done) -> rclient_history.lrange HistoryKeys.uncompressedHistoryOps({@doc_id}), 0, -1, (error, updates) => @@ -171,6 +183,7 @@ describe "Applying updates to a doc", -> rclient_history.sismember HistoryKeys.docsWithHistoryOps({@project_id}), @doc_id, (error, result) => result.should.equal 1 done() + return null it "should store the doc ops in the correct order", (done) -> rclient_du.lrange Keys.docOps({doc_id: @doc_id}), 0, -1, (error, updates) => @@ -178,6 +191,7 @@ describe "Applying updates to a doc", -> for appliedUpdate, i in @updates appliedUpdate.op.should.deep.equal updates[i].op done() + return null describe "when older ops come in after the delete", -> before (done) -> @@ -210,6 +224,7 @@ describe "Applying updates to a doc", -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => doc.lines.should.deep.equal @my_result done() + return null describe "with a broken update", -> before (done) -> @@ -222,11 +237,13 @@ describe "Applying updates to a doc", -> DocUpdaterClient.sendUpdate @project_id, @doc_id, @broken_update, (error) -> throw error if error? setTimeout done, 200 + return null it "should not update the doc", (done) -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => doc.lines.should.deep.equal @lines done() + return null it "should send a message with an error", -> @messageCallback.called.should.equal true @@ -261,6 +278,7 @@ describe "Applying updates to a doc", -> async.series actions, (error) => throw error if error? setTimeout done, 2000 + return null after -> MockTrackChangesApi.flushDoc.restore() @@ -282,11 +300,13 @@ describe "Applying updates to a doc", -> DocUpdaterClient.sendUpdate @project_id, @doc_id, update, (error) -> throw error if error? setTimeout done, 200 + return null it "should update the doc (using version = 0)", (done) -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => doc.lines.should.deep.equal @result done() + return null describe "when the sending duplicate ops", -> before (done) -> @@ -322,11 +342,13 @@ describe "Applying updates to a doc", -> throw error if error? setTimeout done, 200 , 200 + return null it "should update the doc", (done) -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => doc.lines.should.deep.equal @result done() + return null it "should return a message about duplicate ops", -> @messageCallback.calledTwice.should.equal true diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee index 5fdf2a9b4d..cbb9fd9ea5 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee @@ -42,6 +42,7 @@ describe "Applying updates to a project's structure", -> update.version.should.equal "#{@version}.0" done() + return null describe "renaming a document", -> before -> @@ -57,6 +58,7 @@ describe "Applying updates to a project's structure", -> DocUpdaterClient.sendProjectUpdate @project_id, @user_id, @docUpdates, [], @version, (error) -> throw error if error? setTimeout done, 200 + return null it "should push the applied doc renames to the project history api", (done) -> rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => @@ -71,6 +73,7 @@ describe "Applying updates to a project's structure", -> update.version.should.equal "#{@version}.0" done() + return null describe "when the document is loaded", -> before (done) -> @@ -82,6 +85,7 @@ describe "Applying updates to a project's structure", -> DocUpdaterClient.sendProjectUpdate @project_id, @user_id, @docUpdates, [], @version, (error) -> throw error if error? setTimeout done, 200 + return null after -> MockWebApi.getDocument.restore() @@ -90,6 +94,7 @@ describe "Applying updates to a project's structure", -> DocUpdaterClient.getDoc @project_id, @docUpdate.id, (error, res, doc) => doc.pathname.should.equal @docUpdate.newPathname done() + return null it "should push the applied doc renames to the project history api", (done) -> rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => @@ -104,6 +109,7 @@ describe "Applying updates to a project's structure", -> update.version.should.equal "#{@version}.0" done() + return null describe "renaming multiple documents and files", -> before -> @@ -132,6 +138,7 @@ describe "Applying updates to a project's structure", -> DocUpdaterClient.sendProjectUpdate @project_id, @user_id, @docUpdates, @fileUpdates, @version, (error) -> throw error if error? setTimeout done, 200 + return null it "should push the applied doc renames to the project history api", (done) -> rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => @@ -170,6 +177,7 @@ describe "Applying updates to a project's structure", -> update.version.should.equal "#{@version}.3" done() + return null describe "adding a file", -> @@ -183,6 +191,7 @@ describe "Applying updates to a project's structure", -> DocUpdaterClient.sendProjectUpdate @project_id, @user_id, [], @fileUpdates, @version, (error) -> throw error if error? setTimeout done, 200 + return null it "should push the file addition to the project history api", (done) -> rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => @@ -197,6 +206,7 @@ describe "Applying updates to a project's structure", -> update.version.should.equal "#{@version}.0" done() + return null describe "adding a doc", -> before (done) -> @@ -209,6 +219,7 @@ describe "Applying updates to a project's structure", -> DocUpdaterClient.sendProjectUpdate @project_id, @user_id, @docUpdates, [], @version, (error) -> throw error if error? setTimeout done, 200 + return null it "should push the doc addition to the project history api", (done) -> rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => @@ -223,6 +234,7 @@ describe "Applying updates to a project's structure", -> update.version.should.equal "#{@version}.0" done() + return null describe "with enough updates to flush to the history service", -> before (done) -> @@ -247,6 +259,7 @@ describe "Applying updates to a project's structure", -> DocUpdaterClient.sendProjectUpdate projectId, userId, updates.slice(250), [], @version1, (error) -> throw error if error? setTimeout done, 2000 + return null after -> MockProjectHistoryApi.flushProject.restore() @@ -278,6 +291,7 @@ describe "Applying updates to a project's structure", -> DocUpdaterClient.sendProjectUpdate projectId, userId, updates.slice(10), [], @version1, (error) -> throw error if error? setTimeout done, 2000 + return null after -> MockProjectHistoryApi.flushProject.restore() diff --git a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee index eb71179b3b..f5ec74cbd8 100644 --- a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee @@ -51,6 +51,7 @@ describe "Setting a document", -> @statusCode = res.statusCode done() , 200 + return null it "should return a 204 status code", -> @statusCode.should.equal 204 @@ -64,17 +65,20 @@ describe "Setting a document", -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => doc.lines.should.deep.equal @newLines done() + return null it "should bump the version in the doc updater", (done) -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => doc.version.should.equal @version + 2 done() + return null it "should leave the document in redis", (done) -> rclient_du.get Keys.docLines({doc_id: @doc_id}), (error, lines) => throw error if error? expect(JSON.parse(lines)).to.deep.equal @newLines done() + return null describe "when the updated doc does not exist in the doc updater", -> before (done) -> @@ -83,6 +87,7 @@ describe "Setting a document", -> DocUpdaterClient.setDocLines @project_id, @doc_id, @newLines, @source, @user_id, false, (error, res, body) => @statusCode = res.statusCode setTimeout done, 200 + return null it "should return a 204 status code", -> @statusCode.should.equal 204 @@ -103,6 +108,7 @@ describe "Setting a document", -> throw error if error? expect(lines).to.not.exist done() + return null describe "with track changes", -> before -> @@ -131,6 +137,7 @@ describe "Setting a document", -> DocUpdaterClient.setDocLines @project_id, @doc_id, @lines, @source, @user_id, true, (error, res, body) => @statusCode = res.statusCode setTimeout done, 200 + return null it "should undo the tracked changes", (done) -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, data) => @@ -138,6 +145,7 @@ describe "Setting a document", -> ranges = data.ranges expect(ranges.changes).to.be.undefined done() + return null describe "without the undo flag", -> before (done) -> @@ -151,6 +159,7 @@ describe "Setting a document", -> DocUpdaterClient.setDocLines @project_id, @doc_id, @lines, @source, @user_id, false, (error, res, body) => @statusCode = res.statusCode setTimeout done, 200 + return null it "should not undo the tracked changes", (done) -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, data) => @@ -158,5 +167,6 @@ describe "Setting a document", -> ranges = data.ranges expect(ranges.changes.length).to.equal 1 done() + return null From e8a8c446ed071967d0228f10614674491799c4ed Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Wed, 19 Jun 2019 11:34:38 +0100 Subject: [PATCH 475/769] Increase timeout on test? --- .../test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee | 1 + 1 file changed, 1 insertion(+) diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index 879d0fe371..51b9cf08a9 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -81,6 +81,7 @@ describe "Applying updates to a doc", -> describe "when sending another update", -> before (done) -> + @timeout = 10000 @second_update = Object.create(@update) @second_update.v = @version + 1 DocUpdaterClient.sendUpdate @project_id, @doc_id, @second_update, (error) -> From f37860599d548ed277609d3594f547aff0a8cbb3 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 25 Jun 2019 14:01:43 +0100 Subject: [PATCH 476/769] skip hash check when non-BMP characters replaced --- services/document-updater/app/coffee/UpdateManager.coffee | 7 +++++-- .../unit/coffee/UpdateManager/UpdateManagerTests.coffee | 5 ++++- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index e5ede11173..3d0a318b9b 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -138,10 +138,13 @@ module.exports = UpdateManager = # 16-bit character of a blackboard bold character (http://www.fileformat.info/info/unicode/char/1d400/index.htm). # Something must be going on client side that is screwing up the encoding and splitting the # two 16-bit characters so that \uD835 is standalone. + BAD_CHAR_REGEXP = /[\uD800-\uDFFF]/g for op in update.op or [] - if op.i? + if op.i? && BAD_CHAR_REGEXP.test(op.i) # Replace high and low surrogate characters with 'replacement character' (\uFFFD) - op.i = op.i.replace(/[\uD800-\uDFFF]/g, "\uFFFD") + op.i = op.i.replace(BAD_CHAR_REGEXP, "\uFFFD") + # remove any client-side hash because we have modified the content + delete update.hash return update _addProjectHistoryMetadataToOps: (updates, pathname, projectHistoryId, lines) -> diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index ac8d4c742c..280fad5f33 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -212,7 +212,7 @@ describe "UpdateManager", -> describe "with UTF-16 surrogate pairs in the update", -> beforeEach -> - @update = {op: [{p: 42, i: "\uD835\uDC00"}]} + @update = {op: [{p: 42, i: "\uD835\uDC00"}], hash: "f1d2d2f924e986ac86fdf7b36c94bcdf32beec15"} @UpdateManager.applyUpdate @project_id, @doc_id, @update, @callback it "should apply the update but with surrogate pairs removed", -> @@ -223,6 +223,9 @@ describe "UpdateManager", -> # \uFFFD is 'replacement character' @update.op[0].i.should.equal "\uFFFD\uFFFD" + it "should skip the hash check by removing any hash field present", -> + @update.should.not.have.property('hash') + describe "with an error", -> beforeEach -> @error = new Error("something went wrong") From 16fb2970435cfd6de760bb492279b4deec0bc39d Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 27 Jun 2019 11:39:45 +0100 Subject: [PATCH 477/769] Revert "skip hash check when non-BMP characters replaced" --- services/document-updater/app/coffee/UpdateManager.coffee | 7 ++----- .../unit/coffee/UpdateManager/UpdateManagerTests.coffee | 5 +---- 2 files changed, 3 insertions(+), 9 deletions(-) diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.coffee index 3d0a318b9b..e5ede11173 100644 --- a/services/document-updater/app/coffee/UpdateManager.coffee +++ b/services/document-updater/app/coffee/UpdateManager.coffee @@ -138,13 +138,10 @@ module.exports = UpdateManager = # 16-bit character of a blackboard bold character (http://www.fileformat.info/info/unicode/char/1d400/index.htm). # Something must be going on client side that is screwing up the encoding and splitting the # two 16-bit characters so that \uD835 is standalone. - BAD_CHAR_REGEXP = /[\uD800-\uDFFF]/g for op in update.op or [] - if op.i? && BAD_CHAR_REGEXP.test(op.i) + if op.i? # Replace high and low surrogate characters with 'replacement character' (\uFFFD) - op.i = op.i.replace(BAD_CHAR_REGEXP, "\uFFFD") - # remove any client-side hash because we have modified the content - delete update.hash + op.i = op.i.replace(/[\uD800-\uDFFF]/g, "\uFFFD") return update _addProjectHistoryMetadataToOps: (updates, pathname, projectHistoryId, lines) -> diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee index 280fad5f33..ac8d4c742c 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee @@ -212,7 +212,7 @@ describe "UpdateManager", -> describe "with UTF-16 surrogate pairs in the update", -> beforeEach -> - @update = {op: [{p: 42, i: "\uD835\uDC00"}], hash: "f1d2d2f924e986ac86fdf7b36c94bcdf32beec15"} + @update = {op: [{p: 42, i: "\uD835\uDC00"}]} @UpdateManager.applyUpdate @project_id, @doc_id, @update, @callback it "should apply the update but with surrogate pairs removed", -> @@ -223,9 +223,6 @@ describe "UpdateManager", -> # \uFFFD is 'replacement character' @update.op[0].i.should.equal "\uFFFD\uFFFD" - it "should skip the hash check by removing any hash field present", -> - @update.should.not.have.property('hash') - describe "with an error", -> beforeEach -> @error = new Error("something went wrong") From eed8e215d7e8704a2d53a29c7f79bc49f60c3201 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 2 Jul 2019 16:58:30 +0100 Subject: [PATCH 478/769] bump redis-sharelatex (and io redis) to 1.0.9 --- services/document-updater/npm-shrinkwrap.json | 30 +++++++++++++------ services/document-updater/package.json | 2 +- 2 files changed, 22 insertions(+), 10 deletions(-) diff --git a/services/document-updater/npm-shrinkwrap.json b/services/document-updater/npm-shrinkwrap.json index 794836656c..3809c8746c 100644 --- a/services/document-updater/npm-shrinkwrap.json +++ b/services/document-updater/npm-shrinkwrap.json @@ -779,9 +779,21 @@ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" }, "ioredis": { - "version": "4.9.5", - "from": "ioredis@>=4.9.1 <4.10.0", - "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.9.5.tgz" + "version": "4.11.1", + "from": "ioredis@>=4.11.1 <4.12.0", + "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.11.1.tgz", + "dependencies": { + "debug": { + "version": "4.1.1", + "from": "debug@>=4.1.1 <5.0.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz" + }, + "ms": { + "version": "2.1.2", + "from": "ms@>=2.1.1 <3.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz" + } + } }, "is": { "version": "3.3.0", @@ -1348,9 +1360,9 @@ "resolved": "https://registry.npmjs.org/redis/-/redis-0.11.0.tgz" }, "redis-commands": { - "version": "1.4.0", - "from": "redis-commands@1.4.0", - "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.4.0.tgz" + "version": "1.5.0", + "from": "redis-commands@1.5.0", + "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.5.0.tgz" }, "redis-errors": { "version": "1.2.0", @@ -1368,9 +1380,9 @@ "resolved": "https://registry.npmjs.org/redis-sentinel/-/redis-sentinel-0.1.1.tgz" }, "redis-sharelatex": { - "version": "1.0.8", - "from": "redis-sharelatex@1.0.8", - "resolved": "https://registry.npmjs.org/redis-sharelatex/-/redis-sharelatex-1.0.8.tgz", + "version": "1.0.9", + "from": "redis-sharelatex@1.0.9", + "resolved": "https://registry.npmjs.org/redis-sharelatex/-/redis-sharelatex-1.0.9.tgz", "dependencies": { "coffee-script": { "version": "1.8.0", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index a08f300865..635e23ab74 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -28,7 +28,7 @@ "lynx": "0.0.11", "metrics-sharelatex": "^2.2.0", "mongojs": "^2.6.0", - "redis-sharelatex": "^1.0.8", + "redis-sharelatex": "^1.0.9", "request": "2.25.0", "requestretry": "^1.12.0", "sandboxed-module": "~0.2.0", From 3b3b2da0f5f64b2f0564475092d20e6b0f922c72 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 3 Jul 2019 10:21:25 +0100 Subject: [PATCH 479/769] add pubsub redis connection and remove real time redis connection --- services/document-updater/app.coffee | 4 ++-- .../app/coffee/DispatchManager.coffee | 2 +- .../app/coffee/RealTimeRedisManager.coffee | 4 ++-- .../document-updater/config/settings.defaults.coffee | 11 +++++------ .../acceptance/coffee/helpers/DocUpdaterClient.coffee | 6 +++--- .../RealTimeRedisManagerTests.coffee | 2 +- 6 files changed, 14 insertions(+), 15 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 66d941b832..829cc029eb 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -73,9 +73,9 @@ app.get '/status', (req, res)-> else res.send('document updater is alive') -webRedisClient = require("redis-sharelatex").createClient(Settings.redis.realtime) +pubsubClient = require("redis-sharelatex").createClient(Settings.redis.pubsub) app.get "/health_check/redis", (req, res, next) -> - webRedisClient.healthCheck (error) -> + pubsubClient.healthCheck (error) -> if error? logger.err {err: error}, "failed redis health check" res.send 500 diff --git a/services/document-updater/app/coffee/DispatchManager.coffee b/services/document-updater/app/coffee/DispatchManager.coffee index 419aa17de7..375f3b98dc 100644 --- a/services/document-updater/app/coffee/DispatchManager.coffee +++ b/services/document-updater/app/coffee/DispatchManager.coffee @@ -10,7 +10,7 @@ RateLimitManager = require('./RateLimitManager') module.exports = DispatchManager = createDispatcher: (RateLimiter) -> - client = redis.createClient(Settings.redis.realtime) + client = redis.createClient(Settings.redis.documentupdater) worker = { client: client _waitForUpdateThenDispatchWorker: (callback = (error) ->) -> diff --git a/services/document-updater/app/coffee/RealTimeRedisManager.coffee b/services/document-updater/app/coffee/RealTimeRedisManager.coffee index da6f47f2ff..5644fe82a0 100644 --- a/services/document-updater/app/coffee/RealTimeRedisManager.coffee +++ b/services/document-updater/app/coffee/RealTimeRedisManager.coffee @@ -1,6 +1,6 @@ Settings = require('settings-sharelatex') -rclient = require("redis-sharelatex").createClient(Settings.redis.realtime) -Keys = Settings.redis.realtime.key_schema +rclient = require("redis-sharelatex").createClient(Settings.redis.pubsub) +Keys = Settings.redis.documentupdater.key_schema logger = require('logger-sharelatex') os = require "os" crypto = require "crypto" diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index f68077bb8b..f359ab38f0 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -21,14 +21,12 @@ module.exports = redis: - realtime: - port: process.env["REAL_TIME_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" - host: process.env["REAL_TIME_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" - password: process.env["REAL_TIME_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" + pubsub: + host: process.env['PUBSUB_REDIS_HOST'] or process.env['REDIS_HOST'] or "localhost" + port: process.env['PUBSUB_REDIS_PORT'] or process.env['REDIS_PORT'] or "6379" + password: process.env["PUBSUB_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" redisOptions: keepAlive: 100 - key_schema: - pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" history: port: process.env["HISTORY_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" @@ -81,6 +79,7 @@ module.exports = pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" lastUpdatedBy: ({doc_id}) -> "lastUpdatedBy:{#{doc_id}}" lastUpdatedAt: ({doc_id}) -> "lastUpdatedAt:{#{doc_id}}" + pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" redisOptions: keepAlive: 100 diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee index 7f50d64372..82dba20685 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee @@ -1,10 +1,10 @@ Settings = require('settings-sharelatex') -rclient = require("redis-sharelatex").createClient(Settings.redis.realtime) -keys = Settings.redis.realtime.key_schema +rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) +keys = Settings.redis.documentupdater.key_schema request = require("request").defaults(jar: false) async = require "async" -rclient_sub = require("redis-sharelatex").createClient(Settings.redis.realtime) +rclient_sub = require("redis-sharelatex").createClient(Settings.redis.pubsub) rclient_sub.subscribe "applied-ops" rclient_sub.setMaxListeners(0) diff --git a/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee index 32ec3d9020..2d23e948a2 100644 --- a/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee @@ -15,7 +15,7 @@ describe "RealTimeRedisManager", -> "redis-sharelatex": createClient: () => @rclient "settings-sharelatex": redis: - realtime: @settings = + documentupdater: @settings = key_schema: pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" "logger-sharelatex": { log: () -> } From 97487a077e7423b3c1e99eb2830d5fec6f7c8102 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 10 Jul 2019 09:42:05 +0100 Subject: [PATCH 480/769] fix cluster/sentinel connection with real-time --- .../app/coffee/RealTimeRedisManager.coffee | 5 +++-- .../RealTimeRedisManagerTests.coffee | 9 ++++++--- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/services/document-updater/app/coffee/RealTimeRedisManager.coffee b/services/document-updater/app/coffee/RealTimeRedisManager.coffee index 5644fe82a0..e949268f62 100644 --- a/services/document-updater/app/coffee/RealTimeRedisManager.coffee +++ b/services/document-updater/app/coffee/RealTimeRedisManager.coffee @@ -1,5 +1,6 @@ Settings = require('settings-sharelatex') -rclient = require("redis-sharelatex").createClient(Settings.redis.pubsub) +rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) +pubsubClient = require("redis-sharelatex").createClient(Settings.redis.pubsub) Keys = Settings.redis.documentupdater.key_schema logger = require('logger-sharelatex') os = require "os" @@ -35,4 +36,4 @@ module.exports = RealTimeRedisManager = # create a unique message id using a counter message_id = "doc:#{HOST}:#{RND}-#{COUNT++}" data?._id = message_id - rclient.publish "applied-ops", JSON.stringify(data) + pubsubClient.publish "applied-ops", JSON.stringify(data) diff --git a/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee index 2d23e948a2..1d97779bfa 100644 --- a/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee @@ -11,13 +11,17 @@ describe "RealTimeRedisManager", -> auth: () -> exec: sinon.stub() @rclient.multi = () => @rclient + @pubsubClient = + publish: sinon.stub() @RealTimeRedisManager = SandboxedModule.require modulePath, requires: - "redis-sharelatex": createClient: () => @rclient + "redis-sharelatex": createClient: (config) => if (config.name is 'pubsub') then @pubsubClient else @rclient "settings-sharelatex": redis: documentupdater: @settings = key_schema: pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" + pubsub: + name: "pubsub" "logger-sharelatex": { log: () -> } "crypto": @crypto = { randomBytes: sinon.stub().withArgs(4).returns(Buffer.from([0x1, 0x2, 0x3, 0x4])) } "os": @os = {hostname: sinon.stub().returns("somehost")} @@ -81,8 +85,7 @@ describe "RealTimeRedisManager", -> describe "sendData", -> beforeEach -> @message_id = "doc:somehost:01020304-0" - @rclient.publish = sinon.stub() @RealTimeRedisManager.sendData({op: "thisop"}) it "should send the op with a message id", -> - @rclient.publish.calledWith("applied-ops", JSON.stringify({op:"thisop",_id:@message_id})).should.equal true \ No newline at end of file + @pubsubClient.publish.calledWith("applied-ops", JSON.stringify({op:"thisop",_id:@message_id})).should.equal true \ No newline at end of file From c9ccf62d714912fb3f53de46106a51620cce3e49 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 22 Jul 2019 12:20:06 +0100 Subject: [PATCH 481/769] support per-doc pubsub channels --- .../app/coffee/RealTimeRedisManager.coffee | 7 ++++++- services/document-updater/config/settings.defaults.coffee | 4 +++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/RealTimeRedisManager.coffee b/services/document-updater/app/coffee/RealTimeRedisManager.coffee index e949268f62..d6a4fc5044 100644 --- a/services/document-updater/app/coffee/RealTimeRedisManager.coffee +++ b/services/document-updater/app/coffee/RealTimeRedisManager.coffee @@ -36,4 +36,9 @@ module.exports = RealTimeRedisManager = # create a unique message id using a counter message_id = "doc:#{HOST}:#{RND}-#{COUNT++}" data?._id = message_id - pubsubClient.publish "applied-ops", JSON.stringify(data) + # publish on separate channels for individual projects and docs when + # configured (needs realtime to be configured for this too). + if Settings.publishOnIndividualChannels and data.doc_id? + pubsubClient.publish "applied-ops:#{data.doc_id}", JSON.stringify(data) + else + pubsubClient.publish "applied-ops", JSON.stringify(data) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index f359ab38f0..547866599d 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -91,4 +91,6 @@ module.exports = url: "mongodb://#{process.env["MONGO_HOST"] or "localhost"}/sharelatex" sentry: - dsn: process.env.SENTRY_DSN \ No newline at end of file + dsn: process.env.SENTRY_DSN + + publishOnIndividualChannels: process.env['PUBLISH_ON_INDIVIDUAL_CHANNELS'] or false From 618880f99d804f522ce4944e720c887d8e7bf4b3 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 24 Jul 2019 16:57:43 +0100 Subject: [PATCH 482/769] remove unnecessary check for doc_id --- .../document-updater/app/coffee/RealTimeRedisManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RealTimeRedisManager.coffee b/services/document-updater/app/coffee/RealTimeRedisManager.coffee index d6a4fc5044..d04f2304d3 100644 --- a/services/document-updater/app/coffee/RealTimeRedisManager.coffee +++ b/services/document-updater/app/coffee/RealTimeRedisManager.coffee @@ -38,7 +38,7 @@ module.exports = RealTimeRedisManager = data?._id = message_id # publish on separate channels for individual projects and docs when # configured (needs realtime to be configured for this too). - if Settings.publishOnIndividualChannels and data.doc_id? + if Settings.publishOnIndividualChannels pubsubClient.publish "applied-ops:#{data.doc_id}", JSON.stringify(data) else pubsubClient.publish "applied-ops", JSON.stringify(data) From df9ca8b2723517c1ae0bb8f1958556f02a629ebb Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 31 Jul 2019 15:25:54 +0100 Subject: [PATCH 483/769] Add serializer to print only length of large fields in production --- services/document-updater/app.coffee | 5 +++-- .../app/coffee/LoggerSerializers.coffee | 22 ++++++++----------- 2 files changed, 12 insertions(+), 15 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 829cc029eb..9623a36c2c 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -6,8 +6,9 @@ http = require("http") Settings = require('settings-sharelatex') logger = require('logger-sharelatex') logger.initialize("document-updater") -logger.logger.serializers.docs = require("./app/js/LoggerSerializers").docs -logger.logger.serializers.files = require("./app/js/LoggerSerializers").files + +logger.logger.addSerializers(require("./app/js/LoggerSerializers")) + if Settings.sentry?.dsn? logger.initializeErrorReporting(Settings.sentry.dsn) diff --git a/services/document-updater/app/coffee/LoggerSerializers.coffee b/services/document-updater/app/coffee/LoggerSerializers.coffee index 3d4bfc42c2..5c55bc197a 100644 --- a/services/document-updater/app/coffee/LoggerSerializers.coffee +++ b/services/document-updater/app/coffee/LoggerSerializers.coffee @@ -1,14 +1,10 @@ -module.exports = - docs: (docs) -> - docs.map (doc) -> - { - path: doc.path - id: doc.doc - } +showLength = (thing) -> + "length: #{thing?.length}" - files: (files) -> - files.map (file) -> - { - path: file.path - id: file.file - } +module.exports = + # replace long values with their length + lines: showLength + oldLines: showLength + newLines: showLength + ranges: showLength + update: showLength From aa1c74a979fbc832af498bcea1302964da7b235f Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 7 Aug 2019 15:44:57 +0100 Subject: [PATCH 484/769] use MONGO_CONNECTION_STRING in settings.defaults.coffee if set --- services/document-updater/config/settings.defaults.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 547866599d..6801c62a8b 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -88,7 +88,7 @@ module.exports = dispatcherCount: process.env["DISPATCHER_COUNT"] mongo: - url: "mongodb://#{process.env["MONGO_HOST"] or "localhost"}/sharelatex" + url : process.env['MONGO_CONNECTION_STRING'] || "mongodb://#{process.env['MONGO_HOST'] or '127.0.0.1'}/sharelatex" sentry: dsn: process.env.SENTRY_DSN From 40f6494b198585c619a4378ffbde58c29107b844 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 7 Aug 2019 16:25:23 +0100 Subject: [PATCH 485/769] add a combined health check for mongo and redis --- services/document-updater/app.coffee | 25 +++++++++++++++++++ .../app/coffee/mongojs.coffee | 7 +++++- 2 files changed, 31 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 9623a36c2c..6c1d8d1136 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -16,6 +16,8 @@ RedisManager = require('./app/js/RedisManager') DispatchManager = require('./app/js/DispatchManager') Errors = require "./app/js/Errors" HttpController = require "./app/js/HttpController" +mongojs = require "./app/js/mongojs" +async = require "async" Path = require "path" @@ -92,6 +94,29 @@ app.get "/health_check/redis_cluster", (req, res, next) -> else res.send 200 +app.get "/health_check", (req, res, next) -> + async.series [ + (cb) -> + pubsubClient.healthCheck (error) -> + if error? + logger.err {err: error}, "failed redis health check" + cb(error) + (cb) -> + docUpdaterRedisClient.healthCheck (error) -> + if error? + logger.err {err: error}, "failed redis cluster health check" + cb(error) + (cb) -> + mongojs.healthCheck (error) -> + if error? + logger.err {err: error}, "failed mongo health check" + cb(error) + ] , (error) -> + if error? + res.send 500 + else + res.send 200 + app.use (error, req, res, next) -> if error instanceof Errors.NotFoundError res.send 404 diff --git a/services/document-updater/app/coffee/mongojs.coffee b/services/document-updater/app/coffee/mongojs.coffee index 8f8f1a9ab9..dfeebb788f 100644 --- a/services/document-updater/app/coffee/mongojs.coffee +++ b/services/document-updater/app/coffee/mongojs.coffee @@ -1,7 +1,12 @@ Settings = require "settings-sharelatex" mongojs = require "mongojs" db = mongojs(Settings.mongo.url, ["docSnapshots"]) + module.exports = db: db ObjectId: mongojs.ObjectId - + healthCheck: (callback) -> + db.runCommand {ping: 1}, (err, res) -> + return callback(err) if err? + return callback(new Error("failed mongo ping")) if !res.ok + callback() From 06444d2cc4f56f93c7eab89c4e18a9fd0f4d70b2 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 8 Aug 2019 14:10:54 +0100 Subject: [PATCH 486/769] Improve/fix serializers for update logging (#80) * Improve/fix serializers for update logging --- .../app/coffee/LoggerSerializers.coffee | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/LoggerSerializers.coffee b/services/document-updater/app/coffee/LoggerSerializers.coffee index 5c55bc197a..437f49e074 100644 --- a/services/document-updater/app/coffee/LoggerSerializers.coffee +++ b/services/document-updater/app/coffee/LoggerSerializers.coffee @@ -1,10 +1,25 @@ +_ = require('lodash') + showLength = (thing) -> - "length: #{thing?.length}" + if thing?.length then thing.length else thing + +showUpdateLength = (update) -> + if update?.op instanceof Array + copy = _.cloneDeep(update) + copy.op.forEach (element, index) -> + copy.op[index].i = element.i.length if element?.i?.length? + copy.op[index].d = element.d.length if element?.d?.length? + copy.op[index].c = element.c.length if element?.c?.length? + copy + else + update module.exports = # replace long values with their length lines: showLength oldLines: showLength newLines: showLength + docLines: showLength + newDocLines: showLength ranges: showLength - update: showLength + update: showUpdateLength From a76e0dca88ce0b46e4e21effe612f66a9dd8516f Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 15 Aug 2019 09:51:16 +0100 Subject: [PATCH 487/769] skip history flush when project is cleared by realtime shutdown history is flushed by a background cron job anyway --- services/document-updater/app/coffee/HistoryManager.coffee | 3 +++ services/document-updater/app/coffee/HttpController.coffee | 1 + 2 files changed, 4 insertions(+) diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index 0c3ab9cea1..7cfafa9ba0 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -28,6 +28,9 @@ module.exports = HistoryManager = # flush changes and callback (for when we need to know the queue is flushed) flushProjectChanges: (project_id, options, callback = (error) ->) -> return callback() if !Settings.apis?.project_history?.enabled + if options.skip_history_flush + logger.log {project_id}, "skipping flush of project history from realtime shutdown" + return callback() url = "#{Settings.apis.project_history.url}/project/#{project_id}/flush" qs = {} qs.background = true if options.background # pass on the background flush option if present diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index b35943f5cd..d2ef5043d0 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -133,6 +133,7 @@ module.exports = HttpController = timer = new Metrics.Timer("http.deleteProject") options = {} options.background = true if req.query?.background # allow non-urgent flushes to be queued + options.skip_history_flush = true if req.query?.shutdown # don't flush history when realtime shuts down ProjectManager.flushAndDeleteProjectWithLocks project_id, options, (error) -> timer.done() return next(error) if error? From 7493462154bd249a78c5de9f9d81ba7a7293b94d Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 15 Aug 2019 10:38:07 +0100 Subject: [PATCH 488/769] add acceptance test for realtime shutdown --- .../coffee/DeletingAProjectTests.coffee | 49 +++++++++++++++++-- .../coffee/helpers/DocUpdaterClient.coffee | 3 ++ 2 files changed, 47 insertions(+), 5 deletions(-) diff --git a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee index 7a5eed5691..cb1d3495d8 100644 --- a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee +++ b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee @@ -41,17 +41,15 @@ describe "Deleting a project", -> version: doc.update.v } - sinon.spy MockTrackChangesApi, "flushDoc" - sinon.spy MockProjectHistoryApi, "flushProject" DocUpdaterApp.ensureRunning(done) - after -> - MockTrackChangesApi.flushDoc.restore() - MockProjectHistoryApi.flushProject.restore() describe "with documents which have been updated", -> before (done) -> sinon.spy MockWebApi, "setDocument" + sinon.spy MockTrackChangesApi, "flushDoc" + sinon.spy MockProjectHistoryApi, "flushProject" + async.series @docs.map((doc) => (callback) => DocUpdaterClient.preloadDoc @project_id, doc.id, (error) => @@ -68,6 +66,8 @@ describe "Deleting a project", -> after -> MockWebApi.setDocument.restore() + MockTrackChangesApi.flushDoc.restore() + MockProjectHistoryApi.flushProject.restore() it "should return a 204 status code", -> @statusCode.should.equal 204 @@ -96,3 +96,42 @@ describe "Deleting a project", -> it "should flush each doc in project history", -> MockProjectHistoryApi.flushProject.calledWith(@project_id).should.equal true + + describe "with the shutdown=true parameter from realtime", -> + before (done) -> + sinon.spy MockWebApi, "setDocument" + sinon.spy MockTrackChangesApi, "flushDoc" + sinon.spy MockProjectHistoryApi, "flushProject" + + async.series @docs.map((doc) => + (callback) => + DocUpdaterClient.preloadDoc @project_id, doc.id, callback + ), (error) => + throw error if error? + setTimeout () => + DocUpdaterClient.deleteProjectOnShutdown @project_id, (error, res, body) => + @statusCode = res.statusCode + done() + , 200 + + after -> + MockWebApi.setDocument.restore() + MockTrackChangesApi.flushDoc.restore() + MockProjectHistoryApi.flushProject.restore() + + it "should return a 204 status code", -> + @statusCode.should.equal 204 + + it "should send each document to the web api", -> + for doc in @docs + MockWebApi.setDocument + .calledWith(@project_id, doc.id, doc.updatedLines) + .should.equal true + + it "should flush each doc in track changes", -> + for doc in @docs + MockTrackChangesApi.flushDoc.calledWith(doc.id).should.equal true + + it "should not flush to project history", -> + MockProjectHistoryApi.flushProject.called.should.equal false + diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee index 82dba20685..9525cc27e9 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee @@ -75,6 +75,9 @@ module.exports = DocUpdaterClient = deleteProject: (project_id, callback = () ->) -> request.del "http://localhost:3003/project/#{project_id}", callback + deleteProjectOnShutdown: (project_id, callback = () ->) -> + request.del "http://localhost:3003/project/#{project_id}?background=true&shutdown=true", callback + acceptChange: (project_id, doc_id, change_id, callback = () ->) -> request.post "http://localhost:3003/project/#{project_id}/doc/#{doc_id}/change/#{change_id}/accept", callback From e75f2cc3253a2be2b3499fbc699d349d5c6bd359 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 15 Aug 2019 10:54:12 +0100 Subject: [PATCH 489/769] add unit tests for skipping history flush --- .../HistoryManager/HistoryManagerTests.coffee | 22 +++++++++++++++++++ .../HttpController/HttpControllerTests.coffee | 11 ++++++++++ 2 files changed, 33 insertions(+) diff --git a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee index 1198bf7c7b..07c3577a91 100644 --- a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee @@ -46,6 +46,28 @@ describe "HistoryManager", -> .calledWith({url: "#{@Settings.apis.project_history.url}/project/#{@project_id}/flush", qs:{background:true}}) .should.equal true + describe "flushProjectChanges", -> + + describe "in the normal case", -> + beforeEach -> + @request.post = sinon.stub().callsArgWith(1, null, statusCode: 204) + @HistoryManager.flushProjectChanges @project_id, {background:true} + + it "should send a request to the project history api", -> + @request.post + .calledWith({url: "#{@Settings.apis.project_history.url}/project/#{@project_id}/flush", qs:{background:true}}) + .should.equal true + + describe "with the skip_history_flush option", -> + beforeEach -> + @request.post = sinon.stub() + @HistoryManager.flushProjectChanges @project_id, {skip_history_flush:true} + + it "should not send a request to the project history api", -> + @request.post + .called + .should.equal false + describe "recordAndFlushHistoryOps", -> beforeEach -> @ops = [ 'mock-ops' ] diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index 6429a4031d..c1f5c5eca8 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -343,6 +343,17 @@ describe "HttpController", -> it "should time the request", -> @Metrics.Timer::done.called.should.equal true + describe "with the shutdown=true option from realtime", -> + beforeEach -> + @ProjectManager.flushAndDeleteProjectWithLocks = sinon.stub().callsArgWith(2) + @req.query = {background:true, shutdown:true} + @HttpController.deleteProject(@req, @res, @next) + + it "should pass the skip_history_flush option when flushing the project", -> + @ProjectManager.flushAndDeleteProjectWithLocks + .calledWith(@project_id, {background:true, skip_history_flush:true}) + .should.equal true + describe "when an errors occurs", -> beforeEach -> @ProjectManager.flushAndDeleteProjectWithLocks = sinon.stub().callsArgWith(2, new Error("oops")) From 0ae838dd2d6a070660e774d21e65910c1236a8a3 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 29 Aug 2019 20:36:00 +0100 Subject: [PATCH 490/769] add logger into project flusher --- services/document-updater/app/coffee/ProjectFlusher.coffee | 1 + 1 file changed, 1 insertion(+) diff --git a/services/document-updater/app/coffee/ProjectFlusher.coffee b/services/document-updater/app/coffee/ProjectFlusher.coffee index cd23cc6f74..ce93b0e2f1 100644 --- a/services/document-updater/app/coffee/ProjectFlusher.coffee +++ b/services/document-updater/app/coffee/ProjectFlusher.coffee @@ -6,6 +6,7 @@ docUpdaterKeys = Settings.redis.documentupdater.key_schema async = require("async") ProjectManager = require("./ProjectManager") _ = require("lodash") +logger = require("logger-sharelatex") ProjectFlusher = From aa15a76059148f160c9ad07393b9964ae88c6dd0 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 30 Aug 2019 07:38:53 +0100 Subject: [PATCH 491/769] added log lines for all project flushing --- services/document-updater/app/coffee/ProjectFlusher.coffee | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/document-updater/app/coffee/ProjectFlusher.coffee b/services/document-updater/app/coffee/ProjectFlusher.coffee index ce93b0e2f1..e1ead3759c 100644 --- a/services/document-updater/app/coffee/ProjectFlusher.coffee +++ b/services/document-updater/app/coffee/ProjectFlusher.coffee @@ -47,6 +47,7 @@ ProjectFlusher = return ids flushAllProjects: (options, callback)-> + logger.log options:options, "flushing all projects" ProjectFlusher._getKeys docUpdaterKeys.docsInProject({project_id:"*"}), options.limit, (error, project_keys) -> if error? logger.err err:error, "error getting keys for flushing" @@ -65,6 +66,7 @@ ProjectFlusher = failure.push(project_ids[i]) else success.push(project_ids[i]) + logger.log success:success, failure:failure, "finished flushing all projects" return callback(error, {success:success, failure:failure}) From 912a3a7753f1350584fa60a96552fae02314d0fd Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 9 Sep 2019 15:27:58 +0100 Subject: [PATCH 492/769] remove redis server-side hashing for performance we still compute the document hash in node, and check it on retrieval but we don't check the hash at the point of writing it in redis which was previously done with a redis Lua script. --- .../app/coffee/RedisManager.coffee | 20 +--------- .../RedisManager/RedisManagerTests.coffee | 37 +++---------------- 2 files changed, 8 insertions(+), 49 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 85918f4608..82b6caccd7 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -15,16 +15,8 @@ MAX_REDIS_REQUEST_LENGTH = 5000 # 5 seconds # Make times easy to read minutes = 60 # seconds for Redis expire -# LUA script to write document and return hash -# arguments: docLinesKey docLines -setScript = """ - redis.call('set', KEYS[1], ARGV[1]) - return redis.sha1hex(ARGV[1]) -""" - logHashErrors = Settings.documentupdater?.logHashErrors logHashReadErrors = logHashErrors?.read -logHashWriteErrors = logHashErrors?.write MEGABYTES = 1024 * 1024 MAX_RANGES_SIZE = 3 * MEGABYTES @@ -52,7 +44,7 @@ module.exports = RedisManager = logger.error {err: error, doc_id, project_id}, error.message return callback(error) multi = rclient.multi() - multi.eval setScript, 1, keys.docLines(doc_id:doc_id), docLines + multi.set keys.docLines(doc_id:doc_id), docLines multi.set keys.projectKey({doc_id:doc_id}), project_id multi.set keys.docVersion(doc_id:doc_id), version multi.set keys.docHash(doc_id:doc_id), docHash @@ -64,10 +56,6 @@ module.exports = RedisManager = multi.set keys.projectHistoryId(doc_id:doc_id), projectHistoryId multi.exec (error, result) -> return callback(error) if error? - # check the hash computed on the redis server - writeHash = result?[0] - if logHashWriteErrors and writeHash? and writeHash isnt docHash - logger.error project_id: project_id, doc_id: doc_id, writeHash: writeHash, origHash: docHash, docLines:docLines, "hash mismatch on putDocInMemory" # update docsInProject set rclient.sadd keys.docsInProject(project_id:project_id), doc_id, callback @@ -247,7 +235,7 @@ module.exports = RedisManager = logger.error err: error, doc_id: doc_id, ranges: ranges, error.message return callback(error) multi = rclient.multi() - multi.eval setScript, 1, keys.docLines(doc_id:doc_id), newDocLines # index 0 + multi.set keys.docLines(doc_id:doc_id), newDocLines # index 0 multi.set keys.docVersion(doc_id:doc_id), newVersion # index 1 multi.set keys.docHash(doc_id:doc_id), newHash # index 2 multi.ltrim keys.docOps(doc_id: doc_id), -RedisManager.DOC_OPS_MAX_LENGTH, -1 # index 3 @@ -272,10 +260,6 @@ module.exports = RedisManager = multi.del keys.lastUpdatedBy(doc_id: doc_id) # index 9 multi.exec (error, result) -> return callback(error) if error? - # check the hash computed on the redis server - writeHash = result?[0] - if logHashWriteErrors and writeHash? and writeHash isnt newHash - logger.error doc_id: doc_id, writeHash: writeHash, origHash: newHash, docLines:newDocLines, "hash mismatch on updateDocument" # length of uncompressedHistoryOps queue (index 7) docUpdateCount = result[7] diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index cdfdc45ac2..5491922efb 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -351,7 +351,6 @@ describe "RedisManager", -> @multi.expire = sinon.stub() @multi.ltrim = sinon.stub() @multi.del = sinon.stub() - @multi.eval = sinon.stub() @multi.exec = sinon.stub().callsArgWith(0, null, [@hash, null, null, null, null, null, null, @doc_update_list_length, null, null] ) @@ -374,8 +373,8 @@ describe "RedisManager", -> .should.equal true it "should set the doclines", -> - @multi.eval - .calledWith(sinon.match(/redis.call/), 1, "doclines:#{@doc_id}", JSON.stringify(@lines)) + @multi.set + .calledWith("doclines:#{@doc_id}", JSON.stringify(@lines)) .should.equal true it "should set the version", -> @@ -486,8 +485,8 @@ describe "RedisManager", -> .should.equal false it "should still set the doclines", -> - @multi.eval - .calledWith(sinon.match(/redis.call/), 1, "doclines:#{@doc_id}", JSON.stringify(@lines)) + @multi.set + .calledWith("doclines:#{@doc_id}", JSON.stringify(@lines)) .should.equal true describe "with empty ranges", -> @@ -505,20 +504,6 @@ describe "RedisManager", -> .calledWith("Ranges:#{@doc_id}") .should.equal true - describe "with a corrupted write", -> - beforeEach -> - @badHash = "INVALID-HASH-VALUE" - @multi.exec = sinon.stub().callsArgWith(0, null, [@badHash]) - @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) - @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @updateMeta, @callback - - it 'should log a hash error', -> - @logger.error.calledWith() - .should.equal true - - it "should call the callback", -> - @callback.called.should.equal true - describe "with null bytes in the serialized doc lines", -> beforeEach -> @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) @@ -567,7 +552,6 @@ describe "RedisManager", -> @multi.set = sinon.stub() @rclient.sadd = sinon.stub().yields() @multi.del = sinon.stub() - @multi.eval = sinon.stub() @lines = ["one", "two", "three", "これは"] @version = 42 @hash = crypto.createHash('sha1').update(JSON.stringify(@lines),'utf8').digest('hex') @@ -580,8 +564,8 @@ describe "RedisManager", -> @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @projectHistoryId, done it "should set the lines", -> - @multi.eval - .calledWith(sinon.match(/redis.call/), 1, "doclines:#{@doc_id}", JSON.stringify(@lines)) + @multi.set + .calledWith("doclines:#{@doc_id}", JSON.stringify(@lines)) .should.equal true it "should set the version", -> @@ -637,15 +621,6 @@ describe "RedisManager", -> .calledWith("Ranges:#{@doc_id}", JSON.stringify(@ranges)) .should.equal false - describe "with a corrupted write", -> - beforeEach (done) -> - @multi.exec = sinon.stub().callsArgWith(0, null, ["INVALID-HASH-VALUE"]) - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @projectHistoryId, done - - it 'should log a hash error', -> - @logger.error.calledWith() - .should.equal true - describe "with null bytes in the serialized doc lines", -> beforeEach -> @_stringify = JSON.stringify From c32101f0fbdffe3537208599ad3e0f28de02604e Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 19 Sep 2019 16:22:43 +0100 Subject: [PATCH 493/769] upgrade redis-sharelatex to 1.0.10 --- services/document-updater/npm-shrinkwrap.json | 10 +++++----- services/document-updater/package.json | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/services/document-updater/npm-shrinkwrap.json b/services/document-updater/npm-shrinkwrap.json index 3809c8746c..32ad981a08 100644 --- a/services/document-updater/npm-shrinkwrap.json +++ b/services/document-updater/npm-shrinkwrap.json @@ -779,9 +779,9 @@ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" }, "ioredis": { - "version": "4.11.1", + "version": "4.11.2", "from": "ioredis@>=4.11.1 <4.12.0", - "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.11.1.tgz", + "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.11.2.tgz", "dependencies": { "debug": { "version": "4.1.1", @@ -1380,9 +1380,9 @@ "resolved": "https://registry.npmjs.org/redis-sentinel/-/redis-sentinel-0.1.1.tgz" }, "redis-sharelatex": { - "version": "1.0.9", - "from": "redis-sharelatex@1.0.9", - "resolved": "https://registry.npmjs.org/redis-sharelatex/-/redis-sharelatex-1.0.9.tgz", + "version": "1.0.10", + "from": "redis-sharelatex@1.0.10", + "resolved": "https://registry.npmjs.org/redis-sharelatex/-/redis-sharelatex-1.0.10.tgz", "dependencies": { "coffee-script": { "version": "1.8.0", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 635e23ab74..040febca1d 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -28,7 +28,7 @@ "lynx": "0.0.11", "metrics-sharelatex": "^2.2.0", "mongojs": "^2.6.0", - "redis-sharelatex": "^1.0.9", + "redis-sharelatex": "^1.0.10", "request": "2.25.0", "requestretry": "^1.12.0", "sandboxed-module": "~0.2.0", From f6b2ac73609faa5f7b031dd73af3279c1f8f7f6f Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 25 Sep 2019 16:42:49 +0100 Subject: [PATCH 494/769] queue deletes for deferred processing --- services/document-updater/app.coffee | 2 + .../app/coffee/DeleteQueueManager.coffee | 45 +++++++++++++++++++ .../app/coffee/HttpController.coffee | 41 ++++++++++++++--- .../app/coffee/ProjectManager.coffee | 16 +++++++ .../app/coffee/RedisManager.coffee | 24 ++++++++++ .../config/settings.defaults.coffee | 1 + .../coffee/DeletingAProjectTests.coffee | 8 +++- .../coffee/helpers/DocUpdaterClient.coffee | 3 ++ .../HttpController/HttpControllerTests.coffee | 11 ++--- 9 files changed, 138 insertions(+), 13 deletions(-) create mode 100644 services/document-updater/app/coffee/DeleteQueueManager.coffee diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 6c1d8d1136..2962860027 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -55,6 +55,7 @@ app.post '/project/:project_id/doc/:doc_id', HttpCont app.post '/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLoaded app.delete '/project/:project_id/doc/:doc_id', HttpController.flushAndDeleteDoc app.delete '/project/:project_id', HttpController.deleteProject +app.delete '/project', HttpController.deleteMultipleProjects app.post '/project/:project_id', HttpController.updateProject app.post '/project/:project_id/history/resync', HttpController.resyncProjectHistory app.post '/project/:project_id/flush', HttpController.flushProject @@ -63,6 +64,7 @@ app.post '/project/:project_id/doc/:doc_id/change/accept', HttpCont app.del '/project/:project_id/doc/:doc_id/comment/:comment_id', HttpController.deleteComment app.get '/flush_all_projects', HttpController.flushAllProjects +app.get '/flush_queued_projects', HttpController.flushQueuedProjects app.get '/total', (req, res)-> timer = new Metrics.Timer("http.allDocList") diff --git a/services/document-updater/app/coffee/DeleteQueueManager.coffee b/services/document-updater/app/coffee/DeleteQueueManager.coffee new file mode 100644 index 0000000000..0a122369c3 --- /dev/null +++ b/services/document-updater/app/coffee/DeleteQueueManager.coffee @@ -0,0 +1,45 @@ +RedisManager = require "./RedisManager" +ProjectManager = require "./ProjectManager" +logger = require "logger-sharelatex" +metrics = require "./Metrics" +async = require "async" + +module.exports = DeleteQueueManager = + flushAndDeleteOldProjects: (options, callback) -> + startTime = Date.now() + count = 0 + + flushProjectIfNotModified = (project_id, flushTimestamp, cb) -> + ProjectManager.getProjectDocsTimestamps project_id, (err, timestamps) -> + return callback(err) if err? + if !timestamps? + logger.log {project_id}, "skipping flush of queued project - no timestamps" + return cb() + # are any of the timestamps newer than the time the project was flushed? + for timestamp in timestamps or [] when timestamp > flushTimestamp + metrics.inc "queued-delete-skipped" + logger.debug {project_id, timestamps, flushTimestamp}, "found newer timestamp, will skip delete" + return cb() + logger.log {project_id, flushTimestamp}, "flushing queued project" + ProjectManager.flushAndDeleteProjectWithLocks project_id, {skip_history_flush: true}, (err) -> + logger.err {project_id, err}, "error flushing queued project" + metrics.inc "queued-delete-completed" + return cb(null, true) + + flushNextProject = () -> + now = Date.now() + if now - startTime > options.timeout + logger.log "hit time limit on flushing old projects" + return callback() + if count > options.limit + logger.log "hit count limit on flushing old projects" + return callback() + cutoffTime = now - options.min_delete_age + RedisManager.getNextProjectToFlushAndDelete cutoffTime, (err, project_id, flushTimestamp) -> + return callback(err) if err? + return callback() if !project_id? + flushProjectIfNotModified project_id, flushTimestamp, (err, flushed) -> + count++ if flushed + flushNextProject() + + flushNextProject() \ No newline at end of file diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index d2ef5043d0..96595c3449 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -5,7 +5,8 @@ Errors = require "./Errors" logger = require "logger-sharelatex" Metrics = require "./Metrics" ProjectFlusher = require("./ProjectFlusher") - +DeleteQueueManager = require("./DeleteQueueManager") +async = require "async" TWO_MEGABYTES = 2 * 1024 * 1024 @@ -130,14 +131,30 @@ module.exports = HttpController = deleteProject: (req, res, next = (error) ->) -> project_id = req.params.project_id logger.log project_id: project_id, "deleting project via http" - timer = new Metrics.Timer("http.deleteProject") options = {} options.background = true if req.query?.background # allow non-urgent flushes to be queued options.skip_history_flush = true if req.query?.shutdown # don't flush history when realtime shuts down - ProjectManager.flushAndDeleteProjectWithLocks project_id, options, (error) -> - timer.done() + if req.query?.background + ProjectManager.queueFlushAndDeleteProject project_id, (error) -> + return next(error) if error? + logger.log project_id: project_id, "queue delete of project via http" + res.send 204 # No Content + else + timer = new Metrics.Timer("http.deleteProject") + ProjectManager.flushAndDeleteProjectWithLocks project_id, options, (error) -> + timer.done() + return next(error) if error? + logger.log project_id: project_id, "deleted project via http" + res.send 204 # No Content + + deleteMultipleProjects: (req, res, next = (error) ->) -> + project_ids = req.body?.project_ids || [] + logger.log project_ids: project_ids, "deleting multiple projects via http" + async.eachSeries project_ids, (project_id, cb) -> + logger.log project_id: project_id, "queue delete of project via http" + ProjectManager.queueFlushAndDeleteProject project_id, cb + , (error) -> return next(error) if error? - logger.log project_id: project_id, "deleted project via http" res.send 204 # No Content acceptChanges: (req, res, next = (error) ->) -> @@ -198,4 +215,16 @@ module.exports = HttpController = else res.send project_ids - + flushQueuedProjects: (req, res, next = (error) ->) -> + res.setTimeout(5 * 60 * 1000) + options = + limit : req.query.limit || 1000 + timeout: 5 * 60 * 1000 + dryRun : req.query.dryRun || false + min_delete_age: req.query.min_delete_age || 5 * 60 * 1000 + DeleteQueueManager.flushAndDeleteOldProjects options, (err, project_ids)-> + if err? + logger.err err:err, "error flushing old projects" + res.send 500 + else + res.send project_ids diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index 4271186b7a..ca077ff60b 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -72,6 +72,22 @@ module.exports = ProjectManager = else callback(null) + queueFlushAndDeleteProject: (project_id, callback = (error) ->) -> + RedisManager.queueFlushAndDeleteProject project_id, (error) -> + if error? + logger.error {project_id: project_id, error:error}, "error adding project to flush and delete queue" + return callback(error) + Metrics.inc "queued-delete" + callback() + + getProjectDocsTimestamps: (project_id, callback = (error) ->) -> + RedisManager.getDocIdsInProject project_id, (error, doc_ids) -> + return callback(error) if error? + return callback() if !doc_ids?.length + RedisManager.getDocTimestamps doc_ids, (error, timestamps) -> + return callback(error) if error? + callback(null, timestamps) + getProjectDocsAndFlushIfOld: (project_id, projectStateHash, excludeVersions = {}, _callback = (error, docs) ->) -> timer = new Metrics.Timer("projectManager.getProjectDocsAndFlushIfOld") callback = (args...) -> diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 82b6caccd7..799d5a7fc1 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -287,6 +287,30 @@ module.exports = RedisManager = getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) -> rclient.smembers keys.docsInProject(project_id: project_id), callback + getDocTimestamps: (doc_ids, callback = (error, result) ->) -> + # get lastupdatedat timestamps for an array of doc_ids + multi = rclient.multi() + for doc_id in doc_ids + multi.get keys.lastUpdatedAt(doc_id: doc_id) + multi.exec callback + + queueFlushAndDeleteProject: (project_id, callback) -> + rclient.zadd keys.flushAndDeleteQueue(), Date.now(), project_id, callback + + getNextProjectToFlushAndDelete: (cutoffTime, callback = (error, key, timestamp)->) -> + # find the oldest queued flsus + rclient.zrangebyscore keys.flushAndDeleteQueue(), 0, cutoffTime, "WITHSCORES", "LIMIT", 0, 1, (err, reply) -> + return callback(err) if err? + return callback() if !reply?.length + multi = rclient.multi() + multi.zrange keys.flushAndDeleteQueue(), 0, 0, "WITHSCORES" + multi.zremrangebyrank keys.flushAndDeleteQueue(), 0, 0 + multi.exec (err, reply) -> + return callback(err) if err? + return callback() if !reply?.length + [key, timestamp] = reply[0] + callback(null, key, timestamp) + _serializeRanges: (ranges, callback = (error, serializedRanges) ->) -> jsonRanges = JSON.stringify(ranges) if jsonRanges? and jsonRanges.length > MAX_RANGES_SIZE diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 6801c62a8b..3a376e3e66 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -80,6 +80,7 @@ module.exports = lastUpdatedBy: ({doc_id}) -> "lastUpdatedBy:{#{doc_id}}" lastUpdatedAt: ({doc_id}) -> "lastUpdatedAt:{#{doc_id}}" pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" + flushAndDeleteQueue: () -> "DocUpdaterFlushAndDeleteQueue" redisOptions: keepAlive: 100 diff --git a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee index cb1d3495d8..7012f22fdb 100644 --- a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee +++ b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee @@ -97,7 +97,7 @@ describe "Deleting a project", -> it "should flush each doc in project history", -> MockProjectHistoryApi.flushProject.calledWith(@project_id).should.equal true - describe "with the shutdown=true parameter from realtime", -> + describe "with the background=true parameter from realtime", -> before (done) -> sinon.spy MockWebApi, "setDocument" sinon.spy MockTrackChangesApi, "flushDoc" @@ -111,7 +111,11 @@ describe "Deleting a project", -> setTimeout () => DocUpdaterClient.deleteProjectOnShutdown @project_id, (error, res, body) => @statusCode = res.statusCode - done() + # after deleting the project and putting it in the queue, flush the queue + setTimeout () -> + DocUpdaterClient.flushOldProjects (error, res, body) => + done() + , 100 , 200 after -> diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee index 9525cc27e9..17067b5bf4 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee @@ -78,6 +78,9 @@ module.exports = DocUpdaterClient = deleteProjectOnShutdown: (project_id, callback = () ->) -> request.del "http://localhost:3003/project/#{project_id}?background=true&shutdown=true", callback + flushOldProjects: (callback = () ->) -> + request.get "http://localhost:3003/flush_queued_projects?min_delete_age=1", callback + acceptChange: (project_id, doc_id, change_id, callback = () ->) -> request.post "http://localhost:3003/project/#{project_id}/doc/#{doc_id}/change/#{change_id}/accept", callback diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index c1f5c5eca8..b8ace494f5 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -14,6 +14,7 @@ describe "HttpController", -> "./ProjectManager": @ProjectManager = {} "logger-sharelatex" : @logger = { log: sinon.stub() } "./ProjectFlusher": {flushAllProjects:->} + "./DeleteQueueManager": @DeleteQueueManager = {} "./Metrics": @Metrics = {} "./Errors" : Errors @Metrics.Timer = class Timer @@ -343,15 +344,15 @@ describe "HttpController", -> it "should time the request", -> @Metrics.Timer::done.called.should.equal true - describe "with the shutdown=true option from realtime", -> + describe "with the background=true option from realtime", -> beforeEach -> - @ProjectManager.flushAndDeleteProjectWithLocks = sinon.stub().callsArgWith(2) + @ProjectManager.queueFlushAndDeleteProject = sinon.stub().callsArgWith(1) @req.query = {background:true, shutdown:true} @HttpController.deleteProject(@req, @res, @next) - it "should pass the skip_history_flush option when flushing the project", -> - @ProjectManager.flushAndDeleteProjectWithLocks - .calledWith(@project_id, {background:true, skip_history_flush:true}) + it "should queue the flush and delete", -> + @ProjectManager.queueFlushAndDeleteProject + .calledWith(@project_id) .should.equal true describe "when an errors occurs", -> From 83dd43b809c34b48fed8d37c5bb82bdfdd2211a0 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 25 Sep 2019 17:04:36 +0100 Subject: [PATCH 495/769] add metric for queue length --- .../document-updater/app/coffee/DeleteQueueManager.coffee | 4 +++- services/document-updater/app/coffee/RedisManager.coffee | 6 ++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/services/document-updater/app/coffee/DeleteQueueManager.coffee b/services/document-updater/app/coffee/DeleteQueueManager.coffee index 0a122369c3..cad65ae2f5 100644 --- a/services/document-updater/app/coffee/DeleteQueueManager.coffee +++ b/services/document-updater/app/coffee/DeleteQueueManager.coffee @@ -35,9 +35,11 @@ module.exports = DeleteQueueManager = logger.log "hit count limit on flushing old projects" return callback() cutoffTime = now - options.min_delete_age - RedisManager.getNextProjectToFlushAndDelete cutoffTime, (err, project_id, flushTimestamp) -> + RedisManager.getNextProjectToFlushAndDelete cutoffTime, (err, project_id, flushTimestamp, queueLength) -> return callback(err) if err? return callback() if !project_id? + logger.log {project_id, queueLength: queueLength}, "flushing queued project" + metrics.globalGauge "queued-flush-backlog", queueLength flushProjectIfNotModified project_id, flushTimestamp, (err, flushed) -> count++ if flushed flushNextProject() diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 799d5a7fc1..f5530b99fd 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -298,18 +298,20 @@ module.exports = RedisManager = rclient.zadd keys.flushAndDeleteQueue(), Date.now(), project_id, callback getNextProjectToFlushAndDelete: (cutoffTime, callback = (error, key, timestamp)->) -> - # find the oldest queued flsus + # find the oldest queued flush rclient.zrangebyscore keys.flushAndDeleteQueue(), 0, cutoffTime, "WITHSCORES", "LIMIT", 0, 1, (err, reply) -> return callback(err) if err? return callback() if !reply?.length multi = rclient.multi() multi.zrange keys.flushAndDeleteQueue(), 0, 0, "WITHSCORES" multi.zremrangebyrank keys.flushAndDeleteQueue(), 0, 0 + multi.zcard keys.flushAndDeleteQueue() multi.exec (err, reply) -> return callback(err) if err? return callback() if !reply?.length [key, timestamp] = reply[0] - callback(null, key, timestamp) + queueLength = reply[2] + callback(null, key, timestamp, queueLength) _serializeRanges: (ranges, callback = (error, serializedRanges) ->) -> jsonRanges = JSON.stringify(ranges) From b49621b3e9da275018fadabe929c9729d382acf3 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 26 Sep 2019 10:14:49 +0100 Subject: [PATCH 496/769] add comments --- .../app/coffee/DeleteQueueManager.coffee | 15 +++++++++++++++ .../app/coffee/RedisManager.coffee | 8 +++++--- 2 files changed, 20 insertions(+), 3 deletions(-) diff --git a/services/document-updater/app/coffee/DeleteQueueManager.coffee b/services/document-updater/app/coffee/DeleteQueueManager.coffee index cad65ae2f5..835f718615 100644 --- a/services/document-updater/app/coffee/DeleteQueueManager.coffee +++ b/services/document-updater/app/coffee/DeleteQueueManager.coffee @@ -4,6 +4,21 @@ logger = require "logger-sharelatex" metrics = require "./Metrics" async = require "async" +# Maintain a sorted set of project flushAndDelete requests, ordered by timestamp +# (ZADD), and process them from oldest to newest. A flushAndDelete request comes +# from real-time and is triggered when a user leaves a project. +# +# The aim is to remove the project from redis 5 minutes after the last request +# if there has been no activity (document updates) in that time. If there is +# activity we can expect a further flushAndDelete request when the editing user +# leaves the project. +# +# If a new flushAndDelete request comes in while an existing request is already +# in the queue we update the timestamp as we can postpone flushing further. +# +# Documents are processed by checking the queue, seeing if the first entry is +# older than 5 minutes, and popping it from the queue in that case. + module.exports = DeleteQueueManager = flushAndDeleteOldProjects: (options, callback) -> startTime = Date.now() diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index f5530b99fd..7f62fe1e7e 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -295,17 +295,19 @@ module.exports = RedisManager = multi.exec callback queueFlushAndDeleteProject: (project_id, callback) -> + # store the project id in a sorted set ordered by time rclient.zadd keys.flushAndDeleteQueue(), Date.now(), project_id, callback getNextProjectToFlushAndDelete: (cutoffTime, callback = (error, key, timestamp)->) -> - # find the oldest queued flush + # find the oldest queued flush that is before the cutoff time rclient.zrangebyscore keys.flushAndDeleteQueue(), 0, cutoffTime, "WITHSCORES", "LIMIT", 0, 1, (err, reply) -> return callback(err) if err? - return callback() if !reply?.length + return callback() if !reply?.length # return if no projects ready to be processed + # pop the oldest entry (get and remove in a multi) multi = rclient.multi() multi.zrange keys.flushAndDeleteQueue(), 0, 0, "WITHSCORES" multi.zremrangebyrank keys.flushAndDeleteQueue(), 0, 0 - multi.zcard keys.flushAndDeleteQueue() + multi.zcard keys.flushAndDeleteQueue() # the total length of the queue (for metrics) multi.exec (err, reply) -> return callback(err) if err? return callback() if !reply?.length From 9f358ead9f304b066438360e88833130b3123c60 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 26 Sep 2019 10:55:05 +0100 Subject: [PATCH 497/769] add an acceptance test for flush with queue processing --- .../coffee/DeletingAProjectTests.coffee | 36 ++++++++++++++++++- 1 file changed, 35 insertions(+), 1 deletion(-) diff --git a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee index 7012f22fdb..45aab847e3 100644 --- a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee +++ b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee @@ -97,7 +97,41 @@ describe "Deleting a project", -> it "should flush each doc in project history", -> MockProjectHistoryApi.flushProject.calledWith(@project_id).should.equal true - describe "with the background=true parameter from realtime", -> + describe "with the background=true parameter from realtime and no request to flush the queue", -> + before (done) -> + sinon.spy MockWebApi, "setDocument" + sinon.spy MockTrackChangesApi, "flushDoc" + sinon.spy MockProjectHistoryApi, "flushProject" + + async.series @docs.map((doc) => + (callback) => + DocUpdaterClient.preloadDoc @project_id, doc.id, callback + ), (error) => + throw error if error? + setTimeout () => + DocUpdaterClient.deleteProjectOnShutdown @project_id, (error, res, body) => + @statusCode = res.statusCode + done() + , 200 + + after -> + MockWebApi.setDocument.restore() + MockTrackChangesApi.flushDoc.restore() + MockProjectHistoryApi.flushProject.restore() + + it "should return a 204 status code", -> + @statusCode.should.equal 204 + + it "should not send any documents to the web api", -> + MockWebApi.setDocument.called.should.equal false + + it "should not flush any docs in track changes", -> + MockTrackChangesApi.flushDoc.called.should.equal false + + it "should not flush to project history", -> + MockProjectHistoryApi.flushProject.called.should.equal false + + describe "with the background=true parameter from realtime and a request to flush the queue", -> before (done) -> sinon.spy MockWebApi, "setDocument" sinon.spy MockTrackChangesApi, "flushDoc" From eae4b352ca6d526b220084faa8e9d704c39020b7 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 26 Sep 2019 14:59:03 +0100 Subject: [PATCH 498/769] remove unnecessary check --- services/document-updater/app/coffee/DeleteQueueManager.coffee | 2 +- services/document-updater/app/coffee/ProjectManager.coffee | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/DeleteQueueManager.coffee b/services/document-updater/app/coffee/DeleteQueueManager.coffee index 835f718615..3c8af6cd84 100644 --- a/services/document-updater/app/coffee/DeleteQueueManager.coffee +++ b/services/document-updater/app/coffee/DeleteQueueManager.coffee @@ -31,7 +31,7 @@ module.exports = DeleteQueueManager = logger.log {project_id}, "skipping flush of queued project - no timestamps" return cb() # are any of the timestamps newer than the time the project was flushed? - for timestamp in timestamps or [] when timestamp > flushTimestamp + for timestamp in timestamps when timestamp > flushTimestamp metrics.inc "queued-delete-skipped" logger.debug {project_id, timestamps, flushTimestamp}, "found newer timestamp, will skip delete" return cb() diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index ca077ff60b..0d57687668 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -83,7 +83,7 @@ module.exports = ProjectManager = getProjectDocsTimestamps: (project_id, callback = (error) ->) -> RedisManager.getDocIdsInProject project_id, (error, doc_ids) -> return callback(error) if error? - return callback() if !doc_ids?.length + return callback(null, []) if !doc_ids?.length RedisManager.getDocTimestamps doc_ids, (error, timestamps) -> return callback(error) if error? callback(null, timestamps) From a709a0adaa954e2a634ac0ad571a4cddf196f4d0 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 26 Sep 2019 15:05:38 +0100 Subject: [PATCH 499/769] for simplicity keep the cutoff time the same while flushing the queue --- services/document-updater/app/coffee/DeleteQueueManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/DeleteQueueManager.coffee b/services/document-updater/app/coffee/DeleteQueueManager.coffee index 3c8af6cd84..db69fa4b2f 100644 --- a/services/document-updater/app/coffee/DeleteQueueManager.coffee +++ b/services/document-updater/app/coffee/DeleteQueueManager.coffee @@ -22,6 +22,7 @@ async = require "async" module.exports = DeleteQueueManager = flushAndDeleteOldProjects: (options, callback) -> startTime = Date.now() + cutoffTime = startTime - options.min_delete_age count = 0 flushProjectIfNotModified = (project_id, flushTimestamp, cb) -> @@ -49,7 +50,6 @@ module.exports = DeleteQueueManager = if count > options.limit logger.log "hit count limit on flushing old projects" return callback() - cutoffTime = now - options.min_delete_age RedisManager.getNextProjectToFlushAndDelete cutoffTime, (err, project_id, flushTimestamp, queueLength) -> return callback(err) if err? return callback() if !project_id? From ba35c73cb65491e2d579492626b987d7c84a88d0 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 26 Sep 2019 15:18:10 +0100 Subject: [PATCH 500/769] add comment about ZPOPMIN --- services/document-updater/app/coffee/RedisManager.coffee | 1 + 1 file changed, 1 insertion(+) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 7f62fe1e7e..1490ac87f4 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -305,6 +305,7 @@ module.exports = RedisManager = return callback() if !reply?.length # return if no projects ready to be processed # pop the oldest entry (get and remove in a multi) multi = rclient.multi() + # Poor man's version of ZPOPMIN, which is only available in Redis 5. multi.zrange keys.flushAndDeleteQueue(), 0, 0, "WITHSCORES" multi.zremrangebyrank keys.flushAndDeleteQueue(), 0, 0 multi.zcard keys.flushAndDeleteQueue() # the total length of the queue (for metrics) From fc62abfcfafb0f16d8250708457cdb02e4126f18 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 26 Sep 2019 15:46:14 +0100 Subject: [PATCH 501/769] run flush of queued projects in the background --- .../app/coffee/DeleteQueueManager.coffee | 6 +++--- .../document-updater/app/coffee/HttpController.coffee | 10 +++++----- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/services/document-updater/app/coffee/DeleteQueueManager.coffee b/services/document-updater/app/coffee/DeleteQueueManager.coffee index db69fa4b2f..568d71bcdc 100644 --- a/services/document-updater/app/coffee/DeleteQueueManager.coffee +++ b/services/document-updater/app/coffee/DeleteQueueManager.coffee @@ -46,13 +46,13 @@ module.exports = DeleteQueueManager = now = Date.now() if now - startTime > options.timeout logger.log "hit time limit on flushing old projects" - return callback() + return callback(null, count) if count > options.limit logger.log "hit count limit on flushing old projects" - return callback() + return callback(null, count) RedisManager.getNextProjectToFlushAndDelete cutoffTime, (err, project_id, flushTimestamp, queueLength) -> return callback(err) if err? - return callback() if !project_id? + return callback(null, count) if !project_id? logger.log {project_id, queueLength: queueLength}, "flushing queued project" metrics.globalGauge "queued-flush-backlog", queueLength flushProjectIfNotModified project_id, flushTimestamp, (err, flushed) -> diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 96595c3449..537dbd4903 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -216,15 +216,15 @@ module.exports = HttpController = res.send project_ids flushQueuedProjects: (req, res, next = (error) ->) -> - res.setTimeout(5 * 60 * 1000) options = limit : req.query.limit || 1000 timeout: 5 * 60 * 1000 dryRun : req.query.dryRun || false min_delete_age: req.query.min_delete_age || 5 * 60 * 1000 - DeleteQueueManager.flushAndDeleteOldProjects options, (err, project_ids)-> + res.send 204 + # run the flush in the background + DeleteQueueManager.flushAndDeleteOldProjects options, (err, flushed)-> if err? logger.err err:err, "error flushing old projects" - res.send 500 - else - res.send project_ids + else + logger.log {flushed: flushed}, "flush of queued projects completed" From 8cdc8c410aa63f47eb49c5f58b8539214e34a85a Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 26 Sep 2019 15:46:45 +0100 Subject: [PATCH 502/769] fix error logging --- services/document-updater/app/coffee/DeleteQueueManager.coffee | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/DeleteQueueManager.coffee b/services/document-updater/app/coffee/DeleteQueueManager.coffee index 568d71bcdc..e9f2346909 100644 --- a/services/document-updater/app/coffee/DeleteQueueManager.coffee +++ b/services/document-updater/app/coffee/DeleteQueueManager.coffee @@ -38,7 +38,8 @@ module.exports = DeleteQueueManager = return cb() logger.log {project_id, flushTimestamp}, "flushing queued project" ProjectManager.flushAndDeleteProjectWithLocks project_id, {skip_history_flush: true}, (err) -> - logger.err {project_id, err}, "error flushing queued project" + if err? + logger.err {project_id, err}, "error flushing queued project" metrics.inc "queued-delete-completed" return cb(null, true) From 3bc176259b450449ea7bb7322a4ca1b6bf8f03e9 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 26 Sep 2019 15:46:54 +0100 Subject: [PATCH 503/769] fix log line --- services/document-updater/app/coffee/HistoryManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index 7cfafa9ba0..6b68b4b676 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -29,7 +29,7 @@ module.exports = HistoryManager = flushProjectChanges: (project_id, options, callback = (error) ->) -> return callback() if !Settings.apis?.project_history?.enabled if options.skip_history_flush - logger.log {project_id}, "skipping flush of project history from realtime shutdown" + logger.log {project_id}, "skipping flush of project history" return callback() url = "#{Settings.apis.project_history.url}/project/#{project_id}/flush" qs = {} From b7f3b848afee67b5dfc0c46348abb6f5c7b635ac Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 26 Sep 2019 15:50:55 +0100 Subject: [PATCH 504/769] remove unused dryRun option Co-Authored-By: Jakob Ackermann --- services/document-updater/app/coffee/HttpController.coffee | 1 - 1 file changed, 1 deletion(-) diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 537dbd4903..cf1d5f9aca 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -219,7 +219,6 @@ module.exports = HttpController = options = limit : req.query.limit || 1000 timeout: 5 * 60 * 1000 - dryRun : req.query.dryRun || false min_delete_age: req.query.min_delete_age || 5 * 60 * 1000 res.send 204 # run the flush in the background From 0f0682df43aba15171601608fb76214c012f1e04 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 26 Sep 2019 15:58:22 +0100 Subject: [PATCH 505/769] allow flush to complete in acceptance test --- .../test/acceptance/coffee/DeletingAProjectTests.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee index 45aab847e3..91e4378dc2 100644 --- a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee +++ b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee @@ -148,7 +148,7 @@ describe "Deleting a project", -> # after deleting the project and putting it in the queue, flush the queue setTimeout () -> DocUpdaterClient.flushOldProjects (error, res, body) => - done() + setTimeout done, 1000 # allow time for the flush to complete , 100 , 200 From 7561e05660d5c6ec509211e3862ed6e0f405e994 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 27 Sep 2019 10:39:56 +0100 Subject: [PATCH 506/769] check timestamps array length --- services/document-updater/app/coffee/DeleteQueueManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/DeleteQueueManager.coffee b/services/document-updater/app/coffee/DeleteQueueManager.coffee index e9f2346909..f1af0ba244 100644 --- a/services/document-updater/app/coffee/DeleteQueueManager.coffee +++ b/services/document-updater/app/coffee/DeleteQueueManager.coffee @@ -28,7 +28,7 @@ module.exports = DeleteQueueManager = flushProjectIfNotModified = (project_id, flushTimestamp, cb) -> ProjectManager.getProjectDocsTimestamps project_id, (err, timestamps) -> return callback(err) if err? - if !timestamps? + if timestamps.length == 0 logger.log {project_id}, "skipping flush of queued project - no timestamps" return cb() # are any of the timestamps newer than the time the project was flushed? From 260923f291ac65d31ae64cefaf0da84eb6dc8237 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 27 Sep 2019 10:46:24 +0100 Subject: [PATCH 507/769] keep flushQueuedProjects in the foreground --- services/document-updater/app/coffee/HttpController.coffee | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index cf1d5f9aca..e2e2e712bc 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -216,14 +216,15 @@ module.exports = HttpController = res.send project_ids flushQueuedProjects: (req, res, next = (error) ->) -> + res.setTimeout(10 * 60 * 1000) options = limit : req.query.limit || 1000 timeout: 5 * 60 * 1000 min_delete_age: req.query.min_delete_age || 5 * 60 * 1000 - res.send 204 - # run the flush in the background DeleteQueueManager.flushAndDeleteOldProjects options, (err, flushed)-> if err? logger.err err:err, "error flushing old projects" - else + res.send 500 + else logger.log {flushed: flushed}, "flush of queued projects completed" + res.send {flushed: flushed} From c5a9105c33f9008ed00461a668aad880c7a1fe27 Mon Sep 17 00:00:00 2001 From: John Lees-Miller Date: Sat, 28 Sep 2019 11:07:33 +0100 Subject: [PATCH 508/769] Update config --- services/document-updater/config/settings.defaults.coffee | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 6801c62a8b..a775dac5d3 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -11,8 +11,8 @@ module.exports = apis: web: url: "http://#{process.env['WEB_API_HOST'] or process.env['WEB_HOST'] or "localhost"}:#{process.env['WEB_API_PORT'] or process.env['WEB_PORT'] or 3000}" - user: "sharelatex" - pass: "password" + user: process.env['WEB_API_USER'] or "sharelatex" + pass: process.env['WEB_API_PASSWORD'] or "password" trackchanges: url: "http://#{process.env["TRACK_CHANGES_HOST"] or "localhost"}:3015" project_history: From 33fadf51c1a8aa103d711db41c6931d4e0283244 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 30 Sep 2019 13:41:47 +0100 Subject: [PATCH 509/769] fix getDocTimestamps for multiple docs --- services/document-updater/app/coffee/RedisManager.coffee | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 1490ac87f4..842f5d545b 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -4,6 +4,7 @@ logger = require('logger-sharelatex') metrics = require('./Metrics') Errors = require "./Errors" crypto = require "crypto" +async = require "async" ProjectHistoryRedisManager = require "./ProjectHistoryRedisManager" # Sometimes Redis calls take an unexpectedly long time. We have to be @@ -289,10 +290,9 @@ module.exports = RedisManager = getDocTimestamps: (doc_ids, callback = (error, result) ->) -> # get lastupdatedat timestamps for an array of doc_ids - multi = rclient.multi() - for doc_id in doc_ids - multi.get keys.lastUpdatedAt(doc_id: doc_id) - multi.exec callback + async.mapSeries doc_ids, (doc_id, cb) -> + rclient.get keys.lastUpdatedAt(doc_id: doc_id), cb + , callback queueFlushAndDeleteProject: (project_id, callback) -> # store the project id in a sorted set ordered by time From 73b4262186e51b5117d6d110831ddf54bd245409 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 30 Sep 2019 15:35:05 +0100 Subject: [PATCH 510/769] add continuous background flush --- services/document-updater/app.coffee | 5 +++++ .../app/coffee/DeleteQueueManager.coffee | 12 +++++++++++- .../document-updater/config/settings.defaults.coffee | 2 ++ 3 files changed, 18 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 2962860027..5eb47d9c9e 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -14,6 +14,7 @@ if Settings.sentry?.dsn? RedisManager = require('./app/js/RedisManager') DispatchManager = require('./app/js/DispatchManager') +DeleteQueueManager = require('./app/js/DeleteQueueManager') Errors = require "./app/js/Errors" HttpController = require "./app/js/HttpController" mongojs = require "./app/js/mongojs" @@ -146,3 +147,7 @@ module.exports = app for signal in ['SIGINT', 'SIGHUP', 'SIGQUIT', 'SIGUSR1', 'SIGUSR2', 'SIGTERM', 'SIGABRT'] process.on signal, shutdownCleanly(signal) + +if Settings.continuousBackgroundFlush + logger.info "Starting continuous background flush" + DeleteQueueManager.startBackgroundFlush() \ No newline at end of file diff --git a/services/document-updater/app/coffee/DeleteQueueManager.coffee b/services/document-updater/app/coffee/DeleteQueueManager.coffee index f1af0ba244..3fc6b3f644 100644 --- a/services/document-updater/app/coffee/DeleteQueueManager.coffee +++ b/services/document-updater/app/coffee/DeleteQueueManager.coffee @@ -1,3 +1,4 @@ +Settings = require('settings-sharelatex') RedisManager = require "./RedisManager" ProjectManager = require "./ProjectManager" logger = require "logger-sharelatex" @@ -60,4 +61,13 @@ module.exports = DeleteQueueManager = count++ if flushed flushNextProject() - flushNextProject() \ No newline at end of file + flushNextProject() + + startBackgroundFlush: () -> + doFlush = () -> + if Settings.shuttingDown + logger.warn "discontinuing background flush due to shutdown" + return + DeleteQueueManager.flushAndDeleteOldProjects {timeout:1000,min_delete_age:3*60*1000,limit:1000}, () -> + setTimeout doFlush, 10 + doFlush() diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index bc83433484..327bf98ee4 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -95,3 +95,5 @@ module.exports = dsn: process.env.SENTRY_DSN publishOnIndividualChannels: process.env['PUBLISH_ON_INDIVIDUAL_CHANNELS'] or false + + continuousBackgroundFlush: process.env['CONTINUOUS_BACKGROUND_FLUSH'] or false From a32495d2b4184b7ec7256cb0258bf9bd323e02f2 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 1 Oct 2019 14:09:41 +0100 Subject: [PATCH 511/769] make background flush more adaptive --- .../document-updater/app/coffee/DeleteQueueManager.coffee | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/DeleteQueueManager.coffee b/services/document-updater/app/coffee/DeleteQueueManager.coffee index 3fc6b3f644..731cdf77b7 100644 --- a/services/document-updater/app/coffee/DeleteQueueManager.coffee +++ b/services/document-updater/app/coffee/DeleteQueueManager.coffee @@ -64,10 +64,12 @@ module.exports = DeleteQueueManager = flushNextProject() startBackgroundFlush: () -> + SHORT_DELAY = 10 + LONG_DELAY = 1000 doFlush = () -> if Settings.shuttingDown logger.warn "discontinuing background flush due to shutdown" return - DeleteQueueManager.flushAndDeleteOldProjects {timeout:1000,min_delete_age:3*60*1000,limit:1000}, () -> - setTimeout doFlush, 10 + DeleteQueueManager.flushAndDeleteOldProjects {timeout:1000,min_delete_age:3*60*1000,limit:1000}, (err, flushed) -> + setTimeout doFlush, (if flushed > 10 then SHORT_DELAY else LONG_DELAY) doFlush() From 2c22a60052b04b47756f5c461164bb49c9f2e4b4 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 1 Oct 2019 15:01:20 +0100 Subject: [PATCH 512/769] add random jitter to cutoff time --- services/document-updater/app/coffee/DeleteQueueManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/DeleteQueueManager.coffee b/services/document-updater/app/coffee/DeleteQueueManager.coffee index 731cdf77b7..8cd4a66c21 100644 --- a/services/document-updater/app/coffee/DeleteQueueManager.coffee +++ b/services/document-updater/app/coffee/DeleteQueueManager.coffee @@ -23,7 +23,7 @@ async = require "async" module.exports = DeleteQueueManager = flushAndDeleteOldProjects: (options, callback) -> startTime = Date.now() - cutoffTime = startTime - options.min_delete_age + cutoffTime = startTime - options.min_delete_age + 100 * (Math.random() - 0.5) count = 0 flushProjectIfNotModified = (project_id, flushTimestamp, cb) -> From 2845b23b7030c227b226511ca88ade019bb161a7 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 1 Oct 2019 15:01:53 +0100 Subject: [PATCH 513/769] add smoothing of delete spikes --- services/document-updater/app/coffee/RedisManager.coffee | 5 +++-- services/document-updater/config/settings.defaults.coffee | 2 ++ .../test/acceptance/coffee/DeletingAProjectTests.coffee | 5 ++--- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 1490ac87f4..63086320d7 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -295,8 +295,9 @@ module.exports = RedisManager = multi.exec callback queueFlushAndDeleteProject: (project_id, callback) -> - # store the project id in a sorted set ordered by time - rclient.zadd keys.flushAndDeleteQueue(), Date.now(), project_id, callback + # store the project id in a sorted set ordered by time with a random offset to smooth out spikes + SMOOTHING_OFFSET = if Settings.smoothingOffset > 0 then Math.round(Settings.smoothingOffset * Math.random()) else 0 + rclient.zadd keys.flushAndDeleteQueue(), Date.now() + SMOOTHING_OFFSET, project_id, callback getNextProjectToFlushAndDelete: (cutoffTime, callback = (error, key, timestamp)->) -> # find the oldest queued flush that is before the cutoff time diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 327bf98ee4..2b070e562c 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -97,3 +97,5 @@ module.exports = publishOnIndividualChannels: process.env['PUBLISH_ON_INDIVIDUAL_CHANNELS'] or false continuousBackgroundFlush: process.env['CONTINUOUS_BACKGROUND_FLUSH'] or false + + smoothingOffset: process.env['SMOOTHING_OFFSET'] or 1000 # milliseconds \ No newline at end of file diff --git a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee index 91e4378dc2..6c1f7fe8ed 100644 --- a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee +++ b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee @@ -147,9 +147,8 @@ describe "Deleting a project", -> @statusCode = res.statusCode # after deleting the project and putting it in the queue, flush the queue setTimeout () -> - DocUpdaterClient.flushOldProjects (error, res, body) => - setTimeout done, 1000 # allow time for the flush to complete - , 100 + DocUpdaterClient.flushOldProjects done + , 2000 , 200 after -> From ae3ebf2db6411b9ba5121b4bbb3d388151c24a0b Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 1 Oct 2019 15:02:27 +0100 Subject: [PATCH 514/769] start background flush after http server has started --- services/document-updater/app.coffee | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 5eb47d9c9e..82598bd93f 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -143,11 +143,12 @@ host = Settings.internal.documentupdater.host or "localhost" if !module.parent # Called directly app.listen port, host, -> logger.info "Document-updater starting up, listening on #{host}:#{port}" + if Settings.continuousBackgroundFlush + logger.info "Starting continuous background flush" + DeleteQueueManager.startBackgroundFlush() + module.exports = app for signal in ['SIGINT', 'SIGHUP', 'SIGQUIT', 'SIGUSR1', 'SIGUSR2', 'SIGTERM', 'SIGABRT'] process.on signal, shutdownCleanly(signal) -if Settings.continuousBackgroundFlush - logger.info "Starting continuous background flush" - DeleteQueueManager.startBackgroundFlush() \ No newline at end of file From 0c14b7d2f8b98ceeb7cf4585bf7b714b06cf9249 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 1 Oct 2019 15:06:01 +0100 Subject: [PATCH 515/769] add comment about background flush limit --- .../document-updater/app/coffee/DeleteQueueManager.coffee | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/DeleteQueueManager.coffee b/services/document-updater/app/coffee/DeleteQueueManager.coffee index 8cd4a66c21..985222df69 100644 --- a/services/document-updater/app/coffee/DeleteQueueManager.coffee +++ b/services/document-updater/app/coffee/DeleteQueueManager.coffee @@ -70,6 +70,10 @@ module.exports = DeleteQueueManager = if Settings.shuttingDown logger.warn "discontinuing background flush due to shutdown" return - DeleteQueueManager.flushAndDeleteOldProjects {timeout:1000,min_delete_age:3*60*1000,limit:1000}, (err, flushed) -> + DeleteQueueManager.flushAndDeleteOldProjects { + timeout:1000, + min_delete_age:3*60*1000, + limit:1000 # high value, to ensure we always flush enough projects + }, (err, flushed) -> setTimeout doFlush, (if flushed > 10 then SHORT_DELAY else LONG_DELAY) doFlush() From d82b180b7624e97ea27dc59e9a8700e3501674f7 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 3 Oct 2019 04:01:56 +0100 Subject: [PATCH 516/769] avoid project history queues building up with deferred flush --- .../document-updater/app/coffee/DeleteQueueManager.coffee | 2 +- .../test/acceptance/coffee/DeletingAProjectTests.coffee | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/services/document-updater/app/coffee/DeleteQueueManager.coffee b/services/document-updater/app/coffee/DeleteQueueManager.coffee index 985222df69..9e3f1c176e 100644 --- a/services/document-updater/app/coffee/DeleteQueueManager.coffee +++ b/services/document-updater/app/coffee/DeleteQueueManager.coffee @@ -38,7 +38,7 @@ module.exports = DeleteQueueManager = logger.debug {project_id, timestamps, flushTimestamp}, "found newer timestamp, will skip delete" return cb() logger.log {project_id, flushTimestamp}, "flushing queued project" - ProjectManager.flushAndDeleteProjectWithLocks project_id, {skip_history_flush: true}, (err) -> + ProjectManager.flushAndDeleteProjectWithLocks project_id, {skip_history_flush: false}, (err) -> if err? logger.err {project_id, err}, "error flushing queued project" metrics.inc "queued-delete-completed" diff --git a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee index 6c1f7fe8ed..cddc008bc0 100644 --- a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee +++ b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee @@ -169,6 +169,6 @@ describe "Deleting a project", -> for doc in @docs MockTrackChangesApi.flushDoc.calledWith(doc.id).should.equal true - it "should not flush to project history", -> - MockProjectHistoryApi.flushProject.called.should.equal false + it "should flush to project history", -> + MockProjectHistoryApi.flushProject.called.should.equal true From e9efd6b93b01c51d0342aee58b7ab2bbab9ef7c4 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 3 Oct 2019 11:00:24 +0100 Subject: [PATCH 517/769] logs out when a redis event occurs --- services/document-updater/app.coffee | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 82598bd93f..645eb8cc63 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -138,6 +138,15 @@ shutdownCleanly = (signal) -> process.exit() , 10000 +watchForEvent = (eventName)-> + docUpdaterRedisClient.on eventName, (e)-> + console.log "redis event: #{eventName} #{e}" + +events = ["connect", "ready", "error", "close", "reconnecting", "end"] +for eventName in events + watchForEvent(eventName) + + port = Settings.internal?.documentupdater?.port or Settings.apis?.documentupdater?.port or 3003 host = Settings.internal.documentupdater.host or "localhost" if !module.parent # Called directly From 18ccd112241127d3090aa1975471e1ac74086ae9 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 4 Oct 2019 11:32:08 +0100 Subject: [PATCH 518/769] update to ioredis 4.14.1 --- services/document-updater/npm-shrinkwrap.json | 20 ++++++++++++------- services/document-updater/package.json | 4 ++-- 2 files changed, 15 insertions(+), 9 deletions(-) diff --git a/services/document-updater/npm-shrinkwrap.json b/services/document-updater/npm-shrinkwrap.json index 32ad981a08..604dbdeb1c 100644 --- a/services/document-updater/npm-shrinkwrap.json +++ b/services/document-updater/npm-shrinkwrap.json @@ -404,7 +404,8 @@ "cluster-key-slot": { "version": "1.0.8", "from": "cluster-key-slot@>=1.0.6 <2.0.0", - "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.0.8.tgz" + "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.0.8.tgz", + "dev": true }, "co": { "version": "4.6.0", @@ -779,10 +780,15 @@ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" }, "ioredis": { - "version": "4.11.2", - "from": "ioredis@>=4.11.1 <4.12.0", - "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.11.2.tgz", + "version": "4.14.1", + "from": "ioredis@>=4.14.1 <4.15.0", + "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.14.1.tgz", "dependencies": { + "cluster-key-slot": { + "version": "1.1.0", + "from": "cluster-key-slot@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.0.tgz" + }, "debug": { "version": "4.1.1", "from": "debug@>=4.1.1 <5.0.0", @@ -1380,9 +1386,9 @@ "resolved": "https://registry.npmjs.org/redis-sentinel/-/redis-sentinel-0.1.1.tgz" }, "redis-sharelatex": { - "version": "1.0.10", - "from": "redis-sharelatex@1.0.10", - "resolved": "https://registry.npmjs.org/redis-sharelatex/-/redis-sharelatex-1.0.10.tgz", + "version": "1.0.11", + "from": "redis-sharelatex@1.0.11", + "resolved": "https://registry.npmjs.org/redis-sharelatex/-/redis-sharelatex-1.0.11.tgz", "dependencies": { "coffee-script": { "version": "1.8.0", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 040febca1d..db50ca9c04 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -28,7 +28,7 @@ "lynx": "0.0.11", "metrics-sharelatex": "^2.2.0", "mongojs": "^2.6.0", - "redis-sharelatex": "^1.0.10", + "redis-sharelatex": "^1.0.11", "request": "2.25.0", "requestretry": "^1.12.0", "sandboxed-module": "~0.2.0", @@ -43,4 +43,4 @@ "mocha": "^5.0.1", "timekeeper": "^2.0.0" } -} +} \ No newline at end of file From 51a821c03c3c629f44d12fad9055ac9854b40767 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 4 Oct 2019 11:51:37 +0100 Subject: [PATCH 519/769] remove old unused ioredis keepalive option --- .../document-updater/config/settings.defaults.coffee | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 2b070e562c..ca7daae82a 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -25,8 +25,6 @@ module.exports = host: process.env['PUBSUB_REDIS_HOST'] or process.env['REDIS_HOST'] or "localhost" port: process.env['PUBSUB_REDIS_PORT'] or process.env['REDIS_PORT'] or "6379" password: process.env["PUBSUB_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" - redisOptions: - keepAlive: 100 history: port: process.env["HISTORY_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" @@ -35,8 +33,6 @@ module.exports = key_schema: uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:{#{doc_id}}" docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:{#{project_id}}" - redisOptions: - keepAlive: 100 project_history: port: process.env["HISTORY_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" @@ -45,8 +41,6 @@ module.exports = key_schema: projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:{#{project_id}}" projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:{#{project_id}}" - redisOptions: - keepAlive: 100 lock: port: process.env["LOCK_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" @@ -54,15 +48,11 @@ module.exports = password: process.env["LOCK_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" key_schema: blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" - redisOptions: - keepAlive: 100 documentupdater: port: process.env["DOC_UPDATER_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" host: process.env["DOC_UPDATER_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" password: process.env["DOC_UPDATER_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" - redisOptions: - keepAlive: 100 key_schema: blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" docLines: ({doc_id}) -> "doclines:{#{doc_id}}" @@ -81,8 +71,6 @@ module.exports = lastUpdatedAt: ({doc_id}) -> "lastUpdatedAt:{#{doc_id}}" pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" flushAndDeleteQueue: () -> "DocUpdaterFlushAndDeleteQueue" - redisOptions: - keepAlive: 100 max_doc_length: 2 * 1024 * 1024 # 2mb From 5a62632cfdfe60c8acc140ab1c5024b75405a25b Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 4 Oct 2019 11:53:14 +0100 Subject: [PATCH 520/769] add ioredis option maxRetriesPerRequest: 0 see https://github.com/luin/ioredis/issues/965 --- services/document-updater/config/settings.defaults.coffee | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index ca7daae82a..eb647dab7c 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -25,11 +25,13 @@ module.exports = host: process.env['PUBSUB_REDIS_HOST'] or process.env['REDIS_HOST'] or "localhost" port: process.env['PUBSUB_REDIS_PORT'] or process.env['REDIS_PORT'] or "6379" password: process.env["PUBSUB_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" - + maxRetriesPerRequest: 0 + history: port: process.env["HISTORY_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" host: process.env["HISTORY_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" password: process.env["HISTORY_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" + maxRetriesPerRequest: 0 key_schema: uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:{#{doc_id}}" docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:{#{project_id}}" @@ -38,6 +40,7 @@ module.exports = port: process.env["HISTORY_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" host: process.env["HISTORY_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" password: process.env["HISTORY_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" + maxRetriesPerRequest: 0 key_schema: projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:{#{project_id}}" projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:{#{project_id}}" @@ -46,6 +49,7 @@ module.exports = port: process.env["LOCK_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" host: process.env["LOCK_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" password: process.env["LOCK_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" + maxRetriesPerRequest: 0 key_schema: blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" @@ -53,6 +57,7 @@ module.exports = port: process.env["DOC_UPDATER_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" host: process.env["DOC_UPDATER_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" password: process.env["DOC_UPDATER_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" + maxRetriesPerRequest: 0 key_schema: blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" docLines: ({doc_id}) -> "doclines:{#{doc_id}}" From 21a912684798dccddcdcff56aee2f02fe82f0f78 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 16 Oct 2019 08:44:08 +0100 Subject: [PATCH 521/769] set maxRetriesPerRequest from environment variable --- .../document-updater/config/settings.defaults.coffee | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index eb647dab7c..6711b3c3bf 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -25,13 +25,13 @@ module.exports = host: process.env['PUBSUB_REDIS_HOST'] or process.env['REDIS_HOST'] or "localhost" port: process.env['PUBSUB_REDIS_PORT'] or process.env['REDIS_PORT'] or "6379" password: process.env["PUBSUB_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" - maxRetriesPerRequest: 0 + maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] or "20") history: port: process.env["HISTORY_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" host: process.env["HISTORY_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" password: process.env["HISTORY_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" - maxRetriesPerRequest: 0 + maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] or "20") key_schema: uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:{#{doc_id}}" docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:{#{project_id}}" @@ -40,7 +40,7 @@ module.exports = port: process.env["HISTORY_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" host: process.env["HISTORY_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" password: process.env["HISTORY_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" - maxRetriesPerRequest: 0 + maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] or "20") key_schema: projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:{#{project_id}}" projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:{#{project_id}}" @@ -49,7 +49,7 @@ module.exports = port: process.env["LOCK_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" host: process.env["LOCK_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" password: process.env["LOCK_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" - maxRetriesPerRequest: 0 + maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] or "20") key_schema: blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" @@ -57,7 +57,7 @@ module.exports = port: process.env["DOC_UPDATER_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" host: process.env["DOC_UPDATER_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" password: process.env["DOC_UPDATER_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" - maxRetriesPerRequest: 0 + maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] or "20") key_schema: blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" docLines: ({doc_id}) -> "doclines:{#{doc_id}}" From 1620956e2e4ddb7a6188c73b7ce04f593f3a5f7d Mon Sep 17 00:00:00 2001 From: Nate Stemen Date: Fri, 25 Oct 2019 12:50:33 -0400 Subject: [PATCH 522/769] replace private link with public one --- services/document-updater/.github/PULL_REQUEST_TEMPLATE.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/services/document-updater/.github/PULL_REQUEST_TEMPLATE.md b/services/document-updater/.github/PULL_REQUEST_TEMPLATE.md index ed25ee83c1..12bb2eeb3f 100644 --- a/services/document-updater/.github/PULL_REQUEST_TEMPLATE.md +++ b/services/document-updater/.github/PULL_REQUEST_TEMPLATE.md @@ -1,4 +1,7 @@ - + + + + ### Description From 1d3a1d22be4f5865be6bbe5843d7d12b920f96cc Mon Sep 17 00:00:00 2001 From: Nate Stemen Date: Fri, 25 Oct 2019 12:50:45 -0400 Subject: [PATCH 523/769] bump build script to 1.1.24 --- services/document-updater/Makefile | 4 ++-- services/document-updater/buildscript.txt | 4 +++- services/document-updater/docker-compose.ci.yml | 2 +- services/document-updater/docker-compose.yml | 2 +- services/document-updater/package.json | 2 +- 5 files changed, 8 insertions(+), 6 deletions(-) diff --git a/services/document-updater/Makefile b/services/document-updater/Makefile index ce7210ccf3..73f63edba8 100644 --- a/services/document-updater/Makefile +++ b/services/document-updater/Makefile @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.21 +# Version: 1.1.24 BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) @@ -35,7 +35,7 @@ test_clean: $(DOCKER_COMPOSE) down -v -t 0 test_acceptance_pre_run: - @[ ! -f test/acceptance/scripts/pre-run ] && echo "document-updater has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/scripts/pre-run + @[ ! -f test/acceptance/js/scripts/pre-run ] && echo "document-updater has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/js/scripts/pre-run build: docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ --tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ diff --git a/services/document-updater/buildscript.txt b/services/document-updater/buildscript.txt index ebef72a5cc..a9a1b603d3 100644 --- a/services/document-updater/buildscript.txt +++ b/services/document-updater/buildscript.txt @@ -5,4 +5,6 @@ document-updater --dependencies=mongo,redis --docker-repos=gcr.io/overleaf-ops --build-target=docker ---script-version=1.1.21 +--script-version=1.1.24 +--env-pass-through= +--public-repo=True diff --git a/services/document-updater/docker-compose.ci.yml b/services/document-updater/docker-compose.ci.yml index d2bcca9ec6..c78d90e8ed 100644 --- a/services/document-updater/docker-compose.ci.yml +++ b/services/document-updater/docker-compose.ci.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.21 +# Version: 1.1.24 version: "2" diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index 02ccd930ba..6dc90009ca 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.21 +# Version: 1.1.24 version: "2" diff --git a/services/document-updater/package.json b/services/document-updater/package.json index db50ca9c04..886fa3515c 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -43,4 +43,4 @@ "mocha": "^5.0.1", "timekeeper": "^2.0.0" } -} \ No newline at end of file +} From ccc072e9da55faefb6ceaa213d55ca2a39c3b0f1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 30 Oct 2019 13:38:50 +0000 Subject: [PATCH 524/769] Bump express from 3.3.4 to 3.11.0 Bumps [express](https://github.com/expressjs/express) from 3.3.4 to 3.11.0. - [Release notes](https://github.com/expressjs/express/releases) - [Changelog](https://github.com/expressjs/express/blob/master/History.md) - [Commits](https://github.com/expressjs/express/compare/3.3.4...3.11.0) Signed-off-by: dependabot[bot] --- services/document-updater/npm-shrinkwrap.json | 2583 ++++++++++++----- services/document-updater/package.json | 2 +- 2 files changed, 1905 insertions(+), 680 deletions(-) diff --git a/services/document-updater/npm-shrinkwrap.json b/services/document-updater/npm-shrinkwrap.json index 604dbdeb1c..b508969560 100644 --- a/services/document-updater/npm-shrinkwrap.json +++ b/services/document-updater/npm-shrinkwrap.json @@ -1,1805 +1,3030 @@ { "name": "document-updater-sharelatex", "version": "0.1.4", + "lockfileVersion": 1, + "requires": true, "dependencies": { "@google-cloud/common": { "version": "0.32.1", - "from": "@google-cloud/common@>=0.32.0 <0.33.0", "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.32.1.tgz", + "integrity": "sha512-bLdPzFvvBMtVkwsoBtygE9oUm3yrNmPa71gvOgucYI/GqvNP2tb6RYsDHPq98kvignhcgHGDI5wyNgxaCo8bKQ==", + "requires": { + "@google-cloud/projectify": "^0.3.3", + "@google-cloud/promisify": "^0.4.0", + "@types/request": "^2.48.1", + "arrify": "^2.0.0", + "duplexify": "^3.6.0", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^3.1.1", + "pify": "^4.0.1", + "retry-request": "^4.0.0", + "teeny-request": "^3.11.3" + }, "dependencies": { "extend": { "version": "3.0.2", - "from": "extend@>=3.0.2 <4.0.0", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz" + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" } } }, "@google-cloud/debug-agent": { "version": "3.2.0", - "from": "@google-cloud/debug-agent@>=3.0.0 <4.0.0", "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-3.2.0.tgz", + "integrity": "sha512-fP87kYbS6aeDna08BivwQ1J260mwJGchRi99XdWCgqbRwuFac8ul0OT5i2wEeDSc5QaDX8ZuWQQ0igZvh1rTyQ==", + "requires": { + "@google-cloud/common": "^0.32.0", + "@sindresorhus/is": "^0.15.0", + "acorn": "^6.0.0", + "coffeescript": "^2.0.0", + "console-log-level": "^1.4.0", + "extend": "^3.0.1", + "findit2": "^2.2.3", + "gcp-metadata": "^1.0.0", + "lodash.pickby": "^4.6.0", + "p-limit": "^2.2.0", + "pify": "^4.0.1", + "semver": "^6.0.0", + "source-map": "^0.6.1", + "split": "^1.0.0" + }, "dependencies": { "coffeescript": { "version": "2.4.1", - "from": "coffeescript@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.4.1.tgz" + "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.4.1.tgz", + "integrity": "sha512-34GV1aHrsMpTaO3KfMJL40ZNuvKDR/g98THHnE9bQj8HjMaZvSrLik99WWqyMhRtbe8V5hpx5iLgdcSvM/S2wg==" }, "semver": { "version": "6.1.1", - "from": "semver@>=6.0.0 <7.0.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.1.1.tgz" + "resolved": "https://registry.npmjs.org/semver/-/semver-6.1.1.tgz", + "integrity": "sha512-rWYq2e5iYW+fFe/oPPtYJxYgjBm8sC4rmoGdUOgBB7VnwKt6HrL793l2voH1UlsyYZpJ4g0wfjnTEO1s1NP2eQ==" } } }, "@google-cloud/profiler": { "version": "0.2.3", - "from": "@google-cloud/profiler@>=0.2.3 <0.3.0", "resolved": "https://registry.npmjs.org/@google-cloud/profiler/-/profiler-0.2.3.tgz", + "integrity": "sha512-rNvtrFtIebIxZEJ/O0t8n7HciZGIXBo8DvHxWqAmsCaeLvkTtsaL6HmPkwxrNQ1IhbYWAxF+E/DwCiHyhKmgTg==", + "requires": { + "@google-cloud/common": "^0.26.0", + "@types/console-log-level": "^1.4.0", + "@types/semver": "^5.5.0", + "bindings": "^1.2.1", + "console-log-level": "^1.4.0", + "delay": "^4.0.1", + "extend": "^3.0.1", + "gcp-metadata": "^0.9.0", + "nan": "^2.11.1", + "parse-duration": "^0.1.1", + "pify": "^4.0.0", + "pretty-ms": "^4.0.0", + "protobufjs": "~6.8.6", + "semver": "^5.5.0", + "teeny-request": "^3.3.0" + }, "dependencies": { "@google-cloud/common": { "version": "0.26.2", - "from": "@google-cloud/common@>=0.26.0 <0.27.0", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.26.2.tgz" + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.26.2.tgz", + "integrity": "sha512-xJ2M/q3MrUbnYZuFlpF01caAlEhAUoRn0NXp93Hn3pkFpfSOG8YfbKbpBAHvcKVbBOAKVIwPsleNtuyuabUwLQ==", + "requires": { + "@google-cloud/projectify": "^0.3.2", + "@google-cloud/promisify": "^0.3.0", + "@types/duplexify": "^3.5.0", + "@types/request": "^2.47.0", + "arrify": "^1.0.1", + "duplexify": "^3.6.0", + "ent": "^2.2.0", + "extend": "^3.0.1", + "google-auth-library": "^2.0.0", + "pify": "^4.0.0", + "retry-request": "^4.0.0", + "through2": "^3.0.0" + } }, "@google-cloud/promisify": { "version": "0.3.1", - "from": "@google-cloud/promisify@>=0.3.0 <0.4.0", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.3.1.tgz" + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.3.1.tgz", + "integrity": "sha512-QzB0/IMvB0eFxFK7Eqh+bfC8NLv3E9ScjWQrPOk6GgfNroxcVITdTlT8NRsRrcp5+QQJVPLkRqKG0PUdaWXmHw==" }, "arrify": { "version": "1.0.1", - "from": "arrify@>=1.0.1 <2.0.0", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz" + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", + "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=" }, "gcp-metadata": { "version": "0.9.3", - "from": "gcp-metadata@>=0.9.0 <0.10.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.9.3.tgz" + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.9.3.tgz", + "integrity": "sha512-caV4S84xAjENtpezLCT/GILEAF5h/bC4cNqZFmt/tjTn8t+JBtTkQrgBrJu3857YdsnlM8rxX/PMcKGtE8hUlw==", + "requires": { + "gaxios": "^1.0.2", + "json-bigint": "^0.3.0" + } }, "google-auth-library": { "version": "2.0.2", - "from": "google-auth-library@>=2.0.0 <3.0.0", "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-2.0.2.tgz", + "integrity": "sha512-FURxmo1hBVmcfLauuMRKOPYAPKht3dGuI2wjeJFalDUThO0HoYVjr4yxt5cgYSFm1dgUpmN9G/poa7ceTFAIiA==", + "requires": { + "axios": "^0.18.0", + "gcp-metadata": "^0.7.0", + "gtoken": "^2.3.0", + "https-proxy-agent": "^2.2.1", + "jws": "^3.1.5", + "lru-cache": "^5.0.0", + "semver": "^5.5.0" + }, "dependencies": { "gcp-metadata": { "version": "0.7.0", - "from": "gcp-metadata@>=0.7.0 <0.8.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.7.0.tgz" + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.7.0.tgz", + "integrity": "sha512-ffjC09amcDWjh3VZdkDngIo7WoluyC5Ag9PAYxZbmQLOLNI8lvPtoKTSCyU54j2gwy5roZh6sSMTfkY2ct7K3g==", + "requires": { + "axios": "^0.18.0", + "extend": "^3.0.1", + "retry-axios": "0.3.2" + } } } }, "through2": { "version": "3.0.1", - "from": "through2@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.1.tgz" + "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.1.tgz", + "integrity": "sha512-M96dvTalPT3YbYLaKaCuwu+j06D/8Jfib0o/PxbVt6Amhv3dUAtW6rTV1jPgJSBG83I/e04Y6xkVdVhSRhi0ww==", + "requires": { + "readable-stream": "2 || 3" + } } } }, "@google-cloud/projectify": { "version": "0.3.3", - "from": "@google-cloud/projectify@>=0.3.3 <0.4.0", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.3.tgz" + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.3.tgz", + "integrity": "sha512-7522YHQ4IhaafgSunsFF15nG0TGVmxgXidy9cITMe+256RgqfcrfWphiMufW+Ou4kqagW/u3yxwbzVEW3dk2Uw==" }, "@google-cloud/promisify": { "version": "0.4.0", - "from": "@google-cloud/promisify@>=0.4.0 <0.5.0", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.4.0.tgz" + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.4.0.tgz", + "integrity": "sha512-4yAHDC52TEMCNcMzVC8WlqnKKKq+Ssi2lXoUg9zWWkZ6U6tq9ZBRYLHHCRdfU+EU9YJsVmivwGcKYCjRGjnf4Q==" }, "@google-cloud/trace-agent": { "version": "3.6.1", - "from": "@google-cloud/trace-agent@>=3.2.0 <4.0.0", "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-3.6.1.tgz", + "integrity": "sha512-KDo85aPN4gSxJ7oEIOlKd7aGENZFXAM1kbIn1Ds+61gh/K1CQWSyepgJo3nUpAwH6D1ezDWV7Iaf8ueoITc8Uw==", + "requires": { + "@google-cloud/common": "^0.32.1", + "builtin-modules": "^3.0.0", + "console-log-level": "^1.4.0", + "continuation-local-storage": "^3.2.1", + "extend": "^3.0.0", + "gcp-metadata": "^1.0.0", + "hex2dec": "^1.0.1", + "is": "^3.2.0", + "methods": "^1.1.1", + "require-in-the-middle": "^4.0.0", + "semver": "^6.0.0", + "shimmer": "^1.2.0", + "uuid": "^3.0.1" + }, "dependencies": { "methods": { "version": "1.1.2", - "from": "methods@>=1.1.1 <2.0.0", - "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz" + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=" }, "semver": { "version": "6.1.1", - "from": "semver@^6.0.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.1.1.tgz" + "resolved": "https://registry.npmjs.org/semver/-/semver-6.1.1.tgz", + "integrity": "sha512-rWYq2e5iYW+fFe/oPPtYJxYgjBm8sC4rmoGdUOgBB7VnwKt6HrL793l2voH1UlsyYZpJ4g0wfjnTEO1s1NP2eQ==" }, "uuid": { "version": "3.3.2", - "from": "uuid@^3.0.1", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz" + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", + "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" } } }, "@protobufjs/aspromise": { "version": "1.1.2", - "from": "@protobufjs/aspromise@>=1.1.2 <2.0.0", - "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz" + "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", + "integrity": "sha1-m4sMxmPWaafY9vXQiToU00jzD78=" }, "@protobufjs/base64": { "version": "1.1.2", - "from": "@protobufjs/base64@>=1.1.2 <2.0.0", - "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz" + "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", + "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==" }, "@protobufjs/codegen": { "version": "2.0.4", - "from": "@protobufjs/codegen@>=2.0.4 <3.0.0", - "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz" + "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", + "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==" }, "@protobufjs/eventemitter": { "version": "1.1.0", - "from": "@protobufjs/eventemitter@>=1.1.0 <2.0.0", - "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz" + "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", + "integrity": "sha1-NVy8mLr61ZePntCV85diHx0Ga3A=" }, "@protobufjs/fetch": { "version": "1.1.0", - "from": "@protobufjs/fetch@>=1.1.0 <2.0.0", - "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz" + "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", + "integrity": "sha1-upn7WYYUr2VwDBYZ/wbUVLDYTEU=", + "requires": { + "@protobufjs/aspromise": "^1.1.1", + "@protobufjs/inquire": "^1.1.0" + } }, "@protobufjs/float": { "version": "1.0.2", - "from": "@protobufjs/float@>=1.0.2 <2.0.0", - "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz" + "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", + "integrity": "sha1-Xp4avctz/Ap8uLKR33jIy9l7h9E=" }, "@protobufjs/inquire": { "version": "1.1.0", - "from": "@protobufjs/inquire@>=1.1.0 <2.0.0", - "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz" + "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", + "integrity": "sha1-/yAOPnzyQp4tyvwRQIKOjMY48Ik=" }, "@protobufjs/path": { "version": "1.1.2", - "from": "@protobufjs/path@>=1.1.2 <2.0.0", - "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz" + "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", + "integrity": "sha1-bMKyDFya1q0NzP0hynZz2Nf79o0=" }, "@protobufjs/pool": { "version": "1.1.0", - "from": "@protobufjs/pool@>=1.1.0 <2.0.0", - "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz" + "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", + "integrity": "sha1-Cf0V8tbTq/qbZbw2ZQbWrXhG/1Q=" }, "@protobufjs/utf8": { "version": "1.1.0", - "from": "@protobufjs/utf8@>=1.1.0 <2.0.0", - "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz" + "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", + "integrity": "sha1-p3c2C1s5oaLlEG+OhY8v0tBgxXA=" }, "@sindresorhus/is": { "version": "0.15.0", - "from": "@sindresorhus/is@>=0.15.0 <0.16.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.15.0.tgz" + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.15.0.tgz", + "integrity": "sha512-lu8BpxjAtRCAo5ifytTpCPCj99LF7o/2Myn+NXyNCBqvPYn7Pjd76AMmUB5l7XF1U6t0hcWrlEM5ESufW7wAeA==" }, "@types/caseless": { "version": "0.12.2", - "from": "@types/caseless@*", - "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.2.tgz" + "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.2.tgz", + "integrity": "sha512-6ckxMjBBD8URvjB6J3NcnuAn5Pkl7t3TizAg+xdlzzQGSPSmBcXf8KoIH0ua/i+tio+ZRUHEXp0HEmvaR4kt0w==" }, "@types/console-log-level": { "version": "1.4.0", - "from": "@types/console-log-level@>=1.4.0 <2.0.0", - "resolved": "https://registry.npmjs.org/@types/console-log-level/-/console-log-level-1.4.0.tgz" + "resolved": "https://registry.npmjs.org/@types/console-log-level/-/console-log-level-1.4.0.tgz", + "integrity": "sha512-x+OscEQwcx5Biair4enH7ov9W+clcqUWaZRaxn5IkT4yNWWjRr2oiYDkY/x1uXSTVZOQ2xlbFQySaQGB+VdXGQ==" }, "@types/duplexify": { "version": "3.6.0", - "from": "@types/duplexify@>=3.5.0 <4.0.0", - "resolved": "https://registry.npmjs.org/@types/duplexify/-/duplexify-3.6.0.tgz" + "resolved": "https://registry.npmjs.org/@types/duplexify/-/duplexify-3.6.0.tgz", + "integrity": "sha512-5zOA53RUlzN74bvrSGwjudssD9F3a797sDZQkiYpUOxW+WHaXTCPz4/d5Dgi6FKnOqZ2CpaTo0DhgIfsXAOE/A==", + "requires": { + "@types/node": "*" + } }, "@types/form-data": { "version": "2.2.1", - "from": "@types/form-data@*", - "resolved": "https://registry.npmjs.org/@types/form-data/-/form-data-2.2.1.tgz" + "resolved": "https://registry.npmjs.org/@types/form-data/-/form-data-2.2.1.tgz", + "integrity": "sha512-JAMFhOaHIciYVh8fb5/83nmuO/AHwmto+Hq7a9y8FzLDcC1KCU344XDOMEmahnrTFlHjgh4L0WJFczNIX2GxnQ==", + "requires": { + "@types/node": "*" + } }, "@types/long": { "version": "4.0.0", - "from": "@types/long@>=4.0.0 <5.0.0", - "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.0.tgz" + "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.0.tgz", + "integrity": "sha512-1w52Nyx4Gq47uuu0EVcsHBxZFJgurQ+rTKS3qMHxR1GY2T8c2AJYd6vZoZ9q1rupaDjU0yT+Jc2XTyXkjeMA+Q==" }, "@types/node": { "version": "12.0.8", - "from": "@types/node@*", - "resolved": "https://registry.npmjs.org/@types/node/-/node-12.0.8.tgz" + "resolved": "https://registry.npmjs.org/@types/node/-/node-12.0.8.tgz", + "integrity": "sha512-b8bbUOTwzIY3V5vDTY1fIJ+ePKDUBqt2hC2woVGotdQQhG/2Sh62HOKHrT7ab+VerXAcPyAiTEipPu/FsreUtg==" }, "@types/request": { "version": "2.48.1", - "from": "@types/request@>=2.47.0 <3.0.0", - "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.1.tgz" + "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.1.tgz", + "integrity": "sha512-ZgEZ1TiD+KGA9LiAAPPJL68Id2UWfeSO62ijSXZjFJArVV+2pKcsVHmrcu+1oiE3q6eDGiFiSolRc4JHoerBBg==", + "requires": { + "@types/caseless": "*", + "@types/form-data": "*", + "@types/node": "*", + "@types/tough-cookie": "*" + } }, "@types/semver": { "version": "5.5.0", - "from": "@types/semver@>=5.5.0 <6.0.0", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-5.5.0.tgz" + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-5.5.0.tgz", + "integrity": "sha512-41qEJgBH/TWgo5NFSvBCJ1qkoi3Q6ONSF2avrHq1LVEZfYpdHmj0y9SuTK+u9ZhG1sYQKBL1AWXKyLWP4RaUoQ==" }, "@types/tough-cookie": { "version": "2.3.5", - "from": "@types/tough-cookie@*", - "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-2.3.5.tgz" + "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-2.3.5.tgz", + "integrity": "sha512-SCcK7mvGi3+ZNz833RRjFIxrn4gI1PPR3NtuIS+6vMkvmsGjosqTJwRt5bAEFLRz+wtJMWv8+uOnZf2hi2QXTg==" }, "abort-controller": { "version": "3.0.0", - "from": "abort-controller@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz" + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "requires": { + "event-target-shim": "^5.0.0" + } + }, + "accepts": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.0.3.tgz", + "integrity": "sha1-krHbDU89tHsFMN9uFa6X21FNwvg=", + "requires": { + "mime": "~1.2.11", + "negotiator": "0.4.6" + } }, "acorn": { "version": "6.1.1", - "from": "acorn@>=6.0.0 <7.0.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.1.1.tgz" + "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.1.1.tgz", + "integrity": "sha512-jPTiwtOxaHNaAPg/dmrJ/beuzLRnXtB0kQPQ8JpotKJgTB6rX6c8mlf315941pyjBSaPg8NHXS9fhP4u17DpGA==" }, "agent-base": { "version": "4.3.0", - "from": "agent-base@>=4.1.0 <5.0.0", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz" + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz", + "integrity": "sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg==", + "requires": { + "es6-promisify": "^5.0.0" + } }, "ajv": { "version": "5.5.2", - "from": "ajv@>=5.1.0 <6.0.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz" + "resolved": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz", + "integrity": "sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU=", + "requires": { + "co": "^4.6.0", + "fast-deep-equal": "^1.0.0", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.3.0" + } }, "arrify": { "version": "2.0.1", - "from": "arrify@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz" + "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", + "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==" }, "asn1": { "version": "0.1.11", - "from": "asn1@0.1.11", - "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.1.11.tgz" + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.1.11.tgz", + "integrity": "sha1-VZvhg3bQik7E2+gId9J4GGObLfc=" }, "assert-plus": { "version": "0.1.5", - "from": "assert-plus@>=0.1.5 <0.2.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.1.5.tgz" + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.1.5.tgz", + "integrity": "sha1-7nQAlBMALYTOxyGcasgRgS5yMWA=" }, "assertion-error": { "version": "1.1.0", - "from": "assertion-error@^1.0.1", "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", "dev": true }, "async": { "version": "2.6.0", - "from": "async@>=2.5.0 <3.0.0", - "resolved": "https://registry.npmjs.org/async/-/async-2.6.0.tgz" + "resolved": "https://registry.npmjs.org/async/-/async-2.6.0.tgz", + "integrity": "sha512-xAfGg1/NTLBBKlHFmnd7PlmUW9KhVQIUuSrYem9xzFUZy13ScvtyGGejaae9iAVRiRq9+Cx7DPFaAAhCpyxyPw==", + "requires": { + "lodash": "^4.14.0" + } }, "async-listener": { "version": "0.6.10", - "from": "async-listener@>=0.6.0 <0.7.0", - "resolved": "https://registry.npmjs.org/async-listener/-/async-listener-0.6.10.tgz" + "resolved": "https://registry.npmjs.org/async-listener/-/async-listener-0.6.10.tgz", + "integrity": "sha512-gpuo6xOyF4D5DE5WvyqZdPA3NGhiT6Qf07l7DCB0wwDEsLvDIbCr6j9S5aj5Ch96dLace5tXVzWBZkxU/c5ohw==", + "requires": { + "semver": "^5.3.0", + "shimmer": "^1.1.0" + } }, "asynckit": { "version": "0.4.0", - "from": "asynckit@>=0.4.0 <0.5.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz" + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" }, "aws-sign": { "version": "0.3.0", - "from": "aws-sign@>=0.3.0 <0.4.0", - "resolved": "https://registry.npmjs.org/aws-sign/-/aws-sign-0.3.0.tgz" + "resolved": "https://registry.npmjs.org/aws-sign/-/aws-sign-0.3.0.tgz", + "integrity": "sha1-PYHKabR0seFlGHKLUcJP8Lvtxuk=" }, "aws-sign2": { "version": "0.7.0", - "from": "aws-sign2@>=0.7.0 <0.8.0", - "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz" + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", + "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=" }, "aws4": { "version": "1.6.0", - "from": "aws4@>=1.6.0 <2.0.0", - "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.6.0.tgz" + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.6.0.tgz", + "integrity": "sha1-g+9cqGCysy5KDe7e6MdxudtXRx4=" }, "axios": { "version": "0.18.1", - "from": "axios@>=0.18.0 <0.19.0", - "resolved": "https://registry.npmjs.org/axios/-/axios-0.18.1.tgz" + "resolved": "https://registry.npmjs.org/axios/-/axios-0.18.1.tgz", + "integrity": "sha512-0BfJq4NSfQXd+SkFdrvFbG7addhYSBA2mQwISr46pD6E5iqkWg02RAs8vyTT/j0RTnoYmeXauBuSv1qKwR179g==", + "requires": { + "follow-redirects": "1.5.10", + "is-buffer": "^2.0.2" + } }, "balanced-match": { "version": "1.0.0", - "from": "balanced-match@^1.0.0", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" }, "base64-js": { "version": "1.3.0", - "from": "base64-js@>=1.3.0 <2.0.0", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.0.tgz" + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.0.tgz", + "integrity": "sha512-ccav/yGvoa80BQDljCxsmmQ3Xvx60/UpBIij5QN21W3wBi/hhIC9OoO+KLpu9IJTS9j4DRVJ3aDDF9cMSoa2lw==" + }, + "base64-url": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/base64-url/-/base64-url-1.3.3.tgz", + "integrity": "sha1-+LbFN/CaT8WMmcuG4LDpxhRhog8=" + }, + "basic-auth-connect": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/basic-auth-connect/-/basic-auth-connect-1.0.0.tgz", + "integrity": "sha1-/bC0OWLKe0BFanwrtI/hc9otISI=" + }, + "batch": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/batch/-/batch-0.5.1.tgz", + "integrity": "sha1-NqS6tZTAUP17UHvKDbMMLZKvT/I=" }, "bcrypt-pbkdf": { "version": "1.0.1", - "from": "bcrypt-pbkdf@>=1.0.0 <2.0.0", "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz", - "optional": true + "integrity": "sha1-Y7xdy2EzG5K8Bf1SiVPDNGKgb40=", + "optional": true, + "requires": { + "tweetnacl": "^0.14.3" + } }, "bignumber.js": { "version": "7.2.1", - "from": "bignumber.js@>=7.0.0 <8.0.0", - "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz" + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz", + "integrity": "sha512-S4XzBk5sMB+Rcb/LNcpzXr57VRTxgAvaAEDAl1AwRx27j00hT84O6OkteE7u8UB3NuaaygCRrEpqox4uDOrbdQ==" }, "bindings": { "version": "1.5.0", - "from": "bindings@>=1.2.1 <2.0.0", - "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz" + "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", + "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", + "requires": { + "file-uri-to-path": "1.0.0" + } }, "bintrees": { "version": "1.0.1", - "from": "bintrees@1.0.1", - "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz" + "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz", + "integrity": "sha1-DmVcm5wkNeqraL9AJyJtK1WjRSQ=" + }, + "body-parser": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.4.3.tgz", + "integrity": "sha1-RyeVLP9K8Hc+76SyJsL0Ei9eI00=", + "requires": { + "bytes": "1.0.0", + "depd": "0.3.0", + "iconv-lite": "0.4.3", + "media-typer": "0.2.0", + "qs": "0.6.6", + "raw-body": "1.2.2", + "type-is": "1.3.1" + }, + "dependencies": { + "qs": { + "version": "0.6.6", + "resolved": "https://registry.npmjs.org/qs/-/qs-0.6.6.tgz", + "integrity": "sha1-bgFQmP9RlouKPIGQAdXyyJvEsQc=" + } + } }, "boom": { "version": "0.4.2", - "from": "boom@>=0.4.0 <0.5.0", - "resolved": "https://registry.npmjs.org/boom/-/boom-0.4.2.tgz" + "resolved": "https://registry.npmjs.org/boom/-/boom-0.4.2.tgz", + "integrity": "sha1-emNune1O/O+xnO9JR6PGffrukRs=", + "requires": { + "hoek": "0.9.x" + } }, "brace-expansion": { "version": "1.1.8", - "from": "brace-expansion@^1.1.7", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz" + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz", + "integrity": "sha1-wHshHHyVLsH479Uad+8NHTmQopI=", + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } }, "browser-stdout": { "version": "1.3.1", - "from": "browser-stdout@1.3.1", "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", "dev": true }, "bson": { "version": "1.0.9", - "from": "bson@~1.0.4", - "resolved": "https://registry.npmjs.org/bson/-/bson-1.0.9.tgz" + "resolved": "https://registry.npmjs.org/bson/-/bson-1.0.9.tgz", + "integrity": "sha512-IQX9/h7WdMBIW/q/++tGd+emQr0XMdeZ6icnT/74Xk9fnabWn+gZgpE+9V+gujL3hhJOoNrnDVY7tWdzc7NUTg==" }, "buffer-crc32": { - "version": "0.2.1", - "from": "buffer-crc32@0.2.1", - "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.1.tgz" + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.3.tgz", + "integrity": "sha1-u1RRnpXRB8vSQA520MqxRnM22SE=" }, "buffer-equal-constant-time": { "version": "1.0.1", - "from": "buffer-equal-constant-time@1.0.1", - "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz" + "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", + "integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk=" }, "buffer-shims": { "version": "1.0.0", - "from": "buffer-shims@>=1.0.0 <1.1.0", - "resolved": "https://registry.npmjs.org/buffer-shims/-/buffer-shims-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/buffer-shims/-/buffer-shims-1.0.0.tgz", + "integrity": "sha1-mXjOMXOIxkmth5MCjDR37wRKi1E=" }, "builtin-modules": { "version": "3.1.0", - "from": "builtin-modules@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.1.0.tgz" + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.1.0.tgz", + "integrity": "sha512-k0KL0aWZuBt2lrxrcASWDfwOLMnodeQjodT/1SxEQAXsHANgo6ZC/VEaSEHCXt7aSTZ4/4H5LKa+tBXmW7Vtvw==" }, "bunyan": { "version": "0.22.3", - "from": "bunyan@>=0.22.1 <0.23.0", "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-0.22.3.tgz", - "dev": true + "integrity": "sha1-ehncG0yMZF90AkGnQPIkUUfGfsI=", + "dev": true, + "requires": { + "dtrace-provider": "0.2.8", + "mv": "~2" + } }, "buster-core": { "version": "0.6.4", - "from": "buster-core@0.6.4", - "resolved": "https://registry.npmjs.org/buster-core/-/buster-core-0.6.4.tgz" + "resolved": "https://registry.npmjs.org/buster-core/-/buster-core-0.6.4.tgz", + "integrity": "sha1-J79rrWdCROpyDzEdkAoMoct4YFA=" }, "buster-format": { "version": "0.5.6", - "from": "buster-format@>=0.5.0 <0.6.0", - "resolved": "https://registry.npmjs.org/buster-format/-/buster-format-0.5.6.tgz" + "resolved": "https://registry.npmjs.org/buster-format/-/buster-format-0.5.6.tgz", + "integrity": "sha1-K4bDIuz14bCubm55Bev884fSq5U=", + "requires": { + "buster-core": "=0.6.4" + } }, "bytes": { - "version": "0.2.0", - "from": "bytes@0.2.0", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-0.2.0.tgz" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-1.0.0.tgz", + "integrity": "sha1-NWnt6Lo0MV+rmcPpLLBMciDeH6g=" }, "caseless": { "version": "0.12.0", - "from": "caseless@>=0.12.0 <0.13.0", - "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz" + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" }, "chai": { "version": "3.5.0", - "from": "chai@>=3.5.0 <4.0.0", "resolved": "https://registry.npmjs.org/chai/-/chai-3.5.0.tgz", + "integrity": "sha1-TQJjewZ/6Vi9v906QOxW/vc3Mkc=", "dev": true, + "requires": { + "assertion-error": "^1.0.1", + "deep-eql": "^0.1.3", + "type-detect": "^1.0.0" + }, "dependencies": { "type-detect": { "version": "1.0.0", - "from": "type-detect@>=1.0.0 <2.0.0", "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-1.0.0.tgz", + "integrity": "sha1-diIXzAbbJY7EiQihKY6LlRIejqI=", "dev": true } } }, "chai-spies": { "version": "0.7.1", - "from": "chai-spies@>=0.7.1 <0.8.0", "resolved": "https://registry.npmjs.org/chai-spies/-/chai-spies-0.7.1.tgz", + "integrity": "sha1-ND2Z9RJEIS6LF+ZLk5lv97LCqbE=", "dev": true }, "cluster-key-slot": { "version": "1.0.8", - "from": "cluster-key-slot@>=1.0.6 <2.0.0", "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.0.8.tgz", + "integrity": "sha1-dlRVYIWmUzCTKi6LWXb44tCz5BQ=", "dev": true }, "co": { "version": "4.6.0", - "from": "co@>=4.6.0 <5.0.0", - "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz" + "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", + "integrity": "sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ=" }, "coffee-script": { "version": "1.7.1", - "from": "coffee-script@1.7.1", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.7.1.tgz" + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.7.1.tgz", + "integrity": "sha1-YplqhheAx15tUGnROCJyO3NAS/w=", + "requires": { + "mkdirp": "~0.3.5" + } }, "combined-stream": { "version": "0.0.7", - "from": "combined-stream@>=0.0.4 <0.1.0", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz" + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz", + "integrity": "sha1-ATfmV7qlp1QcV6w3rF/AfXO03B8=", + "requires": { + "delayed-stream": "0.0.5" + } }, "commander": { - "version": "1.2.0", - "from": "commander@1.2.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-1.2.0.tgz" + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/commander/-/commander-1.3.2.tgz", + "integrity": "sha1-io8w7GcKb91kr1LxkUuQfXnq1bU=", + "requires": { + "keypress": "0.1.x" + } + }, + "compressible": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/compressible/-/compressible-1.1.0.tgz", + "integrity": "sha1-Ek2Ke7oYoFpBCi8lutQTsblK/2c=" + }, + "compression": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/compression/-/compression-1.0.7.tgz", + "integrity": "sha1-/Ev/Jh3043oTAAby2yqZo0iW9Vo=", + "requires": { + "accepts": "1.0.3", + "bytes": "1.0.0", + "compressible": "1.1.0", + "on-headers": "0.0.0", + "vary": "0.1.0" + } }, "concat-map": { "version": "0.0.1", - "from": "concat-map@0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" }, "connect": { - "version": "2.8.4", - "from": "connect@2.8.4", - "resolved": "https://registry.npmjs.org/connect/-/connect-2.8.4.tgz" + "version": "2.20.2", + "resolved": "https://registry.npmjs.org/connect/-/connect-2.20.2.tgz", + "integrity": "sha1-RLxkM0x668IZfFLGh0cUAC0Jd74=", + "requires": { + "basic-auth-connect": "1.0.0", + "body-parser": "1.4.3", + "bytes": "1.0.0", + "compression": "1.0.7", + "connect-timeout": "1.1.1", + "cookie": "0.1.2", + "cookie-parser": "1.3.1", + "cookie-signature": "1.0.3", + "csurf": "1.2.2", + "debug": "1.0.2", + "depd": "0.3.0", + "errorhandler": "1.1.0", + "express-session": "1.4.0", + "finalhandler": "0.0.2", + "fresh": "0.2.2", + "media-typer": "0.2.0", + "method-override": "2.0.2", + "morgan": "1.1.1", + "multiparty": "3.2.9", + "on-headers": "0.0.0", + "parseurl": "1.0.1", + "pause": "0.0.1", + "qs": "0.6.6", + "response-time": "2.0.0", + "serve-favicon": "2.0.1", + "serve-index": "1.1.2", + "serve-static": "1.2.3", + "type-is": "1.3.1", + "vhost": "2.0.0" + }, + "dependencies": { + "debug": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.2.tgz", + "integrity": "sha1-OElZHBDM5khHbDx8Li40FttZY8Q=", + "requires": { + "ms": "0.6.2" + } + }, + "ms": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", + "integrity": "sha1-2JwhJMb9wTU9Zai3e/GqxLGTcIw=" + }, + "qs": { + "version": "0.6.6", + "resolved": "https://registry.npmjs.org/qs/-/qs-0.6.6.tgz", + "integrity": "sha1-bgFQmP9RlouKPIGQAdXyyJvEsQc=" + } + } + }, + "connect-timeout": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/connect-timeout/-/connect-timeout-1.1.1.tgz", + "integrity": "sha1-bH4xyY8Kxo620TBfZ/IfWm6Q/QQ=", + "requires": { + "debug": "1.0.2", + "on-headers": "0.0.0" + }, + "dependencies": { + "debug": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.2.tgz", + "integrity": "sha1-OElZHBDM5khHbDx8Li40FttZY8Q=", + "requires": { + "ms": "0.6.2" + } + }, + "ms": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", + "integrity": "sha1-2JwhJMb9wTU9Zai3e/GqxLGTcIw=" + } + } }, "console-log-level": { "version": "1.4.1", - "from": "console-log-level@>=1.4.0 <2.0.0", - "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz" + "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz", + "integrity": "sha512-VZzbIORbP+PPcN/gg3DXClTLPLg5Slwd5fL2MIc+o1qZ4BXBvWyc6QxPk6T/Mkr6IVjRpoAGf32XxP3ZWMVRcQ==" }, "continuation-local-storage": { "version": "3.2.1", - "from": "continuation-local-storage@>=3.2.1 <4.0.0", - "resolved": "https://registry.npmjs.org/continuation-local-storage/-/continuation-local-storage-3.2.1.tgz" + "resolved": "https://registry.npmjs.org/continuation-local-storage/-/continuation-local-storage-3.2.1.tgz", + "integrity": "sha512-jx44cconVqkCEEyLSKWwkvUXwO561jXMa3LPjTPsm5QR22PA0/mhe33FT4Xb5y74JDvt/Cq+5lm8S8rskLv9ZA==", + "requires": { + "async-listener": "^0.6.0", + "emitter-listener": "^1.1.1" + } }, "cookie": { - "version": "0.1.0", - "from": "cookie@0.1.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.1.0.tgz" + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.1.2.tgz", + "integrity": "sha1-cv7D0k5Io0Mgc9kMEmQgBQYQBLE=" }, "cookie-jar": { "version": "0.3.0", - "from": "cookie-jar@>=0.3.0 <0.4.0", - "resolved": "https://registry.npmjs.org/cookie-jar/-/cookie-jar-0.3.0.tgz" + "resolved": "https://registry.npmjs.org/cookie-jar/-/cookie-jar-0.3.0.tgz", + "integrity": "sha1-vJon1OK5fhhs1XyeIGPLmfpozMw=" + }, + "cookie-parser": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/cookie-parser/-/cookie-parser-1.3.1.tgz", + "integrity": "sha1-ML/CkGoESJ1ZvLnjL5DbCOBLtR4=", + "requires": { + "cookie": "0.1.2", + "cookie-signature": "1.0.3" + } }, "cookie-signature": { - "version": "1.0.1", - "from": "cookie-signature@1.0.1", - "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.1.tgz" + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.3.tgz", + "integrity": "sha1-kc2ZfMUftkFZVzjGnNoCAyj1D/k=" }, "core-util-is": { "version": "1.0.2", - "from": "core-util-is@1.0.2", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" }, "cryptiles": { "version": "0.2.2", - "from": "cryptiles@>=0.2.0 <0.3.0", - "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.2.2.tgz" + "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.2.2.tgz", + "integrity": "sha1-7ZH/HxetE9N0gohZT4pIoNJvMlw=", + "requires": { + "boom": "0.4.x" + } + }, + "csrf-tokens": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/csrf-tokens/-/csrf-tokens-2.0.0.tgz", + "integrity": "sha1-yCEAP7i2rRe8l31v0ahL7cPtYZs=", + "requires": { + "base64-url": "1", + "rndm": "1", + "scmp": "~0.0.3", + "uid-safe": "1" + } + }, + "csurf": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/csurf/-/csurf-1.2.2.tgz", + "integrity": "sha1-Lqny0/LWex4iUykOZ2tiGV3Ld1Y=", + "requires": { + "csrf-tokens": "~2.0.0" + } }, "ctype": { "version": "0.5.3", - "from": "ctype@0.5.3", - "resolved": "https://registry.npmjs.org/ctype/-/ctype-0.5.3.tgz" + "resolved": "https://registry.npmjs.org/ctype/-/ctype-0.5.3.tgz", + "integrity": "sha1-gsGMJGH3QRTvFsE1IkrQuRRMoS8=" }, "dashdash": { "version": "1.14.1", - "from": "dashdash@>=1.12.0 <2.0.0", "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", + "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", + "requires": { + "assert-plus": "^1.0.0" + }, "dependencies": { "assert-plus": { "version": "1.0.0", - "from": "assert-plus@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" } } }, "debug": { "version": "3.1.0", - "from": "debug@*", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz" + "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", + "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", + "requires": { + "ms": "2.0.0" + } }, "deep-eql": { "version": "0.1.3", - "from": "deep-eql@>=0.1.3 <0.2.0", "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-0.1.3.tgz", + "integrity": "sha1-71WKyrjeJSBs1xOQbXTlaTDrafI=", "dev": true, + "requires": { + "type-detect": "0.1.1" + }, "dependencies": { "type-detect": { "version": "0.1.1", - "from": "type-detect@0.1.1", "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-0.1.1.tgz", + "integrity": "sha1-C6XsKohWQORw6k6FBZcZANrFiCI=", "dev": true } } }, "delay": { "version": "4.3.0", - "from": "delay@>=4.0.1 <5.0.0", - "resolved": "https://registry.npmjs.org/delay/-/delay-4.3.0.tgz" + "resolved": "https://registry.npmjs.org/delay/-/delay-4.3.0.tgz", + "integrity": "sha512-Lwaf3zVFDMBop1yDuFZ19F9WyGcZcGacsbdlZtWjQmM50tOcMntm1njF/Nb/Vjij3KaSvCF+sEYGKrrjObu2NA==" }, "delayed-stream": { "version": "0.0.5", - "from": "delayed-stream@0.0.5", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz" + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz", + "integrity": "sha1-1LH0OpPoKW3+AmlPRoC8N6MTxz8=" }, "denque": { "version": "1.4.1", - "from": "denque@>=1.1.0 <2.0.0", - "resolved": "https://registry.npmjs.org/denque/-/denque-1.4.1.tgz" + "resolved": "https://registry.npmjs.org/denque/-/denque-1.4.1.tgz", + "integrity": "sha512-OfzPuSZKGcgr96rf1oODnfjqBFmr1DVoc/TrItj3Ohe0Ah1C5WX5Baquw/9U9KovnQ88EqmJbD66rKYUQYN1tQ==" + }, + "depd": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-0.3.0.tgz", + "integrity": "sha1-Ecm8KOQlMl+9iziUC+/2n6UyaIM=" }, "diff": { "version": "3.5.0", - "from": "diff@3.5.0", "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", + "integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==", "dev": true }, "dtrace-provider": { "version": "0.2.8", - "from": "dtrace-provider@0.2.8", "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.2.8.tgz", + "integrity": "sha1-4kPxkhmqlfvw2PL/sH9b1k6U/iA=", "dev": true, "optional": true }, "duplexify": { "version": "3.7.1", - "from": "duplexify@>=3.6.0 <4.0.0", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz" + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz", + "integrity": "sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g==", + "requires": { + "end-of-stream": "^1.0.0", + "inherits": "^2.0.1", + "readable-stream": "^2.0.0", + "stream-shift": "^1.0.0" + } }, "each-series": { "version": "1.0.0", - "from": "each-series@^1.0.0", - "resolved": "https://registry.npmjs.org/each-series/-/each-series-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/each-series/-/each-series-1.0.0.tgz", + "integrity": "sha1-+Ibmxm39sl7x/nNWQUbuXLR4r8s=" }, "ecc-jsbn": { "version": "0.1.1", - "from": "ecc-jsbn@>=0.1.1 <0.2.0", "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz", - "optional": true + "integrity": "sha1-D8c6ntXw1Tw4GTOYUj735UN3dQU=", + "optional": true, + "requires": { + "jsbn": "~0.1.0" + } }, "ecdsa-sig-formatter": { "version": "1.0.11", - "from": "ecdsa-sig-formatter@1.0.11", - "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz" + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", + "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", + "requires": { + "safe-buffer": "^5.0.1" + } + }, + "ee-first": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.0.3.tgz", + "integrity": "sha1-bJjECJq+y1p7hcGsRJqmA9Oz2r4=" }, "emitter-listener": { "version": "1.1.2", - "from": "emitter-listener@>=1.1.1 <2.0.0", - "resolved": "https://registry.npmjs.org/emitter-listener/-/emitter-listener-1.1.2.tgz" + "resolved": "https://registry.npmjs.org/emitter-listener/-/emitter-listener-1.1.2.tgz", + "integrity": "sha512-Bt1sBAGFHY9DKY+4/2cV6izcKJUf5T7/gkdmkxzX/qv9CcGH8xSwVRW5mtX03SWJtRTWSOpzCuWN9rBFYZepZQ==", + "requires": { + "shimmer": "^1.2.0" + } }, "end-of-stream": { "version": "1.4.1", - "from": "end-of-stream@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.1.tgz" + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.1.tgz", + "integrity": "sha512-1MkrZNvWTKCaigbn+W15elq2BB/L22nqrSY5DKlo3X6+vclJm8Bb5djXJBmEX6fS3+zCh/F4VBK5Z2KxJt4s2Q==", + "requires": { + "once": "^1.4.0" + } }, "ent": { "version": "2.2.0", - "from": "ent@>=2.2.0 <3.0.0", - "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz" + "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz", + "integrity": "sha1-6WQhkyWiHQX0RGai9obtbOX13R0=" + }, + "errorhandler": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/errorhandler/-/errorhandler-1.1.0.tgz", + "integrity": "sha1-JzsOuFCtED6abWOpmB2G8bmhIC4=", + "requires": { + "accepts": "1.0.3", + "escape-html": "1.0.1" + } }, "es6-promise": { "version": "4.2.8", - "from": "es6-promise@>=4.0.3 <5.0.0", - "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz" + "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz", + "integrity": "sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w==" }, "es6-promisify": { "version": "5.0.0", - "from": "es6-promisify@>=5.0.0 <6.0.0", - "resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz" + "resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz", + "integrity": "sha1-UQnWLz5W6pZ8S2NQWu8IKRyKUgM=", + "requires": { + "es6-promise": "^4.0.3" + } + }, + "escape-html": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.1.tgz", + "integrity": "sha1-GBoobq05ejmpKFfPsdQwUuNWv/A=" }, "escape-string-regexp": { "version": "1.0.5", - "from": "escape-string-regexp@^1.0.2", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", "dev": true }, "event-target-shim": { "version": "5.0.1", - "from": "event-target-shim@>=5.0.0 <6.0.0", - "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz" + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==" }, "express": { - "version": "3.3.4", - "from": "express@3.3.4", - "resolved": "https://registry.npmjs.org/express/-/express-3.3.4.tgz" - }, - "extend": { - "version": "3.0.1", - "from": "extend@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.1.tgz" - }, - "extsprintf": { - "version": "1.3.0", - "from": "extsprintf@1.3.0", - "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz" - }, - "fast-deep-equal": { - "version": "1.0.0", - "from": "fast-deep-equal@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-1.0.0.tgz" - }, - "fast-json-stable-stringify": { - "version": "2.0.0", - "from": "fast-json-stable-stringify@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz" - }, - "fast-text-encoding": { - "version": "1.0.0", - "from": "fast-text-encoding@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.0.tgz" - }, - "file-uri-to-path": { - "version": "1.0.0", - "from": "file-uri-to-path@1.0.0", - "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz" - }, - "findit2": { - "version": "2.2.3", - "from": "findit2@>=2.2.3 <3.0.0", - "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz" - }, - "follow-redirects": { - "version": "1.5.10", - "from": "follow-redirects@1.5.10", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.10.tgz" - }, - "forever-agent": { - "version": "0.5.2", - "from": "forever-agent@>=0.5.0 <0.6.0", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.5.2.tgz" - }, - "form-data": { - "version": "0.1.4", - "from": "form-data@>=0.1.0 <0.2.0", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-0.1.4.tgz", + "version": "3.11.0", + "resolved": "https://registry.npmjs.org/express/-/express-3.11.0.tgz", + "integrity": "sha1-8cjhyZGkRN164zG/t/GkVX/P0u4=", + "requires": { + "buffer-crc32": "0.2.3", + "commander": "1.3.2", + "connect": "2.20.2", + "cookie": "0.1.2", + "cookie-signature": "1.0.3", + "debug": "1.0.2", + "depd": "0.3.0", + "escape-html": "1.0.1", + "fresh": "0.2.2", + "merge-descriptors": "0.0.2", + "methods": "1.0.1", + "mkdirp": "0.5.0", + "parseurl": "1.0.1", + "proxy-addr": "1.0.1", + "range-parser": "1.0.0", + "send": "0.4.3", + "vary": "0.1.0" + }, "dependencies": { - "async": { - "version": "0.9.2", - "from": "async@>=0.9.0 <0.10.0", - "resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz" + "debug": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.2.tgz", + "integrity": "sha1-OElZHBDM5khHbDx8Li40FttZY8Q=", + "requires": { + "ms": "0.6.2" + } + }, + "mkdirp": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.0.tgz", + "integrity": "sha1-HXMHam35hs2TROFecfzAWkyavxI=", + "requires": { + "minimist": "0.0.8" + } + }, + "ms": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", + "integrity": "sha1-2JwhJMb9wTU9Zai3e/GqxLGTcIw=" } } }, - "formidable": { - "version": "1.0.14", - "from": "formidable@1.0.14", - "resolved": "https://registry.npmjs.org/formidable/-/formidable-1.0.14.tgz" + "express-session": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/express-session/-/express-session-1.4.0.tgz", + "integrity": "sha1-kL+Kk5ocjcAS5KEeTC/DYp98+JQ=", + "requires": { + "buffer-crc32": "0.2.3", + "cookie": "0.1.2", + "cookie-signature": "1.0.3", + "debug": "1.0.2", + "on-headers": "0.0.0", + "rand-token": "0.2.1", + "utils-merge": "1.0.0" + }, + "dependencies": { + "debug": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.2.tgz", + "integrity": "sha1-OElZHBDM5khHbDx8Li40FttZY8Q=", + "requires": { + "ms": "0.6.2" + } + }, + "ms": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", + "integrity": "sha1-2JwhJMb9wTU9Zai3e/GqxLGTcIw=" + } + } + }, + "extend": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.1.tgz", + "integrity": "sha1-p1Xqe8Gt/MWjHOfnYtuq3F5jZEQ=" + }, + "extsprintf": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", + "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=" + }, + "fast-deep-equal": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-1.0.0.tgz", + "integrity": "sha1-liVqO8l1WV6zbYLpkp0GDYk0Of8=" + }, + "fast-json-stable-stringify": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", + "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I=" + }, + "fast-text-encoding": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.0.tgz", + "integrity": "sha512-R9bHCvweUxxwkDwhjav5vxpFvdPGlVngtqmx4pIZfSUhM/Q4NiIUHB456BAf+Q1Nwu3HEZYONtu+Rya+af4jiQ==" + }, + "file-uri-to-path": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", + "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==" + }, + "finalhandler": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-0.0.2.tgz", + "integrity": "sha1-BgPYde6H1WeiZmkoFcyK1E/M7to=", + "requires": { + "debug": "1.0.2", + "escape-html": "1.0.1" + }, + "dependencies": { + "debug": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.2.tgz", + "integrity": "sha1-OElZHBDM5khHbDx8Li40FttZY8Q=", + "requires": { + "ms": "0.6.2" + } + }, + "ms": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", + "integrity": "sha1-2JwhJMb9wTU9Zai3e/GqxLGTcIw=" + } + } + }, + "findit2": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz", + "integrity": "sha1-WKRmaX34piBc39vzlVNri9d3pfY=" + }, + "finished": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/finished/-/finished-1.2.2.tgz", + "integrity": "sha1-QWCOr639ZWg7RqEiC8Sx7D2u3Ng=", + "requires": { + "ee-first": "1.0.3" + } + }, + "follow-redirects": { + "version": "1.5.10", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.10.tgz", + "integrity": "sha512-0V5l4Cizzvqt5D44aTXbFZz+FtyXV1vrDN6qrelxtfYQKW0KO0W2T/hkE8xvGa/540LkZlkaUjO4ailYTFtHVQ==", + "requires": { + "debug": "=3.1.0" + } + }, + "forever-agent": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.5.2.tgz", + "integrity": "sha1-bQ4JxJIflKJ/Y9O0nF/v8epMUTA=" + }, + "form-data": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-0.1.4.tgz", + "integrity": "sha1-kavXiKupcCsaq/qLwBAxoqyeOxI=", + "requires": { + "async": "~0.9.0", + "combined-stream": "~0.0.4", + "mime": "~1.2.11" + }, + "dependencies": { + "async": { + "version": "0.9.2", + "resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz", + "integrity": "sha1-rqdNXmHB+JlhO/ZL2mbUx48v0X0=" + } + } }, "fresh": { - "version": "0.1.0", - "from": "fresh@0.1.0", - "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.1.0.tgz" + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.2.2.tgz", + "integrity": "sha1-lzHc9WeMf660T7kDxPct9VGH+nc=" }, "fs.realpath": { "version": "1.0.0", - "from": "fs.realpath@>=1.0.0 <2.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", "dev": true }, "gaxios": { "version": "1.8.4", - "from": "gaxios@>=1.2.1 <2.0.0", "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.8.4.tgz", + "integrity": "sha512-BoENMnu1Gav18HcpV9IleMPZ9exM+AvUjrAOV4Mzs/vfz2Lu/ABv451iEXByKiMPn2M140uul1txXCg83sAENw==", + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^2.2.1", + "node-fetch": "^2.3.0" + }, "dependencies": { "extend": { "version": "3.0.2", - "from": "extend@>=3.0.2 <4.0.0", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz" + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" } } }, "gcp-metadata": { "version": "1.0.0", - "from": "gcp-metadata@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-1.0.0.tgz", + "integrity": "sha512-Q6HrgfrCQeEircnNP3rCcEgiDv7eF9+1B+1MMgpE190+/+0mjQR8PxeOaRgxZWmdDAF9EIryHB9g1moPiw1SbQ==", + "requires": { + "gaxios": "^1.0.2", + "json-bigint": "^0.3.0" + } }, "getpass": { "version": "0.1.7", - "from": "getpass@>=0.1.1 <0.2.0", "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", + "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", + "requires": { + "assert-plus": "^1.0.0" + }, "dependencies": { "assert-plus": { "version": "1.0.0", - "from": "assert-plus@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" } } }, "glob": { "version": "6.0.4", - "from": "glob@^6.0.1", "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", - "optional": true + "integrity": "sha1-DwiGD2oVUSey+t1PnOJLGqtuTSI=", + "optional": true, + "requires": { + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "2 || 3", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } }, "google-auth-library": { "version": "3.1.2", - "from": "google-auth-library@>=3.1.1 <4.0.0", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-3.1.2.tgz" + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-3.1.2.tgz", + "integrity": "sha512-cDQMzTotwyWMrg5jRO7q0A4TL/3GWBgO7I7q5xGKNiiFf9SmGY/OJ1YsLMgI2MVHHsEGyrqYnbnmV1AE+Z6DnQ==", + "requires": { + "base64-js": "^1.3.0", + "fast-text-encoding": "^1.0.0", + "gaxios": "^1.2.1", + "gcp-metadata": "^1.0.0", + "gtoken": "^2.3.2", + "https-proxy-agent": "^2.2.1", + "jws": "^3.1.5", + "lru-cache": "^5.0.0", + "semver": "^5.5.0" + } }, "google-p12-pem": { "version": "1.0.4", - "from": "google-p12-pem@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.4.tgz" + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.4.tgz", + "integrity": "sha512-SwLAUJqUfTB2iS+wFfSS/G9p7bt4eWcc2LyfvmUXe7cWp6p3mpxDo6LLI29MXdU6wvPcQ/up298X7GMC5ylAlA==", + "requires": { + "node-forge": "^0.8.0", + "pify": "^4.0.0" + } }, "gtoken": { "version": "2.3.3", - "from": "gtoken@>=2.3.2 <3.0.0", "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.3.tgz", + "integrity": "sha512-EaB49bu/TCoNeQjhCYKI/CurooBKkGxIqFHsWABW0b25fobBYVTMe84A8EBVVZhl8emiUdNypil9huMOTmyAnw==", + "requires": { + "gaxios": "^1.0.4", + "google-p12-pem": "^1.0.0", + "jws": "^3.1.5", + "mime": "^2.2.0", + "pify": "^4.0.0" + }, "dependencies": { "mime": { "version": "2.4.4", - "from": "mime@>=2.2.0 <3.0.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz" + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", + "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==" } } }, "har-schema": { "version": "2.0.0", - "from": "har-schema@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz" + "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", + "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=" }, "har-validator": { "version": "5.0.3", - "from": "har-validator@>=5.0.3 <5.1.0", - "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.0.3.tgz" + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.0.3.tgz", + "integrity": "sha1-ukAsJmGU8VlW7xXg/PJCmT9qff0=", + "requires": { + "ajv": "^5.1.0", + "har-schema": "^2.0.0" + } }, "hawk": { "version": "1.0.0", - "from": "hawk@>=1.0.0 <1.1.0", - "resolved": "https://registry.npmjs.org/hawk/-/hawk-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/hawk/-/hawk-1.0.0.tgz", + "integrity": "sha1-uQuxaYByhUEdp//LjdJZhQLTtS0=", + "requires": { + "boom": "0.4.x", + "cryptiles": "0.2.x", + "hoek": "0.9.x", + "sntp": "0.2.x" + } }, "he": { "version": "1.1.1", - "from": "he@1.1.1", "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", + "integrity": "sha1-k0EP0hsAlzUVH4howvJx80J+I/0=", "dev": true }, "hex2dec": { "version": "1.1.2", - "from": "hex2dec@>=1.0.1 <2.0.0", - "resolved": "https://registry.npmjs.org/hex2dec/-/hex2dec-1.1.2.tgz" + "resolved": "https://registry.npmjs.org/hex2dec/-/hex2dec-1.1.2.tgz", + "integrity": "sha512-Yu+q/XWr2fFQ11tHxPq4p4EiNkb2y+lAacJNhAdRXVfRIcDH6gi7htWFnnlIzvqHMHoWeIsfXlNAjZInpAOJDA==" }, "hoek": { "version": "0.9.1", - "from": "hoek@>=0.9.0 <0.10.0", - "resolved": "https://registry.npmjs.org/hoek/-/hoek-0.9.1.tgz" + "resolved": "https://registry.npmjs.org/hoek/-/hoek-0.9.1.tgz", + "integrity": "sha1-PTIkYrrfB3Fup+uFuviAec3c5QU=" }, "http-signature": { "version": "0.10.1", - "from": "http-signature@>=0.10.0 <0.11.0", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-0.10.1.tgz" + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-0.10.1.tgz", + "integrity": "sha1-T72sEyVZqoMjEh5UB3nAoBKyfmY=", + "requires": { + "asn1": "0.1.11", + "assert-plus": "^0.1.5", + "ctype": "0.5.3" + } }, "https-proxy-agent": { "version": "2.2.1", - "from": "https-proxy-agent@>=2.2.1 <3.0.0", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.1.tgz" + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.1.tgz", + "integrity": "sha512-HPCTS1LW51bcyMYbxUIOO4HEOlQ1/1qRaFWcyxvwaqUS9TY88aoEuHUY33kuAh1YhVVaDQhLZsnPd+XNARWZlQ==", + "requires": { + "agent-base": "^4.1.0", + "debug": "^3.1.0" + } + }, + "iconv-lite": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.3.tgz", + "integrity": "sha1-nniHeTt2nMaV6yLSVGpP0tebeh4=" }, "inflight": { "version": "1.0.6", - "from": "inflight@^1.0.4", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz" + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } }, "inherits": { "version": "2.0.3", - "from": "inherits@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" }, "ioredis": { "version": "4.14.1", - "from": "ioredis@>=4.14.1 <4.15.0", "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.14.1.tgz", + "integrity": "sha512-94W+X//GHM+1GJvDk6JPc+8qlM7Dul+9K+lg3/aHixPN7ZGkW6qlvX0DG6At9hWtH2v3B32myfZqWoANUJYGJA==", + "requires": { + "cluster-key-slot": "^1.1.0", + "debug": "^4.1.1", + "denque": "^1.1.0", + "lodash.defaults": "^4.2.0", + "lodash.flatten": "^4.4.0", + "redis-commands": "1.5.0", + "redis-errors": "^1.2.0", + "redis-parser": "^3.0.0", + "standard-as-callback": "^2.0.1" + }, "dependencies": { "cluster-key-slot": { "version": "1.1.0", - "from": "cluster-key-slot@>=1.1.0 <2.0.0", - "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.0.tgz" + "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.0.tgz", + "integrity": "sha512-2Nii8p3RwAPiFwsnZvukotvow2rIHM+yQ6ZcBXGHdniadkYGZYiGmkHJIbZPIV9nfv7m/U1IPMVVcAhoWFeklw==" }, "debug": { "version": "4.1.1", - "from": "debug@>=4.1.1 <5.0.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz" + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "requires": { + "ms": "^2.1.1" + } }, "ms": { "version": "2.1.2", - "from": "ms@>=2.1.1 <3.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz" + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" } } }, + "ipaddr.js": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-0.1.2.tgz", + "integrity": "sha1-ah/T2FT1ACllw017vNm0qNSwRn4=" + }, "is": { "version": "3.3.0", - "from": "is@>=3.2.0 <4.0.0", - "resolved": "https://registry.npmjs.org/is/-/is-3.3.0.tgz" + "resolved": "https://registry.npmjs.org/is/-/is-3.3.0.tgz", + "integrity": "sha512-nW24QBoPcFGGHJGUwnfpI7Yc5CdqWNdsyHQszVE/z2pKHXzh7FZ5GWhJqSyaQ9wMkQnsTx+kAI8bHlCX4tKdbg==" }, "is-buffer": { "version": "2.0.3", - "from": "is-buffer@>=2.0.2 <3.0.0", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.3.tgz" + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.3.tgz", + "integrity": "sha512-U15Q7MXTuZlrbymiz95PJpZxu8IlipAp4dtS3wOdgPXx3mqBnslrWU14kxfHB+Py/+2PVKSr37dMAgM2A4uArw==" }, "is-typedarray": { "version": "1.0.0", - "from": "is-typedarray@>=1.0.0 <1.1.0", - "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=" }, "isarray": { "version": "1.0.0", - "from": "isarray@>=1.0.0 <1.1.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" }, "isstream": { "version": "0.1.2", - "from": "isstream@>=0.1.2 <0.2.0", - "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz" + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" }, "jsbn": { "version": "0.1.1", - "from": "jsbn@>=0.1.0 <0.2.0", "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", "optional": true }, "json-bigint": { "version": "0.3.0", - "from": "json-bigint@>=0.3.0 <0.4.0", - "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-0.3.0.tgz" + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-0.3.0.tgz", + "integrity": "sha1-DM2RLEuCcNBfBW+9E4FLU9OCWx4=", + "requires": { + "bignumber.js": "^7.0.0" + } }, "json-schema": { "version": "0.2.3", - "from": "json-schema@0.2.3", - "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz" + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", + "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=" }, "json-schema-traverse": { "version": "0.3.1", - "from": "json-schema-traverse@>=0.3.0 <0.4.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz" + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz", + "integrity": "sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A=" }, "json-stringify-safe": { "version": "5.0.1", - "from": "json-stringify-safe@5.0.1", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz" + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" }, "jsprim": { "version": "1.4.1", - "from": "jsprim@>=1.2.2 <2.0.0", "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", + "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", + "requires": { + "assert-plus": "1.0.0", + "extsprintf": "1.3.0", + "json-schema": "0.2.3", + "verror": "1.10.0" + }, "dependencies": { "assert-plus": { "version": "1.0.0", - "from": "assert-plus@1.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" } } }, "jwa": { "version": "1.4.1", - "from": "jwa@>=1.4.1 <2.0.0", - "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz" + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", + "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "requires": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } }, "jws": { "version": "3.2.2", - "from": "jws@>=3.1.5 <4.0.0", - "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz" + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", + "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "requires": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + } }, "keypress": { "version": "0.1.0", - "from": "keypress@>=0.1.0 <0.2.0", - "resolved": "https://registry.npmjs.org/keypress/-/keypress-0.1.0.tgz" + "resolved": "https://registry.npmjs.org/keypress/-/keypress-0.1.0.tgz", + "integrity": "sha1-SjGI1CkbZrT2XtuZ+AaqmuKTWSo=" }, "lodash": { "version": "4.17.4", - "from": "lodash@4.17.4", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz" + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz", + "integrity": "sha1-eCA6TRwyiuHYbcpkYONptX9AVa4=" }, "lodash.defaults": { "version": "4.2.0", - "from": "lodash.defaults@>=4.2.0 <5.0.0", - "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz" + "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz", + "integrity": "sha1-0JF4cW/+pN3p5ft7N/bwgCJ0WAw=" }, "lodash.flatten": { "version": "4.4.0", - "from": "lodash.flatten@>=4.4.0 <5.0.0", - "resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz" + "resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz", + "integrity": "sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8=" }, "lodash.pickby": { "version": "4.6.0", - "from": "lodash.pickby@>=4.6.0 <5.0.0", - "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz" + "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz", + "integrity": "sha1-feoh2MGNdwOifHBMFdO4SmfjOv8=" }, "logger-sharelatex": { "version": "1.7.0", - "from": "logger-sharelatex@1.7.0", "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.7.0.tgz", + "integrity": "sha512-9sxDGPSphOMDqUqGpOu/KxFAVcpydKggWv60g9D7++FDCxGkhLLn0kmBkDdgB00d1PadgX1CBMWKzIBpptDU/Q==", + "requires": { + "bunyan": "1.8.12", + "raven": "1.1.3", + "request": "2.88.0" + }, "dependencies": { "ajv": { "version": "6.10.0", - "from": "ajv@>=6.5.5 <7.0.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.10.0.tgz" + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.10.0.tgz", + "integrity": "sha512-nffhOpkymDECQyR0mnsUtoCE8RlX38G0rYP+wgLWFyZuUyuuojSSvi/+euOiQBIn63whYwYVIIH1TvE3tu4OEg==", + "requires": { + "fast-deep-equal": "^2.0.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } }, "assert-plus": { "version": "1.0.0", - "from": "assert-plus@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" }, "aws4": { "version": "1.8.0", - "from": "aws4@>=1.8.0 <2.0.0", - "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz" + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz", + "integrity": "sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ==" }, "bunyan": { "version": "1.8.12", - "from": "bunyan@1.8.12", - "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.12.tgz" + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.12.tgz", + "integrity": "sha1-8VDw9nSKvdcq6uhPBEA74u8RN5c=", + "requires": { + "dtrace-provider": "~0.8", + "moment": "^2.10.6", + "mv": "~2", + "safe-json-stringify": "~1" + } }, "combined-stream": { "version": "1.0.8", - "from": "combined-stream@>=1.0.6 <1.1.0", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz" + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "requires": { + "delayed-stream": "~1.0.0" + } }, "delayed-stream": { "version": "1.0.0", - "from": "delayed-stream@>=1.0.0 <1.1.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" }, "dtrace-provider": { "version": "0.8.7", - "from": "dtrace-provider@>=0.8.0 <0.9.0", "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.8.7.tgz", - "optional": true + "integrity": "sha1-3JObTT4GIM/gwc2APQ0tftBP/QQ=", + "optional": true, + "requires": { + "nan": "^2.10.0" + } }, "extend": { "version": "3.0.2", - "from": "extend@>=3.0.2 <3.1.0", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz" + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" }, "fast-deep-equal": { "version": "2.0.1", - "from": "fast-deep-equal@>=2.0.1 <3.0.0", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz" + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz", + "integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk=" }, "forever-agent": { "version": "0.6.1", - "from": "forever-agent@>=0.6.1 <0.7.0", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz" + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=" }, "form-data": { "version": "2.3.3", - "from": "form-data@>=2.3.2 <2.4.0", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz" + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + } }, "har-validator": { "version": "5.1.3", - "from": "har-validator@>=5.1.0 <5.2.0", - "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz" + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz", + "integrity": "sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g==", + "requires": { + "ajv": "^6.5.5", + "har-schema": "^2.0.0" + } }, "http-signature": { "version": "1.2.0", - "from": "http-signature@>=1.2.0 <1.3.0", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz" + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", + "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", + "requires": { + "assert-plus": "^1.0.0", + "jsprim": "^1.2.2", + "sshpk": "^1.7.0" + } }, "json-schema-traverse": { "version": "0.4.1", - "from": "json-schema-traverse@>=0.4.1 <0.5.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz" + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" }, "mime-db": { "version": "1.40.0", - "from": "mime-db@1.40.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.40.0.tgz" + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.40.0.tgz", + "integrity": "sha512-jYdeOMPy9vnxEqFRRo6ZvTZ8d9oPb+k18PKoYNYUe2stVEBPPwsln/qWzdbmaIvnhZ9v2P+CuecK+fpUfsV2mA==" }, "mime-types": { "version": "2.1.24", - "from": "mime-types@>=2.1.19 <2.2.0", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.24.tgz" + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.24.tgz", + "integrity": "sha512-WaFHS3MCl5fapm3oLxU4eYDw77IQM2ACcxQ9RIxfaC3ooc6PFuBMGZZsYpvoXS5D5QTWPieo1jjLdAm3TBP3cQ==", + "requires": { + "mime-db": "1.40.0" + } }, "oauth-sign": { "version": "0.9.0", - "from": "oauth-sign@>=0.9.0 <0.10.0", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz" + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", + "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==" }, "qs": { "version": "6.5.2", - "from": "qs@>=6.5.2 <6.6.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz" + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", + "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==" }, "request": { "version": "2.88.0", - "from": "request@>=2.88.0 <3.0.0", - "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz" + "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", + "integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==", + "requires": { + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "har-validator": "~5.1.0", + "http-signature": "~1.2.0", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "oauth-sign": "~0.9.0", + "performance-now": "^2.1.0", + "qs": "~6.5.2", + "safe-buffer": "^5.1.2", + "tough-cookie": "~2.4.3", + "tunnel-agent": "^0.6.0", + "uuid": "^3.3.2" + } }, "safe-buffer": { "version": "5.1.2", - "from": "safe-buffer@>=5.1.2 <6.0.0", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" }, "tough-cookie": { "version": "2.4.3", - "from": "tough-cookie@>=2.4.3 <2.5.0", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz" + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", + "integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==", + "requires": { + "psl": "^1.1.24", + "punycode": "^1.4.1" + } }, "tunnel-agent": { "version": "0.6.0", - "from": "tunnel-agent@>=0.6.0 <0.7.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz" + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", + "requires": { + "safe-buffer": "^5.0.1" + } }, "uuid": { "version": "3.3.2", - "from": "uuid@>=3.3.2 <4.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz" + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", + "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" } } }, "long": { "version": "4.0.0", - "from": "long@>=4.0.0 <5.0.0", - "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz" + "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", + "integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==" }, "lru-cache": { "version": "5.1.1", - "from": "lru-cache@>=5.0.0 <6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz" + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "requires": { + "yallist": "^3.0.2" + } }, "lsmod": { "version": "1.0.0", - "from": "lsmod@1.0.0", - "resolved": "https://registry.npmjs.org/lsmod/-/lsmod-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/lsmod/-/lsmod-1.0.0.tgz", + "integrity": "sha1-mgD3bco26yP6BTUK/htYXUKZ5ks=" }, "lynx": { "version": "0.0.11", - "from": "lynx@0.0.11", - "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.0.11.tgz" + "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.0.11.tgz", + "integrity": "sha1-LPoU5EP9LZKlm3efQVZ84cxpZaM=", + "requires": { + "mersenne": "~0.0.3", + "statsd-parser": "~0.0.4" + } + }, + "media-typer": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.2.0.tgz", + "integrity": "sha1-2KBlITrf6qLnYyGitt2jb/YzWYQ=" + }, + "merge-descriptors": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-0.0.2.tgz", + "integrity": "sha1-w2pSp4FDdRPFcnXzndnTF1FKyMc=" }, "mersenne": { "version": "0.0.4", - "from": "mersenne@>=0.0.3 <0.1.0", - "resolved": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz" + "resolved": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz", + "integrity": "sha1-QB/ex+whzbngPNPTAhOY2iGycIU=" + }, + "method-override": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/method-override/-/method-override-2.0.2.tgz", + "integrity": "sha1-AFMSeMeXiWQL8n6X4mo6Wh98ynM=", + "requires": { + "methods": "1.0.1", + "parseurl": "1.0.1", + "vary": "0.1.0" + } }, "methods": { - "version": "0.0.1", - "from": "methods@0.0.1", - "resolved": "https://registry.npmjs.org/methods/-/methods-0.0.1.tgz" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.0.1.tgz", + "integrity": "sha1-dbyRlD3/19oDfPPusO1zoAN80Us=" }, "metrics-sharelatex": { "version": "2.2.0", - "from": "metrics-sharelatex@2.2.0", "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.2.0.tgz", + "integrity": "sha512-kjj3EdkrOJrENLFW/QHiPqBr5AbGEHeti90nMbw6sjKO2TOcuPJHT2Y66m8tqgotnMPKw+kXToRs8Rc9+0xuMQ==", + "requires": { + "@google-cloud/debug-agent": "^3.0.0", + "@google-cloud/profiler": "^0.2.3", + "@google-cloud/trace-agent": "^3.2.0", + "coffee-script": "1.6.0", + "lynx": "~0.1.1", + "prom-client": "^11.1.3", + "underscore": "~1.6.0" + }, "dependencies": { "coffee-script": { "version": "1.6.0", - "from": "coffee-script@1.6.0", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz" + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", + "integrity": "sha1-gIs5bhEPU9AhoZpO8fZb4OjjX6M=" }, "lynx": { "version": "0.1.1", - "from": "lynx@>=0.1.1 <0.2.0", - "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz" + "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz", + "integrity": "sha1-Mxjc7xaQi4KG6Bisz9sxzXQkj50=", + "requires": { + "mersenne": "~0.0.3", + "statsd-parser": "~0.0.4" + } } } }, "mime": { "version": "1.2.11", - "from": "mime@>=1.2.9 <1.3.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz" + "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", + "integrity": "sha1-WCA+7Ybjpe8XrtK32evUfwpg3RA=" }, "mime-db": { "version": "1.30.0", - "from": "mime-db@>=1.30.0 <1.31.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.30.0.tgz" + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.30.0.tgz", + "integrity": "sha1-dMZD2i3Z1qRTmZY0ZbJtXKfXHwE=" }, "mime-types": { "version": "2.1.17", - "from": "mime-types@>=2.1.17 <2.2.0", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.17.tgz" + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.17.tgz", + "integrity": "sha1-Cdejk/A+mVp5+K+Fe3Cp4KsWVXo=", + "requires": { + "mime-db": "~1.30.0" + } }, "minimatch": { "version": "3.0.4", - "from": "minimatch@2 || 3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz" + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "requires": { + "brace-expansion": "^1.1.7" + } }, "minimist": { "version": "0.0.8", - "from": "minimist@0.0.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" }, "mkdirp": { "version": "0.3.5", - "from": "mkdirp@>=0.3.5 <0.4.0", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz" + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz", + "integrity": "sha1-3j5fiWHIjHh+4TaN+EmsRBPsqNc=" }, "mocha": { "version": "5.2.0", - "from": "mocha@>=5.0.1 <6.0.0", "resolved": "https://registry.npmjs.org/mocha/-/mocha-5.2.0.tgz", + "integrity": "sha512-2IUgKDhc3J7Uug+FxMXuqIyYzH7gJjXECKe/w43IGgQHTSj3InJi+yAA7T24L9bQMRKiUEHxEX37G5JpVUGLcQ==", "dev": true, + "requires": { + "browser-stdout": "1.3.1", + "commander": "2.15.1", + "debug": "3.1.0", + "diff": "3.5.0", + "escape-string-regexp": "1.0.5", + "glob": "7.1.2", + "growl": "1.10.5", + "he": "1.1.1", + "minimatch": "3.0.4", + "mkdirp": "0.5.1", + "supports-color": "5.4.0" + }, "dependencies": { "commander": { "version": "2.15.1", - "from": "commander@2.15.1", "resolved": "https://registry.npmjs.org/commander/-/commander-2.15.1.tgz", + "integrity": "sha512-VlfT9F3V0v+jr4yxPc5gg9s62/fIVWsd2Bk2iD435um1NlGMYdVCq+MjcXnhYq2icNOizHr1kK+5TI6H0Hy0ag==", "dev": true }, "glob": { "version": "7.1.2", - "from": "glob@7.1.2", "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz", - "dev": true + "integrity": "sha512-MJTUg1kjuLeQCJ+ccE4Vpa6kKVXkPYJ2mOCQyUuKLcLQsdrMCpBPUi8qVE6+YuaJkozeA9NusTAw3hLr8Xe5EQ==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } }, "growl": { "version": "1.10.5", - "from": "growl@1.10.5", "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", + "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==", "dev": true }, "has-flag": { "version": "3.0.0", - "from": "has-flag@>=3.0.0 <4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", "dev": true }, "mkdirp": { "version": "0.5.1", - "from": "mkdirp@0.5.1", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "dev": true + "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "dev": true, + "requires": { + "minimist": "0.0.8" + } }, "supports-color": { "version": "5.4.0", - "from": "supports-color@5.4.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.4.0.tgz", - "dev": true + "integrity": "sha512-zjaXglF5nnWpsq470jSv6P9DwPvgLkuapYmfDm3JWOm0vkNTVF2tI4UrN2r6jH1qM/uc/WtxYY1hYoA2dOKj5w==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } } } }, "module-details-from-path": { "version": "1.0.3", - "from": "module-details-from-path@>=1.0.3 <2.0.0", - "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz" + "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz", + "integrity": "sha1-EUyUlnPiqKNenTV4hSeqN7Z52is=" }, "moment": { "version": "2.24.0", - "from": "moment@>=2.10.6 <3.0.0", "resolved": "https://registry.npmjs.org/moment/-/moment-2.24.0.tgz", + "integrity": "sha512-bV7f+6l2QigeBBZSM/6yTNq4P2fNpSWj/0e7jQcy87A8e7o2nAfP/34/2ky5Vw4B9S446EtIhodAzkFCcR4dQg==", "optional": true }, "mongodb": { "version": "2.2.36", - "from": "mongodb@^2.2.31", "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-2.2.36.tgz", + "integrity": "sha512-P2SBLQ8Z0PVx71ngoXwo12+FiSfbNfGOClAao03/bant5DgLNkOPAck5IaJcEk4gKlQhDEURzfR3xuBG1/B+IA==", + "requires": { + "es6-promise": "3.2.1", + "mongodb-core": "2.1.20", + "readable-stream": "2.2.7" + }, "dependencies": { "es6-promise": { "version": "3.2.1", - "from": "es6-promise@3.2.1", - "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-3.2.1.tgz" + "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-3.2.1.tgz", + "integrity": "sha1-7FYjOGgDKQkgcXDDlEjiREndH8Q=" }, "process-nextick-args": { "version": "1.0.7", - "from": "process-nextick-args@>=1.0.6 <1.1.0", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz" + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz", + "integrity": "sha1-FQ4gt1ZZCtP5EJPyWk8q2L/zC6M=" }, "readable-stream": { "version": "2.2.7", - "from": "readable-stream@2.2.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.2.7.tgz" + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.2.7.tgz", + "integrity": "sha1-BwV6y+JGeyIELTb5jFrVBwVOlbE=", + "requires": { + "buffer-shims": "~1.0.0", + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "~1.0.0", + "process-nextick-args": "~1.0.6", + "string_decoder": "~1.0.0", + "util-deprecate": "~1.0.1" + } }, "string_decoder": { "version": "1.0.3", - "from": "string_decoder@>=1.0.0 <1.1.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.0.3.tgz" + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.0.3.tgz", + "integrity": "sha512-4AH6Z5fzNNBcH+6XDMfA/BTt87skxqJlO0lAh3Dker5zThcAxG6mKz+iGu308UKoPPQ8Dcqx/4JhujzltRa+hQ==", + "requires": { + "safe-buffer": "~5.1.0" + } } } }, "mongodb-core": { "version": "2.1.20", - "from": "mongodb-core@2.1.20", - "resolved": "https://registry.npmjs.org/mongodb-core/-/mongodb-core-2.1.20.tgz" + "resolved": "https://registry.npmjs.org/mongodb-core/-/mongodb-core-2.1.20.tgz", + "integrity": "sha512-IN57CX5/Q1bhDq6ShAR6gIv4koFsZP7L8WOK1S0lR0pVDQaScffSMV5jxubLsmZ7J+UdqmykKw4r9hG3XQEGgQ==", + "requires": { + "bson": "~1.0.4", + "require_optional": "~1.0.0" + } }, "mongojs": { "version": "2.6.0", - "from": "mongojs@2.6.0", - "resolved": "https://registry.npmjs.org/mongojs/-/mongojs-2.6.0.tgz" + "resolved": "https://registry.npmjs.org/mongojs/-/mongojs-2.6.0.tgz", + "integrity": "sha512-r6tj71DjYcaRTi2jpa+CA6Iq72cTZclB2JKy+Zub+0JPTEq/l2plsAYfF2eHqSYBtZbKNcObvhGYk9E9UKZWJg==", + "requires": { + "each-series": "^1.0.0", + "mongodb": "^2.2.31", + "once": "^1.4.0", + "parse-mongo-url": "^1.1.1", + "readable-stream": "^2.3.3", + "thunky": "^1.0.2", + "to-mongodb-core": "^2.0.0", + "xtend": "^4.0.1" + } + }, + "morgan": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/morgan/-/morgan-1.1.1.tgz", + "integrity": "sha1-zeRdLoB+vMQ5dFhG6oA5LmkJgUY=", + "requires": { + "bytes": "1.0.0" + } }, "ms": { "version": "2.0.0", - "from": "ms@2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz" + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + }, + "multiparty": { + "version": "3.2.9", + "resolved": "https://registry.npmjs.org/multiparty/-/multiparty-3.2.9.tgz", + "integrity": "sha1-xzNz6pwBLnd2zlvEDJNiZLa6LB4=", + "requires": { + "readable-stream": "~1.1.9", + "stream-counter": "~0.2.0" + }, + "dependencies": { + "isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" + }, + "readable-stream": { + "version": "1.1.14", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", + "integrity": "sha1-fPTFTvZI44EwhMY23SB54WbAgdk=", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + } + } }, "mv": { "version": "2.1.1", - "from": "mv@~2", "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", + "integrity": "sha1-rmzg1vbV4KT32JN5jQPB6pVZtqI=", "optional": true, + "requires": { + "mkdirp": "~0.5.1", + "ncp": "~2.0.0", + "rimraf": "~2.4.0" + }, "dependencies": { "mkdirp": { "version": "0.5.1", - "from": "mkdirp@>=0.5.1 <0.6.0", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "optional": true + "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "optional": true, + "requires": { + "minimist": "0.0.8" + } } } }, "nan": { "version": "2.12.1", - "from": "nan@>=2.0.8 <3.0.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.12.1.tgz" + "resolved": "https://registry.npmjs.org/nan/-/nan-2.12.1.tgz", + "integrity": "sha512-JY7V6lRkStKcKTvHO5NVSQRv+RV+FIL5pvDoLiAtSL9pKlC5x9PKQcZDsq7m4FO4d57mkhC6Z+QhAh3Jdk5JFw==" + }, + "native-or-bluebird": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/native-or-bluebird/-/native-or-bluebird-1.1.2.tgz", + "integrity": "sha1-OSHhECMtHreQ89rGG7NwUxx9NW4=" }, "ncp": { "version": "2.0.0", - "from": "ncp@~2.0.0", "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", + "integrity": "sha1-GVoh1sRuNh0vsSgbo4uR6d9727M=", "optional": true }, + "negotiator": { + "version": "0.4.6", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.4.6.tgz", + "integrity": "sha1-9F+vn6gz7TylElDqmn3fxCZ6RLM=" + }, "node-fetch": { "version": "2.6.0", - "from": "node-fetch@>=2.3.0 <3.0.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz" + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz", + "integrity": "sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==" }, "node-forge": { "version": "0.8.4", - "from": "node-forge@>=0.8.0 <0.9.0", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.4.tgz" + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.4.tgz", + "integrity": "sha512-UOfdpxivIYY4g5tqp5FNRNgROVNxRACUxxJREntJLFaJr1E0UEqFtUIk0F/jYx/E+Y6sVXd0KDi/m5My0yGCVw==" }, "oauth-sign": { "version": "0.3.0", - "from": "oauth-sign@>=0.3.0 <0.4.0", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.3.0.tgz" + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.3.0.tgz", + "integrity": "sha1-y1QPk7srIqfVlBaRoojWDo6pOG4=" + }, + "on-headers": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-0.0.0.tgz", + "integrity": "sha1-7igX+DRDJXhc2cLfKyQrvBfK9MQ=" }, "once": { "version": "1.4.0", - "from": "once@^1.3.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz" + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "requires": { + "wrappy": "1" + } }, "p-limit": { "version": "2.2.0", - "from": "p-limit@>=2.2.0 <3.0.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.0.tgz" + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.0.tgz", + "integrity": "sha512-pZbTJpoUsCzV48Mc9Nh51VbwO0X9cuPFE8gYwx9BTCt9SF8/b7Zljd2fVgOxhIF/HDTKgpVzs+GPhyKfjLLFRQ==", + "requires": { + "p-try": "^2.0.0" + } }, "p-try": { "version": "2.2.0", - "from": "p-try@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz" + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" }, "parse-duration": { "version": "0.1.1", - "from": "parse-duration@>=0.1.1 <0.2.0", - "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.1.1.tgz" + "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.1.1.tgz", + "integrity": "sha1-ExFN3JiRwezSgANiRFVN5DZHoiY=" }, "parse-mongo-url": { "version": "1.1.1", - "from": "parse-mongo-url@^1.1.1", - "resolved": "https://registry.npmjs.org/parse-mongo-url/-/parse-mongo-url-1.1.1.tgz" + "resolved": "https://registry.npmjs.org/parse-mongo-url/-/parse-mongo-url-1.1.1.tgz", + "integrity": "sha1-ZiON9fjnwMjKTNlw1KtqE3PrdbU=" }, "parse-ms": { "version": "2.1.0", - "from": "parse-ms@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.1.0.tgz" + "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.1.0.tgz", + "integrity": "sha512-kHt7kzLoS9VBZfUsiKjv43mr91ea+U05EyKkEtqp7vNbHxmaVuEqN7XxeEVnGrMtYOAxGrDElSi96K7EgO1zCA==" + }, + "parseurl": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.0.1.tgz", + "integrity": "sha1-Llfc5u/dN8NRhwEDCUTCK/OIt7Q=" }, "path-is-absolute": { "version": "1.0.1", - "from": "path-is-absolute@^1.0.0", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" }, "path-parse": { "version": "1.0.6", - "from": "path-parse@>=1.0.5 <2.0.0", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz" + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", + "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==" }, "pause": { "version": "0.0.1", - "from": "pause@0.0.1", - "resolved": "https://registry.npmjs.org/pause/-/pause-0.0.1.tgz" + "resolved": "https://registry.npmjs.org/pause/-/pause-0.0.1.tgz", + "integrity": "sha1-HUCLP9t2kjuVQ9lvtMnf1TXZy10=" }, "performance-now": { "version": "2.1.0", - "from": "performance-now@>=2.1.0 <3.0.0", - "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz" + "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", + "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" }, "pify": { "version": "4.0.1", - "from": "pify@>=4.0.0 <5.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz" + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==" }, "pretty-ms": { "version": "4.0.0", - "from": "pretty-ms@>=4.0.0 <5.0.0", - "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-4.0.0.tgz" + "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-4.0.0.tgz", + "integrity": "sha512-qG66ahoLCwpLXD09ZPHSCbUWYTqdosB7SMP4OffgTgL2PBKXMuUsrk5Bwg8q4qPkjTXsKBMr+YK3Ltd/6F9s/Q==", + "requires": { + "parse-ms": "^2.0.0" + } }, "process-nextick-args": { "version": "2.0.0", - "from": "process-nextick-args@>=2.0.0 <2.1.0", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz" + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz", + "integrity": "sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw==" }, "prom-client": { "version": "11.5.1", - "from": "prom-client@>=11.1.3 <12.0.0", - "resolved": "https://registry.npmjs.org/prom-client/-/prom-client-11.5.1.tgz" + "resolved": "https://registry.npmjs.org/prom-client/-/prom-client-11.5.1.tgz", + "integrity": "sha512-AcFuxVgzoA/4nlpeg9SkM2HkDjNU3V7g2LCLwpudXSbcSLiFpRMVfsCoCY5RYeR/d9jkQng1mCmVKj1mPHvP0Q==", + "requires": { + "tdigest": "^0.1.1" + } }, "protobufjs": { "version": "6.8.8", - "from": "protobufjs@>=6.8.6 <6.9.0", "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz", + "integrity": "sha512-AAmHtD5pXgZfi7GMpllpO3q1Xw1OYldr+dMUlAnffGTAhqkg72WdmSY71uKBF/JuyiKs8psYbtKrhi0ASCD8qw==", + "requires": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/long": "^4.0.0", + "@types/node": "^10.1.0", + "long": "^4.0.0" + }, "dependencies": { "@types/node": { "version": "10.14.9", - "from": "@types/node@>=10.1.0 <11.0.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-10.14.9.tgz" + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.14.9.tgz", + "integrity": "sha512-NelG/dSahlXYtSoVPErrp06tYFrvzj8XLWmKA+X8x0W//4MqbUyZu++giUG/v0bjAT6/Qxa8IjodrfdACyb0Fg==" } } }, + "proxy-addr": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-1.0.1.tgz", + "integrity": "sha1-x8Vm1etOP61n7rnHfFVYzMObiKg=", + "requires": { + "ipaddr.js": "0.1.2" + } + }, "psl": { "version": "1.1.32", - "from": "psl@>=1.1.24 <2.0.0", - "resolved": "https://registry.npmjs.org/psl/-/psl-1.1.32.tgz" + "resolved": "https://registry.npmjs.org/psl/-/psl-1.1.32.tgz", + "integrity": "sha512-MHACAkHpihU/REGGPLj4sEfc/XKW2bheigvHO1dUqjaKigMp1C8+WLQYRGgeKFMsw5PMfegZcaN8IDXK/cD0+g==" }, "punycode": { "version": "1.4.1", - "from": "punycode@>=1.4.1 <2.0.0", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz" + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=" }, "q": { "version": "0.9.2", - "from": "q@0.9.2", - "resolved": "https://registry.npmjs.org/q/-/q-0.9.2.tgz" + "resolved": "https://registry.npmjs.org/q/-/q-0.9.2.tgz", + "integrity": "sha1-I8BsRsgTKGFqrhaNPuI6Vr1D2vY=" }, "qs": { "version": "0.6.5", - "from": "qs@0.6.5", - "resolved": "https://registry.npmjs.org/qs/-/qs-0.6.5.tgz" + "resolved": "https://registry.npmjs.org/qs/-/qs-0.6.5.tgz", + "integrity": "sha1-KUsmjksNQlD23eGbO4s0k13/FO8=" + }, + "rand-token": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/rand-token/-/rand-token-0.2.1.tgz", + "integrity": "sha1-3GfIEjMGyRInstw/W+pz0wE3YiY=" }, "range-parser": { - "version": "0.0.4", - "from": "range-parser@0.0.4", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-0.0.4.tgz" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.0.0.tgz", + "integrity": "sha1-pLJkz+C+XONqvjdlrJwqJIdG28A=" }, "raven": { "version": "1.1.3", - "from": "raven@1.1.3", "resolved": "https://registry.npmjs.org/raven/-/raven-1.1.3.tgz", + "integrity": "sha1-QnPBrm005CMPUbLAEEGjK5Iygio=", + "requires": { + "cookie": "0.3.1", + "json-stringify-safe": "5.0.1", + "lsmod": "1.0.0", + "stack-trace": "0.0.9", + "uuid": "3.0.0" + }, "dependencies": { "cookie": { "version": "0.3.1", - "from": "cookie@0.3.1", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz" + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", + "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" } } }, + "raw-body": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-1.2.2.tgz", + "integrity": "sha1-DGjh7ijP7X26SCIjSuxgeEYcvB8=", + "requires": { + "bytes": "1", + "iconv-lite": "0.4.3" + } + }, "readable-stream": { "version": "2.3.6", - "from": "readable-stream@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz" + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } }, "redis": { "version": "0.11.0", - "from": "redis@>=0.11.0 <0.12.0", - "resolved": "https://registry.npmjs.org/redis/-/redis-0.11.0.tgz" + "resolved": "https://registry.npmjs.org/redis/-/redis-0.11.0.tgz", + "integrity": "sha1-/cAdSrTL5LO7LLKByP5WnDhX9XE=" }, "redis-commands": { "version": "1.5.0", - "from": "redis-commands@1.5.0", - "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.5.0.tgz" + "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.5.0.tgz", + "integrity": "sha512-6KxamqpZ468MeQC3bkWmCB1fp56XL64D4Kf0zJSwDZbVLLm7KFkoIcHrgRvQ+sk8dnhySs7+yBg94yIkAK7aJg==" }, "redis-errors": { "version": "1.2.0", - "from": "redis-errors@>=1.2.0 <2.0.0", - "resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz" + "resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz", + "integrity": "sha1-62LSrbFeTq9GEMBK/hUpOEJQq60=" }, "redis-parser": { "version": "3.0.0", - "from": "redis-parser@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz" + "resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz", + "integrity": "sha1-tm2CjNyv5rS4pCin3vTGvKwxyLQ=", + "requires": { + "redis-errors": "^1.0.0" + } }, "redis-sentinel": { "version": "0.1.1", - "from": "redis-sentinel@0.1.1", - "resolved": "https://registry.npmjs.org/redis-sentinel/-/redis-sentinel-0.1.1.tgz" + "resolved": "https://registry.npmjs.org/redis-sentinel/-/redis-sentinel-0.1.1.tgz", + "integrity": "sha1-Vj3TQduZMgMfSX+v3Td+hkj/s+U=", + "requires": { + "q": "0.9.2", + "redis": "0.11.x" + } }, "redis-sharelatex": { "version": "1.0.11", - "from": "redis-sharelatex@1.0.11", "resolved": "https://registry.npmjs.org/redis-sharelatex/-/redis-sharelatex-1.0.11.tgz", + "integrity": "sha512-rKXPVLmFC9ycpRc5e4rULOwi9DB0LqRcWEiUxQuJNSVgcqCxpGqVw+zwivo+grk3G2tGpduh3/8y+4KVHWOntw==", + "requires": { + "async": "^2.5.0", + "coffee-script": "1.8.0", + "ioredis": "~4.14.1", + "redis-sentinel": "0.1.1", + "underscore": "1.7.0" + }, "dependencies": { "coffee-script": { "version": "1.8.0", - "from": "coffee-script@1.8.0", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.8.0.tgz" + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.8.0.tgz", + "integrity": "sha1-nJ8dK0pSoADe0Vtll5FwNkgmPB0=", + "requires": { + "mkdirp": "~0.3.5" + } }, "underscore": { "version": "1.7.0", - "from": "underscore@1.7.0", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz" + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz", + "integrity": "sha1-a7rwh3UA02vjTsqlhODbn+8DUgk=" } } }, "request": { "version": "2.25.0", - "from": "request@2.25.0", "resolved": "https://registry.npmjs.org/request/-/request-2.25.0.tgz", + "integrity": "sha1-2sFnMYGIf+CyzmvX4S9G1VSgLOk=", + "requires": { + "aws-sign": "~0.3.0", + "cookie-jar": "~0.3.0", + "forever-agent": "~0.5.0", + "form-data": "~0.1.0", + "hawk": "~1.0.0", + "http-signature": "~0.10.0", + "json-stringify-safe": "~5.0.0", + "mime": "~1.2.9", + "node-uuid": "~1.4.0", + "oauth-sign": "~0.3.0", + "qs": "~0.6.0", + "tunnel-agent": "~0.3.0" + }, "dependencies": { "node-uuid": { "version": "1.4.8", - "from": "node-uuid@>=1.4.0 <1.5.0", - "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz" + "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz", + "integrity": "sha1-sEDrCSOWivq/jTL7HxfxFn/auQc=" } } }, "requestretry": { "version": "1.12.2", - "from": "requestretry@>=1.12.0 <2.0.0", "resolved": "https://registry.npmjs.org/requestretry/-/requestretry-1.12.2.tgz", + "integrity": "sha512-wDYnH4imurLs5upu31WoPaOFfEu31qhFlF7KgpYbBsmBagFmreZZo8E/XpoQ3erCP5za+72t8k8QI4wlrtwVXw==", + "requires": { + "extend": "^3.0.0", + "lodash": "^4.15.0", + "request": "^2.74.0", + "when": "^3.7.7" + }, "dependencies": { "assert-plus": { "version": "1.0.0", - "from": "assert-plus@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" }, "boom": { "version": "4.3.1", - "from": "boom@>=4.0.0 <5.0.0", - "resolved": "https://registry.npmjs.org/boom/-/boom-4.3.1.tgz" + "resolved": "https://registry.npmjs.org/boom/-/boom-4.3.1.tgz", + "integrity": "sha1-T4owBctKfjiJ90kDD9JbluAdLjE=", + "requires": { + "hoek": "4.x.x" + } }, "combined-stream": { "version": "1.0.5", - "from": "combined-stream@>=1.0.5 <1.1.0", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz" + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz", + "integrity": "sha1-k4NwpXtKUd6ix3wV1cX9+JUWQAk=", + "requires": { + "delayed-stream": "~1.0.0" + } }, "cryptiles": { "version": "3.1.2", - "from": "cryptiles@>=3.0.0 <4.0.0", "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-3.1.2.tgz", + "integrity": "sha1-qJ+7Ig9c4l7FboxKqKT9e1sNKf4=", + "requires": { + "boom": "5.x.x" + }, "dependencies": { "boom": { "version": "5.2.0", - "from": "boom@>=5.0.0 <6.0.0", - "resolved": "https://registry.npmjs.org/boom/-/boom-5.2.0.tgz" + "resolved": "https://registry.npmjs.org/boom/-/boom-5.2.0.tgz", + "integrity": "sha512-Z5BTk6ZRe4tXXQlkqftmsAUANpXmuwlsF5Oov8ThoMbQRzdGTA1ngYRW160GexgOgjsFOKJz0LYhoNi+2AMBUw==", + "requires": { + "hoek": "4.x.x" + } } } }, "delayed-stream": { "version": "1.0.0", - "from": "delayed-stream@>=1.0.0 <1.1.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" }, "forever-agent": { "version": "0.6.1", - "from": "forever-agent@>=0.6.1 <0.7.0", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz" + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=" }, "form-data": { "version": "2.3.1", - "from": "form-data@>=2.3.1 <2.4.0", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.1.tgz" + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.1.tgz", + "integrity": "sha1-b7lPvXGIUwbXPRXMSX/kzE7NRL8=", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.5", + "mime-types": "^2.1.12" + } }, "hawk": { "version": "6.0.2", - "from": "hawk@>=6.0.2 <6.1.0", - "resolved": "https://registry.npmjs.org/hawk/-/hawk-6.0.2.tgz" + "resolved": "https://registry.npmjs.org/hawk/-/hawk-6.0.2.tgz", + "integrity": "sha512-miowhl2+U7Qle4vdLqDdPt9m09K6yZhkLDTWGoUiUzrQCn+mHHSmfJgAyGaLRZbPmTqfFFjRV1QWCW0VWUJBbQ==", + "requires": { + "boom": "4.x.x", + "cryptiles": "3.x.x", + "hoek": "4.x.x", + "sntp": "2.x.x" + } }, "hoek": { "version": "4.2.0", - "from": "hoek@>=4.0.0 <5.0.0", - "resolved": "https://registry.npmjs.org/hoek/-/hoek-4.2.0.tgz" + "resolved": "https://registry.npmjs.org/hoek/-/hoek-4.2.0.tgz", + "integrity": "sha512-v0XCLxICi9nPfYrS9RL8HbYnXi9obYAeLbSP00BmnZwCK9+Ih9WOjoZ8YoHCoav2csqn4FOz4Orldsy2dmDwmQ==" }, "http-signature": { "version": "1.2.0", - "from": "http-signature@>=1.2.0 <1.3.0", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz" + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", + "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", + "requires": { + "assert-plus": "^1.0.0", + "jsprim": "^1.2.2", + "sshpk": "^1.7.0" + } }, "oauth-sign": { "version": "0.8.2", - "from": "oauth-sign@>=0.8.2 <0.9.0", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz" + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz", + "integrity": "sha1-Rqarfwrq2N6unsBWV4C31O/rnUM=" }, "qs": { "version": "6.5.1", - "from": "qs@>=6.5.1 <6.6.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz" + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz", + "integrity": "sha512-eRzhrN1WSINYCDCbrz796z37LOe3m5tmW7RQf6oBntukAG1nmovJvhnwHHRMAfeoItc1m2Hk02WER2aQ/iqs+A==" }, "request": { "version": "2.83.0", - "from": "request@>=2.74.0 <3.0.0", - "resolved": "https://registry.npmjs.org/request/-/request-2.83.0.tgz" + "resolved": "https://registry.npmjs.org/request/-/request-2.83.0.tgz", + "integrity": "sha512-lR3gD69osqm6EYLk9wB/G1W/laGWjzH90t1vEa2xuxHD5KUrSzp9pUSfTm+YC5Nxt2T8nMPEvKlhbQayU7bgFw==", + "requires": { + "aws-sign2": "~0.7.0", + "aws4": "^1.6.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.5", + "extend": "~3.0.1", + "forever-agent": "~0.6.1", + "form-data": "~2.3.1", + "har-validator": "~5.0.3", + "hawk": "~6.0.2", + "http-signature": "~1.2.0", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.17", + "oauth-sign": "~0.8.2", + "performance-now": "^2.1.0", + "qs": "~6.5.1", + "safe-buffer": "^5.1.1", + "stringstream": "~0.0.5", + "tough-cookie": "~2.3.3", + "tunnel-agent": "^0.6.0", + "uuid": "^3.1.0" + } }, "sntp": { "version": "2.1.0", - "from": "sntp@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/sntp/-/sntp-2.1.0.tgz" + "resolved": "https://registry.npmjs.org/sntp/-/sntp-2.1.0.tgz", + "integrity": "sha512-FL1b58BDrqS3A11lJ0zEdnJ3UOKqVxawAkF3k7F0CVN7VQ34aZrV+G8BZ1WC9ZL7NyrwsW0oviwsWDgRuVYtJg==", + "requires": { + "hoek": "4.x.x" + } }, "tunnel-agent": { "version": "0.6.0", - "from": "tunnel-agent@>=0.6.0 <0.7.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz" + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", + "requires": { + "safe-buffer": "^5.0.1" + } }, "uuid": { "version": "3.1.0", - "from": "uuid@>=3.1.0 <4.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.1.0.tgz" + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.1.0.tgz", + "integrity": "sha512-DIWtzUkw04M4k3bf1IcpS2tngXEL26YUD2M0tMDUpnUrz2hgzUBlD55a4FjdLGPvfHxS6uluGWvaVEqgBcVa+g==" } } }, - "require_optional": { - "version": "1.0.1", - "from": "require_optional@~1.0.0", - "resolved": "https://registry.npmjs.org/require_optional/-/require_optional-1.0.1.tgz" - }, "require-in-the-middle": { "version": "4.0.0", - "from": "require-in-the-middle@>=4.0.0 <5.0.0", "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-4.0.0.tgz", + "integrity": "sha512-GX12iFhCUzzNuIqvei0dTLUbBEjZ420KTY/MmDxe2GQKPDGyH/wgfGMWFABpnM/M6sLwC3IaSg8A95U6gIb+HQ==", + "requires": { + "debug": "^4.1.1", + "module-details-from-path": "^1.0.3", + "resolve": "^1.10.0" + }, "dependencies": { "debug": { "version": "4.1.1", - "from": "debug@>=4.1.1 <5.0.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz" + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "requires": { + "ms": "^2.1.1" + } }, "ms": { "version": "2.1.2", - "from": "ms@>=2.1.1 <3.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz" + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" } } }, "require-like": { "version": "0.1.2", - "from": "require-like@0.1.2", - "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz" + "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", + "integrity": "sha1-rW8wwTvs15cBDEaK+ndcDAprR/o=" + }, + "require_optional": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/require_optional/-/require_optional-1.0.1.tgz", + "integrity": "sha512-qhM/y57enGWHAe3v/NcwML6a3/vfESLe/sGM2dII+gEO0BpKRUkWZow/tyloNqJyN6kXSl3RyyM8Ll5D/sJP8g==", + "requires": { + "resolve-from": "^2.0.0", + "semver": "^5.1.0" + } }, "resolve": { "version": "1.11.0", - "from": "resolve@>=1.10.0 <2.0.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.11.0.tgz" + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.11.0.tgz", + "integrity": "sha512-WL2pBDjqT6pGUNSUzMw00o4T7If+z4H2x3Gz893WoUQ5KW8Vr9txp00ykiP16VBaZF5+j/OcXJHZ9+PCvdiDKw==", + "requires": { + "path-parse": "^1.0.6" + } }, "resolve-from": { "version": "2.0.0", - "from": "resolve-from@^2.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz" + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz", + "integrity": "sha1-lICrIOlP+h2egKgEx+oUdhGWa1c=" + }, + "response-time": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/response-time/-/response-time-2.0.0.tgz", + "integrity": "sha1-Zcs5/VDeL0/9vdKF8YVZZr1vyzY=", + "requires": { + "on-headers": "0.0.0" + } }, "retry-axios": { "version": "0.3.2", - "from": "retry-axios@>=0.3.2 <0.4.0", - "resolved": "https://registry.npmjs.org/retry-axios/-/retry-axios-0.3.2.tgz" + "resolved": "https://registry.npmjs.org/retry-axios/-/retry-axios-0.3.2.tgz", + "integrity": "sha512-jp4YlI0qyDFfXiXGhkCOliBN1G7fRH03Nqy8YdShzGqbY5/9S2x/IR6C88ls2DFkbWuL3ASkP7QD3pVrNpPgwQ==" }, "retry-request": { "version": "4.0.0", - "from": "retry-request@>=4.0.0 <5.0.0", - "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-4.0.0.tgz" + "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-4.0.0.tgz", + "integrity": "sha512-S4HNLaWcMP6r8E4TMH52Y7/pM8uNayOcTDDQNBwsCccL1uI+Ol2TljxRDPzaNfbhOB30+XWP5NnZkB3LiJxi1w==", + "requires": { + "through2": "^2.0.0" + } }, "rimraf": { "version": "2.4.5", - "from": "rimraf@~2.4.0", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", - "optional": true + "integrity": "sha1-7nEM5dk6j9uFb7Xqj/Di11k0sto=", + "optional": true, + "requires": { + "glob": "^6.0.1" + } + }, + "rndm": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/rndm/-/rndm-1.2.0.tgz", + "integrity": "sha1-8z/pz7Urv9UgqhgyO8ZdsRCht2w=" }, "safe-buffer": { "version": "5.1.1", - "from": "safe-buffer@>=5.1.1 <6.0.0", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz" + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz", + "integrity": "sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg==" }, "safe-json-stringify": { "version": "1.2.0", - "from": "safe-json-stringify@>=1.0.0 <2.0.0", "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.2.0.tgz", + "integrity": "sha512-gH8eh2nZudPQO6TytOvbxnuhYBOvDBBLW52tz5q6X58lJcd/tkmqFR+5Z9adS8aJtURSXWThWy/xJtJwixErvg==", "optional": true }, "sandboxed-module": { "version": "0.2.2", - "from": "sandboxed-module@>=0.2.0 <0.3.0", "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-0.2.2.tgz", + "integrity": "sha1-bL3sghOAx31FdcjIeDi5ET5kulA=", + "requires": { + "require-like": "0.1.2", + "stack-trace": "0.0.6" + }, "dependencies": { "stack-trace": { "version": "0.0.6", - "from": "stack-trace@0.0.6", - "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.6.tgz" + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.6.tgz", + "integrity": "sha1-HnGb1qJin/CcGJ4Xqe+QKpT8XbA=" } } }, + "scmp": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/scmp/-/scmp-0.0.3.tgz", + "integrity": "sha1-NkjfLXKUZB5/eGc//CloHZutkHM=" + }, "semver": { "version": "5.6.0", - "from": "semver@>=5.5.0 <6.0.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.6.0.tgz" + "resolved": "https://registry.npmjs.org/semver/-/semver-5.6.0.tgz", + "integrity": "sha512-RS9R6R35NYgQn++fkDWaOmqGoj4Ek9gGs+DPxNUZKuwE183xjJroKvyo1IzVFeXvUrvmALy6FWD5xrdJT25gMg==" }, "send": { - "version": "0.1.3", - "from": "send@0.1.3", - "resolved": "https://registry.npmjs.org/send/-/send-0.1.3.tgz" + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/send/-/send-0.4.3.tgz", + "integrity": "sha1-lieyO3cH+/Y3ODHKxXkzMLWUtkA=", + "requires": { + "debug": "1.0.2", + "escape-html": "1.0.1", + "finished": "1.2.2", + "fresh": "0.2.2", + "mime": "1.2.11", + "range-parser": "~1.0.0" + }, + "dependencies": { + "debug": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.2.tgz", + "integrity": "sha1-OElZHBDM5khHbDx8Li40FttZY8Q=", + "requires": { + "ms": "0.6.2" + } + }, + "ms": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", + "integrity": "sha1-2JwhJMb9wTU9Zai3e/GqxLGTcIw=" + } + } + }, + "serve-favicon": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/serve-favicon/-/serve-favicon-2.0.1.tgz", + "integrity": "sha1-SCaXXZ8XPKOkFY6WmBYfdd7Hr+w=", + "requires": { + "fresh": "0.2.2" + } + }, + "serve-index": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/serve-index/-/serve-index-1.1.2.tgz", + "integrity": "sha1-B0K0gJmCV1OcLSrMbKH0qvJn+XI=", + "requires": { + "accepts": "1.0.3", + "batch": "0.5.1" + } + }, + "serve-static": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.2.3.tgz", + "integrity": "sha1-k87Lw0Dweey4WJKB0dwxwmwM0Vg=", + "requires": { + "escape-html": "1.0.1", + "parseurl": "1.0.1", + "send": "0.4.3" + } }, "settings-sharelatex": { "version": "1.1.0", - "from": "settings-sharelatex@1.1.0", "resolved": "https://registry.npmjs.org/settings-sharelatex/-/settings-sharelatex-1.1.0.tgz", + "integrity": "sha512-f7D+0lnlohoteSn6IKTH72NE+JnAdMWTKwQglAuimZWTID2FRRItZSGeYMTRpvEnaQApkoVwRp//WRMsiddnqw==", + "requires": { + "coffee-script": "1.6.0" + }, "dependencies": { "coffee-script": { "version": "1.6.0", - "from": "coffee-script@1.6.0", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz" + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", + "integrity": "sha1-gIs5bhEPU9AhoZpO8fZb4OjjX6M=" } } }, "shimmer": { "version": "1.2.1", - "from": "shimmer@>=1.2.0 <2.0.0", - "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz" + "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz", + "integrity": "sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw==" }, "sinon": { "version": "1.5.2", - "from": "sinon@>=1.5.2 <1.6.0", - "resolved": "https://registry.npmjs.org/sinon/-/sinon-1.5.2.tgz" + "resolved": "https://registry.npmjs.org/sinon/-/sinon-1.5.2.tgz", + "integrity": "sha1-nKvGx4vfRF1/gxHVSWhi+VRoxPg=", + "requires": { + "buster-format": "~0.5" + } }, "sntp": { "version": "0.2.4", - "from": "sntp@>=0.2.0 <0.3.0", - "resolved": "https://registry.npmjs.org/sntp/-/sntp-0.2.4.tgz" + "resolved": "https://registry.npmjs.org/sntp/-/sntp-0.2.4.tgz", + "integrity": "sha1-+4hfGLDzqtGJ+CSGJTa87ux1CQA=", + "requires": { + "hoek": "0.9.x" + } }, "source-map": { "version": "0.6.1", - "from": "source-map@>=0.6.1 <0.7.0", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz" + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" }, "split": { "version": "1.0.1", - "from": "split@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz" + "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", + "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==", + "requires": { + "through": "2" + } }, "sshpk": { "version": "1.13.1", - "from": "sshpk@>=1.7.0 <2.0.0", "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.13.1.tgz", + "integrity": "sha1-US322mKHFEMW3EwY/hzx2UBzm+M=", + "requires": { + "asn1": "~0.2.3", + "assert-plus": "^1.0.0", + "bcrypt-pbkdf": "^1.0.0", + "dashdash": "^1.12.0", + "ecc-jsbn": "~0.1.1", + "getpass": "^0.1.1", + "jsbn": "~0.1.0", + "tweetnacl": "~0.14.0" + }, "dependencies": { "asn1": { "version": "0.2.3", - "from": "asn1@>=0.2.3 <0.3.0", - "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz" + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz", + "integrity": "sha1-2sh4dxPJlmhJ/IGAd36+nB3fO4Y=" }, "assert-plus": { "version": "1.0.0", - "from": "assert-plus@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" } } }, "stack-trace": { "version": "0.0.9", - "from": "stack-trace@0.0.9", - "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz" + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz", + "integrity": "sha1-qPbq7KkGdMMz58Q5U/J1tFFRBpU=" }, "standard-as-callback": { "version": "2.0.1", - "from": "standard-as-callback@>=2.0.1 <3.0.0", - "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.0.1.tgz" + "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.0.1.tgz", + "integrity": "sha512-NQOxSeB8gOI5WjSaxjBgog2QFw55FV8TkS6Y07BiB3VJ8xNTvUYm0wl0s8ObgQ5NhdpnNfigMIKjgPESzgr4tg==" }, "statsd-parser": { "version": "0.0.4", - "from": "statsd-parser@>=0.0.4 <0.1.0", - "resolved": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz" + "resolved": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz", + "integrity": "sha1-y9JDlTzELv/VSLXSI4jtaJ7GOb0=" + }, + "stream-counter": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/stream-counter/-/stream-counter-0.2.0.tgz", + "integrity": "sha1-3tJmVWMZyLDiIoErnPOyb6fZR94=", + "requires": { + "readable-stream": "~1.1.8" + }, + "dependencies": { + "isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" + }, + "readable-stream": { + "version": "1.1.14", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", + "integrity": "sha1-fPTFTvZI44EwhMY23SB54WbAgdk=", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + } + } }, "stream-shift": { "version": "1.0.0", - "from": "stream-shift@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.0.tgz", + "integrity": "sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI=" }, "string_decoder": { "version": "1.1.1", - "from": "string_decoder@>=1.1.1 <1.2.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } }, "stringstream": { "version": "0.0.5", - "from": "stringstream@>=0.0.5 <0.1.0", - "resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz" + "resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz", + "integrity": "sha1-TkhM1N5aC7vuGORjB3EKioFiGHg=" }, "tdigest": { "version": "0.1.1", - "from": "tdigest@>=0.1.1 <0.2.0", - "resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.1.tgz" + "resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.1.tgz", + "integrity": "sha1-Ljyyw56kSeVdHmzZEReszKRYgCE=", + "requires": { + "bintrees": "1.0.1" + } }, "teeny-request": { "version": "3.11.3", - "from": "teeny-request@>=3.6.0 <4.0.0", "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-3.11.3.tgz", + "integrity": "sha512-CKncqSF7sH6p4rzCgkb/z/Pcos5efl0DmolzvlqRQUNcpRIruOhY9+T1FsIlyEbfWd7MsFpodROOwHYh2BaXzw==", + "requires": { + "https-proxy-agent": "^2.2.1", + "node-fetch": "^2.2.0", + "uuid": "^3.3.2" + }, "dependencies": { "uuid": { "version": "3.3.2", - "from": "uuid@>=3.3.2 <4.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz" + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", + "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" } } }, "through": { "version": "2.3.8", - "from": "through@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz" + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=" }, "through2": { "version": "2.0.5", - "from": "through2@>=2.0.3 <3.0.0", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz" + "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", + "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", + "requires": { + "readable-stream": "~2.3.6", + "xtend": "~4.0.1" + } }, "thunky": { "version": "1.0.3", - "from": "thunky@^1.0.2", - "resolved": "https://registry.npmjs.org/thunky/-/thunky-1.0.3.tgz" + "resolved": "https://registry.npmjs.org/thunky/-/thunky-1.0.3.tgz", + "integrity": "sha512-YwT8pjmNcAXBZqrubu22P4FYsh2D4dxRmnWBOL8Jk8bUcRUtc5326kx32tuTmFDAZtLOGEVNl8POAR8j896Iow==" }, "timekeeper": { "version": "2.0.0", - "from": "timekeeper@>=2.0.0 <3.0.0", "resolved": "https://registry.npmjs.org/timekeeper/-/timekeeper-2.0.0.tgz", + "integrity": "sha512-DVH+iEKcVwU3JkZK0Z86qFx8osIG05U1H/F6lAE+iPfvElioM9HPVd2ZKmoI4zS0AWsDogOXl/BuKWXNadI/fw==", "dev": true }, "to-mongodb-core": { "version": "2.0.0", - "from": "to-mongodb-core@^2.0.0", - "resolved": "https://registry.npmjs.org/to-mongodb-core/-/to-mongodb-core-2.0.0.tgz" + "resolved": "https://registry.npmjs.org/to-mongodb-core/-/to-mongodb-core-2.0.0.tgz", + "integrity": "sha1-NZbsdhOsmtO5ioncua77pWnNJ+s=" }, "tough-cookie": { "version": "2.3.3", - "from": "tough-cookie@>=2.3.3 <2.4.0", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.3.tgz" + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.3.tgz", + "integrity": "sha1-C2GKVWW23qkL80JdBNVe3EdadWE=", + "requires": { + "punycode": "^1.4.1" + } }, "tunnel-agent": { "version": "0.3.0", - "from": "tunnel-agent@>=0.3.0 <0.4.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.3.0.tgz" + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.3.0.tgz", + "integrity": "sha1-rWgbaPUyGtKCfEz7G31d8s/pQu4=" }, "tweetnacl": { "version": "0.14.5", - "from": "tweetnacl@>=0.14.0 <0.15.0", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", "optional": true }, - "uid2": { - "version": "0.0.2", - "from": "uid2@0.0.2", - "resolved": "https://registry.npmjs.org/uid2/-/uid2-0.0.2.tgz" + "type-is": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.3.1.tgz", + "integrity": "sha1-pnibWlITgomt4e+PbZ8odP/XC2s=", + "requires": { + "media-typer": "0.2.0", + "mime-types": "1.0.0" + }, + "dependencies": { + "mime-types": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-1.0.0.tgz", + "integrity": "sha1-antKavLn2S+Xr+A/BHx4AejwAdI=" + } + } + }, + "uid-safe": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/uid-safe/-/uid-safe-1.1.0.tgz", + "integrity": "sha1-WNbF2r+N+9jVKDSDmAbAP9YUMjI=", + "requires": { + "base64-url": "1.2.1", + "native-or-bluebird": "~1.1.2" + }, + "dependencies": { + "base64-url": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/base64-url/-/base64-url-1.2.1.tgz", + "integrity": "sha1-GZ/WYXAqDnt9yubgaYuwicUvbXg=" + } + } }, "underscore": { "version": "1.6.0", - "from": "underscore@>=1.6.0 <1.7.0", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz" + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz", + "integrity": "sha1-izixDKze9jM3uLJOT/htRa6lKag=" }, "uri-js": { "version": "4.2.2", - "from": "uri-js@>=4.2.2 <5.0.0", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz", + "integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==", + "requires": { + "punycode": "^2.1.0" + }, "dependencies": { "punycode": { "version": "2.1.1", - "from": "punycode@>=2.1.0 <3.0.0", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz" + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" } } }, "util-deprecate": { "version": "1.0.2", - "from": "util-deprecate@>=1.0.1 <1.1.0", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" + }, + "utils-merge": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.0.tgz", + "integrity": "sha1-ApT7kiu5N1FTVBxPcJYjHyh8ivg=" }, "uuid": { "version": "3.0.0", - "from": "uuid@3.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.0.tgz" + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.0.tgz", + "integrity": "sha1-Zyj8BFnEUNeWqZwxg3VpvfZy1yg=" + }, + "vary": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/vary/-/vary-0.1.0.tgz", + "integrity": "sha1-3wlFiZ6TwMxb0YzIMh2dIedPYXY=" }, "verror": { "version": "1.10.0", - "from": "verror@1.10.0", "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", + "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", + "requires": { + "assert-plus": "^1.0.0", + "core-util-is": "1.0.2", + "extsprintf": "^1.2.0" + }, "dependencies": { "assert-plus": { "version": "1.0.0", - "from": "assert-plus@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" } } }, + "vhost": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/vhost/-/vhost-2.0.0.tgz", + "integrity": "sha1-HiZ3C9D86GxAlFWR5vKExokXkeI=" + }, "when": { "version": "3.7.8", - "from": "when@>=3.7.7 <4.0.0", - "resolved": "https://registry.npmjs.org/when/-/when-3.7.8.tgz" + "resolved": "https://registry.npmjs.org/when/-/when-3.7.8.tgz", + "integrity": "sha1-xxMLan6gRpPoQs3J56Hyqjmjn4I=" }, "wrappy": { "version": "1.0.2", - "from": "wrappy@1", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" }, "xtend": { "version": "4.0.1", - "from": "xtend@>=4.0.1 <4.1.0", - "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz" + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz", + "integrity": "sha1-pcbVMr5lbiPbgg77lDofBJmNY68=" }, "yallist": { "version": "3.0.3", - "from": "yallist@>=3.0.2 <4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz" + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz", + "integrity": "sha512-S+Zk8DEWE6oKpV+vI3qWkaK+jSbIK86pCwe2IF/xwIpQ8jEuxpw9NyaGjmp9+BoJv5FV2piqCDcoCtStppiq2A==" } } } diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 886fa3515c..927584fc08 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -22,7 +22,7 @@ "dependencies": { "async": "^2.5.0", "coffee-script": "~1.7.0", - "express": "3.3.4", + "express": "3.11.0", "lodash": "^4.17.4", "logger-sharelatex": "^1.7.0", "lynx": "0.0.11", From 6871889bfb34cbf5269b8ed3d2a9ceb1cdbd1432 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 5 Nov 2019 11:38:25 +0000 Subject: [PATCH 525/769] Bump request from 2.25.0 to 2.47.0 Bumps [request](https://github.com/request/request) from 2.25.0 to 2.47.0. - [Release notes](https://github.com/request/request/releases) - [Changelog](https://github.com/request/request/blob/master/CHANGELOG.md) - [Commits](https://github.com/request/request/compare/v2.25.0...v2.47.0) Signed-off-by: dependabot[bot] --- services/document-updater/npm-shrinkwrap.json | 112 ++++++++++++------ services/document-updater/package.json | 2 +- 2 files changed, 77 insertions(+), 37 deletions(-) diff --git a/services/document-updater/npm-shrinkwrap.json b/services/document-updater/npm-shrinkwrap.json index b508969560..964ffbc4d2 100644 --- a/services/document-updater/npm-shrinkwrap.json +++ b/services/document-updater/npm-shrinkwrap.json @@ -405,11 +405,6 @@ "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" }, - "aws-sign": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/aws-sign/-/aws-sign-0.3.0.tgz", - "integrity": "sha1-PYHKabR0seFlGHKLUcJP8Lvtxuk=" - }, "aws-sign2": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", @@ -481,6 +476,37 @@ "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz", "integrity": "sha1-DmVcm5wkNeqraL9AJyJtK1WjRSQ=" }, + "bl": { + "version": "0.9.5", + "resolved": "https://registry.npmjs.org/bl/-/bl-0.9.5.tgz", + "integrity": "sha1-wGt5evCF6gC8Unr8jvzxHeIjIFQ=", + "requires": { + "readable-stream": "~1.0.26" + }, + "dependencies": { + "isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" + }, + "readable-stream": { + "version": "1.0.34", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", + "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + } + } + }, "body-parser": { "version": "1.4.3", "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.4.3.tgz", @@ -764,11 +790,6 @@ "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.1.2.tgz", "integrity": "sha1-cv7D0k5Io0Mgc9kMEmQgBQYQBLE=" }, - "cookie-jar": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/cookie-jar/-/cookie-jar-0.3.0.tgz", - "integrity": "sha1-vJon1OK5fhhs1XyeIGPLmfpozMw=" - }, "cookie-parser": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/cookie-parser/-/cookie-parser-1.3.1.tgz", @@ -1288,9 +1309,9 @@ } }, "hawk": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/hawk/-/hawk-1.0.0.tgz", - "integrity": "sha1-uQuxaYByhUEdp//LjdJZhQLTtS0=", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/hawk/-/hawk-1.1.1.tgz", + "integrity": "sha1-h81JH5tG5OKurKM1QWdmiF0tHtk=", "requires": { "boom": "0.4.x", "cryptiles": "0.2.x", @@ -2072,10 +2093,15 @@ "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.4.tgz", "integrity": "sha512-UOfdpxivIYY4g5tqp5FNRNgROVNxRACUxxJREntJLFaJr1E0UEqFtUIk0F/jYx/E+Y6sVXd0KDi/m5My0yGCVw==" }, + "node-uuid": { + "version": "1.4.8", + "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz", + "integrity": "sha1-sEDrCSOWivq/jTL7HxfxFn/auQc=" + }, "oauth-sign": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.3.0.tgz", - "integrity": "sha1-y1QPk7srIqfVlBaRoojWDo6pOG4=" + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.4.0.tgz", + "integrity": "sha1-8ilW8x6nFRqCHl8vsywRPK2Ln2k=" }, "on-headers": { "version": "0.0.0", @@ -2220,9 +2246,9 @@ "integrity": "sha1-I8BsRsgTKGFqrhaNPuI6Vr1D2vY=" }, "qs": { - "version": "0.6.5", - "resolved": "https://registry.npmjs.org/qs/-/qs-0.6.5.tgz", - "integrity": "sha1-KUsmjksNQlD23eGbO4s0k13/FO8=" + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-2.3.3.tgz", + "integrity": "sha1-6eha2+ddoLvkyOBHaghikPhjtAQ=" }, "rand-token": { "version": "0.2.1", @@ -2336,28 +2362,42 @@ } }, "request": { - "version": "2.25.0", - "resolved": "https://registry.npmjs.org/request/-/request-2.25.0.tgz", - "integrity": "sha1-2sFnMYGIf+CyzmvX4S9G1VSgLOk=", + "version": "2.47.0", + "resolved": "https://registry.npmjs.org/request/-/request-2.47.0.tgz", + "integrity": "sha1-Cen9Gk/tZZOoBe+CArIPDF7LSF8=", "requires": { - "aws-sign": "~0.3.0", - "cookie-jar": "~0.3.0", + "aws-sign2": "~0.5.0", + "bl": "~0.9.0", + "caseless": "~0.6.0", + "combined-stream": "~0.0.5", "forever-agent": "~0.5.0", "form-data": "~0.1.0", - "hawk": "~1.0.0", + "hawk": "1.1.1", "http-signature": "~0.10.0", "json-stringify-safe": "~5.0.0", - "mime": "~1.2.9", + "mime-types": "~1.0.1", "node-uuid": "~1.4.0", - "oauth-sign": "~0.3.0", - "qs": "~0.6.0", - "tunnel-agent": "~0.3.0" + "oauth-sign": "~0.4.0", + "qs": "~2.3.1", + "stringstream": "~0.0.4", + "tough-cookie": ">=0.12.0", + "tunnel-agent": "~0.4.0" }, "dependencies": { - "node-uuid": { - "version": "1.4.8", - "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz", - "integrity": "sha1-sEDrCSOWivq/jTL7HxfxFn/auQc=" + "aws-sign2": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.5.0.tgz", + "integrity": "sha1-xXED96F/wDfwLXwuZLYC6iI/fWM=" + }, + "caseless": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.6.0.tgz", + "integrity": "sha1-gWfBq4OX+1u5X5bSjlqBxQ8kesQ=" + }, + "mime-types": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-1.0.2.tgz", + "integrity": "sha1-mVrhOSq4r/y/yyZB3QVOlDwNXc4=" } } }, @@ -2902,9 +2942,9 @@ } }, "tunnel-agent": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.3.0.tgz", - "integrity": "sha1-rWgbaPUyGtKCfEz7G31d8s/pQu4=" + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.3.tgz", + "integrity": "sha1-Y3PbdpCf5XDgjXNYM2Xtgop07us=" }, "tweetnacl": { "version": "0.14.5", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 927584fc08..f80251248b 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -29,7 +29,7 @@ "metrics-sharelatex": "^2.2.0", "mongojs": "^2.6.0", "redis-sharelatex": "^1.0.11", - "request": "2.25.0", + "request": "2.47.0", "requestretry": "^1.12.0", "sandboxed-module": "~0.2.0", "settings-sharelatex": "^1.1.0", From 82a72c9b7315c405cd3d7954b9748dc836b3702f Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 14 Nov 2019 16:32:59 +0000 Subject: [PATCH 526/769] fix missing bodyParser limit --- services/document-updater/app.coffee | 4 +- .../coffee/SettingADocumentTests.coffee | 78 +++++++++++++++++++ .../coffee/helpers/MockWebApi.coffee | 3 +- 3 files changed, 83 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 645eb8cc63..e3aee88bf7 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -28,7 +28,7 @@ Metrics.event_loop.monitor(logger, 100) app = express() app.configure -> app.use(Metrics.http.monitor(logger)); - app.use express.bodyParser() + app.use express.bodyParser({limit: (Settings.max_doc_length + 64 * 1024)}) app.use app.router Metrics.injectMetricsRoute(app) @@ -125,6 +125,8 @@ app.use (error, req, res, next) -> res.send 404 else if error instanceof Errors.OpRangeNotAvailableError res.send 422 # Unprocessable Entity + else if error.statusCode is 413 + res.send(413, "request entity too large") else logger.error err: error, req: req, "request errored" res.send(500, "Oops, something went wrong") diff --git a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee index f5ec74cbd8..45fc732033 100644 --- a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee @@ -53,6 +53,11 @@ describe "Setting a document", -> , 200 return null + after -> + MockTrackChangesApi.flushDoc.reset() + MockProjectHistoryApi.flushProject.reset() + MockWebApi.setDocument.reset() + it "should return a 204 status code", -> @statusCode.should.equal 204 @@ -89,6 +94,11 @@ describe "Setting a document", -> setTimeout done, 200 return null + after -> + MockTrackChangesApi.flushDoc.reset() + MockProjectHistoryApi.flushProject.reset() + MockWebApi.setDocument.reset() + it "should return a 204 status code", -> @statusCode.should.equal 204 @@ -110,6 +120,64 @@ describe "Setting a document", -> done() return null + describe "when the updated doc is too large for the body parser", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + @newLines = [] + while JSON.stringify(@newLines).length < Settings.max_doc_length + 64 * 1024 + @newLines.push("(a long line of text)".repeat(10000)) + console.log("newlines size",JSON.stringify(@newLines).length) + DocUpdaterClient.setDocLines @project_id, @doc_id, @newLines, @source, @user_id, false, (error, res, body) => + @statusCode = res.statusCode + setTimeout done, 200 + return null + + after -> + MockTrackChangesApi.flushDoc.reset() + MockProjectHistoryApi.flushProject.reset() + MockWebApi.setDocument.reset() + + it "should return a 413 status code", -> + @statusCode.should.equal 413 + + it "should not send the updated doc lines to the web api", -> + MockWebApi.setDocument.called.should.equal false + + it "should not flush track changes", -> + MockTrackChangesApi.flushDoc.called.should.equal false + + it "should not flush project history", -> + MockProjectHistoryApi.flushProject.called.should.equal false + + describe "when the updated doc is large but under the bodyParser and HTTPController size limit", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + + @newLines = [] + while JSON.stringify(@newLines).length < 2 * 1024 * 1024 # limit in HTTPController + @newLines.push("(a long line of text)".repeat(10000)) + @newLines.pop() # remove the line which took it over the limit + console.log("newlines size",JSON.stringify(@newLines).length) + DocUpdaterClient.setDocLines @project_id, @doc_id, @newLines, @source, @user_id, false, (error, res, body) => + @statusCode = res.statusCode + setTimeout done, 200 + return null + + after -> + MockTrackChangesApi.flushDoc.reset() + MockProjectHistoryApi.flushProject.reset() + MockWebApi.setDocument.reset() + + it "should return a 204 status code", -> + @statusCode.should.equal 204 + + it "should send the updated doc lines to the web api", -> + MockWebApi.setDocument + .calledWith(@project_id, @doc_id, @newLines) + .should.equal true + describe "with track changes", -> before -> @lines = ["one", "one and a half", "two", "three"] @@ -139,6 +207,11 @@ describe "Setting a document", -> setTimeout done, 200 return null + after -> + MockTrackChangesApi.flushDoc.reset() + MockProjectHistoryApi.flushProject.reset() + MockWebApi.setDocument.reset() + it "should undo the tracked changes", (done) -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, data) => throw error if error? @@ -161,6 +234,11 @@ describe "Setting a document", -> setTimeout done, 200 return null + after -> + MockTrackChangesApi.flushDoc.reset() + MockProjectHistoryApi.flushProject.reset() + MockWebApi.setDocument.reset() + it "should not undo the tracked changes", (done) -> DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, data) => throw error if error? diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee index 5ee673ccf7..4f73017a1d 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee @@ -1,5 +1,6 @@ express = require("express") app = express() +MAX_REQUEST_SIZE = 2*(2*1024*1024 + 64*1024) module.exports = MockWebApi = docs: {} @@ -35,7 +36,7 @@ module.exports = MockWebApi = else res.send 404 - app.post "/project/:project_id/doc/:doc_id", express.bodyParser(), (req, res, next) => + app.post "/project/:project_id/doc/:doc_id", express.bodyParser({limit: MAX_REQUEST_SIZE}), (req, res, next) => MockWebApi.setDocument req.params.project_id, req.params.doc_id, req.body.lines, req.body.version, req.body.ranges, req.body.lastUpdatedAt, req.body.lastUpdatedBy, (error) -> if error? res.send 500 From 08ee58abb844d2ef0aeb8f8821c7315525ee562c Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 15 Nov 2019 09:34:13 +0000 Subject: [PATCH 527/769] remove console.log from tests --- .../test/acceptance/coffee/SettingADocumentTests.coffee | 2 -- 1 file changed, 2 deletions(-) diff --git a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee index 45fc732033..5ea43a39cc 100644 --- a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee @@ -127,7 +127,6 @@ describe "Setting a document", -> @newLines = [] while JSON.stringify(@newLines).length < Settings.max_doc_length + 64 * 1024 @newLines.push("(a long line of text)".repeat(10000)) - console.log("newlines size",JSON.stringify(@newLines).length) DocUpdaterClient.setDocLines @project_id, @doc_id, @newLines, @source, @user_id, false, (error, res, body) => @statusCode = res.statusCode setTimeout done, 200 @@ -159,7 +158,6 @@ describe "Setting a document", -> while JSON.stringify(@newLines).length < 2 * 1024 * 1024 # limit in HTTPController @newLines.push("(a long line of text)".repeat(10000)) @newLines.pop() # remove the line which took it over the limit - console.log("newlines size",JSON.stringify(@newLines).length) DocUpdaterClient.setDocLines @project_id, @doc_id, @newLines, @source, @user_id, false, (error, res, body) => @statusCode = res.statusCode setTimeout done, 200 From dcd7649badcbbedfd2488fae55d1e8d25ff786ab Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 15 Nov 2019 16:53:16 +0000 Subject: [PATCH 528/769] filter track-changes updates for projects using project-history --- .../app/coffee/DocumentManager.coffee | 8 +++-- .../app/coffee/HistoryManager.coffee | 29 +++++++++++++----- .../app/coffee/PersistenceManager.coffee | 4 +-- .../app/coffee/RedisManager.coffee | 30 ++++++++++++++----- .../config/settings.defaults.coffee | 5 ++-- .../DocumentManagerTests.coffee | 9 +++++- .../HistoryManager/HistoryManagerTests.coffee | 25 ++++++++++++---- .../RedisManager/RedisManagerTests.coffee | 1 + 8 files changed, 82 insertions(+), 29 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 5183a3aaea..59db98e97f 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -23,12 +23,14 @@ module.exports = DocumentManager = return callback(error) if error? if !lines? or !version? logger.log {project_id, doc_id}, "doc not in redis so getting from persistence API" - PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) -> + PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId, projectHistoryType) -> return callback(error) if error? - logger.log {project_id, doc_id, lines, version, pathname, projectHistoryId}, "got doc from persistence API" + logger.log {project_id, doc_id, lines, version, pathname, projectHistoryId, projectHistoryType}, "got doc from persistence API" RedisManager.putDocInMemory project_id, doc_id, lines, version, ranges, pathname, projectHistoryId, (error) -> return callback(error) if error? - callback null, lines, version, ranges, pathname, projectHistoryId, null, false + RedisManager.setHistoryType doc_id, projectHistoryType, (error) -> + return callback(error) if error? + callback null, lines, version, ranges, pathname, projectHistoryId, null, false else callback null, lines, version, ranges, pathname, projectHistoryId, unflushedTime, true diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index 6b68b4b676..286292e6b3 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -11,14 +11,19 @@ module.exports = HistoryManager = if !Settings.apis?.trackchanges? logger.warn { doc_id }, "track changes API is not configured, so not flushing" return - - url = "#{Settings.apis.trackchanges.url}/project/#{project_id}/doc/#{doc_id}/flush" - logger.log { project_id, doc_id, url }, "flushing doc in track changes api" - request.post url, (error, res, body)-> - if error? - logger.error { error, doc_id, project_id}, "track changes doc to track changes api" - else if res.statusCode < 200 and res.statusCode >= 300 - logger.error { doc_id, project_id }, "track changes api returned a failure status code: #{res.statusCode}" + RedisManager.getHistoryType doc_id, (err, projectHistoryType) -> + if err? + logger.error {err, doc_id}, "error getting history type" + else if projectHistoryType is "project-history" + logger.debug {doc_id, projectHistoryType}, "skipping track-changes flush" + else + url = "#{Settings.apis.trackchanges.url}/project/#{project_id}/doc/#{doc_id}/flush" + logger.log { project_id, doc_id, url, projectHistoryType }, "flushing doc in track changes api" + request.post url, (error, res, body)-> + if error? + logger.error { error, doc_id, project_id}, "track changes doc to track changes api" + else if res.statusCode < 200 and res.statusCode >= 300 + logger.error { doc_id, project_id }, "track changes api returned a failure status code: #{res.statusCode}" # flush changes in the background flushProjectChangesAsync: (project_id) -> @@ -52,6 +57,7 @@ module.exports = HistoryManager = if ops.length == 0 return callback() + # record updates for project history if Settings.apis?.project_history?.enabled if HistoryManager.shouldFlushHistoryOps(project_ops_length, ops.length, HistoryManager.FLUSH_PROJECT_EVERY_N_OPS) # Do this in the background since it uses HTTP and so may be too @@ -59,6 +65,13 @@ module.exports = HistoryManager = logger.log { project_ops_length, project_id }, "flushing project history api" HistoryManager.flushProjectChangesAsync project_id + # if the doc_ops_length is undefined it means the project is not using track-changes + # so we can bail out here + if typeof(doc_ops_length) is 'undefined' + logger.debug { project_id, doc_id}, "skipping flush to track-changes, only using project-history" + return callback() + + # record updates for track-changes HistoryRedisManager.recordDocHasHistoryOps project_id, doc_id, ops, (error) -> return callback(error) if error? if HistoryManager.shouldFlushHistoryOps(doc_ops_length, ops.length, HistoryManager.FLUSH_DOC_EVERY_N_OPS) diff --git a/services/document-updater/app/coffee/PersistenceManager.coffee b/services/document-updater/app/coffee/PersistenceManager.coffee index ee80453137..543b9f0c22 100644 --- a/services/document-updater/app/coffee/PersistenceManager.coffee +++ b/services/document-updater/app/coffee/PersistenceManager.coffee @@ -13,7 +13,7 @@ request = (require("requestretry")).defaults({ MAX_HTTP_REQUEST_LENGTH = 5000 # 5 seconds module.exports = PersistenceManager = - getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, pathname, projectHistoryId) ->) -> + getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, pathname, projectHistoryId, projectHistoryType) ->) -> timer = new Metrics.Timer("persistenceManager.getDoc") callback = (args...) -> timer.done() @@ -44,7 +44,7 @@ module.exports = PersistenceManager = return callback(new Error("web API response had no valid doc version")) if !body.pathname? return callback(new Error("web API response had no valid doc pathname")) - return callback null, body.lines, body.version, body.ranges, body.pathname, body.projectHistoryId + return callback null, body.lines, body.version, body.ranges, body.pathname, body.projectHistoryId, body.projectHistoryType else if res.statusCode == 404 return callback(new Errors.NotFoundError("doc not not found: #{url}")) else diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 12227fae21..7729486986 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -78,6 +78,7 @@ module.exports = RedisManager = multi.del keys.ranges(doc_id:doc_id) multi.del keys.pathname(doc_id:doc_id) multi.del keys.projectHistoryId(doc_id:doc_id) + multi.del keys.projectHistoryType(doc_id:doc_id) multi.del keys.unflushedTime(doc_id:doc_id) multi.del keys.lastUpdatedAt(doc_id: doc_id) multi.del keys.lastUpdatedBy(doc_id: doc_id) @@ -154,11 +155,11 @@ module.exports = RedisManager = logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, "doc missing from docsInProject set" callback null, docLines, version, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy - getDocVersion: (doc_id, callback = (error, version) ->) -> - rclient.get keys.docVersion(doc_id: doc_id), (error, version) -> + getDocVersion: (doc_id, callback = (error, version, projectHistoryType) ->) -> + rclient.mget keys.docVersion(doc_id: doc_id), keys.projectHistoryType(doc_id:doc_id), (error, version, projectHistoryType) -> return callback(error) if error? version = parseInt(version, 10) - callback null, version + callback null, version, projectHistoryType getDocLines: (doc_id, callback = (error, version) ->) -> rclient.get keys.docLines(doc_id: doc_id), (error, docLines) -> @@ -200,10 +201,18 @@ module.exports = RedisManager = return callback(error) callback null, ops + getHistoryType: (doc_id, callback = (error, projectHistoryType) ->) -> + rclient.get keys.projectHistoryType(doc_id:doc_id), (error, projectHistoryType) -> + return callback(error) if error? + callback null, projectHistoryType + + setHistoryType: (doc_id, projectHistoryType, callback = (error) ->) -> + rclient.set keys.projectHistoryType(doc_id:doc_id), projectHistoryType, callback + DOC_OPS_TTL: 60 * minutes DOC_OPS_MAX_LENGTH: 100 updateDocument : (project_id, doc_id, docLines, newVersion, appliedOps = [], ranges, updateMeta, callback = (error) ->)-> - RedisManager.getDocVersion doc_id, (error, currentVersion) -> + RedisManager.getDocVersion doc_id, (error, currentVersion, projectHistoryType) -> return callback(error) if error? if currentVersion + appliedOps.length != newVersion error = new Error("Version mismatch. '#{doc_id}' is corrupted.") @@ -249,7 +258,11 @@ module.exports = RedisManager = multi.rpush keys.docOps(doc_id: doc_id), jsonOps... # index 5 # expire must come after rpush since before it will be a no-op if the list is empty multi.expire keys.docOps(doc_id: doc_id), RedisManager.DOC_OPS_TTL # index 6 - multi.rpush historyKeys.uncompressedHistoryOps(doc_id: doc_id), jsonOps... # index 7 + if projectHistoryType is "project-history" + logger.debug {doc_id}, "skipping push of uncompressed ops for project using project-history" + else + # project is using old track-changes history service + multi.rpush historyKeys.uncompressedHistoryOps(doc_id: doc_id), jsonOps... # index 7 # Set the unflushed timestamp to the current time if the doc # hasn't been modified before (the content in mongo has been # valid up to this point). Otherwise leave it alone ("NX" flag). @@ -262,8 +275,11 @@ module.exports = RedisManager = multi.exec (error, result) -> return callback(error) if error? - # length of uncompressedHistoryOps queue (index 7) - docUpdateCount = result[7] + if projectHistoryType is 'project-history' + docUpdateCount = undefined # only using project history, don't bother with track-changes + else + # project is using old track-changes history service + docUpdateCount = result[7] # length of uncompressedHistoryOps queue (index 7) if jsonOps.length > 0 && Settings.apis?.project_history?.enabled ProjectHistoryRedisManager.queueOps project_id, jsonOps..., (error, projectUpdateCount) -> diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 6711b3c3bf..e8804dfe3e 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -70,13 +70,14 @@ module.exports = unflushedTime: ({doc_id}) -> "UnflushedTime:{#{doc_id}}" pathname: ({doc_id}) -> "Pathname:{#{doc_id}}" projectHistoryId: ({doc_id}) -> "ProjectHistoryId:{#{doc_id}}" + projectHistoryType: ({doc_id}) -> "ProjectHistoryType:{#{doc_id}}" projectState: ({project_id}) -> "ProjectState:{#{project_id}}" pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" lastUpdatedBy: ({doc_id}) -> "lastUpdatedBy:{#{doc_id}}" lastUpdatedAt: ({doc_id}) -> "lastUpdatedAt:{#{doc_id}}" pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" flushAndDeleteQueue: () -> "DocUpdaterFlushAndDeleteQueue" - + max_doc_length: 2 * 1024 * 1024 # 2mb dispatcherCount: process.env["DISPATCHER_COUNT"] @@ -91,4 +92,4 @@ module.exports = continuousBackgroundFlush: process.env['CONTINUOUS_BACKGROUND_FLUSH'] or false - smoothingOffset: process.env['SMOOTHING_OFFSET'] or 1000 # milliseconds \ No newline at end of file + smoothingOffset: process.env['SMOOTHING_OFFSET'] or 1000 # milliseconds diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index dc57022a5a..76ad7f5af5 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -27,6 +27,7 @@ describe "DocumentManager", -> "./RangesManager": @RangesManager = {} @project_id = "project-id-123" @projectHistoryId = "history-id-123" + @projectHistoryType = "project-history" @doc_id = "doc-id-123" @user_id = 1234 @callback = sinon.stub() @@ -178,8 +179,9 @@ describe "DocumentManager", -> describe "when the doc does not exist in Redis", -> beforeEach -> @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null, null, null, null) - @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId) + @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId, @projectHistoryType) @RedisManager.putDocInMemory = sinon.stub().yields() + @RedisManager.setHistoryType = sinon.stub().yields() @DocumentManager.getDoc @project_id, @doc_id, @callback it "should try to get the doc from Redis", -> @@ -197,6 +199,11 @@ describe "DocumentManager", -> .calledWith(@project_id, @doc_id, @lines, @version, @ranges, @pathname, @projectHistoryId) .should.equal true + it "should set the history type in Redis", -> + @RedisManager.setHistoryType + .calledWith(@doc_id, @projectHistoryType) + .should.equal true + it "should call the callback with the doc info", -> @callback.calledWith(null, @lines, @version, @ranges, @pathname, @projectHistoryId, null, false).should.equal true diff --git a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee index 07c3577a91..20d5d0358b 100644 --- a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee @@ -15,7 +15,7 @@ describe "HistoryManager", -> trackchanges: url: "http://trackchanges.example.com" } - "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } + "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub(), debug: sinon.stub() } "./DocumentManager": @DocumentManager = {} "./HistoryRedisManager": @HistoryRedisManager = {} "./RedisManager": @RedisManager = {} @@ -28,12 +28,25 @@ describe "HistoryManager", -> beforeEach -> @request.post = sinon.stub().callsArgWith(1, null, statusCode: 204) - @HistoryManager.flushDocChangesAsync @project_id, @doc_id + describe "when the project uses track changes", -> + beforeEach -> + @RedisManager.getHistoryType = sinon.stub().yields(null, 'track-changes') + @HistoryManager.flushDocChangesAsync @project_id, @doc_id - it "should send a request to the track changes api", -> - @request.post - .calledWith("#{@Settings.apis.trackchanges.url}/project/#{@project_id}/doc/#{@doc_id}/flush") - .should.equal true + it "should send a request to the track changes api", -> + @request.post + .calledWith("#{@Settings.apis.trackchanges.url}/project/#{@project_id}/doc/#{@doc_id}/flush") + .should.equal true + + describe "when the project uses project history", -> + beforeEach -> + @RedisManager.getHistoryType = sinon.stub().yields(null, 'project-history') + @HistoryManager.flushDocChangesAsync @project_id, @doc_id + + it "should not send a request to the track changes api", -> + @request.post + .called + .should.equal false describe "flushProjectChangesAsync", -> beforeEach -> diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 5491922efb..508a9ba0f7 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -34,6 +34,7 @@ describe "RedisManager", -> ranges: ({doc_id}) -> "Ranges:#{doc_id}" pathname: ({doc_id}) -> "Pathname:#{doc_id}" projectHistoryId: ({doc_id}) -> "ProjectHistoryId:#{doc_id}" + projectHistoryType: ({doc_id}) -> "ProjectHistoryType:#{doc_id}" projectState: ({project_id}) -> "ProjectState:#{project_id}" unflushedTime: ({doc_id}) -> "UnflushedTime:#{doc_id}" lastUpdatedBy: ({doc_id}) -> "lastUpdatedBy:#{doc_id}" From 65cf4cf7c7dde3a0a917801210ba68c7ac6fa915 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 21 Nov 2019 14:58:35 +0000 Subject: [PATCH 529/769] make flush to track-changes failsafe --- services/document-updater/app/coffee/HistoryManager.coffee | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index 286292e6b3..50fd645669 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -13,8 +13,9 @@ module.exports = HistoryManager = return RedisManager.getHistoryType doc_id, (err, projectHistoryType) -> if err? - logger.error {err, doc_id}, "error getting history type" - else if projectHistoryType is "project-history" + logger.warn {err, doc_id}, "error getting history type" + # if there's an error continue and flush to track-changes for safety + if projectHistoryType is "project-history" logger.debug {doc_id, projectHistoryType}, "skipping track-changes flush" else url = "#{Settings.apis.trackchanges.url}/project/#{project_id}/doc/#{doc_id}/flush" From b7055eecee6680579cb69d07d95a363ba2164631 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 22 Nov 2019 09:14:32 +0000 Subject: [PATCH 530/769] add metrics for history flushes --- services/document-updater/app/coffee/HistoryManager.coffee | 4 +++- .../unit/coffee/HistoryManager/HistoryManagerTests.coffee | 1 + 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index 6b68b4b676..da65f11237 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -5,13 +5,14 @@ Settings = require "settings-sharelatex" HistoryRedisManager = require "./HistoryRedisManager" ProjectHistoryRedisManager = require "./ProjectHistoryRedisManager" RedisManager = require "./RedisManager" +metrics = require "./Metrics" module.exports = HistoryManager = flushDocChangesAsync: (project_id, doc_id) -> if !Settings.apis?.trackchanges? logger.warn { doc_id }, "track changes API is not configured, so not flushing" return - + metrics.inc 'history-flush', 1, { status: 'track-changes'} url = "#{Settings.apis.trackchanges.url}/project/#{project_id}/doc/#{doc_id}/flush" logger.log { project_id, doc_id, url }, "flushing doc in track changes api" request.post url, (error, res, body)-> @@ -31,6 +32,7 @@ module.exports = HistoryManager = if options.skip_history_flush logger.log {project_id}, "skipping flush of project history" return callback() + metrics.inc 'history-flush', 1, { status: 'project-history'} url = "#{Settings.apis.project_history.url}/project/#{project_id}/flush" qs = {} qs.background = true if options.background # pass on the background flush option if present diff --git a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee index 07c3577a91..f84e3246eb 100644 --- a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee @@ -20,6 +20,7 @@ describe "HistoryManager", -> "./HistoryRedisManager": @HistoryRedisManager = {} "./RedisManager": @RedisManager = {} "./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {} + "./Metrics": @metrics = {inc: sinon.stub()} @project_id = "mock-project-id" @doc_id = "mock-doc-id" @callback = sinon.stub() From 68e12f4d2d8b9377f927e97652e8e666388b48f8 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 25 Nov 2019 10:51:10 +0000 Subject: [PATCH 531/769] add metrics for queue operations --- services/document-updater/app/coffee/RedisManager.coffee | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 7729486986..01e0289bee 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -259,9 +259,11 @@ module.exports = RedisManager = # expire must come after rpush since before it will be a no-op if the list is empty multi.expire keys.docOps(doc_id: doc_id), RedisManager.DOC_OPS_TTL # index 6 if projectHistoryType is "project-history" - logger.debug {doc_id}, "skipping push of uncompressed ops for project using project-history" + metrics.inc 'history-queue', 1, {status: 'skip-track-changes'} + logger.log {doc_id}, "skipping push of uncompressed ops for project using project-history" else # project is using old track-changes history service + metrics.inc 'history-queue', 1, {status: 'track-changes'} multi.rpush historyKeys.uncompressedHistoryOps(doc_id: doc_id), jsonOps... # index 7 # Set the unflushed timestamp to the current time if the doc # hasn't been modified before (the content in mongo has been @@ -282,6 +284,7 @@ module.exports = RedisManager = docUpdateCount = result[7] # length of uncompressedHistoryOps queue (index 7) if jsonOps.length > 0 && Settings.apis?.project_history?.enabled + metrics.inc 'history-queue', 1, {status: 'project-history'} ProjectHistoryRedisManager.queueOps project_id, jsonOps..., (error, projectUpdateCount) -> callback null, docUpdateCount, projectUpdateCount else From 4f6583bbf22877dafc37d9d71acaded07604f0d8 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 25 Nov 2019 13:28:36 +0000 Subject: [PATCH 532/769] fix getDocVersion and add tests --- .../app/coffee/RedisManager.coffee | 3 +- .../coffee/ApplyingUpdatesToADocTests.coffee | 33 ++++++++++++++ .../RedisManager/RedisManagerTests.coffee | 44 ++++++++++++++++++- 3 files changed, 78 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 01e0289bee..9e2edbd99d 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -156,8 +156,9 @@ module.exports = RedisManager = callback null, docLines, version, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy getDocVersion: (doc_id, callback = (error, version, projectHistoryType) ->) -> - rclient.mget keys.docVersion(doc_id: doc_id), keys.projectHistoryType(doc_id:doc_id), (error, version, projectHistoryType) -> + rclient.mget keys.docVersion(doc_id: doc_id), keys.projectHistoryType(doc_id:doc_id), (error, result) -> return callback(error) if error? + [version, projectHistoryType] = result || [] version = parseInt(version, 10) callback null, version, projectHistoryType diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index 51b9cf08a9..0b28dea7a7 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -135,6 +135,39 @@ describe "Applying updates to a doc", -> done() return null + describe "when the document is loaded and is using project-history only", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version, projectHistoryType: 'project-history'} + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + throw error if error? + sinon.spy MockWebApi, "getDocument" + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> + throw error if error? + setTimeout done, 200 + return null + + after -> + MockWebApi.getDocument.restore() + + it "should update the doc", (done) -> + DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => + doc.lines.should.deep.equal @result + done() + return null + + it "should not push any applied updates to the track changes api", (done) -> + rclient_history.lrange HistoryKeys.uncompressedHistoryOps({@doc_id}), 0, -1, (error, updates) => + updates.length.should.equal 0 + done() + return null + + it "should push the applied updates to the project history changes api", (done) -> + rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + JSON.parse(updates[0]).op.should.deep.equal @update.op + done() + return null describe "when the document has been deleted", -> describe "when the ops come in a single linear order", -> diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 508a9ba0f7..99035a32b3 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -361,11 +361,12 @@ describe "RedisManager", -> describe "with a consistent version", -> beforeEach -> - @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) + describe "with project history enabled", -> beforeEach -> @settings.apis.project_history.enabled = true + @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @updateMeta, @callback it "should get the current doc version to check for consistency", -> @@ -446,6 +447,7 @@ describe "RedisManager", -> beforeEach -> @rclient.rpush = sinon.stub() @settings.apis.project_history.enabled = false + @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @updateMeta, @callback it "should not push the updates into the project history ops list", -> @@ -456,6 +458,26 @@ describe "RedisManager", -> .calledWith(null, @doc_update_list_length) .should.equal true + describe "with a doc using project history only", -> + beforeEach -> + @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length, 'project-history') + @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @updateMeta, @callback + + it "should not push the updates to the track-changes ops list", -> + @multi.rpush + .calledWith("UncompressedHistoryOps:#{@doc_id}") + .should.equal false + + it "should push the updates into the project history ops list", -> + @ProjectHistoryRedisManager.queueOps + .calledWith(@project_id, JSON.stringify(@ops[0])) + .should.equal true + + it "should call the callback with the project update count only", -> + @callback + .calledWith(null, undefined, @project_update_list_length) + .should.equal true + describe "with an inconsistent version", -> beforeEach -> @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length - 1) @@ -754,3 +776,23 @@ describe "RedisManager", -> @ProjectHistoryRedisManager.queueRenameEntity .calledWithExactly(@project_id, @projectHistoryId, 'doc', @doc_id, @userId, @update, @callback) .should.equal true + + describe "getDocVersion", -> + beforeEach -> + @version = 12345 + + describe "when the document does not have a project history type set", -> + beforeEach -> + @rclient.mget = sinon.stub().withArgs("DocVersion:#{@doc_id}", "ProjectHistoryType:#{@doc_id}").callsArgWith(2, null, ["#{@version}"]) + @RedisManager.getDocVersion @doc_id, @callback + + it "should return the document version and an undefined history type", -> + @callback.calledWithExactly(null, @version, undefined).should.equal true + + describe "when the document has a project history type set", -> + beforeEach -> + @rclient.mget = sinon.stub().withArgs("DocVersion:#{@doc_id}", "ProjectHistoryType:#{@doc_id}").callsArgWith(2, null, ["#{@version}", 'project-history']) + @RedisManager.getDocVersion @doc_id, @callback + + it "should return the document version and history type", -> + @callback.calledWithExactly(null, @version, 'project-history').should.equal true From ad19fee66770773027a14c1d6eeeb436cca07cc3 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 25 Nov 2019 13:36:25 +0000 Subject: [PATCH 533/769] add setting so that double flush is the default can be disabled to stop flushing to track-changes --- .../app/coffee/HistoryManager.coffee | 2 +- .../config/settings.defaults.coffee | 2 ++ .../HistoryManager/HistoryManagerTests.coffee | 14 +++++++++++++- 3 files changed, 16 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.coffee index 0e13627445..183ac268f3 100644 --- a/services/document-updater/app/coffee/HistoryManager.coffee +++ b/services/document-updater/app/coffee/HistoryManager.coffee @@ -16,7 +16,7 @@ module.exports = HistoryManager = if err? logger.warn {err, doc_id}, "error getting history type" # if there's an error continue and flush to track-changes for safety - if projectHistoryType is "project-history" + if Settings.disableDoubleFlush and projectHistoryType is "project-history" logger.debug {doc_id, projectHistoryType}, "skipping track-changes flush" else metrics.inc 'history-flush', 1, { status: 'track-changes'} diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index e8804dfe3e..9eebe86005 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -93,3 +93,5 @@ module.exports = continuousBackgroundFlush: process.env['CONTINUOUS_BACKGROUND_FLUSH'] or false smoothingOffset: process.env['SMOOTHING_OFFSET'] or 1000 # milliseconds + + disableDoubleFlush: process.env['DISABLE_DOUBLE_FLUSH'] or false # don't flush track-changes for projects using project-history diff --git a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee index 4a738bd455..6cb6b1d8da 100644 --- a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee @@ -39,16 +39,28 @@ describe "HistoryManager", -> .calledWith("#{@Settings.apis.trackchanges.url}/project/#{@project_id}/doc/#{@doc_id}/flush") .should.equal true - describe "when the project uses project history", -> + describe "when the project uses project history and double flush is not disabled", -> beforeEach -> @RedisManager.getHistoryType = sinon.stub().yields(null, 'project-history') @HistoryManager.flushDocChangesAsync @project_id, @doc_id + it "should send a request to the track changes api", -> + @request.post + .called + .should.equal true + + describe "when the project uses project history and double flush is disabled", -> + beforeEach -> + @Settings.disableDoubleFlush = true + @RedisManager.getHistoryType = sinon.stub().yields(null, 'project-history') + @HistoryManager.flushDocChangesAsync @project_id, @doc_id + it "should not send a request to the track changes api", -> @request.post .called .should.equal false + describe "flushProjectChangesAsync", -> beforeEach -> @request.post = sinon.stub().callsArgWith(1, null, statusCode: 204) From 1bb12a5035e5b08e191b1084821377beea5e4d06 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 11 Dec 2019 14:43:59 +0000 Subject: [PATCH 534/769] allow pending updates to clear in acceptance tests --- .../coffee/helpers/DocUpdaterClient.coffee | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee index 17067b5bf4..b78f2aa7dd 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee @@ -33,7 +33,17 @@ module.exports = DocUpdaterClient = do (update) -> jobs.push (callback) -> DocUpdaterClient.sendUpdate project_id, doc_id, update, callback - async.series jobs, callback + async.series jobs, (err) -> + DocUpdaterClient.waitForPendingUpdates project_id, doc_id, callback + + waitForPendingUpdates: (project_id, doc_id, callback) -> + async.retry {times: 30, interval: 100}, (cb) -> + rclient.llen keys.pendingUpdates({doc_id}), (err, length) -> + if length > 0 + cb(new Error("updates still pending")) + else + cb() + , callback getDoc: (project_id, doc_id, callback = (error, res, body) ->) -> request.get "http://localhost:3003/project/#{project_id}/doc/#{doc_id}", (error, res, body) -> From d0c5eb569807ff14315c688fac5275742b9a0163 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 13 Sep 2019 15:06:42 +0100 Subject: [PATCH 535/769] support migration of project history keys to separate redis instance --- .../coffee/ProjectHistoryRedisManager.coffee | 3 +- .../app/coffee/RedisMigrationManager.coffee | 199 ++++++++++++++++++ .../config/settings.defaults.coffee | 12 ++ services/document-updater/docker-compose.yml | 9 +- .../coffee/helpers/DocUpdaterApp.coffee | 4 +- .../ProjectHistoryRedisManagerTests.coffee | 2 + 6 files changed, 222 insertions(+), 7 deletions(-) create mode 100644 services/document-updater/app/coffee/RedisMigrationManager.coffee diff --git a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee index 1cc80ea722..c6362c4fbd 100644 --- a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee +++ b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee @@ -1,6 +1,7 @@ Settings = require('settings-sharelatex') projectHistoryKeys = Settings.redis?.project_history?.key_schema -rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) +#rclient = require("redis-sharelatex").createClient(Settings.redis.project_history) +rclient = require("./RedisMigrationManager").createClient(Settings.redis.project_history, Settings.redis.new_project_history) logger = require('logger-sharelatex') module.exports = ProjectHistoryRedisManager = diff --git a/services/document-updater/app/coffee/RedisMigrationManager.coffee b/services/document-updater/app/coffee/RedisMigrationManager.coffee new file mode 100644 index 0000000000..c00d7727af --- /dev/null +++ b/services/document-updater/app/coffee/RedisMigrationManager.coffee @@ -0,0 +1,199 @@ +logger = require "logger-sharelatex" +Settings = require "settings-sharelatex" +redis = require("redis-sharelatex") +LockManager = require("./LockManager") +async = require("async") + +# The aim is to migrate the project history queues +# ProjectHistory:Ops:{project_id} from the existing redis to a new redis. +# +# This has to work in conjunction with changes in project history. +# +# The basic principles are: +# +# - project history is modified to read from an 'old' and 'new' queue. It reads +# from the 'old' queue first, and when that queue is empty it reads from the +# 'new' queue. +# - docupdater will migrate to writing to the 'new' queue when the 'old' queue +# is empty. +# +# Some facts about the update process: +# +# - project history has a lock on the project-id, so each queue is processed in +# isolation +# - docupdaters take a lock on the doc_id but not the project_id, therefore +# multiple docupdaters can be appending to the queue for a project at the same +# time (provided they updates for individual docs are in order this is +# acceptable) +# - as we want to do this without shutting down the site, we have to take into +# account that different versions of the code will be running while deploys +# are in progress. +# +# The migration has to be carried out with the following constraint: +# +# - a docupdater should never write to the "old" queue when there are updates in +# the "new" queue (there is a strict ordering on the versions, new > old) +# +# The deployment process for docupdater will be +# +# - add a project-level lock to the queuing in docupdater +# - use a per-project migration flag to determine when to write to the new redis +# - set the migration flag for projects with an empty queue in the old redis +# - when all docupdaters respect the flag, make a new deploy which starts to set +# the flag +# - when all docupdaters are setting the flag (and writing to the new redis), +# finish the migration by writing all data to the new redis +# +# Rollback +# +# Under the scheme above a project should only ever have data in the old redis +# or the new redis, but never both at the same time. +# +# Two scenarios: +# +# Hard rollback +# +# If we want to roll back to the old redis immediately, we need to get the data +# out of the new queues and back into the old queues, before appending to the +# old queues again. The actions to do this are: +# +# - close the site +# - revert docupdater so it only writes to the original redis (there will now +# be some data in the new redis for some projects which we need to recover) +# - run a script to move the new queues back into the old redis +# - revert project history to only read from the original redis +# +# Graceful rollback +# +# If we are prepared to keep the new redis running, but not add new projects to +# it we can do the following: +# +# - deploy all docupdaters to update from the "switch" phase into the +# "rollback" phase (projects in the new redis will continue to send data +# there, project not yet migrated will continue to go to the old redis) +# - deploy project history with the "old queue" pointing to the new redis and +# the "new queue" to the old redis to clear the new queue before processing +# the new queue (i.e. add a rollback:true property in new_project_history in +# the project-history settings) +# - projects will now clear gradually from the new redis back to the old redis +# - get a list of all the projects in the new redis and flush them, which will +# cause the new queues to be cleared and the old redis to be used for those +# projects. + +getProjectId = (key) -> + key.match(/\{([0-9a-f]{24})\}/)[1] + +class Multi + constructor: (@migrationClient) -> + @command_list = [] + @queueKey = null + rpush: (args...) -> + @queueKey = args[0] + @command_list.push { command:'rpush', args: args} + setnx: (args...) -> + @command_list.push { command: 'setnx', args: args} + exec: (callback) -> + # decide which client to use + project_id = getProjectId(@queueKey) + LockManager.getLock project_id, (error, lockValue) => + return callback(error) if error? + releaseLock = (args...) => + LockManager.releaseLock project_id, lockValue, (lockError) -> + return callback(lockError) if lockError? + callback(args...) + @migrationClient.findQueue @queueKey, (err, rclient) => + return releaseLock(err) if err? + multi = rclient.multi() + for entry in @command_list + multi[entry.command](entry.args...) + multi.exec releaseLock + +class MigrationClient + constructor: (old_settings, new_settings) -> + @rclient_old = redis.createClient(old_settings) + @rclient_new = redis.createClient(new_settings) + @new_key_schema = new_settings.key_schema + @migration_phase = new_settings.migration_phase + throw new Error("invalid migration phase") unless @migration_phase in ['prepare', 'start', 'switch', 'complete'] + + getMigrationStatus: (key, migrationKey, callback) -> + async.series [ + (cb) => @rclient_new.exists migrationKey, cb + (cb) => @rclient_new.exists key, cb + (cb) => @rclient_old.exists key, cb + ], (err, result) -> + return callback(err) if err? + migrationKeyExists = result[0] > 0 + newQueueExists = result[1] > 0 + oldQueueExists = result[2] > 0 + callback(null, migrationKeyExists, newQueueExists, oldQueueExists) + + findQueue: (key, callback) -> + project_id = getProjectId(key) + migrationKey = @new_key_schema.projectHistoryMigrationKey({project_id}) + + @getMigrationStatus key, migrationKey, (err, migrationKeyExists, newQueueExists, oldQueueExists) -> + return callback(err) if err? + # In all cases, if the migration key exists we must always write to the + # new redis, unless we are rolling back. + if @migration_phase is "prepare" + # in this phase we prepare for the switch, when some docupdaters will + # start setting the migration flag. We monitor the migration key and + # write to the new redis if the key is present, but we do not set the + # migration key. At this point no writes will be going into the new + # redis. When all the docupdaters are in the "prepare" phase we can + # begin deploying the "switch" phase. + if migrationKeyExists + logger.debug {project_id}, "using new client because migration key exists" + return callback(null, @rclient_new) + else + logger.debug {project_id}, "using old client because migration key does not exist" + return callback(null, @rclient_old) + else if @migration_phase is "switch" + # As we deploy the "switch" phase new docupdaters will set the migration + # flag for projects which have an empty queue in the old redis, and + # write updates into the new redis. Existing docupdaters still in the + # "prepare" phase will pick up the migration flag and write new updates + # into the new redis when appropriate. When this deploy is complete + # writes will be going into the new redis for projects with an empty + # queue in the old redis. We have to remain in the switch phase until + # all projects are flushed from the old redis. + if migrationKeyExists + logger.debug {project_id}, "using new client because migration key exists" + return callback(null, @rclient_new) + else + if oldQueueExists + logger.debug {project_id}, "using old client because old queue exists" + return callback(null, @rclient_old) + else + @rclient_new.setnx migrationKey, "NEW", (err) => + return callback(err) if err? + logger.debug {key: key}, "switching to new redis because old queue is empty" + return callback(null, @rclient_new) + else if @migration_phase is "rollback" + # If we need to roll back gracefully we do the opposite of the "switch" + # phase. We use the new redis when the migration key is set and the + # queue exists in the new redis, but if the queue in the new redis is + # empty we delete the migration key and send further updates to the old + # redis. + if migrationKeyExists + if newQueueExists + logger.debug {project_id}, "using new client because migration key exists and new queue is present" + return callback(null, @rclient_new) + else + @rclient_new.del migrationKey, (err) => + return callback(err) if err? + logger.debug {key: key}, "switching to old redis in rollback phase because new queue is empty" + return callback(null, @rclient_old) + else + logger.debug {project_id}, "using old client because migration key does not exist" + return callback(null, @rclient_old) + else + logger.error {key: key}, "unknown migration phase" + callback(new Error('invalid migration phase')) + multi: () -> + new Multi(@) + +module.exports = RedisMigrationManager = + createClient: (args...) -> + new MigrationClient(args...) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 9eebe86005..f890eb0f4a 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -45,6 +45,18 @@ module.exports = projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:{#{project_id}}" projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:{#{project_id}}" + new_project_history: + port: process.env["NEW_HISTORY_REDIS_PORT"] or "6379" + host: process.env["NEW_HISTORY_REDIS_HOST"] + password: process.env["NEW_HISTORY_REDIS_PASSWORD"] or "" + key_schema: + projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:{#{project_id}}" + projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:{#{project_id}}" + projectHistoryMigrationKey: ({project_id}) -> "ProjectHistory:MigrationKey:{#{project_id}}" + migration_phase: "prepare" + redisOptions: + keepAlive: 100 + lock: port: process.env["LOCK_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" host: process.env["LOCK_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index 6dc90009ca..31869acb50 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -25,6 +25,7 @@ services: environment: ELASTIC_SEARCH_DSN: es:9200 REDIS_HOST: redis + NEW_HISTORY_REDIS_HOST: new_redis MONGO_HOST: mongo POSTGRES_HOST: postgres MOCHA_GREP: ${MOCHA_GREP} @@ -34,10 +35,9 @@ services: depends_on: - mongo - redis + - new_redis command: npm run test:acceptance - - tar: build: . image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER @@ -49,7 +49,8 @@ services: redis: image: redis + new_redis: + image: redis + mongo: image: mongo:3.4 - - diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterApp.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterApp.coffee index 9819f9f99e..0f77199e73 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterApp.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterApp.coffee @@ -13,8 +13,8 @@ module.exports = else @initing = true @callbacks.push callback - app.listen 3003, "localhost", (error) => + app.listen 3003, "localhost", (error) => throw error if error? @running = true for callback in @callbacks - callback() \ No newline at end of file + callback() diff --git a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee index a93545b250..6748f87af9 100644 --- a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee @@ -24,6 +24,8 @@ describe "ProjectHistoryRedisManager", -> } "redis-sharelatex": createClient: () => @rclient + "./RedisMigrationManager": + createClient: () => @rclient "logger-sharelatex": log:-> globals: From a85dffbcefad8d52e8508541eda4ee76a39471a7 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 16 Dec 2019 09:27:00 +0000 Subject: [PATCH 536/769] fix acceptance tests --- services/document-updater/docker-compose.ci.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/services/document-updater/docker-compose.ci.yml b/services/document-updater/docker-compose.ci.yml index c78d90e8ed..d65f97b913 100644 --- a/services/document-updater/docker-compose.ci.yml +++ b/services/document-updater/docker-compose.ci.yml @@ -20,6 +20,7 @@ services: environment: ELASTIC_SEARCH_DSN: es:9200 REDIS_HOST: redis + NEW_HISTORY_REDIS_HOST: new_redis MONGO_HOST: mongo POSTGRES_HOST: postgres MOCHA_GREP: ${MOCHA_GREP} @@ -27,6 +28,7 @@ services: depends_on: - mongo - redis + - new_redis user: node command: npm run test:acceptance:_run @@ -43,5 +45,8 @@ services: redis: image: redis + new_redis: + image: redis + mongo: image: mongo:3.4 From a2e63d009ee16a9064c0cb5ce3fd2ad7c9411812 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 16 Dec 2019 09:55:26 +0000 Subject: [PATCH 537/769] fix migration phase check --- .../app/coffee/RedisMigrationManager.coffee | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/document-updater/app/coffee/RedisMigrationManager.coffee b/services/document-updater/app/coffee/RedisMigrationManager.coffee index c00d7727af..f1ec00cf5c 100644 --- a/services/document-updater/app/coffee/RedisMigrationManager.coffee +++ b/services/document-updater/app/coffee/RedisMigrationManager.coffee @@ -114,7 +114,7 @@ class MigrationClient @rclient_new = redis.createClient(new_settings) @new_key_schema = new_settings.key_schema @migration_phase = new_settings.migration_phase - throw new Error("invalid migration phase") unless @migration_phase in ['prepare', 'start', 'switch', 'complete'] + throw new Error("invalid migration phase") unless @migration_phase in ['prepare', 'switch', 'rollback'] getMigrationStatus: (key, migrationKey, callback) -> async.series [ @@ -132,7 +132,7 @@ class MigrationClient project_id = getProjectId(key) migrationKey = @new_key_schema.projectHistoryMigrationKey({project_id}) - @getMigrationStatus key, migrationKey, (err, migrationKeyExists, newQueueExists, oldQueueExists) -> + @getMigrationStatus key, migrationKey, (err, migrationKeyExists, newQueueExists, oldQueueExists) => return callback(err) if err? # In all cases, if the migration key exists we must always write to the # new redis, unless we are rolling back. @@ -189,7 +189,7 @@ class MigrationClient logger.debug {project_id}, "using old client because migration key does not exist" return callback(null, @rclient_old) else - logger.error {key: key}, "unknown migration phase" + logger.error {key: key, migration_phase: @migration_phase}, "unknown migration phase" callback(new Error('invalid migration phase')) multi: () -> new Multi(@) From 97cbf461601add8ac9c024b9cbc189afe3fdc26b Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 16 Dec 2019 11:46:35 +0000 Subject: [PATCH 538/769] add metrics for migration --- .../document-updater/app/coffee/RedisMigrationManager.coffee | 5 +++++ services/document-updater/package.json | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RedisMigrationManager.coffee b/services/document-updater/app/coffee/RedisMigrationManager.coffee index f1ec00cf5c..553e707753 100644 --- a/services/document-updater/app/coffee/RedisMigrationManager.coffee +++ b/services/document-updater/app/coffee/RedisMigrationManager.coffee @@ -2,6 +2,7 @@ logger = require "logger-sharelatex" Settings = require "settings-sharelatex" redis = require("redis-sharelatex") LockManager = require("./LockManager") +metrics = require "./Metrics" async = require("async") # The aim is to migrate the project history queues @@ -89,6 +90,7 @@ class Multi @queueKey = null rpush: (args...) -> @queueKey = args[0] + @updates_count = args.length - 1 @command_list.push { command:'rpush', args: args} setnx: (args...) -> @command_list.push { command: 'setnx', args: args} @@ -103,6 +105,9 @@ class Multi callback(args...) @migrationClient.findQueue @queueKey, (err, rclient) => return releaseLock(err) if err? + # add metric for updates + dest = (if rclient == @rclient_new then "new" else "old") + metrics.count "migration", @updates_count, 1, {status: "#{@migrationClient.migration_phase}-#{dest}"} multi = rclient.multi() for entry in @command_list multi[entry.command](entry.args...) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index f80251248b..ca79ec51ac 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -26,7 +26,7 @@ "lodash": "^4.17.4", "logger-sharelatex": "^1.7.0", "lynx": "0.0.11", - "metrics-sharelatex": "^2.2.0", + "metrics-sharelatex": "^2.4.0", "mongojs": "^2.6.0", "redis-sharelatex": "^1.0.11", "request": "2.47.0", From 8ae95ebf604734a4c3d2ddb96d7f3c29134fcc49 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 6 Jan 2020 16:45:36 +0000 Subject: [PATCH 539/769] fix rclient check in migration metrics --- .../document-updater/app/coffee/RedisMigrationManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RedisMigrationManager.coffee b/services/document-updater/app/coffee/RedisMigrationManager.coffee index 553e707753..4978bf7e7b 100644 --- a/services/document-updater/app/coffee/RedisMigrationManager.coffee +++ b/services/document-updater/app/coffee/RedisMigrationManager.coffee @@ -106,7 +106,7 @@ class Multi @migrationClient.findQueue @queueKey, (err, rclient) => return releaseLock(err) if err? # add metric for updates - dest = (if rclient == @rclient_new then "new" else "old") + dest = (if rclient == @migrationClient.rclient_new then "new" else "old") metrics.count "migration", @updates_count, 1, {status: "#{@migrationClient.migration_phase}-#{dest}"} multi = rclient.multi() for entry in @command_list From 27044c2d02c5cd380dc9a29bf412d5e1d5c19d9c Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 6 Jan 2020 16:46:35 +0000 Subject: [PATCH 540/769] allow migration phase to be modified at runtime for testing --- .../app/coffee/RedisMigrationManager.coffee | 23 +++++++++++-------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/services/document-updater/app/coffee/RedisMigrationManager.coffee b/services/document-updater/app/coffee/RedisMigrationManager.coffee index 4978bf7e7b..96c8ee049c 100644 --- a/services/document-updater/app/coffee/RedisMigrationManager.coffee +++ b/services/document-updater/app/coffee/RedisMigrationManager.coffee @@ -114,12 +114,17 @@ class Multi multi.exec releaseLock class MigrationClient - constructor: (old_settings, new_settings) -> - @rclient_old = redis.createClient(old_settings) - @rclient_new = redis.createClient(new_settings) + constructor: (@old_settings, @new_settings) -> + @rclient_old = redis.createClient(@old_settings) + @rclient_new = redis.createClient(@new_settings) @new_key_schema = new_settings.key_schema - @migration_phase = new_settings.migration_phase + # check that migration phase is valid on startup + @getMigrationPhase() + + getMigrationPhase: () -> + @migration_phase = @new_settings.migration_phase # FIXME: allow setting migration phase while running for testing throw new Error("invalid migration phase") unless @migration_phase in ['prepare', 'switch', 'rollback'] + return @migration_phase getMigrationStatus: (key, migrationKey, callback) -> async.series [ @@ -136,12 +141,12 @@ class MigrationClient findQueue: (key, callback) -> project_id = getProjectId(key) migrationKey = @new_key_schema.projectHistoryMigrationKey({project_id}) - + migration_phase = @getMigrationPhase() # allow setting migration phase while running for testing @getMigrationStatus key, migrationKey, (err, migrationKeyExists, newQueueExists, oldQueueExists) => return callback(err) if err? # In all cases, if the migration key exists we must always write to the # new redis, unless we are rolling back. - if @migration_phase is "prepare" + if migration_phase is "prepare" # in this phase we prepare for the switch, when some docupdaters will # start setting the migration flag. We monitor the migration key and # write to the new redis if the key is present, but we do not set the @@ -154,7 +159,7 @@ class MigrationClient else logger.debug {project_id}, "using old client because migration key does not exist" return callback(null, @rclient_old) - else if @migration_phase is "switch" + else if migration_phase is "switch" # As we deploy the "switch" phase new docupdaters will set the migration # flag for projects which have an empty queue in the old redis, and # write updates into the new redis. Existing docupdaters still in the @@ -175,7 +180,7 @@ class MigrationClient return callback(err) if err? logger.debug {key: key}, "switching to new redis because old queue is empty" return callback(null, @rclient_new) - else if @migration_phase is "rollback" + else if migration_phase is "rollback" # If we need to roll back gracefully we do the opposite of the "switch" # phase. We use the new redis when the migration key is set and the # queue exists in the new redis, but if the queue in the new redis is @@ -194,7 +199,7 @@ class MigrationClient logger.debug {project_id}, "using old client because migration key does not exist" return callback(null, @rclient_old) else - logger.error {key: key, migration_phase: @migration_phase}, "unknown migration phase" + logger.error {key: key, migration_phase: migration_phase}, "unknown migration phase" callback(new Error('invalid migration phase')) multi: () -> new Multi(@) From c2714f9ae9c863890fc299915af4134d49676b4c Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 6 Jan 2020 16:50:06 +0000 Subject: [PATCH 541/769] add acceptance tests for RedisMigrationManager --- .../coffee/RedisMigrationManagerTests.coffee | 320 ++++++++++++++++++ 1 file changed, 320 insertions(+) create mode 100644 services/document-updater/test/acceptance/coffee/RedisMigrationManagerTests.coffee diff --git a/services/document-updater/test/acceptance/coffee/RedisMigrationManagerTests.coffee b/services/document-updater/test/acceptance/coffee/RedisMigrationManagerTests.coffee new file mode 100644 index 0000000000..2684a4a3d8 --- /dev/null +++ b/services/document-updater/test/acceptance/coffee/RedisMigrationManagerTests.coffee @@ -0,0 +1,320 @@ +sinon = require "sinon" +chai = require("chai") +chai.should() +expect = chai.expect +async = require "async" +Settings = require('settings-sharelatex') +rclient_old = require("redis-sharelatex").createClient(Settings.redis.project_history) +rclient_new = require("redis-sharelatex").createClient(Settings.redis.new_project_history) +rclient_du = require("redis-sharelatex").createClient(Settings.redis.documentupdater) +Keys = Settings.redis.documentupdater.key_schema +HistoryKeys = Settings.redis.history.key_schema +ProjectHistoryKeys = Settings.redis.project_history.key_schema +NewProjectHistoryKeys = Settings.redis.new_project_history.key_schema + +MockTrackChangesApi = require "./helpers/MockTrackChangesApi" +MockWebApi = require "./helpers/MockWebApi" +DocUpdaterClient = require "./helpers/DocUpdaterClient" +DocUpdaterApp = require "./helpers/DocUpdaterApp" + +describe "RedisMigrationManager", -> + before (done) -> + @lines = ["one", "two", "three"] + @version = 42 + @update = + doc: @doc_id + op: [{ + i: "one and a half\n" + p: 4 + }] + v: @version + DocUpdaterApp.ensureRunning(done) + + describe "when the migration phase is 'prepare' (default)", -> + + describe "when there is no migration flag", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + throw error if error? + sinon.spy MockWebApi, "getDocument" + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> + throw error if error? + setTimeout done, 200 + return null + + after -> + MockWebApi.getDocument.restore() + + it "should push the applied updates to old redis", (done) -> + rclient_old.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + JSON.parse(updates[0]).op.should.deep.equal @update.op + done() + return null + + it "should not push the applied updates to the new redis", (done) -> + rclient_new.exists ProjectHistoryKeys.projectHistoryOps({@project_id}), (error, result) => + result.should.equal 0 + done() + return null + + it "should not set the migration flag for the project", (done) -> + rclient_new.exists NewProjectHistoryKeys.projectHistoryMigrationKey({@project_id}), (error, result) => + result.should.equal 0 + done() + return null + + describe "when the migration flag is set for the project", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + + rclient_new.set NewProjectHistoryKeys.projectHistoryMigrationKey({@project_id}), '1', (error) => + throw error if error? + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + throw error if error? + sinon.spy MockWebApi, "getDocument" + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> + throw error if error? + setTimeout done, 200 + return null + + after (done) -> + MockWebApi.getDocument.restore() + rclient_new.del NewProjectHistoryKeys.projectHistoryMigrationKey({@project_id}), done + return null + + it "should push the applied updates to the new redis", (done) -> + rclient_new.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + JSON.parse(updates[0]).op.should.deep.equal @update.op + done() + return null + + it "should not push the applied updates to the old redis", (done) -> + rclient_old.exists ProjectHistoryKeys.projectHistoryOps({@project_id}), (error, result) => + result.should.equal 0 + done() + return null + + it "should keep the migration flag for the project", (done) -> + rclient_new.exists NewProjectHistoryKeys.projectHistoryMigrationKey({@project_id}), (error, result) => + result.should.equal 1 + done() + return null + + describe "when the migration phase is 'switch'", -> + before -> + Settings.redis.new_project_history.migration_phase = 'switch' + + describe "when the old queue is empty", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + throw error if error? + sinon.spy MockWebApi, "getDocument" + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> + throw error if error? + setTimeout done, 200 + return null + + after -> + MockWebApi.getDocument.restore() + + it "should push the applied updates to the new redis", (done) -> + rclient_new.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + JSON.parse(updates[0]).op.should.deep.equal @update.op + done() + return null + + it "should not push the applied updates to the old redis", (done) -> + rclient_old.exists ProjectHistoryKeys.projectHistoryOps({@project_id}), (error, result) => + result.should.equal 0 + done() + return null + + it "should set the migration flag for the project", (done) -> + rclient_new.get NewProjectHistoryKeys.projectHistoryMigrationKey({@project_id}), (error, result) => + result.should.equal "NEW" + done() + return null + + describe "when the old queue is not empty", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + throw error if error? + sinon.spy MockWebApi, "getDocument" + rclient_old.rpush ProjectHistoryKeys.projectHistoryOps({@project_id}), JSON.stringify({op: "dummy-op"}), (error) => + throw error if error? + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> + throw error if error? + setTimeout done, 200 + return null + + after -> + MockWebApi.getDocument.restore() + + it "should push the applied updates to the old redis", (done) -> + rclient_old.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + JSON.parse(updates[0]).op.should.deep.equal "dummy-op" + JSON.parse(updates[1]).op.should.deep.equal @update.op + done() + return null + + it "should not push the applied updates to the new redis", (done) -> + rclient_new.exists ProjectHistoryKeys.projectHistoryOps({@project_id}), (error, result) => + result.should.equal 0 + done() + return null + + it "should not set the migration flag for the project", (done) -> + rclient_new.exists NewProjectHistoryKeys.projectHistoryMigrationKey({@project_id}), (error, result) => + result.should.equal 0 + done() + return null + + describe "when the migration flag is set for the project", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + + rclient_new.set NewProjectHistoryKeys.projectHistoryMigrationKey({@project_id}), '1', (error) => + throw error if error? + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + throw error if error? + sinon.spy MockWebApi, "getDocument" + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> + throw error if error? + setTimeout done, 200 + return null + + after (done) -> + MockWebApi.getDocument.restore() + rclient_new.del NewProjectHistoryKeys.projectHistoryMigrationKey({@project_id}), done + return null + + it "should push the applied updates to the new redis", (done) -> + rclient_new.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + JSON.parse(updates[0]).op.should.deep.equal @update.op + done() + return null + + it "should not push the applied updates to the old redis", (done) -> + rclient_old.exists ProjectHistoryKeys.projectHistoryOps({@project_id}), (error, result) => + result.should.equal 0 + done() + return null + + it "should keep the migration flag for the project", (done) -> + rclient_new.exists NewProjectHistoryKeys.projectHistoryMigrationKey({@project_id}), (error, result) => + result.should.equal 1 + done() + return null + + describe "when the migration phase is 'rollback'", -> + before -> + Settings.redis.new_project_history.migration_phase = 'rollback' + + describe "when the old queue is empty", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + throw error if error? + sinon.spy MockWebApi, "getDocument" + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> + throw error if error? + setTimeout done, 200 + return null + + after -> + MockWebApi.getDocument.restore() + + it "should push the applied updates to the old redis", (done) -> + rclient_old.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + JSON.parse(updates[0]).op.should.deep.equal @update.op + done() + return null + + it "should not push the applied updates to the new redis", (done) -> + rclient_new.exists ProjectHistoryKeys.projectHistoryOps({@project_id}), (error, result) => + result.should.equal 0 + done() + return null + + describe "when the new queue is not empty", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + throw error if error? + sinon.spy MockWebApi, "getDocument" + rclient_new.rpush ProjectHistoryKeys.projectHistoryOps({@project_id}), JSON.stringify({op: "dummy-op"}), (error) => + throw error if error? + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> + throw error if error? + setTimeout done, 200 + return null + + after -> + MockWebApi.getDocument.restore() + + it "should push the applied updates to the old redis", (done) -> + rclient_old.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + JSON.parse(updates[0]).op.should.deep.equal @update.op + done() + return null + + it "should not push the applied updates to the new redis", (done) -> + rclient_new.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + JSON.parse(updates[0]).op.should.deep.equal "dummy-op" + updates.length.should.equal 1 + done() + return null + + describe "when the migration flag is set for the project", -> + before (done) -> + [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + + rclient_new.set NewProjectHistoryKeys.projectHistoryMigrationKey({@project_id}), '1', (error) => + throw error if error? + MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => + throw error if error? + sinon.spy MockWebApi, "getDocument" + DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> + throw error if error? + setTimeout done, 200 + return null + + after (done) -> + MockWebApi.getDocument.restore() + rclient_new.del NewProjectHistoryKeys.projectHistoryMigrationKey({@project_id}), done + return null + + it "should push the applied updates to the old redis", (done) -> + rclient_old.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + JSON.parse(updates[0]).op.should.deep.equal @update.op + done() + return null + + it "should not push the applied updates to the new redis", (done) -> + rclient_new.exists ProjectHistoryKeys.projectHistoryOps({@project_id}), (error, result) => + result.should.equal 0 + done() + return null + + it "should delete the migration flag for the project", (done) -> + rclient_new.exists NewProjectHistoryKeys.projectHistoryMigrationKey({@project_id}), (error, result) => + result.should.equal 0 + done() + return null + From a638ef425146bb31e5ebfcd2b7d32e297cc75ab2 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 13 Jan 2020 15:56:28 +0000 Subject: [PATCH 542/769] add comment about locking in redis migration --- .../app/coffee/RedisMigrationManager.coffee | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/services/document-updater/app/coffee/RedisMigrationManager.coffee b/services/document-updater/app/coffee/RedisMigrationManager.coffee index 96c8ee049c..a12593d03f 100644 --- a/services/document-updater/app/coffee/RedisMigrationManager.coffee +++ b/services/document-updater/app/coffee/RedisMigrationManager.coffee @@ -97,6 +97,14 @@ class Multi exec: (callback) -> # decide which client to use project_id = getProjectId(@queueKey) + # Put a lock around finding and updating the queue to avoid time-of-check to + # time-of-use problems. When running in the "switch" phase we need a lock to + # guarantee the order of operations. (Example: docupdater A sees an old + # queue at t=t0 and pushes onto it at t=t1, project history clears the queue + # between t0 and t1, and docupdater B sees the empty queue, sets the + # migration flag and pushes onto the new queue at t2. Without a lock it's + # possible to have t2 < t1 if docupdater A is slower than B - then there + # would be entries in the old and new queues, which we want to avoid.) LockManager.getLock project_id, (error, lockValue) => return callback(error) if error? releaseLock = (args...) => From 3caa0e7c058639d434068e5c88174ada26adfc35 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 14 Jan 2020 13:53:50 +0000 Subject: [PATCH 543/769] add failure/retry metrics for web-api requests --- .../app/coffee/PersistenceManager.coffee | 17 ++++++++++ .../PersistenceManagerTests.coffee | 32 +++++++++++++++++-- 2 files changed, 47 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/PersistenceManager.coffee b/services/document-updater/app/coffee/PersistenceManager.coffee index 543b9f0c22..f27c5f1b7a 100644 --- a/services/document-updater/app/coffee/PersistenceManager.coffee +++ b/services/document-updater/app/coffee/PersistenceManager.coffee @@ -12,6 +12,21 @@ request = (require("requestretry")).defaults({ # hold us up, and need to bail out quickly if there is a problem. MAX_HTTP_REQUEST_LENGTH = 5000 # 5 seconds +updateMetric = (method, error, response) -> + # find the status, with special handling for connection timeouts + # https://github.com/request/request#timeouts + status = if error?.connect is true + "#{error.code} (connect)" + else if error? + error.code + else if response? + response.statusCode + Metrics.inc method, {status: status} + if error?.attempts > 0 + Metrics.inc "#{method}-attempts", {status: 'error'} + if response?.attempts > 0 + Metrics.inc "#{method}-attempts", {status: 'success'} + module.exports = PersistenceManager = getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, pathname, projectHistoryId, projectHistoryType) ->) -> timer = new Metrics.Timer("persistenceManager.getDoc") @@ -32,6 +47,7 @@ module.exports = PersistenceManager = jar: false timeout: MAX_HTTP_REQUEST_LENGTH }, (error, res, body) -> + updateMetric('getDoc', error, res) return callback(error) if error? if res.statusCode >= 200 and res.statusCode < 300 try @@ -73,6 +89,7 @@ module.exports = PersistenceManager = jar: false timeout: MAX_HTTP_REQUEST_LENGTH }, (error, res, body) -> + updateMetric('setDoc', error, res) return callback(error) if error? if res.statusCode >= 200 and res.statusCode < 300 return callback null diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee index d1308ad899..a8e2ed9b23 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee @@ -15,6 +15,7 @@ describe "PersistenceManager", -> "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() + inc: sinon.stub() "logger-sharelatex": @logger = {log: sinon.stub(), err: sinon.stub()} @project_id = "project-id-123" @projectHistoryId = "history-id-123" @@ -71,9 +72,14 @@ describe "PersistenceManager", -> it "should time the execution", -> @Metrics.Timer::done.called.should.equal true + it "should increment the metric", -> + @Metrics.inc.calledWith("getDoc", {status: 200}).should.equal true + describe "when request returns an error", -> beforeEach -> - @request.callsArgWith(1, @error = new Error("oops"), null, null) + @error = new Error("oops") + @error.code = "EOOPS" + @request.callsArgWith(1, @error, null, null) @PersistenceManager.getDoc(@project_id, @doc_id, @callback) it "should return the error", -> @@ -82,6 +88,9 @@ describe "PersistenceManager", -> it "should time the execution", -> @Metrics.Timer::done.called.should.equal true + it "should increment the metric", -> + @Metrics.inc.calledWith("getDoc", {status: "EOOPS"}).should.equal true + describe "when the request returns 404", -> beforeEach -> @request.callsArgWith(1, null, {statusCode: 404}, "") @@ -93,6 +102,9 @@ describe "PersistenceManager", -> it "should time the execution", -> @Metrics.Timer::done.called.should.equal true + it "should increment the metric", -> + @Metrics.inc.calledWith("getDoc", {status: 404}).should.equal true + describe "when the request returns an error status code", -> beforeEach -> @request.callsArgWith(1, null, {statusCode: 500}, "") @@ -104,6 +116,9 @@ describe "PersistenceManager", -> it "should time the execution", -> @Metrics.Timer::done.called.should.equal true + it "should increment the metric", -> + @Metrics.inc.calledWith("getDoc", {status: 500}).should.equal true + describe "when request returns an doc without lines", -> beforeEach -> delete @webResponse.lines @@ -163,9 +178,14 @@ describe "PersistenceManager", -> it "should time the execution", -> @Metrics.Timer::done.called.should.equal true + it "should increment the metric", -> + @Metrics.inc.calledWith("setDoc", {status: 200}).should.equal true + describe "when request returns an error", -> beforeEach -> - @request.callsArgWith(1, @error = new Error("oops"), null, null) + @error = new Error("oops") + @error.code = "EOOPS" + @request.callsArgWith(1, @error, null, null) @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @ranges, @lastUpdatedAt, @lastUpdatedBy, @callback) it "should return the error", -> @@ -174,6 +194,9 @@ describe "PersistenceManager", -> it "should time the execution", -> @Metrics.Timer::done.called.should.equal true + it "should increment the metric", -> + @Metrics.inc.calledWith("setDoc", {status: "EOOPS"}).should.equal true + describe "when the request returns 404", -> beforeEach -> @request.callsArgWith(1, null, {statusCode: 404}, "") @@ -185,6 +208,9 @@ describe "PersistenceManager", -> it "should time the execution", -> @Metrics.Timer::done.called.should.equal true + it "should increment the metric", -> + @Metrics.inc.calledWith("setDoc", {status: 404}).should.equal true + describe "when the request returns an error status code", -> beforeEach -> @request.callsArgWith(1, null, {statusCode: 500}, "") @@ -196,3 +222,5 @@ describe "PersistenceManager", -> it "should time the execution", -> @Metrics.Timer::done.called.should.equal true + it "should increment the metric", -> + @Metrics.inc.calledWith("setDoc", {status: 500}).should.equal true From 7036803acfd551eb4d074b65a5244069e33ffb67 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 14 Jan 2020 15:00:21 +0000 Subject: [PATCH 544/769] add missing argument to metrics.inc also track retries rather than attempts (which is always 1 for a successful request) --- .../app/coffee/PersistenceManager.coffee | 10 +++++----- .../PersistenceManagerTests.coffee | 16 ++++++++-------- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/services/document-updater/app/coffee/PersistenceManager.coffee b/services/document-updater/app/coffee/PersistenceManager.coffee index f27c5f1b7a..88b44fd1de 100644 --- a/services/document-updater/app/coffee/PersistenceManager.coffee +++ b/services/document-updater/app/coffee/PersistenceManager.coffee @@ -21,11 +21,11 @@ updateMetric = (method, error, response) -> error.code else if response? response.statusCode - Metrics.inc method, {status: status} - if error?.attempts > 0 - Metrics.inc "#{method}-attempts", {status: 'error'} - if response?.attempts > 0 - Metrics.inc "#{method}-attempts", {status: 'success'} + Metrics.inc method, 1, {status: status} + if error?.attempts > 1 + Metrics.inc "#{method}-retries", 1, {status: 'error'} + if response?.attempts > 1 + Metrics.inc "#{method}-retries", 1, {status: 'success'} module.exports = PersistenceManager = getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, pathname, projectHistoryId, projectHistoryType) ->) -> diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee index a8e2ed9b23..0ad69c3885 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee @@ -73,7 +73,7 @@ describe "PersistenceManager", -> @Metrics.Timer::done.called.should.equal true it "should increment the metric", -> - @Metrics.inc.calledWith("getDoc", {status: 200}).should.equal true + @Metrics.inc.calledWith("getDoc", 1, {status: 200}).should.equal true describe "when request returns an error", -> beforeEach -> @@ -89,7 +89,7 @@ describe "PersistenceManager", -> @Metrics.Timer::done.called.should.equal true it "should increment the metric", -> - @Metrics.inc.calledWith("getDoc", {status: "EOOPS"}).should.equal true + @Metrics.inc.calledWith("getDoc", 1, {status: "EOOPS"}).should.equal true describe "when the request returns 404", -> beforeEach -> @@ -103,7 +103,7 @@ describe "PersistenceManager", -> @Metrics.Timer::done.called.should.equal true it "should increment the metric", -> - @Metrics.inc.calledWith("getDoc", {status: 404}).should.equal true + @Metrics.inc.calledWith("getDoc", 1, {status: 404}).should.equal true describe "when the request returns an error status code", -> beforeEach -> @@ -117,7 +117,7 @@ describe "PersistenceManager", -> @Metrics.Timer::done.called.should.equal true it "should increment the metric", -> - @Metrics.inc.calledWith("getDoc", {status: 500}).should.equal true + @Metrics.inc.calledWith("getDoc", 1, {status: 500}).should.equal true describe "when request returns an doc without lines", -> beforeEach -> @@ -179,7 +179,7 @@ describe "PersistenceManager", -> @Metrics.Timer::done.called.should.equal true it "should increment the metric", -> - @Metrics.inc.calledWith("setDoc", {status: 200}).should.equal true + @Metrics.inc.calledWith("setDoc", 1, {status: 200}).should.equal true describe "when request returns an error", -> beforeEach -> @@ -195,7 +195,7 @@ describe "PersistenceManager", -> @Metrics.Timer::done.called.should.equal true it "should increment the metric", -> - @Metrics.inc.calledWith("setDoc", {status: "EOOPS"}).should.equal true + @Metrics.inc.calledWith("setDoc", 1, {status: "EOOPS"}).should.equal true describe "when the request returns 404", -> beforeEach -> @@ -209,7 +209,7 @@ describe "PersistenceManager", -> @Metrics.Timer::done.called.should.equal true it "should increment the metric", -> - @Metrics.inc.calledWith("setDoc", {status: 404}).should.equal true + @Metrics.inc.calledWith("setDoc", 1, {status: 404}).should.equal true describe "when the request returns an error status code", -> beforeEach -> @@ -223,4 +223,4 @@ describe "PersistenceManager", -> @Metrics.Timer::done.called.should.equal true it "should increment the metric", -> - @Metrics.inc.calledWith("setDoc", {status: 500}).should.equal true + @Metrics.inc.calledWith("setDoc", 1, {status: 500}).should.equal true From 531d9b77b988e2991517c077979056369f3da402 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 21 Jan 2020 09:49:41 +0000 Subject: [PATCH 545/769] add redislabs ca cert to repository --- services/document-updater/Dockerfile | 1 + services/document-updater/install_deps.sh | 2 + services/document-updater/redislabs_ca.pem | 77 ++++++++++++++++++++++ 3 files changed, 80 insertions(+) create mode 100644 services/document-updater/install_deps.sh create mode 100644 services/document-updater/redislabs_ca.pem diff --git a/services/document-updater/Dockerfile b/services/document-updater/Dockerfile index 59f5e61889..2845544ae6 100644 --- a/services/document-updater/Dockerfile +++ b/services/document-updater/Dockerfile @@ -17,6 +17,7 @@ FROM node:6.9.5 COPY --from=app /app /app WORKDIR /app +RUN chmod 0755 ./install_deps.sh && ./install_deps.sh USER node CMD ["node", "--expose-gc", "app.js"] diff --git a/services/document-updater/install_deps.sh b/services/document-updater/install_deps.sh new file mode 100644 index 0000000000..8016ec6c85 --- /dev/null +++ b/services/document-updater/install_deps.sh @@ -0,0 +1,2 @@ +cp redislabs_ca.pem /usr/local/share/ca-certificates/redislabs_ca.crt +update-ca-certificates diff --git a/services/document-updater/redislabs_ca.pem b/services/document-updater/redislabs_ca.pem new file mode 100644 index 0000000000..a4af612d25 --- /dev/null +++ b/services/document-updater/redislabs_ca.pem @@ -0,0 +1,77 @@ +Certificate: + Data: + Version: 3 (0x2) + Serial Number: 11859567854091286320 (0xa495a620ecc0b730) + Signature Algorithm: sha1WithRSAEncryption + Issuer: O=Garantia Data, CN=SSL Certification Authority + Validity + Not Before: Oct 1 12:14:55 2013 GMT + Not After : Sep 29 12:14:55 2023 GMT + Subject: O=Garantia Data, CN=SSL Certification Authority + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + Public-Key: (2048 bit) + Modulus: + 00:b6:6a:92:1f:c3:73:35:8f:26:7c:67:1c:b4:3b: + 40:bd:13:e0:1e:02:0c:a5:81:28:27:22:b2:b8:86: + 6c:0e:99:78:f5:95:36:8e:21:7c:a4:02:e8:9a:f3: + 7d:1f:b4:f3:53:5e:0f:a5:5c:59:48:b3:ae:67:7e: + 8e:d3:e1:21:8e:1c:f9:65:50:62:6e:4f:29:a3:7a: + 0d:3d:62:99:87:71:43:0e:da:a8:ee:63:d8:a5:02: + 12:1f:dc:ce:7a:4b:c5:e4:87:a1:3c:65:47:7e:04: + 43:01:76:f1:69:77:7a:0d:af:73:97:2d:f0:b8:d4: + dd:ea:33:59:59:37:81:be:da:97:1f:66:48:0d:92: + 82:6b:97:e6:51:10:6b:09:7e:fa:b4:a3:b0:14:ad: + 7a:66:36:04:3c:0e:a4:03:17:22:b7:44:c8:ff:dc: + 56:7f:26:92:f8:bf:04:3b:39:33:91:be:d3:d8:f4: + 81:f8:72:0b:34:56:31:0e:c7:9f:bd:6e:d5:ea:25: + 47:1c:15:c6:08:b7:4c:c9:fe:fe:f4:da:15:2a:b1: + 2a:38:1c:93:ac:ee:01:88:c1:44:f6:87:7b:ba:8b: + c4:73:6b:d5:2a:3f:31:cf:67:3f:2f:b7:c0:77:9b: + 17:06:c8:72:75:28:8f:06:e9:e2:77:2d:91:66:e3: + 6f:67 + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Subject Key Identifier: + FD:70:86:D7:2B:C9:D9:96:DD:92:5E:B9:2A:0A:64:82:A3:CD:ED:F0 + X509v3 Authority Key Identifier: + keyid:FD:70:86:D7:2B:C9:D9:96:DD:92:5E:B9:2A:0A:64:82:A3:CD:ED:F0 + + X509v3 Basic Constraints: + CA:TRUE + Signature Algorithm: sha1WithRSAEncryption + 6d:9e:ad:78:70:44:06:bb:f9:93:81:b3:40:7a:5f:9e:c7:c3: + 27:75:47:89:1f:99:77:2c:d2:bb:5a:95:b3:e9:be:05:0b:4a: + 20:7e:4c:26:df:dc:46:e1:26:71:c6:ca:f7:42:63:5b:6f:95: + f7:cb:8d:d0:3b:1c:9d:0f:08:e9:fe:61:82:c1:03:4a:53:53: + f7:72:be:b3:7a:4a:ef:0d:b9:2e:72:b9:b9:ed:f6:66:f5:de: + 70:c6:62:8d:6b:9e:dd:18:45:fc:4d:fb:c0:cc:dd:f5:c8:56: + bd:37:f0:0d:f4:52:53:d7:d8:eb:b5:13:11:49:4f:43:19:b8: + 52:98:e9:9b:cb:74:8e:bf:d5:c6:e0:9a:0b:8c:94:08:4c:f8: + 38:4a:c9:5e:92:af:9e:bd:f4:b3:37:ce:a7:88:f3:5e:a9:66: + 69:51:10:44:d8:90:6a:fd:d6:ae:e4:06:95:c9:bb:f7:6d:1d: + a1:b1:83:56:46:bb:ac:3f:3c:2b:18:19:47:04:09:61:0d:60: + 3e:15:40:f7:7c:37:7d:89:8c:e7:ee:ea:f1:20:a0:40:30:7c: + f3:fe:de:81:a9:67:89:b7:7b:00:02:71:63:80:7a:7a:9f:95: + bf:9c:41:80:b8:3e:c1:7b:a9:b5:c3:99:16:96:ad:b2:a7:b4: + e9:59:de:7d +-----BEGIN CERTIFICATE----- +MIIDTzCCAjegAwIBAgIJAKSVpiDswLcwMA0GCSqGSIb3DQEBBQUAMD4xFjAUBgNV +BAoMDUdhcmFudGlhIERhdGExJDAiBgNVBAMMG1NTTCBDZXJ0aWZpY2F0aW9uIEF1 +dGhvcml0eTAeFw0xMzEwMDExMjE0NTVaFw0yMzA5MjkxMjE0NTVaMD4xFjAUBgNV +BAoMDUdhcmFudGlhIERhdGExJDAiBgNVBAMMG1NTTCBDZXJ0aWZpY2F0aW9uIEF1 +dGhvcml0eTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALZqkh/DczWP +JnxnHLQ7QL0T4B4CDKWBKCcisriGbA6ZePWVNo4hfKQC6JrzfR+081NeD6VcWUiz +rmd+jtPhIY4c+WVQYm5PKaN6DT1imYdxQw7aqO5j2KUCEh/cznpLxeSHoTxlR34E +QwF28Wl3eg2vc5ct8LjU3eozWVk3gb7alx9mSA2SgmuX5lEQawl++rSjsBStemY2 +BDwOpAMXIrdEyP/cVn8mkvi/BDs5M5G+09j0gfhyCzRWMQ7Hn71u1eolRxwVxgi3 +TMn+/vTaFSqxKjgck6zuAYjBRPaHe7qLxHNr1So/Mc9nPy+3wHebFwbIcnUojwbp +4nctkWbjb2cCAwEAAaNQME4wHQYDVR0OBBYEFP1whtcrydmW3ZJeuSoKZIKjze3w +MB8GA1UdIwQYMBaAFP1whtcrydmW3ZJeuSoKZIKjze3wMAwGA1UdEwQFMAMBAf8w +DQYJKoZIhvcNAQEFBQADggEBAG2erXhwRAa7+ZOBs0B6X57Hwyd1R4kfmXcs0rta +lbPpvgULSiB+TCbf3EbhJnHGyvdCY1tvlffLjdA7HJ0PCOn+YYLBA0pTU/dyvrN6 +Su8NuS5yubnt9mb13nDGYo1rnt0YRfxN+8DM3fXIVr038A30UlPX2Ou1ExFJT0MZ +uFKY6ZvLdI6/1cbgmguMlAhM+DhKyV6Sr5699LM3zqeI816pZmlREETYkGr91q7k +BpXJu/dtHaGxg1ZGu6w/PCsYGUcECWENYD4VQPd8N32JjOfu6vEgoEAwfPP+3oGp +Z4m3ewACcWOAenqflb+cQYC4PsF7qbXDmRaWrbKntOlZ3n0= +-----END CERTIFICATE----- From ad58fe76b27ccfe3e49502407356f13edd09cc3f Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 21 Jan 2020 15:35:37 +0000 Subject: [PATCH 546/769] add tls settings --- .../config/settings.defaults.coffee | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index f890eb0f4a..0965c5bd3d 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -1,6 +1,7 @@ Path = require('path') http = require('http') http.globalAgent.maxSockets = 300 +fs = require('fs') module.exports = internal: @@ -44,6 +45,12 @@ module.exports = key_schema: projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:{#{project_id}}" projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:{#{project_id}}" + tls: if process.env['REDIS_CA_CERT'] && process.env['REDIS_CLIENT_CERT'] && process.env['REDIS_CLIENT_KEY'] + ca: fs.readFileSync(process.env['REDIS_CA_CERT']), + cert: fs.readFileSync( + process.env['REDIS_CLIENT_CERT'] + ), + key: fs.readFileSync(process.env['REDIS_CLIENT_KEY']) new_project_history: port: process.env["NEW_HISTORY_REDIS_PORT"] or "6379" @@ -54,6 +61,12 @@ module.exports = projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:{#{project_id}}" projectHistoryMigrationKey: ({project_id}) -> "ProjectHistory:MigrationKey:{#{project_id}}" migration_phase: "prepare" + tls: if process.env['NEW_HISTORY_REDIS_CA_CERT'] && process.env['NEW_HISTORY_REDIS_CLIENT_CERT'] && process.env['NEW_HISTORY_REDIS_CLIENT_KEY'] + ca: fs.readFileSync(process.env['NEW_HISTORY_REDIS_CA_CERT']), + cert: fs.readFileSync( + process.env['NEW_HISTORY_REDIS_CLIENT_CERT'] + ), + key: fs.readFileSync(process.env['NEW_HISTORY_REDIS_CLIENT_KEY']) redisOptions: keepAlive: 100 From 31324fb65a585767fe61a80f9287f5dcc0c900c6 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 22 Jan 2020 15:28:34 +0000 Subject: [PATCH 547/769] add environment variable for migration_phase setting --- services/document-updater/config/settings.defaults.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 0965c5bd3d..a14a6d29fb 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -60,7 +60,7 @@ module.exports = projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:{#{project_id}}" projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:{#{project_id}}" projectHistoryMigrationKey: ({project_id}) -> "ProjectHistory:MigrationKey:{#{project_id}}" - migration_phase: "prepare" + migration_phase: process.env["PROJECT_HISTORY_MIGRATION_PHASE"] or "prepare" tls: if process.env['NEW_HISTORY_REDIS_CA_CERT'] && process.env['NEW_HISTORY_REDIS_CLIENT_CERT'] && process.env['NEW_HISTORY_REDIS_CLIENT_KEY'] ca: fs.readFileSync(process.env['NEW_HISTORY_REDIS_CA_CERT']), cert: fs.readFileSync( From fcfa3ecc9d04d06c3fc46eeec60ee78c374137d0 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Thu, 23 Jan 2020 10:05:57 +0100 Subject: [PATCH 548/769] [misc] upgrade node to 10.18.1 --- services/document-updater/.nvmrc | 2 +- services/document-updater/Dockerfile | 9 +++++-- services/document-updater/Jenkinsfile | 1 + services/document-updater/Makefile | 4 ++- services/document-updater/buildscript.txt | 8 +++--- .../document-updater/docker-compose.ci.yml | 14 +++++----- services/document-updater/docker-compose.yml | 27 +++++++------------ 7 files changed, 32 insertions(+), 33 deletions(-) diff --git a/services/document-updater/.nvmrc b/services/document-updater/.nvmrc index e1e5d1369a..fd26bfb7c5 100644 --- a/services/document-updater/.nvmrc +++ b/services/document-updater/.nvmrc @@ -1 +1 @@ -6.9.5 +10.18.1 diff --git a/services/document-updater/Dockerfile b/services/document-updater/Dockerfile index 59f5e61889..6fe5611b99 100644 --- a/services/document-updater/Dockerfile +++ b/services/document-updater/Dockerfile @@ -1,4 +1,9 @@ -FROM node:6.9.5 as app +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/sharelatex/sharelatex-dev-environment +# Version: 1.3.2 + +FROM node:10.18.1 as app WORKDIR /app @@ -12,7 +17,7 @@ COPY . /app RUN npm run compile:all -FROM node:6.9.5 +FROM node:10.18.1 COPY --from=app /app /app diff --git a/services/document-updater/Jenkinsfile b/services/document-updater/Jenkinsfile index 2862de8f47..92db215930 100644 --- a/services/document-updater/Jenkinsfile +++ b/services/document-updater/Jenkinsfile @@ -16,6 +16,7 @@ pipeline { } stages { + stage('Install') { steps { withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) { diff --git a/services/document-updater/Makefile b/services/document-updater/Makefile index 73f63edba8..b7085c7631 100644 --- a/services/document-updater/Makefile +++ b/services/document-updater/Makefile @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.24 +# Version: 1.3.2 BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) @@ -36,6 +36,7 @@ test_clean: test_acceptance_pre_run: @[ ! -f test/acceptance/js/scripts/pre-run ] && echo "document-updater has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/js/scripts/pre-run + build: docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ --tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ @@ -48,4 +49,5 @@ publish: docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + .PHONY: clean test test_unit test_acceptance test_clean build publish diff --git a/services/document-updater/buildscript.txt b/services/document-updater/buildscript.txt index a9a1b603d3..2c57ac832a 100644 --- a/services/document-updater/buildscript.txt +++ b/services/document-updater/buildscript.txt @@ -1,10 +1,10 @@ document-updater +--public-repo=True --language=coffeescript ---node-version=6.9.5 +--env-add= +--node-version=10.18.1 --acceptance-creds=None --dependencies=mongo,redis --docker-repos=gcr.io/overleaf-ops ---build-target=docker ---script-version=1.1.24 --env-pass-through= ---public-repo=True +--script-version=1.3.2 diff --git a/services/document-updater/docker-compose.ci.yml b/services/document-updater/docker-compose.ci.yml index c78d90e8ed..59bee48106 100644 --- a/services/document-updater/docker-compose.ci.yml +++ b/services/document-updater/docker-compose.ci.yml @@ -1,9 +1,9 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.24 +# Version: 1.3.2 -version: "2" +version: "2.1" services: test_unit: @@ -25,13 +25,14 @@ services: MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test depends_on: - - mongo - - redis + mongo: + condition: service_healthy + redis: + condition: service_healthy user: node command: npm run test:acceptance:_run - tar: build: . image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER @@ -39,9 +40,8 @@ services: - ./:/tmp/build/ command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . user: root - redis: image: redis mongo: - image: mongo:3.4 + image: mongo:3.6 diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index 6dc90009ca..2b0581294d 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -1,13 +1,13 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.24 +# Version: 1.3.2 -version: "2" +version: "2.1" services: test_unit: - image: node:6.9.5 + image: node:10.18.1 volumes: - .:/app working_dir: /app @@ -18,7 +18,7 @@ services: user: node test_acceptance: - build: . + image: node:10.18.1 volumes: - .:/app working_dir: /app @@ -32,24 +32,15 @@ services: NODE_ENV: test user: node depends_on: - - mongo - - redis + mongo: + condition: service_healthy + redis: + condition: service_healthy command: npm run test:acceptance - - - tar: - build: . - image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER - volumes: - - ./:/tmp/build/ - command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . - user: root - redis: image: redis mongo: - image: mongo:3.4 - + image: mongo:3.6 From d5a2b96df9c8c0f0ece5852a7931d7b0abd920f4 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 23 Jan 2020 14:36:59 +0000 Subject: [PATCH 549/769] add note about deleting the migration key entries --- .../app/coffee/RedisMigrationManager.coffee | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/services/document-updater/app/coffee/RedisMigrationManager.coffee b/services/document-updater/app/coffee/RedisMigrationManager.coffee index a12593d03f..31ffa492c0 100644 --- a/services/document-updater/app/coffee/RedisMigrationManager.coffee +++ b/services/document-updater/app/coffee/RedisMigrationManager.coffee @@ -45,6 +45,12 @@ async = require("async") # - when all docupdaters are setting the flag (and writing to the new redis), # finish the migration by writing all data to the new redis # +# Final stage +# +# When all the queues are migrated, remove the migration code and return to a +# single client pointing at the new redis. Delete the +# ProjectHistory:MigrationKey:* entries in the new redis. +# # Rollback # # Under the scheme above a project should only ever have data in the old redis From 626e19ed1adc0d393c9395e3e0c38395eeef0f78 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 23 Jan 2020 15:46:54 +0000 Subject: [PATCH 550/769] add logging of migration phase at startup --- .../document-updater/app/coffee/RedisMigrationManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RedisMigrationManager.coffee b/services/document-updater/app/coffee/RedisMigrationManager.coffee index 31ffa492c0..af2eb7ad33 100644 --- a/services/document-updater/app/coffee/RedisMigrationManager.coffee +++ b/services/document-updater/app/coffee/RedisMigrationManager.coffee @@ -133,7 +133,7 @@ class MigrationClient @rclient_new = redis.createClient(@new_settings) @new_key_schema = new_settings.key_schema # check that migration phase is valid on startup - @getMigrationPhase() + logger.warn {migration_phase: @getMigrationPhase()}, "running with RedisMigrationManager" getMigrationPhase: () -> @migration_phase = @new_settings.migration_phase # FIXME: allow setting migration phase while running for testing From 544ae05212103925a7b55b160ac324b69057d9ba Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 23 Jan 2020 16:22:26 +0000 Subject: [PATCH 551/769] added note about rollback --- .../document-updater/app/coffee/RedisMigrationManager.coffee | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RedisMigrationManager.coffee b/services/document-updater/app/coffee/RedisMigrationManager.coffee index af2eb7ad33..d11024fc94 100644 --- a/services/document-updater/app/coffee/RedisMigrationManager.coffee +++ b/services/document-updater/app/coffee/RedisMigrationManager.coffee @@ -81,7 +81,8 @@ async = require("async") # - deploy project history with the "old queue" pointing to the new redis and # the "new queue" to the old redis to clear the new queue before processing # the new queue (i.e. add a rollback:true property in new_project_history in -# the project-history settings) +# the project-history settings via the environment variable +# MIGRATION_PHASE="rollback"). # - projects will now clear gradually from the new redis back to the old redis # - get a list of all the projects in the new redis and flush them, which will # cause the new queues to be cleared and the old redis to be used for those From 338d3609f5a5dbf98bc706c14fbbd9b3a4a36558 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 30 Jan 2020 15:17:13 +0000 Subject: [PATCH 552/769] add comment about null byte check --- services/document-updater/app/coffee/RedisManager.coffee | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 9e2edbd99d..ca4151d299 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -36,6 +36,8 @@ module.exports = RedisManager = docLines = JSON.stringify(docLines) if docLines.indexOf("\u0000") != -1 error = new Error("null bytes found in doc lines") + # this check was added to catch memory corruption in JSON.stringify. + # It sometimes returned null bytes at the end of the string. logger.error {err: error, doc_id: doc_id, docLines: docLines}, error.message return callback(error) docHash = RedisManager._computeHash(docLines) @@ -224,12 +226,14 @@ module.exports = RedisManager = for op in jsonOps if op.indexOf("\u0000") != -1 error = new Error("null bytes found in jsonOps") + # this check was added to catch memory corruption in JSON.stringify logger.error {err: error, doc_id: doc_id, jsonOps: jsonOps}, error.message return callback(error) newDocLines = JSON.stringify(docLines) if newDocLines.indexOf("\u0000") != -1 error = new Error("null bytes found in doc lines") + # this check was added to catch memory corruption in JSON.stringify logger.error {err: error, doc_id: doc_id, newDocLines: newDocLines}, error.message return callback(error) newHash = RedisManager._computeHash(newDocLines) @@ -243,6 +247,7 @@ module.exports = RedisManager = return callback(error) if ranges? and ranges.indexOf("\u0000") != -1 error = new Error("null bytes found in ranges") + # this check was added to catch memory corruption in JSON.stringify logger.error err: error, doc_id: doc_id, ranges: ranges, error.message return callback(error) multi = rclient.multi() From 10d177f41bc2d07e311e3d30c1e108466b67bf86 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Fri, 7 Feb 2020 12:24:57 +0100 Subject: [PATCH 553/769] [misc] test/unit: stub the correct redis settings --- .../unit/coffee/DispatchManager/DispatchManagerTests.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee index af36d10a31..773a85afd1 100644 --- a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee @@ -13,7 +13,7 @@ describe "DispatchManager", -> "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub(), warn: sinon.stub() } "settings-sharelatex": @settings = redis: - realtime: {} + documentupdater: {} "redis-sharelatex": @redis = {} "./RateLimitManager": {} "./Errors": Errors From 4070b0ef6f0ad787644b1d9ce22373c89b47e4ba Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Thu, 16 May 2019 14:58:32 +0200 Subject: [PATCH 554/769] [misc] tests: fix a static test `ProjectHistoryRedisManager.queueOps` is masked and can not push changes into redis during the unittest. Signed-off-by: Jakob Ackermann --- .../test/unit/coffee/RedisManager/RedisManagerTests.coffee | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 99035a32b3..b666163762 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -445,13 +445,12 @@ describe "RedisManager", -> describe "with project history disabled", -> beforeEach -> - @rclient.rpush = sinon.stub() @settings.apis.project_history.enabled = false @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @updateMeta, @callback it "should not push the updates into the project history ops list", -> - @rclient.rpush.called.should.equal false + @ProjectHistoryRedisManager.queueOps.called.should.equal false it "should call the callback", -> @callback @@ -493,7 +492,6 @@ describe "RedisManager", -> describe "with no updates", -> beforeEach -> - @rclient.rpush = sinon.stub().callsArgWith(1, null, @project_update_list_length) @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version) @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, [], @ranges, @updateMeta, @callback @@ -503,7 +501,7 @@ describe "RedisManager", -> .should.equal false it "should not try to enqueue project updates", -> - @rclient.rpush + @ProjectHistoryRedisManager.queueOps .called .should.equal false From afe43fa252c431fcaf281c63c77b3bd02eca0f93 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 7 Feb 2020 14:16:31 +0000 Subject: [PATCH 555/769] update to node 10.19.0 --- services/document-updater/.nvmrc | 2 +- services/document-updater/Dockerfile | 4 ++-- services/document-updater/buildscript.txt | 2 +- services/document-updater/docker-compose.yml | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/services/document-updater/.nvmrc b/services/document-updater/.nvmrc index fd26bfb7c5..5b7269c0a9 100644 --- a/services/document-updater/.nvmrc +++ b/services/document-updater/.nvmrc @@ -1 +1 @@ -10.18.1 +10.19.0 diff --git a/services/document-updater/Dockerfile b/services/document-updater/Dockerfile index 6fe5611b99..1ff1ecd5c5 100644 --- a/services/document-updater/Dockerfile +++ b/services/document-updater/Dockerfile @@ -3,7 +3,7 @@ # https://github.com/sharelatex/sharelatex-dev-environment # Version: 1.3.2 -FROM node:10.18.1 as app +FROM node:10.19.0 as app WORKDIR /app @@ -17,7 +17,7 @@ COPY . /app RUN npm run compile:all -FROM node:10.18.1 +FROM node:10.19.0 COPY --from=app /app /app diff --git a/services/document-updater/buildscript.txt b/services/document-updater/buildscript.txt index 2c57ac832a..094b13d978 100644 --- a/services/document-updater/buildscript.txt +++ b/services/document-updater/buildscript.txt @@ -2,7 +2,7 @@ document-updater --public-repo=True --language=coffeescript --env-add= ---node-version=10.18.1 +--node-version=10.19.0 --acceptance-creds=None --dependencies=mongo,redis --docker-repos=gcr.io/overleaf-ops diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index 2b0581294d..d3bc8c7d98 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -7,7 +7,7 @@ version: "2.1" services: test_unit: - image: node:10.18.1 + image: node:10.19.0 volumes: - .:/app working_dir: /app @@ -18,7 +18,7 @@ services: user: node test_acceptance: - image: node:10.18.1 + image: node:10.19.0 volumes: - .:/app working_dir: /app From 9cfc59734a4c1e0b1e5e51b08fbec2632d5d0bac Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Mon, 10 Feb 2020 17:10:39 +0100 Subject: [PATCH 556/769] [misc] update the build scripts to 1.3.5 --- services/document-updater/Dockerfile | 10 +++++----- services/document-updater/Makefile | 7 ++++++- services/document-updater/buildscript.txt | 2 +- services/document-updater/docker-compose.ci.yml | 4 ++-- services/document-updater/docker-compose.yml | 4 ++-- 5 files changed, 16 insertions(+), 11 deletions(-) diff --git a/services/document-updater/Dockerfile b/services/document-updater/Dockerfile index 1ff1ecd5c5..e538fb48d9 100644 --- a/services/document-updater/Dockerfile +++ b/services/document-updater/Dockerfile @@ -1,12 +1,14 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.3.2 +# Version: 1.3.5 -FROM node:10.19.0 as app +FROM node:10.19.0 as base WORKDIR /app +FROM base as app + #wildcard as some files may not be in all repos COPY package*.json npm-shrink*.json /app/ @@ -17,11 +19,9 @@ COPY . /app RUN npm run compile:all -FROM node:10.19.0 +FROM base COPY --from=app /app /app - -WORKDIR /app USER node CMD ["node", "--expose-gc", "app.js"] diff --git a/services/document-updater/Makefile b/services/document-updater/Makefile index b7085c7631..64646d796f 100644 --- a/services/document-updater/Makefile +++ b/services/document-updater/Makefile @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.3.2 +# Version: 1.3.5 BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) @@ -28,9 +28,14 @@ test_unit: test_acceptance: test_clean test_acceptance_pre_run test_acceptance_run +test_acceptance_debug: test_clean test_acceptance_pre_run test_acceptance_run_debug + test_acceptance_run: @[ ! -d test/acceptance ] && echo "document-updater has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance +test_acceptance_run_debug: + @[ ! -d test/acceptance ] && echo "document-updater has no acceptance tests" || $(DOCKER_COMPOSE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk + test_clean: $(DOCKER_COMPOSE) down -v -t 0 diff --git a/services/document-updater/buildscript.txt b/services/document-updater/buildscript.txt index 094b13d978..b7928a7044 100644 --- a/services/document-updater/buildscript.txt +++ b/services/document-updater/buildscript.txt @@ -7,4 +7,4 @@ document-updater --dependencies=mongo,redis --docker-repos=gcr.io/overleaf-ops --env-pass-through= ---script-version=1.3.2 +--script-version=1.3.5 diff --git a/services/document-updater/docker-compose.ci.yml b/services/document-updater/docker-compose.ci.yml index 59bee48106..b99da9b18e 100644 --- a/services/document-updater/docker-compose.ci.yml +++ b/services/document-updater/docker-compose.ci.yml @@ -1,9 +1,9 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.3.2 +# Version: 1.3.5 -version: "2.1" +version: "2.3" services: test_unit: diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index d3bc8c7d98..6a1bbb1005 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -1,9 +1,9 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.3.2 +# Version: 1.3.5 -version: "2.1" +version: "2.3" services: test_unit: From 26d8d07e7af28daeabb3f48cab4eb8b830dbe328 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 11 Feb 2020 14:32:49 +0000 Subject: [PATCH 557/769] remove unused redis settings from rate limit unit test --- .../test/unit/coffee/RateLimitManager/RateLimitManager.coffee | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/services/document-updater/test/unit/coffee/RateLimitManager/RateLimitManager.coffee b/services/document-updater/test/unit/coffee/RateLimitManager/RateLimitManager.coffee index 866532a4da..fe5dc95327 100644 --- a/services/document-updater/test/unit/coffee/RateLimitManager/RateLimitManager.coffee +++ b/services/document-updater/test/unit/coffee/RateLimitManager/RateLimitManager.coffee @@ -9,9 +9,7 @@ describe "RateLimitManager", -> beforeEach -> @RateLimitManager = SandboxedModule.require modulePath, requires: "logger-sharelatex": @logger = { log: sinon.stub() } - "settings-sharelatex": @settings = - redis: - realtime: {} + "settings-sharelatex": @settings = {} "./Metrics": @Metrics = Timer: class Timer done: sinon.stub() From 81e21c5e76c3f7a4e9c9c15d38585574bce83b22 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 12 Feb 2020 12:37:00 +0000 Subject: [PATCH 558/769] remove unused .travis.yml file --- services/document-updater/.travis.yml | 11 ----------- 1 file changed, 11 deletions(-) delete mode 100644 services/document-updater/.travis.yml diff --git a/services/document-updater/.travis.yml b/services/document-updater/.travis.yml deleted file mode 100644 index febdbb55a3..0000000000 --- a/services/document-updater/.travis.yml +++ /dev/null @@ -1,11 +0,0 @@ -language: node_js - -before_install: - - npm install -g grunt-cli - -install: - - npm install - - grunt install - -script: - - grunt test:unit From af4211d167f2e9d8c6950e0af6a07a0271c863aa Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Wed, 12 Feb 2020 14:39:51 +0100 Subject: [PATCH 559/769] [misc] rename npm-shrinkwrap.json to package-lock.json and run npm i --- .../document-updater/{npm-shrinkwrap.json => package-lock.json} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename services/document-updater/{npm-shrinkwrap.json => package-lock.json} (100%) diff --git a/services/document-updater/npm-shrinkwrap.json b/services/document-updater/package-lock.json similarity index 100% rename from services/document-updater/npm-shrinkwrap.json rename to services/document-updater/package-lock.json From c4bec0dcc7028a59a04a42543c551261db1b6315 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 14 Feb 2020 09:56:23 +0000 Subject: [PATCH 560/769] Bump extend from 3.0.1 to 3.0.2 Bumps [extend](https://github.com/justmoon/node-extend) from 3.0.1 to 3.0.2. - [Release notes](https://github.com/justmoon/node-extend/releases) - [Changelog](https://github.com/justmoon/node-extend/blob/master/CHANGELOG.md) - [Commits](https://github.com/justmoon/node-extend/compare/v3.0.1...v3.0.2) Signed-off-by: dependabot[bot] --- services/document-updater/package-lock.json | 25 +++------------------ 1 file changed, 3 insertions(+), 22 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 964ffbc4d2..28faa73192 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -20,13 +20,6 @@ "pify": "^4.0.1", "retry-request": "^4.0.0", "teeny-request": "^3.11.3" - }, - "dependencies": { - "extend": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", - "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" - } } }, "@google-cloud/debug-agent": { @@ -1088,9 +1081,9 @@ } }, "extend": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.1.tgz", - "integrity": "sha1-p1Xqe8Gt/MWjHOfnYtuq3F5jZEQ=" + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" }, "extsprintf": { "version": "1.3.0", @@ -1204,13 +1197,6 @@ "extend": "^3.0.2", "https-proxy-agent": "^2.2.1", "node-fetch": "^2.3.0" - }, - "dependencies": { - "extend": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", - "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" - } } }, "gcp-metadata": { @@ -1594,11 +1580,6 @@ "nan": "^2.10.0" } }, - "extend": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", - "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" - }, "fast-deep-equal": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz", From 2acd4c0f23b39439ae026a8a5e04780cb8b67c9f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 14 Feb 2020 09:56:24 +0000 Subject: [PATCH 561/769] Bump stringstream from 0.0.5 to 0.0.6 Bumps [stringstream](https://github.com/mhart/StringStream) from 0.0.5 to 0.0.6. - [Release notes](https://github.com/mhart/StringStream/releases) - [Commits](https://github.com/mhart/StringStream/compare/v0.0.5...v0.0.6) Signed-off-by: dependabot[bot] --- services/document-updater/package-lock.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 964ffbc4d2..3be60492b6 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -2874,9 +2874,9 @@ } }, "stringstream": { - "version": "0.0.5", - "resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz", - "integrity": "sha1-TkhM1N5aC7vuGORjB3EKioFiGHg=" + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.6.tgz", + "integrity": "sha512-87GEBAkegbBcweToUrdzf3eLhWNg06FJTebl4BVJz/JgWy8CvEr9dRtX5qWphiynMSQlxxi+QqN0z5T32SLlhA==" }, "tdigest": { "version": "0.1.1", From 3fb0ed414cf0282905c791d52475c9a1acabeded Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 14 Feb 2020 09:56:25 +0000 Subject: [PATCH 562/769] Bump sshpk from 1.13.1 to 1.16.1 Bumps [sshpk](https://github.com/joyent/node-sshpk) from 1.13.1 to 1.16.1. - [Release notes](https://github.com/joyent/node-sshpk/releases) - [Commits](https://github.com/joyent/node-sshpk/compare/v1.13.1...v1.16.1) Signed-off-by: dependabot[bot] --- services/document-updater/package-lock.json | 29 ++++++++++++--------- 1 file changed, 17 insertions(+), 12 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 964ffbc4d2..fdc13342e3 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -453,7 +453,6 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz", "integrity": "sha1-Y7xdy2EzG5K8Bf1SiVPDNGKgb40=", - "optional": true, "requires": { "tweetnacl": "^0.14.3" } @@ -934,7 +933,6 @@ "version": "0.1.1", "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz", "integrity": "sha1-D8c6ntXw1Tw4GTOYUj735UN3dQU=", - "optional": true, "requires": { "jsbn": "~0.1.0" } @@ -1442,8 +1440,7 @@ "jsbn": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", - "optional": true + "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=" }, "json-bigint": { "version": "0.3.0", @@ -2657,6 +2654,11 @@ "integrity": "sha512-gH8eh2nZudPQO6TytOvbxnuhYBOvDBBLW52tz5q6X58lJcd/tkmqFR+5Z9adS8aJtURSXWThWy/xJtJwixErvg==", "optional": true }, + "safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + }, "sandboxed-module": { "version": "0.2.2", "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-0.2.2.tgz", @@ -2788,9 +2790,9 @@ } }, "sshpk": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.13.1.tgz", - "integrity": "sha1-US322mKHFEMW3EwY/hzx2UBzm+M=", + "version": "1.16.1", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", + "integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==", "requires": { "asn1": "~0.2.3", "assert-plus": "^1.0.0", @@ -2799,13 +2801,17 @@ "ecc-jsbn": "~0.1.1", "getpass": "^0.1.1", "jsbn": "~0.1.0", + "safer-buffer": "^2.0.2", "tweetnacl": "~0.14.0" }, "dependencies": { "asn1": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz", - "integrity": "sha1-2sh4dxPJlmhJ/IGAd36+nB3fO4Y=" + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", + "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", + "requires": { + "safer-buffer": "~2.1.0" + } }, "assert-plus": { "version": "1.0.0", @@ -2949,8 +2955,7 @@ "tweetnacl": { "version": "0.14.5", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", - "optional": true + "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=" }, "type-is": { "version": "1.3.1", From 9a92cd7b31b47cc5c036bf3b19e35ba72e0972ef Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 14 Feb 2020 09:56:26 +0000 Subject: [PATCH 563/769] Bump lodash from 4.17.4 to 4.17.13 Bumps [lodash](https://github.com/lodash/lodash) from 4.17.4 to 4.17.13. - [Release notes](https://github.com/lodash/lodash/releases) - [Commits](https://github.com/lodash/lodash/compare/4.17.4...4.17.13) Signed-off-by: dependabot[bot] --- services/document-updater/package-lock.json | 6 +++--- services/document-updater/package.json | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 964ffbc4d2..d1a9986887 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -1511,9 +1511,9 @@ "integrity": "sha1-SjGI1CkbZrT2XtuZ+AaqmuKTWSo=" }, "lodash": { - "version": "4.17.4", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz", - "integrity": "sha1-eCA6TRwyiuHYbcpkYONptX9AVa4=" + "version": "4.17.13", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.13.tgz", + "integrity": "sha512-vm3/XWXfWtRua0FkUyEHBZy8kCPjErNBT9fJx8Zvs+U6zjqPbTUOpkaoum3O5uiA8sm+yNMHXfYkTUHFoMxFNA==" }, "lodash.defaults": { "version": "4.2.0", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index f80251248b..eea5c9d88b 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -23,7 +23,7 @@ "async": "^2.5.0", "coffee-script": "~1.7.0", "express": "3.11.0", - "lodash": "^4.17.4", + "lodash": "^4.17.13", "logger-sharelatex": "^1.7.0", "lynx": "0.0.11", "metrics-sharelatex": "^2.2.0", From 908e916779d18e8e1e068495aa392dd086158321 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 17 Feb 2020 13:41:34 +0000 Subject: [PATCH 564/769] move sinon and sandboxed-module to devDependencies --- services/document-updater/package.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index eea5c9d88b..e3f3cd4aef 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -31,9 +31,7 @@ "redis-sharelatex": "^1.0.11", "request": "2.47.0", "requestretry": "^1.12.0", - "sandboxed-module": "~0.2.0", - "settings-sharelatex": "^1.1.0", - "sinon": "~1.5.2" + "settings-sharelatex": "^1.1.0" }, "devDependencies": { "bunyan": "~0.22.1", @@ -41,6 +39,8 @@ "chai-spies": "^0.7.1", "cluster-key-slot": "^1.0.5", "mocha": "^5.0.1", + "sandboxed-module": "~0.2.0", + "sinon": "~1.5.2", "timekeeper": "^2.0.0" } } From 638688a8540c5672cfd7dad48680b0fa16cdbc8d Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 17 Feb 2020 13:41:53 +0000 Subject: [PATCH 565/769] remove unused lynx module --- services/document-updater/package.json | 1 - 1 file changed, 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index e3f3cd4aef..c0c03bfd80 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -25,7 +25,6 @@ "express": "3.11.0", "lodash": "^4.17.13", "logger-sharelatex": "^1.7.0", - "lynx": "0.0.11", "metrics-sharelatex": "^2.2.0", "mongojs": "^2.6.0", "redis-sharelatex": "^1.0.11", From 583af0bfc4e2bf0156022490e5ab0c28140d6349 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 17 Feb 2020 13:42:57 +0000 Subject: [PATCH 566/769] update package-lock.json --- services/document-updater/package-lock.json | 515 ++++++++++---------- 1 file changed, 256 insertions(+), 259 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 9bfbe38a95..687dbe463c 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -104,7 +104,7 @@ "arrify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", - "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=" + "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==" }, "gcp-metadata": { "version": "0.9.3", @@ -184,7 +184,7 @@ "methods": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", - "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=" + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==" }, "semver": { "version": "6.1.1", @@ -201,7 +201,7 @@ "@protobufjs/aspromise": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", - "integrity": "sha1-m4sMxmPWaafY9vXQiToU00jzD78=" + "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==" }, "@protobufjs/base64": { "version": "1.1.2", @@ -216,12 +216,12 @@ "@protobufjs/eventemitter": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", - "integrity": "sha1-NVy8mLr61ZePntCV85diHx0Ga3A=" + "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==" }, "@protobufjs/fetch": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", - "integrity": "sha1-upn7WYYUr2VwDBYZ/wbUVLDYTEU=", + "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", "requires": { "@protobufjs/aspromise": "^1.1.1", "@protobufjs/inquire": "^1.1.0" @@ -230,27 +230,27 @@ "@protobufjs/float": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", - "integrity": "sha1-Xp4avctz/Ap8uLKR33jIy9l7h9E=" + "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==" }, "@protobufjs/inquire": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", - "integrity": "sha1-/yAOPnzyQp4tyvwRQIKOjMY48Ik=" + "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==" }, "@protobufjs/path": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", - "integrity": "sha1-bMKyDFya1q0NzP0hynZz2Nf79o0=" + "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==" }, "@protobufjs/pool": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", - "integrity": "sha1-Cf0V8tbTq/qbZbw2ZQbWrXhG/1Q=" + "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==" }, "@protobufjs/utf8": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", - "integrity": "sha1-p3c2C1s5oaLlEG+OhY8v0tBgxXA=" + "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==" }, "@sindresorhus/is": { "version": "0.15.0", @@ -325,7 +325,7 @@ "accepts": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.0.3.tgz", - "integrity": "sha1-krHbDU89tHsFMN9uFa6X21FNwvg=", + "integrity": "sha512-cZqKqO3VXtuIZ5vQLVc8M6JDFVTZoVwZrlmTCA1nH9EoN5v6ZWWStKvd1A5RWpduRVXD55px3t75TvS7JdLfHA==", "requires": { "mime": "~1.2.11", "negotiator": "0.4.6" @@ -347,7 +347,7 @@ "ajv": { "version": "5.5.2", "resolved": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz", - "integrity": "sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU=", + "integrity": "sha512-Ajr4IcMXq/2QmMkEmSvxqfLN5zGmJ92gHXAeOXq1OekoH2rfDNsgdDoL2f7QaRCy7G/E6TpxBVdRuNraMztGHw==", "requires": { "co": "^4.6.0", "fast-deep-equal": "^1.0.0", @@ -363,12 +363,12 @@ "asn1": { "version": "0.1.11", "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.1.11.tgz", - "integrity": "sha1-VZvhg3bQik7E2+gId9J4GGObLfc=" + "integrity": "sha512-Fh9zh3G2mZ8qM/kwsiKwL2U2FmXxVsboP4x1mXjnhKHv3SmzaBZoYvxEQJz/YS2gnCgd8xlAVWcZnQyC9qZBsA==" }, "assert-plus": { "version": "0.1.5", "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.1.5.tgz", - "integrity": "sha1-7nQAlBMALYTOxyGcasgRgS5yMWA=" + "integrity": "sha512-brU24g7ryhRwGCI2y+1dGQmQXiZF7TtIj583S96y0jjdajIe6wn8BuXyELYhvD22dtIxDQVFk04YTJwwdwOYJw==" }, "assertion-error": { "version": "1.1.0", @@ -396,17 +396,17 @@ "asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, "aws-sign2": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", - "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=" + "integrity": "sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA==" }, "aws4": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.6.0.tgz", - "integrity": "sha1-g+9cqGCysy5KDe7e6MdxudtXRx4=" + "integrity": "sha512-tkleq4Df8UWu/7xf/tfbo7t2vDa07bcONGnKhl0QXKQsh3fJ0yJ1M5wzpy8BtBSENQw/9VTsthMhLG+yXHfStQ==" }, "axios": { "version": "0.18.1", @@ -420,7 +420,7 @@ "balanced-match": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" + "integrity": "sha512-9Y0g0Q8rmSt+H33DfKv7FOc3v+iRI+o1lbzt8jGcIosYW37IIW/2XVYq5NPdmaD5NQ59Nk26Kl/vZbwW9Fr8vg==" }, "base64-js": { "version": "1.3.0", @@ -430,22 +430,22 @@ "base64-url": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/base64-url/-/base64-url-1.3.3.tgz", - "integrity": "sha1-+LbFN/CaT8WMmcuG4LDpxhRhog8=" + "integrity": "sha512-UiVPRwO/m133KIQrOEIqO07D8jaYjFIx7/lYRWTRVR23tDSn00Ves6A+Bk0eLmhyz6IJGSFlNCKUuUBO2ssytA==" }, "basic-auth-connect": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/basic-auth-connect/-/basic-auth-connect-1.0.0.tgz", - "integrity": "sha1-/bC0OWLKe0BFanwrtI/hc9otISI=" + "integrity": "sha512-kiV+/DTgVro4aZifY/hwRwALBISViL5NP4aReaR2EVJEObpbUBHIkdJh/YpcoEiYt7nBodZ6U2ajZeZvSxUCCg==" }, "batch": { "version": "0.5.1", "resolved": "https://registry.npmjs.org/batch/-/batch-0.5.1.tgz", - "integrity": "sha1-NqS6tZTAUP17UHvKDbMMLZKvT/I=" + "integrity": "sha512-OXRjc65VJvFtb7JD5HszSI1WWwsI6YnJS7Qmlx1CaDQrZ5urNIeRjtTyBe1YapNXyoWzrcc4yqg4rNe8YMyong==" }, "bcrypt-pbkdf": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz", - "integrity": "sha1-Y7xdy2EzG5K8Bf1SiVPDNGKgb40=", + "integrity": "sha512-vY4sOrSlpwNZXsinfJ0HpbSkFft4nhSVLeUrQ4j2ydGmBOiVY83aMJStJATBy0C3+XdaYa990kIA1qkC2mUq6g==", "requires": { "tweetnacl": "^0.14.3" } @@ -466,12 +466,12 @@ "bintrees": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz", - "integrity": "sha1-DmVcm5wkNeqraL9AJyJtK1WjRSQ=" + "integrity": "sha512-tbaUB1QpTIj4cKY8c1rvNAvEQXA+ekzHmbe4jzNfW3QWsF9GnnP/BRWyl6/qqS53heoYJ93naaFcm/jooONH8g==" }, "bl": { "version": "0.9.5", "resolved": "https://registry.npmjs.org/bl/-/bl-0.9.5.tgz", - "integrity": "sha1-wGt5evCF6gC8Unr8jvzxHeIjIFQ=", + "integrity": "sha512-njlCs8XLBIK7LCChTWfzWuIAxkpmmLXcL7/igCofFT1B039Sz0IPnAmosN5QaO22lU4qr8LcUz2ojUlE6pLkRQ==", "requires": { "readable-stream": "~1.0.26" }, @@ -479,12 +479,12 @@ "isarray": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" + "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==" }, "readable-stream": { "version": "1.0.34", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", - "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=", + "integrity": "sha512-ok1qVCJuRkNmvebYikljxJA/UEsKwLl2nI1OmaqAu4/UE+h0wKCHok4XkL/gvi39OacXvw59RJUOFUkDib2rHg==", "requires": { "core-util-is": "~1.0.0", "inherits": "~2.0.1", @@ -495,14 +495,14 @@ "string_decoder": { "version": "0.10.31", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", - "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + "integrity": "sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ==" } } }, "body-parser": { "version": "1.4.3", "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.4.3.tgz", - "integrity": "sha1-RyeVLP9K8Hc+76SyJsL0Ei9eI00=", + "integrity": "sha512-+/wGpsrfMR0d7nPNnmpKAPQVXg37cU3YVvR/hThORfbiJYvzmGHf+A/x0QWtE/s2XMdj2/UTQUweVqNPlkZlEw==", "requires": { "bytes": "1.0.0", "depd": "0.3.0", @@ -516,14 +516,14 @@ "qs": { "version": "0.6.6", "resolved": "https://registry.npmjs.org/qs/-/qs-0.6.6.tgz", - "integrity": "sha1-bgFQmP9RlouKPIGQAdXyyJvEsQc=" + "integrity": "sha512-kN+yNdAf29Jgp+AYHUmC7X4QdJPR8czuMWLNLc0aRxkQ7tB3vJQEONKKT9ou/rW7EbqVec11srC9q9BiVbcnHA==" } } }, "boom": { "version": "0.4.2", "resolved": "https://registry.npmjs.org/boom/-/boom-0.4.2.tgz", - "integrity": "sha1-emNune1O/O+xnO9JR6PGffrukRs=", + "integrity": "sha512-OvfN8y1oAxxphzkl2SnCS+ztV/uVKTATtgLjWYg/7KwcNyf3rzpHxNQJZCKtsZd4+MteKczhWbSjtEX4bGgU9g==", "requires": { "hoek": "0.9.x" } @@ -531,7 +531,7 @@ "brace-expansion": { "version": "1.1.8", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz", - "integrity": "sha1-wHshHHyVLsH479Uad+8NHTmQopI=", + "integrity": "sha512-Dnfc9ROAPrkkeLIUweEbh7LFT9Mc53tO/bbM044rKjhgAEyIGKvKXg97PM/kRizZIfUHaROZIoeEaWao+Unzfw==", "requires": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -551,17 +551,17 @@ "buffer-crc32": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.3.tgz", - "integrity": "sha1-u1RRnpXRB8vSQA520MqxRnM22SE=" + "integrity": "sha512-HLvoSqq1z8fJEcT1lUlJZ4OJaXJZ1wsWm0+fBxkz9Bdf/WphA4Da7FtGUguNNyEXL4WB0hNMTaWmdFRFPy8YOQ==" }, "buffer-equal-constant-time": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", - "integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk=" + "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==" }, "buffer-shims": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/buffer-shims/-/buffer-shims-1.0.0.tgz", - "integrity": "sha1-mXjOMXOIxkmth5MCjDR37wRKi1E=" + "integrity": "sha512-Zy8ZXMyxIT6RMTeY7OP/bDndfj6bwCan7SS98CEndS6deHwWPpseeHlwarNcBim+etXnF9HBc1non5JgDaJU1g==" }, "builtin-modules": { "version": "3.1.0", @@ -571,7 +571,7 @@ "bunyan": { "version": "0.22.3", "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-0.22.3.tgz", - "integrity": "sha1-ehncG0yMZF90AkGnQPIkUUfGfsI=", + "integrity": "sha512-v9dd5qmd6nJHEi7fiNo1fR2pMpE8AiB47Ap984p4iJKj+dEA69jSccmq6grFQn6pxIh0evvKpC5XO1SKfiaRoQ==", "dev": true, "requires": { "dtrace-provider": "0.2.8", @@ -581,12 +581,14 @@ "buster-core": { "version": "0.6.4", "resolved": "https://registry.npmjs.org/buster-core/-/buster-core-0.6.4.tgz", - "integrity": "sha1-J79rrWdCROpyDzEdkAoMoct4YFA=" + "integrity": "sha512-WxitPqvzr2J7AA2eLEddv72XYaunQUDI0ICZhd1ucT/HhQI6JqfA7WQtoBoVZSgUn1+7uf9r7Plhh8PdO2+Kjg==", + "dev": true }, "buster-format": { "version": "0.5.6", "resolved": "https://registry.npmjs.org/buster-format/-/buster-format-0.5.6.tgz", - "integrity": "sha1-K4bDIuz14bCubm55Bev884fSq5U=", + "integrity": "sha512-AiH2uZZSwyhgtM7l8/A/7HaqUBJnXwcj9oUPIrSvTKTOo/Go0KJiqnGnRRl4zGqIf3IK3pC/KfkgJMgn6lpyzw==", + "dev": true, "requires": { "buster-core": "=0.6.4" } @@ -594,17 +596,17 @@ "bytes": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/bytes/-/bytes-1.0.0.tgz", - "integrity": "sha1-NWnt6Lo0MV+rmcPpLLBMciDeH6g=" + "integrity": "sha512-/x68VkHLeTl3/Ll8IvxdwzhrT+IyKc52e/oyHhA2RwqPqswSnjVbSddfPRwAsJtbilMAPSRWwAlpxdYsSWOTKQ==" }, "caseless": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", - "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" + "integrity": "sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw==" }, "chai": { "version": "3.5.0", "resolved": "https://registry.npmjs.org/chai/-/chai-3.5.0.tgz", - "integrity": "sha1-TQJjewZ/6Vi9v906QOxW/vc3Mkc=", + "integrity": "sha512-eRYY0vPS2a9zt5w5Z0aCeWbrXTEyvk7u/Xf71EzNObrjSCPgMm1Nku/D/u2tiqHBX5j40wWhj54YJLtgn8g55A==", "dev": true, "requires": { "assertion-error": "^1.0.1", @@ -615,7 +617,7 @@ "type-detect": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-1.0.0.tgz", - "integrity": "sha1-diIXzAbbJY7EiQihKY6LlRIejqI=", + "integrity": "sha512-f9Uv6ezcpvCQjJU0Zqbg+65qdcszv3qUQsZfjdRbWiZ7AMenrX1u0lNk9EoWWX6e1F+NULyg27mtdeZ5WhpljA==", "dev": true } } @@ -623,24 +625,24 @@ "chai-spies": { "version": "0.7.1", "resolved": "https://registry.npmjs.org/chai-spies/-/chai-spies-0.7.1.tgz", - "integrity": "sha1-ND2Z9RJEIS6LF+ZLk5lv97LCqbE=", + "integrity": "sha512-ezo+u5DUDjPhOYkgsjbbVhtdzsnVr6n2CL/juJA89YnBsWO4ocL14Ake0txlGrGZo/HwcfhFGaV0czdunr3tHA==", "dev": true }, "cluster-key-slot": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.0.8.tgz", - "integrity": "sha1-dlRVYIWmUzCTKi6LWXb44tCz5BQ=", + "integrity": "sha512-OF/xJE08NvIL6Fbi8XZpAlt6p55mE7SXyBWXMp9TNo4s4XnRO6kIQ8JLwQ77luFXpoB+G9zi/Ks8OWHBg0vvVg==", "dev": true }, "co": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", - "integrity": "sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ=" + "integrity": "sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==" }, "coffee-script": { "version": "1.7.1", "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.7.1.tgz", - "integrity": "sha1-YplqhheAx15tUGnROCJyO3NAS/w=", + "integrity": "sha512-W3s+SROY73OmrSGtPTTW/2wp2rmW5vuh0/tUuCK1NvTuyzLOVPccIP9whmhZ4cYWcr2NJPNENZIFaAMkTD5G3w==", "requires": { "mkdirp": "~0.3.5" } @@ -648,7 +650,7 @@ "combined-stream": { "version": "0.0.7", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz", - "integrity": "sha1-ATfmV7qlp1QcV6w3rF/AfXO03B8=", + "integrity": "sha512-qfexlmLp9MyrkajQVyjEDb0Vj+KhRgR/rxLiVhaihlT+ZkX0lReqtH6Ack40CvMDERR4b5eFp3CreskpBs1Pig==", "requires": { "delayed-stream": "0.0.5" } @@ -656,7 +658,7 @@ "commander": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/commander/-/commander-1.3.2.tgz", - "integrity": "sha1-io8w7GcKb91kr1LxkUuQfXnq1bU=", + "integrity": "sha512-uoVVA5dchmxZeTMv2Qsd0vhn/RebJYsWo4all1qtrUL3BBhQFn4AQDF4PL+ZvOeK7gczXKEZaSCyMDMwFBlpBg==", "requires": { "keypress": "0.1.x" } @@ -664,12 +666,12 @@ "compressible": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/compressible/-/compressible-1.1.0.tgz", - "integrity": "sha1-Ek2Ke7oYoFpBCi8lutQTsblK/2c=" + "integrity": "sha512-rCwUIlpYk3MyJwPuNJUFY4GkusYq33phMUj0iuJxpmRa7FVyFyTy4O4S2DxheA8LBWZcd3ZiotCR9GZE2PLyzQ==" }, "compression": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/compression/-/compression-1.0.7.tgz", - "integrity": "sha1-/Ev/Jh3043oTAAby2yqZo0iW9Vo=", + "integrity": "sha512-358POVi/83+vOraY0hLNi1s/7G7e3MiZKVlrYiu422gWWjI1AKBXa4sQXnoYYLAyB29jUdo8bqFL7C4JX6kBnA==", "requires": { "accepts": "1.0.3", "bytes": "1.0.0", @@ -681,12 +683,12 @@ "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" }, "connect": { "version": "2.20.2", "resolved": "https://registry.npmjs.org/connect/-/connect-2.20.2.tgz", - "integrity": "sha1-RLxkM0x668IZfFLGh0cUAC0Jd74=", + "integrity": "sha512-D5vV4EMPQFc/mmOoUWm7sRtrWNMmKRHJR2NBkNMjzudrGPJfDNdMGsPWfxbThVv4GAiEp1O1EVW9IZe3Ovao+w==", "requires": { "basic-auth-connect": "1.0.0", "body-parser": "1.4.3", @@ -722,7 +724,7 @@ "debug": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.2.tgz", - "integrity": "sha1-OElZHBDM5khHbDx8Li40FttZY8Q=", + "integrity": "sha512-T9bufXIzQvCa4VrTIpLvvwdLhH+wuBtvIJJA3xgzVcaVETGmTIWMfEXQEd1K4p8BaRmQJPn6MPut38H7YQ+iIA==", "requires": { "ms": "0.6.2" } @@ -730,19 +732,19 @@ "ms": { "version": "0.6.2", "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", - "integrity": "sha1-2JwhJMb9wTU9Zai3e/GqxLGTcIw=" + "integrity": "sha512-/pc3eh7TWorTtbvXg8je4GvrvEqCfH7PA3P7iW01yL2E53FKixzgMBaQi0NOPbMJqY34cBSvR0tZtmlTkdUG4A==" }, "qs": { "version": "0.6.6", "resolved": "https://registry.npmjs.org/qs/-/qs-0.6.6.tgz", - "integrity": "sha1-bgFQmP9RlouKPIGQAdXyyJvEsQc=" + "integrity": "sha512-kN+yNdAf29Jgp+AYHUmC7X4QdJPR8czuMWLNLc0aRxkQ7tB3vJQEONKKT9ou/rW7EbqVec11srC9q9BiVbcnHA==" } } }, "connect-timeout": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/connect-timeout/-/connect-timeout-1.1.1.tgz", - "integrity": "sha1-bH4xyY8Kxo620TBfZ/IfWm6Q/QQ=", + "integrity": "sha512-HS5OPZHc0cAJkzE1jgGjwL95rzF+Znk10Pq0vpUEm4ieDV+4HiAu4U/I71G5Epqs3b3YDeHkxBwE7lZtDRpNPQ==", "requires": { "debug": "1.0.2", "on-headers": "0.0.0" @@ -751,7 +753,7 @@ "debug": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.2.tgz", - "integrity": "sha1-OElZHBDM5khHbDx8Li40FttZY8Q=", + "integrity": "sha512-T9bufXIzQvCa4VrTIpLvvwdLhH+wuBtvIJJA3xgzVcaVETGmTIWMfEXQEd1K4p8BaRmQJPn6MPut38H7YQ+iIA==", "requires": { "ms": "0.6.2" } @@ -759,7 +761,7 @@ "ms": { "version": "0.6.2", "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", - "integrity": "sha1-2JwhJMb9wTU9Zai3e/GqxLGTcIw=" + "integrity": "sha512-/pc3eh7TWorTtbvXg8je4GvrvEqCfH7PA3P7iW01yL2E53FKixzgMBaQi0NOPbMJqY34cBSvR0tZtmlTkdUG4A==" } } }, @@ -780,12 +782,12 @@ "cookie": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.1.2.tgz", - "integrity": "sha1-cv7D0k5Io0Mgc9kMEmQgBQYQBLE=" + "integrity": "sha512-+mHmWbhevLwkiBf7QcbZXHr0v4ZQQ/OgHk3fsQHrsMMiGzuvAmU/YMUR+ZfrO/BLAGIWFfx2Z7Oyso0tZR/wiA==" }, "cookie-parser": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/cookie-parser/-/cookie-parser-1.3.1.tgz", - "integrity": "sha1-ML/CkGoESJ1ZvLnjL5DbCOBLtR4=", + "integrity": "sha512-DExg4B+vImwqftLcz11Kxv5wGEOnfotGbnxwNUV7BpljO0nd+p9dIJ40p0eD2YKBq012vONSETBVO0aXAiY4WA==", "requires": { "cookie": "0.1.2", "cookie-signature": "1.0.3" @@ -794,17 +796,17 @@ "cookie-signature": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.3.tgz", - "integrity": "sha1-kc2ZfMUftkFZVzjGnNoCAyj1D/k=" + "integrity": "sha512-/KzKzsm0OlguYov01OlOpTkX5MhBKUmfL/KMum7R80rPKheb9AwUzr78TwtBt1OdbnWrt4X+wxbTfcQ3noZqHw==" }, "core-util-is": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" + "integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ==" }, "cryptiles": { "version": "0.2.2", "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.2.2.tgz", - "integrity": "sha1-7ZH/HxetE9N0gohZT4pIoNJvMlw=", + "integrity": "sha512-gvWSbgqP+569DdslUiCelxIv3IYK5Lgmq1UrRnk+s1WxQOQ16j3GPDcjdtgL5Au65DU/xQi6q3xPtf5Kta+3IQ==", "requires": { "boom": "0.4.x" } @@ -812,7 +814,7 @@ "csrf-tokens": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/csrf-tokens/-/csrf-tokens-2.0.0.tgz", - "integrity": "sha1-yCEAP7i2rRe8l31v0ahL7cPtYZs=", + "integrity": "sha512-IzcrVVxQJvHoeNSSA9zc9LqIBUPM3OdRUzJ/4ooSbROhvJOSAi6qve2J6XEhmltcECmf/UiR/pgzkHXY5x1mGA==", "requires": { "base64-url": "1", "rndm": "1", @@ -823,7 +825,7 @@ "csurf": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/csurf/-/csurf-1.2.2.tgz", - "integrity": "sha1-Lqny0/LWex4iUykOZ2tiGV3Ld1Y=", + "integrity": "sha512-wCwNMBSAzrj4918iemazALbLZBYQRVN4BpnSpVBcE5Cx5cx/5HPkkpidpL0k+Tbk3Dlg0pDSY3mUvh7Gqc5H7w==", "requires": { "csrf-tokens": "~2.0.0" } @@ -831,12 +833,12 @@ "ctype": { "version": "0.5.3", "resolved": "https://registry.npmjs.org/ctype/-/ctype-0.5.3.tgz", - "integrity": "sha1-gsGMJGH3QRTvFsE1IkrQuRRMoS8=" + "integrity": "sha512-T6CEkoSV4q50zW3TlTHMbzy1E5+zlnNcY+yb7tWVYlTwPhx9LpnfAkd4wecpWknDyptp4k97LUZeInlf6jdzBg==" }, "dashdash": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", - "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", + "integrity": "sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==", "requires": { "assert-plus": "^1.0.0" }, @@ -844,7 +846,7 @@ "assert-plus": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" + "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==" } } }, @@ -859,7 +861,7 @@ "deep-eql": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-0.1.3.tgz", - "integrity": "sha1-71WKyrjeJSBs1xOQbXTlaTDrafI=", + "integrity": "sha512-6sEotTRGBFiNcqVoeHwnfopbSpi5NbH1VWJmYCVkmxMmaVTT0bUTrNaGyBwhgP4MZL012W/mkzIn3Da+iDYweg==", "dev": true, "requires": { "type-detect": "0.1.1" @@ -868,7 +870,7 @@ "type-detect": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-0.1.1.tgz", - "integrity": "sha1-C6XsKohWQORw6k6FBZcZANrFiCI=", + "integrity": "sha512-5rqszGVwYgBoDkIm2oUtvkfZMQ0vk29iDMU0W2qCa3rG0vPDNczCMT4hV/bLBgLg8k8ri6+u3Zbt+S/14eMzlA==", "dev": true } } @@ -881,7 +883,7 @@ "delayed-stream": { "version": "0.0.5", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz", - "integrity": "sha1-1LH0OpPoKW3+AmlPRoC8N6MTxz8=" + "integrity": "sha512-v+7uBd1pqe5YtgPacIIbZ8HuHeLFVNe4mUEyFDXL6KiqzEykjbw+5mXZXpGFgNVasdL4jWKgaKIXrEHiynN1LA==" }, "denque": { "version": "1.4.1", @@ -891,7 +893,7 @@ "depd": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/depd/-/depd-0.3.0.tgz", - "integrity": "sha1-Ecm8KOQlMl+9iziUC+/2n6UyaIM=" + "integrity": "sha512-Uyx3FgdvEYlpA3W4lf37Ide++2qOsjLlJ7dap0tbM63j/BxTCcxmyIOO6PXbKbOuNSko+fsDHzzx1DUeo1+3fA==" }, "diff": { "version": "3.5.0", @@ -902,7 +904,7 @@ "dtrace-provider": { "version": "0.2.8", "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.2.8.tgz", - "integrity": "sha1-4kPxkhmqlfvw2PL/sH9b1k6U/iA=", + "integrity": "sha512-wufYnYt4ISHnT9MEiRgQ3trXuolt7mICTa/ckT+KYHR667K9H82lmI8KM7zKUJ8l5I343A34wJnvL++1TJn1iA==", "dev": true, "optional": true }, @@ -920,12 +922,12 @@ "each-series": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/each-series/-/each-series-1.0.0.tgz", - "integrity": "sha1-+Ibmxm39sl7x/nNWQUbuXLR4r8s=" + "integrity": "sha512-4MQloCGGCmT5GJZK5ibgJSvTK1c1QSrNlDvLk6fEyRxjZnXjl+NNFfzhfXpmnWh33Owc9D9klrdzCUi7yc9r4Q==" }, "ecc-jsbn": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz", - "integrity": "sha1-D8c6ntXw1Tw4GTOYUj735UN3dQU=", + "integrity": "sha512-8Pvg9QY16SYajEL9W1Lk+9yM7XCK/MOq2wibslLZYAAEEkbAIO6mLkW+GFYbvvw8qTuDFzFMg40rS9IxkNCWPg==", "requires": { "jsbn": "~0.1.0" } @@ -941,7 +943,7 @@ "ee-first": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.0.3.tgz", - "integrity": "sha1-bJjECJq+y1p7hcGsRJqmA9Oz2r4=" + "integrity": "sha512-1q/3kz+ZwmrrWpJcCCrBZ3JnBzB1BMA5EVW9nxnIP1LxDZ16Cqs9VdolqLWlExet1vU+bar3WSkAa4/YrA9bIw==" }, "emitter-listener": { "version": "1.1.2", @@ -962,12 +964,12 @@ "ent": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz", - "integrity": "sha1-6WQhkyWiHQX0RGai9obtbOX13R0=" + "integrity": "sha512-GHrMyVZQWvTIdDtpiEXdHZnFQKzeO09apj8Cbl4pKWy4i0Oprcq17usfDt5aO63swf0JOeMWjWQE/LzgSRuWpA==" }, "errorhandler": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/errorhandler/-/errorhandler-1.1.0.tgz", - "integrity": "sha1-JzsOuFCtED6abWOpmB2G8bmhIC4=", + "integrity": "sha512-Xn9tbC+zOP5uc95i72z2b1G1HEF2TuIwZNOamHZLxxumtBXgSxRDeTB5HTNB6URwtfC0fRbwHBdSGtpntjlgcg==", "requires": { "accepts": "1.0.3", "escape-html": "1.0.1" @@ -981,7 +983,7 @@ "es6-promisify": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz", - "integrity": "sha1-UQnWLz5W6pZ8S2NQWu8IKRyKUgM=", + "integrity": "sha512-C+d6UdsYDk0lMebHNR4S2NybQMMngAOnOwYBQjTOiv0MkoJMP0Myw2mgpDLBcpfCmRLxyFqYhS/CfOENq4SJhQ==", "requires": { "es6-promise": "^4.0.3" } @@ -989,12 +991,12 @@ "escape-html": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.1.tgz", - "integrity": "sha1-GBoobq05ejmpKFfPsdQwUuNWv/A=" + "integrity": "sha512-z6kAnok8fqVTra7Yu77dZF2Y6ETJlxH58wN38wNyuNQLm8xXdKnfNrlSmfXsTePWP03rRVUKHubtUwanwUi7+g==" }, "escape-string-regexp": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", "dev": true }, "event-target-shim": { @@ -1005,7 +1007,7 @@ "express": { "version": "3.11.0", "resolved": "https://registry.npmjs.org/express/-/express-3.11.0.tgz", - "integrity": "sha1-8cjhyZGkRN164zG/t/GkVX/P0u4=", + "integrity": "sha512-/647bsD/48HoC+myehc3S93C6KUBpncWSjxEImmRajSlnI7McA9F9QFb6gc6Vxp9KfO/S7OiETvoT2xU0nDfVw==", "requires": { "buffer-crc32": "0.2.3", "commander": "1.3.2", @@ -1029,7 +1031,7 @@ "debug": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.2.tgz", - "integrity": "sha1-OElZHBDM5khHbDx8Li40FttZY8Q=", + "integrity": "sha512-T9bufXIzQvCa4VrTIpLvvwdLhH+wuBtvIJJA3xgzVcaVETGmTIWMfEXQEd1K4p8BaRmQJPn6MPut38H7YQ+iIA==", "requires": { "ms": "0.6.2" } @@ -1037,7 +1039,7 @@ "mkdirp": { "version": "0.5.0", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.0.tgz", - "integrity": "sha1-HXMHam35hs2TROFecfzAWkyavxI=", + "integrity": "sha512-xjjNGy+ry1lhtIKcr2PT6ok3aszhQfgrUDp4OZLHacgRgFmF6XR9XCOJVcXlVGQonIqXcK1DvqgKKQOPWYGSfw==", "requires": { "minimist": "0.0.8" } @@ -1045,14 +1047,14 @@ "ms": { "version": "0.6.2", "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", - "integrity": "sha1-2JwhJMb9wTU9Zai3e/GqxLGTcIw=" + "integrity": "sha512-/pc3eh7TWorTtbvXg8je4GvrvEqCfH7PA3P7iW01yL2E53FKixzgMBaQi0NOPbMJqY34cBSvR0tZtmlTkdUG4A==" } } }, "express-session": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/express-session/-/express-session-1.4.0.tgz", - "integrity": "sha1-kL+Kk5ocjcAS5KEeTC/DYp98+JQ=", + "integrity": "sha512-W/4B7cE/+IMX+zrmY+A3xUKIsvKc85O/75i+sAUVztqO6cwgNhjMidy1UVr6jn75NUmrce3sBQeHgMZuNk2XCw==", "requires": { "buffer-crc32": "0.2.3", "cookie": "0.1.2", @@ -1066,7 +1068,7 @@ "debug": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.2.tgz", - "integrity": "sha1-OElZHBDM5khHbDx8Li40FttZY8Q=", + "integrity": "sha512-T9bufXIzQvCa4VrTIpLvvwdLhH+wuBtvIJJA3xgzVcaVETGmTIWMfEXQEd1K4p8BaRmQJPn6MPut38H7YQ+iIA==", "requires": { "ms": "0.6.2" } @@ -1074,7 +1076,7 @@ "ms": { "version": "0.6.2", "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", - "integrity": "sha1-2JwhJMb9wTU9Zai3e/GqxLGTcIw=" + "integrity": "sha512-/pc3eh7TWorTtbvXg8je4GvrvEqCfH7PA3P7iW01yL2E53FKixzgMBaQi0NOPbMJqY34cBSvR0tZtmlTkdUG4A==" } } }, @@ -1086,17 +1088,17 @@ "extsprintf": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", - "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=" + "integrity": "sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g==" }, "fast-deep-equal": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-1.0.0.tgz", - "integrity": "sha1-liVqO8l1WV6zbYLpkp0GDYk0Of8=" + "integrity": "sha512-46+Jxk9Yj/nQY+3a1KTnpbBTemcAbPySTKya8iM9D7EsiONpSWbvzesalcCJ6tmJrCUITT2fmAQfNHFG+OHM6Q==" }, "fast-json-stable-stringify": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", - "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I=" + "integrity": "sha512-eIgZvM9C3P05kg0qxfqaVU6Tma4QedCPIByQOcemV0vju8ot3cS2DpHi4m2G2JvbSMI152rjfLX0p1pkSdyPlQ==" }, "fast-text-encoding": { "version": "1.0.0", @@ -1111,7 +1113,7 @@ "finalhandler": { "version": "0.0.2", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-0.0.2.tgz", - "integrity": "sha1-BgPYde6H1WeiZmkoFcyK1E/M7to=", + "integrity": "sha512-SbpQfvWVwWEBlPTQyaM9gs0D5404ENTC0x2jzbb7t+P+EOD/cBlWjAAvfozIQYtOepUuNkxoLNLCK9/kS29f4w==", "requires": { "debug": "1.0.2", "escape-html": "1.0.1" @@ -1120,7 +1122,7 @@ "debug": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.2.tgz", - "integrity": "sha1-OElZHBDM5khHbDx8Li40FttZY8Q=", + "integrity": "sha512-T9bufXIzQvCa4VrTIpLvvwdLhH+wuBtvIJJA3xgzVcaVETGmTIWMfEXQEd1K4p8BaRmQJPn6MPut38H7YQ+iIA==", "requires": { "ms": "0.6.2" } @@ -1128,19 +1130,19 @@ "ms": { "version": "0.6.2", "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", - "integrity": "sha1-2JwhJMb9wTU9Zai3e/GqxLGTcIw=" + "integrity": "sha512-/pc3eh7TWorTtbvXg8je4GvrvEqCfH7PA3P7iW01yL2E53FKixzgMBaQi0NOPbMJqY34cBSvR0tZtmlTkdUG4A==" } } }, "findit2": { "version": "2.2.3", "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz", - "integrity": "sha1-WKRmaX34piBc39vzlVNri9d3pfY=" + "integrity": "sha512-lg/Moejf4qXovVutL0Lz4IsaPoNYMuxt4PA0nGqFxnJ1CTTGGlEO2wKgoDpwknhvZ8k4Q2F+eesgkLbG2Mxfog==" }, "finished": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/finished/-/finished-1.2.2.tgz", - "integrity": "sha1-QWCOr639ZWg7RqEiC8Sx7D2u3Ng=", + "integrity": "sha512-HPJ8x7Gn1pmTS1zWyMoXmQ1yxHkYHRoFsBI66ONq4PS9iWBJy1iHYXOSqMWNp3ksMXfrBpenkSwBhl9WG4zr4Q==", "requires": { "ee-first": "1.0.3" } @@ -1156,12 +1158,12 @@ "forever-agent": { "version": "0.5.2", "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.5.2.tgz", - "integrity": "sha1-bQ4JxJIflKJ/Y9O0nF/v8epMUTA=" + "integrity": "sha512-PDG5Ef0Dob/JsZUxUltJOhm/Y9mlteAE+46y3M9RBz/Rd3QVENJ75aGRhN56yekTUboaBIkd8KVWX2NjF6+91A==" }, "form-data": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/form-data/-/form-data-0.1.4.tgz", - "integrity": "sha1-kavXiKupcCsaq/qLwBAxoqyeOxI=", + "integrity": "sha512-x8eE+nzFtAMA0YYlSxf/Qhq6vP1f8wSoZ7Aw1GuctBcmudCNuTUmmx45TfEplyb6cjsZO/jvh6+1VpZn24ez+w==", "requires": { "async": "~0.9.0", "combined-stream": "~0.0.4", @@ -1171,19 +1173,19 @@ "async": { "version": "0.9.2", "resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz", - "integrity": "sha1-rqdNXmHB+JlhO/ZL2mbUx48v0X0=" + "integrity": "sha512-l6ToIJIotphWahxxHyzK9bnLR6kM4jJIIgLShZeqLY7iboHoGkdgFl7W2/Ivi4SkMJYGKqW8vSuk0uKUj6qsSw==" } } }, "fresh": { "version": "0.2.2", "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.2.2.tgz", - "integrity": "sha1-lzHc9WeMf660T7kDxPct9VGH+nc=" + "integrity": "sha512-ZGGi8GROK//ijm2gB33sUuN9TjN1tC/dvG4Bt4j6IWrVGpMmudUBCxx+Ir7qePsdREfkpQC4FL8W0jeSOsgv1w==" }, "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", "dev": true }, "gaxios": { @@ -1209,7 +1211,7 @@ "getpass": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", - "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", + "integrity": "sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==", "requires": { "assert-plus": "^1.0.0" }, @@ -1217,14 +1219,14 @@ "assert-plus": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" + "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==" } } }, "glob": { "version": "6.0.4", "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", - "integrity": "sha1-DwiGD2oVUSey+t1PnOJLGqtuTSI=", + "integrity": "sha512-MKZeRNyYZAVVVG1oZeLaWie1uweH40m9AZwIwxyPbTSX4hHrVYSzLg0Ro5Z5R7XKkIX+Cc6oD1rqeDJnwsB8/A==", "optional": true, "requires": { "inflight": "^1.0.4", @@ -1281,12 +1283,12 @@ "har-schema": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", - "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=" + "integrity": "sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q==" }, "har-validator": { "version": "5.0.3", "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.0.3.tgz", - "integrity": "sha1-ukAsJmGU8VlW7xXg/PJCmT9qff0=", + "integrity": "sha512-r7LZkP7Z6WMxj5zARzB9dSpIKu/sp0NfHIgtj6kmQXhEArNctjB5FEv/L2XfLdWqIocPT2QVt0LFOlEUioTBtQ==", "requires": { "ajv": "^5.1.0", "har-schema": "^2.0.0" @@ -1295,7 +1297,7 @@ "hawk": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/hawk/-/hawk-1.1.1.tgz", - "integrity": "sha1-h81JH5tG5OKurKM1QWdmiF0tHtk=", + "integrity": "sha512-am8sVA2bCJIw8fuuVcKvmmNnGFUGW8spTkVtj2fXTEZVkfN42bwFZFtDem57eFi+NSxurJB8EQ7Jd3uCHLn8Vw==", "requires": { "boom": "0.4.x", "cryptiles": "0.2.x", @@ -1306,7 +1308,7 @@ "he": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", - "integrity": "sha1-k0EP0hsAlzUVH4howvJx80J+I/0=", + "integrity": "sha512-z/GDPjlRMNOa2XJiB4em8wJpuuBfrFOlYKTZxtpkdr1uPdibHI8rYA3MY0KDObpVyaes0e/aunid/t88ZI2EKA==", "dev": true }, "hex2dec": { @@ -1317,12 +1319,12 @@ "hoek": { "version": "0.9.1", "resolved": "https://registry.npmjs.org/hoek/-/hoek-0.9.1.tgz", - "integrity": "sha1-PTIkYrrfB3Fup+uFuviAec3c5QU=" + "integrity": "sha512-ZZ6eGyzGjyMTmpSPYVECXy9uNfqBR7x5CavhUaLOeD6W0vWK1mp/b7O3f86XE0Mtfo9rZ6Bh3fnuw9Xr8MF9zA==" }, "http-signature": { "version": "0.10.1", "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-0.10.1.tgz", - "integrity": "sha1-T72sEyVZqoMjEh5UB3nAoBKyfmY=", + "integrity": "sha512-coK8uR5rq2IMj+Hen+sKPA5ldgbCc1/spPdKCL1Fw6h+D0s/2LzMcRK0Cqufs1h0ryx/niwBHGFu8HC3hwU+lA==", "requires": { "asn1": "0.1.11", "assert-plus": "^0.1.5", @@ -1341,12 +1343,12 @@ "iconv-lite": { "version": "0.4.3", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.3.tgz", - "integrity": "sha1-nniHeTt2nMaV6yLSVGpP0tebeh4=" + "integrity": "sha512-fBUZHWVujxJd0hOJLaN4Zj4h1LeOn+qi5qyts4HFFa0jaOo/0E6DO1UsJReZV0qwiIzeaqm/1LhYBbvvGjQkNg==" }, "inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", "requires": { "once": "^1.3.0", "wrappy": "1" @@ -1355,7 +1357,7 @@ "inherits": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" + "integrity": "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==" }, "ioredis": { "version": "4.14.1", @@ -1396,7 +1398,7 @@ "ipaddr.js": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-0.1.2.tgz", - "integrity": "sha1-ah/T2FT1ACllw017vNm0qNSwRn4=" + "integrity": "sha512-MGrEjHz4Hk5UVpJXZQ2tHB+bp6xgdRKCAEWdrgFsoAmXCgKAPtj8LqMxgvlWEAj9aN+PpTcvE051uZU3K3kLSQ==" }, "is": { "version": "3.3.0", @@ -1411,27 +1413,27 @@ "is-typedarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=" + "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==" }, "isarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" }, "isstream": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" + "integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g==" }, "jsbn": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=" + "integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==" }, "json-bigint": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-0.3.0.tgz", - "integrity": "sha1-DM2RLEuCcNBfBW+9E4FLU9OCWx4=", + "integrity": "sha512-u+c/u/F+JNPUekHCFyGVycRPyh9UHD5iUhSyIAn10kxbDTJxijwAbT6XHaONEOXuGGfmWUSroheXgHcml4gLgg==", "requires": { "bignumber.js": "^7.0.0" } @@ -1439,22 +1441,22 @@ "json-schema": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", - "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=" + "integrity": "sha512-a3xHnILGMtk+hDOqNwHzF6e2fNbiMrXZvxKQiEv2MlgQP+pjIOzqAmKYD2mDpXYE/44M7g+n9p2bKkYWDUcXCQ==" }, "json-schema-traverse": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz", - "integrity": "sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A=" + "integrity": "sha512-4JD/Ivzg7PoW8NzdrBSr3UFwC9mHgvI7Z6z3QGBsSHgKaRTUDmyZAAKJo2UbG1kUVfS9WS8bi36N49U1xw43DA==" }, "json-stringify-safe": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" + "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==" }, "jsprim": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", - "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", + "integrity": "sha512-4Dj8Rf+fQ+/Pn7C5qeEX02op1WfOss3PKTE9Nsop3Dx+6UPxlm1dr/og7o2cRa5hNN07CACr4NFzRLtj/rjWog==", "requires": { "assert-plus": "1.0.0", "extsprintf": "1.3.0", @@ -1465,7 +1467,7 @@ "assert-plus": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" + "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==" } } }, @@ -1491,7 +1493,7 @@ "keypress": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/keypress/-/keypress-0.1.0.tgz", - "integrity": "sha1-SjGI1CkbZrT2XtuZ+AaqmuKTWSo=" + "integrity": "sha512-x0yf9PL/nx9Nw9oLL8ZVErFAk85/lslwEP7Vz7s5SI1ODXZIgit3C5qyWjw4DxOuO/3Hb4866SQh28a1V1d+WA==" }, "lodash": { "version": "4.17.13", @@ -1501,17 +1503,17 @@ "lodash.defaults": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz", - "integrity": "sha1-0JF4cW/+pN3p5ft7N/bwgCJ0WAw=" + "integrity": "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==" }, "lodash.flatten": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz", - "integrity": "sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8=" + "integrity": "sha512-C5N2Z3DgnnKr0LOpv/hKCgKdb7ZZwafIrsesve6lmzvZIRZRGaZ/l6Q8+2W7NaT+ZwO3fFlSCzCzrDCFdJfZ4g==" }, "lodash.pickby": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz", - "integrity": "sha1-feoh2MGNdwOifHBMFdO4SmfjOv8=" + "integrity": "sha512-AZV+GsS/6ckvPOVQPXSiFFacKvKB4kOQu6ynt9wz0F3LO4R9Ij4K1ddYsIytDpSgLz88JHd9P+oaLeej5/Sl7Q==" }, "logger-sharelatex": { "version": "1.7.0", @@ -1537,7 +1539,7 @@ "assert-plus": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" + "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==" }, "aws4": { "version": "1.8.0", @@ -1547,7 +1549,7 @@ "bunyan": { "version": "1.8.12", "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.12.tgz", - "integrity": "sha1-8VDw9nSKvdcq6uhPBEA74u8RN5c=", + "integrity": "sha512-dmDUbGHeGcvCDLRFOscZkwx1ZO/aFz3bJOCi5nCgzdhFGPxwK+y5AcDBnqagNGlJZ7lje/l6JUEz9mQcutttdg==", "requires": { "dtrace-provider": "~0.8", "moment": "^2.10.6", @@ -1566,12 +1568,12 @@ "delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" }, "dtrace-provider": { "version": "0.8.7", "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.8.7.tgz", - "integrity": "sha1-3JObTT4GIM/gwc2APQ0tftBP/QQ=", + "integrity": "sha512-V+HIGbAdxCIxddHNDwzXi6cx8Cz5RRlQOVcsryHfsyVVebpBEnDwHSgqxpgKzqeU/6/0DWqRLAGUwkbg2ecN1Q==", "optional": true, "requires": { "nan": "^2.10.0" @@ -1580,12 +1582,12 @@ "fast-deep-equal": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz", - "integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk=" + "integrity": "sha512-bCK/2Z4zLidyB4ReuIsvALH6w31YfAQDmXMqMx6FyfHqvBxtjC0eRumeSu4Bs3XtXwpyIywtSTrVT99BxY1f9w==" }, "forever-agent": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=" + "integrity": "sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw==" }, "form-data": { "version": "2.3.3", @@ -1609,7 +1611,7 @@ "http-signature": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", - "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", + "integrity": "sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ==", "requires": { "assert-plus": "^1.0.0", "jsprim": "^1.2.2", @@ -1688,7 +1690,7 @@ "tunnel-agent": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", - "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", + "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", "requires": { "safe-buffer": "^5.0.1" } @@ -1716,36 +1718,27 @@ "lsmod": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/lsmod/-/lsmod-1.0.0.tgz", - "integrity": "sha1-mgD3bco26yP6BTUK/htYXUKZ5ks=" - }, - "lynx": { - "version": "0.0.11", - "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.0.11.tgz", - "integrity": "sha1-LPoU5EP9LZKlm3efQVZ84cxpZaM=", - "requires": { - "mersenne": "~0.0.3", - "statsd-parser": "~0.0.4" - } + "integrity": "sha512-Y+6V75r+mGWzWEPr9h6PFmStielICu5JBHLUg18jCsD2VFmEfgHbq/EgnY4inElsUD9eKL9id1qp34w46rSIKQ==" }, "media-typer": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.2.0.tgz", - "integrity": "sha1-2KBlITrf6qLnYyGitt2jb/YzWYQ=" + "integrity": "sha512-TSggxYk75oP4tae7JkT8InpcFGUP4340zg1dOWjcu9qcphaDKtXEuNUv3OD4vJ+gVTvIDK797W0uYeNm8qqsDg==" }, "merge-descriptors": { "version": "0.0.2", "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-0.0.2.tgz", - "integrity": "sha1-w2pSp4FDdRPFcnXzndnTF1FKyMc=" + "integrity": "sha512-dYBT4Ep+t/qnPeJcnMymmhTdd4g8/hn48ciaDqLAkfRf8abzLPS6Rb6EBdz5CZCL8tzZuI5ps9MhGQGxk+EuKg==" }, "mersenne": { "version": "0.0.4", "resolved": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz", - "integrity": "sha1-QB/ex+whzbngPNPTAhOY2iGycIU=" + "integrity": "sha512-XoSUL+nF8hMTKGQxUs8r3Btdsf1yuKKBdCCGbh3YXgCXuVKishpZv1CNc385w9s8t4Ynwc5h61BwW/FCVulkbg==" }, "method-override": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/method-override/-/method-override-2.0.2.tgz", - "integrity": "sha1-AFMSeMeXiWQL8n6X4mo6Wh98ynM=", + "integrity": "sha512-VdXhehVbkQcJD4MJisBqFjCGLlCQ5bhVkJqT9VpSgXyCccskmEYn/MA52pnDlqqffmkFazjGbFEwZFKwOIAKXg==", "requires": { "methods": "1.0.1", "parseurl": "1.0.1", @@ -1755,7 +1748,7 @@ "methods": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/methods/-/methods-1.0.1.tgz", - "integrity": "sha1-dbyRlD3/19oDfPPusO1zoAN80Us=" + "integrity": "sha512-2403MfnVypWSNIEpmQ26/ObZ5kSUx37E8NHRvriw0+I8Sne7k0HGuLGCk0OrCqURh4UIygD0cSsYq+Ll+kzNqA==" }, "metrics-sharelatex": { "version": "2.2.0", @@ -1774,12 +1767,12 @@ "coffee-script": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", - "integrity": "sha1-gIs5bhEPU9AhoZpO8fZb4OjjX6M=" + "integrity": "sha512-Tx8itEfCsQp8RbLDFt7qwjqXycAx2g6SI7//4PPUR2j6meLmNifYm6zKrNDcU1+Q/GWRhjhEZk7DaLG1TfIzGA==" }, "lynx": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz", - "integrity": "sha1-Mxjc7xaQi4KG6Bisz9sxzXQkj50=", + "integrity": "sha512-JI52N0NwK2b/Md0TFPdPtUBI46kjyJXF7+q08l2yvQ56q6QA8s7ZjZQQRoxFpS2jDXNf/B0p8ID+OIKcTsZwzw==", "requires": { "mersenne": "~0.0.3", "statsd-parser": "~0.0.4" @@ -1790,17 +1783,17 @@ "mime": { "version": "1.2.11", "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", - "integrity": "sha1-WCA+7Ybjpe8XrtK32evUfwpg3RA=" + "integrity": "sha512-Ysa2F/nqTNGHhhm9MV8ure4+Hc+Y8AWiqUdHxsO7xu8zc92ND9f3kpALHjaP026Ft17UfxrMt95c50PLUeynBw==" }, "mime-db": { "version": "1.30.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.30.0.tgz", - "integrity": "sha1-dMZD2i3Z1qRTmZY0ZbJtXKfXHwE=" + "integrity": "sha512-SUaL89ROHF5P6cwrhLxE1Xmk60cFcctcJl3zwMeQWcoQzt0Al/X8qxUz2gi19NECqYspzbYpAJryIRnLcjp20g==" }, "mime-types": { "version": "2.1.17", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.17.tgz", - "integrity": "sha1-Cdejk/A+mVp5+K+Fe3Cp4KsWVXo=", + "integrity": "sha512-rOFZoFAbaupSpzARUe5CU1P9mwfX+lIFAuj0soNsEZEnrHu6LZNyV7/FClEB/oF9A1o5KStlumRjW6D4Q2FRCA==", "requires": { "mime-db": "~1.30.0" } @@ -1816,12 +1809,12 @@ "minimist": { "version": "0.0.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", - "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" + "integrity": "sha512-miQKw5Hv4NS1Psg2517mV4e4dYNaO3++hjAvLOAzKqZ61rH8NS1SK+vbfBWZ5PY/Me/bEWhUwqMghEW5Fb9T7Q==" }, "mkdirp": { "version": "0.3.5", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz", - "integrity": "sha1-3j5fiWHIjHh+4TaN+EmsRBPsqNc=" + "integrity": "sha512-8OCq0De/h9ZxseqzCH8Kw/Filf5pF/vMI6+BH7Lu0jXz2pqYCjTAQRolSxRIi+Ax+oCCjlxoJMP0YQ4XlrQNHg==" }, "mocha": { "version": "5.2.0", @@ -1871,13 +1864,13 @@ "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", "dev": true }, "mkdirp": { "version": "0.5.1", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "integrity": "sha512-SknJC52obPfGQPnjIkXbmA6+5H15E+fR+E4iR2oQ3zzCLbd7/ONua69R/Gw7AgkTLsRG+r5fzksYwWe1AgTyWA==", "dev": true, "requires": { "minimist": "0.0.8" @@ -1897,7 +1890,7 @@ "module-details-from-path": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz", - "integrity": "sha1-EUyUlnPiqKNenTV4hSeqN7Z52is=" + "integrity": "sha512-ySViT69/76t8VhE1xXHK6Ch4NcDd26gx0MzKXLO+F7NOtnqH68d9zF94nT8ZWSxXh8ELOERsnJO/sWt1xZYw5A==" }, "moment": { "version": "2.24.0", @@ -1918,17 +1911,17 @@ "es6-promise": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-3.2.1.tgz", - "integrity": "sha1-7FYjOGgDKQkgcXDDlEjiREndH8Q=" + "integrity": "sha512-oj4jOSXvWglTsc3wrw86iom3LDPOx1nbipQk+jaG3dy+sMRM6ReSgVr/VlmBuF6lXUrflN9DCcQHeSbAwGUl4g==" }, "process-nextick-args": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz", - "integrity": "sha1-FQ4gt1ZZCtP5EJPyWk8q2L/zC6M=" + "integrity": "sha512-yN0WQmuCX63LP/TMvAg31nvT6m4vDqJEiiv2CAZqWOGNWutc9DfDk1NPYYmKUFmaVM2UwDowH4u5AHWYP/jxKw==" }, "readable-stream": { "version": "2.2.7", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.2.7.tgz", - "integrity": "sha1-BwV6y+JGeyIELTb5jFrVBwVOlbE=", + "integrity": "sha512-a6ibcfWFhgihuTw/chl+u3fB5ykBZFmnvpyZHebY0MCQE4vvYcsCLpCeaQ1BkH7HdJYavNSqF0WDLeo4IPHQaQ==", "requires": { "buffer-shims": "~1.0.0", "core-util-is": "~1.0.0", @@ -1976,7 +1969,7 @@ "morgan": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/morgan/-/morgan-1.1.1.tgz", - "integrity": "sha1-zeRdLoB+vMQ5dFhG6oA5LmkJgUY=", + "integrity": "sha512-Jx1pZHnbZ43TFAeY0NVuLqpeXX0O2aL7todwFModvpjZCGR+vBTKH0wOKQjwK1wgO/cERhFISIf4roSj1fx5Jg==", "requires": { "bytes": "1.0.0" } @@ -1984,12 +1977,12 @@ "ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, "multiparty": { "version": "3.2.9", "resolved": "https://registry.npmjs.org/multiparty/-/multiparty-3.2.9.tgz", - "integrity": "sha1-xzNz6pwBLnd2zlvEDJNiZLa6LB4=", + "integrity": "sha512-zkG0d0TVz4yw8qDgsP7zZepl9GNOV2kN/CwBpiSXbOP41P824Eu0xrQ+6DnOgni8e+2DNeBZrVI8mg2ppVoWtg==", "requires": { "readable-stream": "~1.1.9", "stream-counter": "~0.2.0" @@ -1998,12 +1991,12 @@ "isarray": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" + "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==" }, "readable-stream": { "version": "1.1.14", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", - "integrity": "sha1-fPTFTvZI44EwhMY23SB54WbAgdk=", + "integrity": "sha512-+MeVjFf4L44XUkhM1eYbD8fyEsxcV81pqMSR5gblfcLCHfZvbrqy4/qYHE+/R5HoBUT11WV5O08Cr1n3YXkWVQ==", "requires": { "core-util-is": "~1.0.0", "inherits": "~2.0.1", @@ -2014,14 +2007,14 @@ "string_decoder": { "version": "0.10.31", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", - "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + "integrity": "sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ==" } } }, "mv": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", - "integrity": "sha1-rmzg1vbV4KT32JN5jQPB6pVZtqI=", + "integrity": "sha512-at/ZndSy3xEGJ8i0ygALh8ru9qy7gWW1cmkaqBN29JmMlIvM//MEO9y1sk/avxuwnPcfhkejkLsuPxH81BrkSg==", "optional": true, "requires": { "mkdirp": "~0.5.1", @@ -2032,7 +2025,7 @@ "mkdirp": { "version": "0.5.1", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "integrity": "sha512-SknJC52obPfGQPnjIkXbmA6+5H15E+fR+E4iR2oQ3zzCLbd7/ONua69R/Gw7AgkTLsRG+r5fzksYwWe1AgTyWA==", "optional": true, "requires": { "minimist": "0.0.8" @@ -2048,18 +2041,18 @@ "native-or-bluebird": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/native-or-bluebird/-/native-or-bluebird-1.1.2.tgz", - "integrity": "sha1-OSHhECMtHreQ89rGG7NwUxx9NW4=" + "integrity": "sha512-Bgn5FHNkd+lPTjIzq1NVU/VZTvPKFvhdIDEyYjxrKNrScSXbVvNVzOKwoleysun0/HoN7R+TXmK9mCtEs84osA==" }, "ncp": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", - "integrity": "sha1-GVoh1sRuNh0vsSgbo4uR6d9727M=", + "integrity": "sha512-zIdGUrPRFTUELUvr3Gmc7KZ2Sw/h1PiVM0Af/oHB6zgnV1ikqSfRk+TOufi79aHYCW3NiOXmr1BP5nWbzojLaA==", "optional": true }, "negotiator": { "version": "0.4.6", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.4.6.tgz", - "integrity": "sha1-9F+vn6gz7TylElDqmn3fxCZ6RLM=" + "integrity": "sha512-nkhZDoiMZOCbMRPfDAilhyb8sETDhHP+zDCUv+JD26OSPOrYG+/76uooeqz3WTVh7BvQE41VV0YMTGKUgn9GQg==" }, "node-fetch": { "version": "2.6.0", @@ -2074,22 +2067,22 @@ "node-uuid": { "version": "1.4.8", "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz", - "integrity": "sha1-sEDrCSOWivq/jTL7HxfxFn/auQc=" + "integrity": "sha512-TkCET/3rr9mUuRp+CpO7qfgT++aAxfDRaalQhwPFzI9BY/2rCDn6OfpZOVggi1AXfTPpfkTrg5f5WQx5G1uLxA==" }, "oauth-sign": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.4.0.tgz", - "integrity": "sha1-8ilW8x6nFRqCHl8vsywRPK2Ln2k=" + "integrity": "sha512-vF36cbrUyfy7Yr6kTIzrj3RsuaPYeJKU3IUOC6MglfNTyiGT6leGvEVOa3UsSsgwBzfVfRnvMiMVyUnpXNqN8w==" }, "on-headers": { "version": "0.0.0", "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-0.0.0.tgz", - "integrity": "sha1-7igX+DRDJXhc2cLfKyQrvBfK9MQ=" + "integrity": "sha512-sd6W+EIQTNDbMndkGZqf1q6x3PlMxAIoufoNhcfpvzrXhtN+IWVyM2sjdsZ3p+TVddtTG5u0lujTglZ+R1VGvQ==" }, "once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", "requires": { "wrappy": "1" } @@ -2110,12 +2103,12 @@ "parse-duration": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.1.1.tgz", - "integrity": "sha1-ExFN3JiRwezSgANiRFVN5DZHoiY=" + "integrity": "sha512-MPkERaX8suJ97HH2TtTN81ASYdFWouJqcnfVYSFHvWCI13vN4NzbvKsBOYN/7o8cTSoaNVMz4H8wG7GjTj0q6g==" }, "parse-mongo-url": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/parse-mongo-url/-/parse-mongo-url-1.1.1.tgz", - "integrity": "sha1-ZiON9fjnwMjKTNlw1KtqE3PrdbU=" + "integrity": "sha512-7bZUusQIrFLwvsLHBnCz2WKYQ5LKO/LwKPnvQxbMIh9gDx8H5ZsknRmLjZdn6GVdrgVOwqDrZKsY0qDLNmRgcw==" }, "parse-ms": { "version": "2.1.0", @@ -2125,12 +2118,12 @@ "parseurl": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.0.1.tgz", - "integrity": "sha1-Llfc5u/dN8NRhwEDCUTCK/OIt7Q=" + "integrity": "sha512-6W9+0+9Ihayqwjgp4OaLLqZ3KDtqPY2PtUPz8YNiy4PamjJv+7x6J9GO93O9rUZOLgaanTPxsKTasxqKkO1iSw==" }, "path-is-absolute": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==" }, "path-parse": { "version": "1.0.6", @@ -2140,12 +2133,12 @@ "pause": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/pause/-/pause-0.0.1.tgz", - "integrity": "sha1-HUCLP9t2kjuVQ9lvtMnf1TXZy10=" + "integrity": "sha512-KG8UEiEVkR3wGEb4m5yZkVCzigAD+cVEJck2CzYZO37ZGJfctvVptVO192MwrtPhzONn6go8ylnOdMhKqi4nfg==" }, "performance-now": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", - "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" + "integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==" }, "pify": { "version": "4.0.1", @@ -2203,7 +2196,7 @@ "proxy-addr": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-1.0.1.tgz", - "integrity": "sha1-x8Vm1etOP61n7rnHfFVYzMObiKg=", + "integrity": "sha512-rIUGzBlSfkJMWWCgsd4N5wvVSNAcJZg//UwPZumDIbScHRUzuSOjBmIdyICiKkB9yArv+er9qC6RA/NL3AWc6A==", "requires": { "ipaddr.js": "0.1.2" } @@ -2216,32 +2209,32 @@ "punycode": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=" + "integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==" }, "q": { "version": "0.9.2", "resolved": "https://registry.npmjs.org/q/-/q-0.9.2.tgz", - "integrity": "sha1-I8BsRsgTKGFqrhaNPuI6Vr1D2vY=" + "integrity": "sha512-ZOxMuWPMJnsUdYhuQ9glpZwKhB4cm8ubYFy1nNCY8TkSAuZun5fd8jCDTlf2ykWnK8x9HGn1stNtLeG179DebQ==" }, "qs": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/qs/-/qs-2.3.3.tgz", - "integrity": "sha1-6eha2+ddoLvkyOBHaghikPhjtAQ=" + "integrity": "sha512-f5M0HQqZWkzU8GELTY8LyMrGkr3bPjKoFtTkwUEqJQbcljbeK8M7mliP9Ia2xoOI6oMerp+QPS7oYJtpGmWe/A==" }, "rand-token": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/rand-token/-/rand-token-0.2.1.tgz", - "integrity": "sha1-3GfIEjMGyRInstw/W+pz0wE3YiY=" + "integrity": "sha512-yEiCpxsNXZ78N3oEsCZdvv2xAWXUmWCSIetJfMSbvhTlENOozW7ax0lPhonieVe1HCwa/I82Djgy79caeVMysw==" }, "range-parser": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.0.0.tgz", - "integrity": "sha1-pLJkz+C+XONqvjdlrJwqJIdG28A=" + "integrity": "sha512-wOH5LIH2ZHo0P7/bwkR+aNbJ+kv3CHVX4B8qs9GqbtY29fi1bGPV5xczrutN20G+Z4XhRqRMTW3q0S4iyJJPfw==" }, "raven": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/raven/-/raven-1.1.3.tgz", - "integrity": "sha1-QnPBrm005CMPUbLAEEGjK5Iygio=", + "integrity": "sha512-RYov4wAaflZasWiCrZuizd3jNXxCOkW1WrXgWsGVb8kRpdHNZ+vPY27R6RhVtqzWp+DG9a5l6iP0QUPK4EgzaQ==", "requires": { "cookie": "0.3.1", "json-stringify-safe": "5.0.1", @@ -2253,14 +2246,14 @@ "cookie": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", - "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" + "integrity": "sha512-+IJOX0OqlHCszo2mBUq+SrEbCj6w7Kpffqx60zYbPTFaO4+yYgRjHwcZNpWvaTylDHaV7PPmBHzSecZiMhtPgw==" } } }, "raw-body": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-1.2.2.tgz", - "integrity": "sha1-DGjh7ijP7X26SCIjSuxgeEYcvB8=", + "integrity": "sha512-52kUCLQKKfbzsJtWdlQmrWwhR8WPc8zsCmIDMEygfiEgT3E/AApymJo8eza+zgaLnDxbNRq+U/UXR79s4uX1qw==", "requires": { "bytes": "1", "iconv-lite": "0.4.3" @@ -2283,7 +2276,7 @@ "redis": { "version": "0.11.0", "resolved": "https://registry.npmjs.org/redis/-/redis-0.11.0.tgz", - "integrity": "sha1-/cAdSrTL5LO7LLKByP5WnDhX9XE=" + "integrity": "sha512-wkgzIZ9HuxJ6Sul1IW/6FG13Ecv6q8kmdHb5xo09Hu6bgWzz5qsnM06SVMpDxFNbyApaRjy8CwnmVaRMMhAMWg==" }, "redis-commands": { "version": "1.5.0", @@ -2293,12 +2286,12 @@ "redis-errors": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz", - "integrity": "sha1-62LSrbFeTq9GEMBK/hUpOEJQq60=" + "integrity": "sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w==" }, "redis-parser": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz", - "integrity": "sha1-tm2CjNyv5rS4pCin3vTGvKwxyLQ=", + "integrity": "sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A==", "requires": { "redis-errors": "^1.0.0" } @@ -2306,7 +2299,7 @@ "redis-sentinel": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/redis-sentinel/-/redis-sentinel-0.1.1.tgz", - "integrity": "sha1-Vj3TQduZMgMfSX+v3Td+hkj/s+U=", + "integrity": "sha512-cKtLSUzDsKmsB50J1eIV/SH11DSMiHgsm/gDPRCU5lXz5OyTSuLKWg9oc8d5n74kZwtAyRkfJP0x8vYXvlPjFQ==", "requires": { "q": "0.9.2", "redis": "0.11.x" @@ -2327,7 +2320,7 @@ "coffee-script": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.8.0.tgz", - "integrity": "sha1-nJ8dK0pSoADe0Vtll5FwNkgmPB0=", + "integrity": "sha512-EvLTMcu9vR6G1yfnz75yrISvhq1eBPC+pZbQhHzTiC5vXgpYIrArxQc5tB+SYfBi3souVdSZ4AZzYxI72oLXUw==", "requires": { "mkdirp": "~0.3.5" } @@ -2335,14 +2328,14 @@ "underscore": { "version": "1.7.0", "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz", - "integrity": "sha1-a7rwh3UA02vjTsqlhODbn+8DUgk=" + "integrity": "sha512-cp0oQQyZhUM1kpJDLdGO1jPZHgS/MpzoWYfe9+CM2h/QGDZlqwT2T3YGukuBdaNJ/CAPoeyAZRRHz8JFo176vA==" } } }, "request": { "version": "2.47.0", "resolved": "https://registry.npmjs.org/request/-/request-2.47.0.tgz", - "integrity": "sha1-Cen9Gk/tZZOoBe+CArIPDF7LSF8=", + "integrity": "sha512-7HDodfmCGAgxZWJddewFP3t3dKGFyMfb/tz9uWkyA3VbR79Wb/ydZ+OihNgOIj1IliYYbqohqox5evZgBCv5aw==", "requires": { "aws-sign2": "~0.5.0", "bl": "~0.9.0", @@ -2365,17 +2358,17 @@ "aws-sign2": { "version": "0.5.0", "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.5.0.tgz", - "integrity": "sha1-xXED96F/wDfwLXwuZLYC6iI/fWM=" + "integrity": "sha512-oqUX0DM5j7aPWPCnpWebiyNIj2wiNI87ZxnOMoGv0aE4TGlBy2N+5iWc6dQ/NOKZaBD2W6PVz8jtOGkWzSC5EA==" }, "caseless": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.6.0.tgz", - "integrity": "sha1-gWfBq4OX+1u5X5bSjlqBxQ8kesQ=" + "integrity": "sha512-/X9C8oGbZJ95LwJyK4XvN9GSBgw/rqBnUg6mejGhf/GNfJukt5tzOXP+CJicXdWSqAX0ETaufLDxXuN2m4/mDg==" }, "mime-types": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-1.0.2.tgz", - "integrity": "sha1-mVrhOSq4r/y/yyZB3QVOlDwNXc4=" + "integrity": "sha512-echfutj/t5SoTL4WZpqjA1DCud1XO0WQF3/GJ48YBmc4ZMhCK77QA6Z/w6VTQERLKuJ4drze3kw2TUT8xZXVNw==" } } }, @@ -2393,12 +2386,12 @@ "assert-plus": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" + "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==" }, "boom": { "version": "4.3.1", "resolved": "https://registry.npmjs.org/boom/-/boom-4.3.1.tgz", - "integrity": "sha1-T4owBctKfjiJ90kDD9JbluAdLjE=", + "integrity": "sha512-FA8ZqcHBLjyFCPns8EsFTWxARi8iKzLfl3vXS1n1O6mlUpZvjXg9E+0Ys8mh7k/s8mHVpROgeoUmz4HadhPhAQ==", "requires": { "hoek": "4.x.x" } @@ -2406,7 +2399,7 @@ "combined-stream": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz", - "integrity": "sha1-k4NwpXtKUd6ix3wV1cX9+JUWQAk=", + "integrity": "sha512-JgSRe4l4UzPwpJuxfcPWEK1SCrL4dxNjp1uqrQLMop3QZUVo+hDU8w9BJKA4JPbulTWI+UzrI2UA3tK12yQ6bg==", "requires": { "delayed-stream": "~1.0.0" } @@ -2414,7 +2407,7 @@ "cryptiles": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-3.1.2.tgz", - "integrity": "sha1-qJ+7Ig9c4l7FboxKqKT9e1sNKf4=", + "integrity": "sha512-XBDBQo47RxVcR5GNxNOI38Cac044AdfU60sJBGZsmWw5TDuG5BAby8W/soTp4SIKeCl37sCGwkbh9wGLEd0cLw==", "requires": { "boom": "5.x.x" }, @@ -2432,17 +2425,17 @@ "delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" }, "forever-agent": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=" + "integrity": "sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw==" }, "form-data": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.1.tgz", - "integrity": "sha1-b7lPvXGIUwbXPRXMSX/kzE7NRL8=", + "integrity": "sha512-ZznzvgkNMfVvSHP0rlg09OeW/g7ib4+NpwNGxLFJOrwUcjN0O8OUASn5cvnpnWve9ZlzW6GUa6NhhlCdb6DqCw==", "requires": { "asynckit": "^0.4.0", "combined-stream": "^1.0.5", @@ -2468,7 +2461,7 @@ "http-signature": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", - "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", + "integrity": "sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ==", "requires": { "assert-plus": "^1.0.0", "jsprim": "^1.2.2", @@ -2478,7 +2471,7 @@ "oauth-sign": { "version": "0.8.2", "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz", - "integrity": "sha1-Rqarfwrq2N6unsBWV4C31O/rnUM=" + "integrity": "sha512-VlF07iu3VV3+BTXj43Nmp6Irt/G7j/NgEctUS6IweH1RGhURjjCc2NWtzXFPXXWWfc7hgbXQdtiQu2LGp6MxUg==" }, "qs": { "version": "6.5.1", @@ -2525,7 +2518,7 @@ "tunnel-agent": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", - "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", + "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", "requires": { "safe-buffer": "^5.0.1" } @@ -2565,7 +2558,8 @@ "require-like": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", - "integrity": "sha1-rW8wwTvs15cBDEaK+ndcDAprR/o=" + "integrity": "sha512-oyrU88skkMtDdauHDuKVrgR+zuItqr6/c//FXzvmxRGMexSDc6hNvJInGW3LL46n+8b50RykrvwSUIIQH2LQ5A==", + "dev": true }, "require_optional": { "version": "1.0.1", @@ -2587,12 +2581,12 @@ "resolve-from": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz", - "integrity": "sha1-lICrIOlP+h2egKgEx+oUdhGWa1c=" + "integrity": "sha512-qpFcKaXsq8+oRoLilkwyc7zHGF5i9Q2/25NIgLQQ/+VVv9rU4qvr6nXVAw1DsnXJyQkZsR4Ytfbtg5ehfcUssQ==" }, "response-time": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/response-time/-/response-time-2.0.0.tgz", - "integrity": "sha1-Zcs5/VDeL0/9vdKF8YVZZr1vyzY=", + "integrity": "sha512-1PeD/WjcPWgv4c1Lpfh+whxgOxauMckWZMWBJNVBXg4Sz/MR1bvtA2V0KOr4gYObkp1GW2NyyiNsJkNMtTOt3w==", "requires": { "on-headers": "0.0.0" } @@ -2613,7 +2607,7 @@ "rimraf": { "version": "2.4.5", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", - "integrity": "sha1-7nEM5dk6j9uFb7Xqj/Di11k0sto=", + "integrity": "sha512-J5xnxTyqaiw06JjMftq7L9ouA448dw/E7dKghkP9WpKNuwmARNNg+Gk8/u5ryb9N/Yo2+z3MCwuqFK/+qPOPfQ==", "optional": true, "requires": { "glob": "^6.0.1" @@ -2622,7 +2616,7 @@ "rndm": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/rndm/-/rndm-1.2.0.tgz", - "integrity": "sha1-8z/pz7Urv9UgqhgyO8ZdsRCht2w=" + "integrity": "sha512-fJhQQI5tLrQvYIYFpOnFinzv9dwmR7hRnUz1XqP3OJ1jIweTNOd6aTO4jwQSgcBSFUB+/KHJxuGneime+FdzOw==" }, "safe-buffer": { "version": "5.1.1", @@ -2643,7 +2637,8 @@ "sandboxed-module": { "version": "0.2.2", "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-0.2.2.tgz", - "integrity": "sha1-bL3sghOAx31FdcjIeDi5ET5kulA=", + "integrity": "sha512-1QAd90eCdAnqVn2sLkRCCeFphH/TKLfoTcdyI6h9h2E+YEY+aKovggwzWWWi5IMObafl0W1wr+dQ5F6LFmjpzA==", + "dev": true, "requires": { "require-like": "0.1.2", "stack-trace": "0.0.6" @@ -2652,14 +2647,15 @@ "stack-trace": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.6.tgz", - "integrity": "sha1-HnGb1qJin/CcGJ4Xqe+QKpT8XbA=" + "integrity": "sha512-5/6uZt7RYjjAl8z2j1mXWAewz+I4Hk2/L/3n6NRLIQ31+uQ7nMd9O6G69QCdrrufHv0QGRRHl/jwUEGTqhelTA==", + "dev": true } } }, "scmp": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/scmp/-/scmp-0.0.3.tgz", - "integrity": "sha1-NkjfLXKUZB5/eGc//CloHZutkHM=" + "integrity": "sha512-ya4sPuUOfcrJnfC+OUqTFgFVBEMOXMS1Xopn0wwIhxKwD4eveTwJoIUN9u1QHJ47nL29/m545dV8KqI92MlHPw==" }, "semver": { "version": "5.6.0", @@ -2669,7 +2665,7 @@ "send": { "version": "0.4.3", "resolved": "https://registry.npmjs.org/send/-/send-0.4.3.tgz", - "integrity": "sha1-lieyO3cH+/Y3ODHKxXkzMLWUtkA=", + "integrity": "sha512-Tl3/iKtlp1WM0hDyackntOVwx5kc8GET/zgEj9AOYRX5ideM/33FeRYk4L19IqioGxCkxHSyq1PThVs6PVvk+w==", "requires": { "debug": "1.0.2", "escape-html": "1.0.1", @@ -2682,7 +2678,7 @@ "debug": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.2.tgz", - "integrity": "sha1-OElZHBDM5khHbDx8Li40FttZY8Q=", + "integrity": "sha512-T9bufXIzQvCa4VrTIpLvvwdLhH+wuBtvIJJA3xgzVcaVETGmTIWMfEXQEd1K4p8BaRmQJPn6MPut38H7YQ+iIA==", "requires": { "ms": "0.6.2" } @@ -2690,14 +2686,14 @@ "ms": { "version": "0.6.2", "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", - "integrity": "sha1-2JwhJMb9wTU9Zai3e/GqxLGTcIw=" + "integrity": "sha512-/pc3eh7TWorTtbvXg8je4GvrvEqCfH7PA3P7iW01yL2E53FKixzgMBaQi0NOPbMJqY34cBSvR0tZtmlTkdUG4A==" } } }, "serve-favicon": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/serve-favicon/-/serve-favicon-2.0.1.tgz", - "integrity": "sha1-SCaXXZ8XPKOkFY6WmBYfdd7Hr+w=", + "integrity": "sha512-ER7Nk+que+Og6kDJpADjLMkTkllBKWz9FPef5A+uELiYAODTjaMJMszKhzUzsNcvqXM5+mzAdpv/6FaxRlJUng==", "requires": { "fresh": "0.2.2" } @@ -2705,7 +2701,7 @@ "serve-index": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/serve-index/-/serve-index-1.1.2.tgz", - "integrity": "sha1-B0K0gJmCV1OcLSrMbKH0qvJn+XI=", + "integrity": "sha512-hGLXKYyzxrFTDEtrNELQ61nLk5jw3Mfm6piJsefpb6fMQDlmW+kunRe09s68DHfSilh1UN5pU7ZWNFn7WmDbTQ==", "requires": { "accepts": "1.0.3", "batch": "0.5.1" @@ -2714,7 +2710,7 @@ "serve-static": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.2.3.tgz", - "integrity": "sha1-k87Lw0Dweey4WJKB0dwxwmwM0Vg=", + "integrity": "sha512-xaOEJYYnhmT2iVnDHcPullns+dFGC18BHseW1ZzkddtPWe4Ot/ZdifPFYk14r+tdWpVNWtXClRRENQ9ODd1Eeg==", "requires": { "escape-html": "1.0.1", "parseurl": "1.0.1", @@ -2732,7 +2728,7 @@ "coffee-script": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", - "integrity": "sha1-gIs5bhEPU9AhoZpO8fZb4OjjX6M=" + "integrity": "sha512-Tx8itEfCsQp8RbLDFt7qwjqXycAx2g6SI7//4PPUR2j6meLmNifYm6zKrNDcU1+Q/GWRhjhEZk7DaLG1TfIzGA==" } } }, @@ -2744,7 +2740,8 @@ "sinon": { "version": "1.5.2", "resolved": "https://registry.npmjs.org/sinon/-/sinon-1.5.2.tgz", - "integrity": "sha1-nKvGx4vfRF1/gxHVSWhi+VRoxPg=", + "integrity": "sha512-4I5YC02+PBQpCCPUydFuUpH4X4+t4IpFmKbP1gHthoFiD7yyLPx179im5jgUPw/O2BytFYnl6NLL4ijh585uiA==", + "dev": true, "requires": { "buster-format": "~0.5" } @@ -2752,7 +2749,7 @@ "sntp": { "version": "0.2.4", "resolved": "https://registry.npmjs.org/sntp/-/sntp-0.2.4.tgz", - "integrity": "sha1-+4hfGLDzqtGJ+CSGJTa87ux1CQA=", + "integrity": "sha512-bDLrKa/ywz65gCl+LmOiIhteP1bhEsAAzhfMedPoiHP3dyYnAevlaJshdqb9Yu0sRifyP/fRqSt8t+5qGIWlGQ==", "requires": { "hoek": "0.9.x" } @@ -2797,14 +2794,14 @@ "assert-plus": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" + "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==" } } }, "stack-trace": { "version": "0.0.9", "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz", - "integrity": "sha1-qPbq7KkGdMMz58Q5U/J1tFFRBpU=" + "integrity": "sha512-vjUc6sfgtgY0dxCdnc40mK6Oftjo9+2K8H/NG81TMhgL392FtiPA9tn9RLyTxXmTLPJPjF3VyzFp6bsWFLisMQ==" }, "standard-as-callback": { "version": "2.0.1", @@ -2814,12 +2811,12 @@ "statsd-parser": { "version": "0.0.4", "resolved": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz", - "integrity": "sha1-y9JDlTzELv/VSLXSI4jtaJ7GOb0=" + "integrity": "sha512-7XO+ur89EalMXXFQaydsczB8sclr5nDsNIoUu0IzJx1pIbHUhO3LtpSzBwetIuU9DyTLMiVaJBMtWS/Nb2KR4g==" }, "stream-counter": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/stream-counter/-/stream-counter-0.2.0.tgz", - "integrity": "sha1-3tJmVWMZyLDiIoErnPOyb6fZR94=", + "integrity": "sha512-GjA2zKc2iXUUKRcOxXQmhEx0Ev3XHJ6c8yWGqhQjWwhGrqNwSsvq9YlRLgoGtZ5Kx2Ln94IedaqJ5GUG6aBbxA==", "requires": { "readable-stream": "~1.1.8" }, @@ -2827,12 +2824,12 @@ "isarray": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" + "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==" }, "readable-stream": { "version": "1.1.14", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", - "integrity": "sha1-fPTFTvZI44EwhMY23SB54WbAgdk=", + "integrity": "sha512-+MeVjFf4L44XUkhM1eYbD8fyEsxcV81pqMSR5gblfcLCHfZvbrqy4/qYHE+/R5HoBUT11WV5O08Cr1n3YXkWVQ==", "requires": { "core-util-is": "~1.0.0", "inherits": "~2.0.1", @@ -2843,14 +2840,14 @@ "string_decoder": { "version": "0.10.31", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", - "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + "integrity": "sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ==" } } }, "stream-shift": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.0.tgz", - "integrity": "sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI=" + "integrity": "sha512-Afuc4BKirbx0fwm9bKOehZPG01DJkm/4qbklw4lo9nMPqd2x0kZTLcgwQUXdGiPPY489l3w8cQ5xEEAGbg8ACQ==" }, "string_decoder": { "version": "1.1.1", @@ -2868,7 +2865,7 @@ "tdigest": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.1.tgz", - "integrity": "sha1-Ljyyw56kSeVdHmzZEReszKRYgCE=", + "integrity": "sha512-CXcDY/NIgIbKZPx5H4JJNpq6JwJhU5Z4+yWj4ZghDc7/9nVajiRlPPyMXRePPPlBfcayUqtoCXjo7/Hm82ecUA==", "requires": { "bintrees": "1.0.1" } @@ -2893,7 +2890,7 @@ "through": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", - "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=" + "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==" }, "through2": { "version": "2.0.5", @@ -2918,12 +2915,12 @@ "to-mongodb-core": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/to-mongodb-core/-/to-mongodb-core-2.0.0.tgz", - "integrity": "sha1-NZbsdhOsmtO5ioncua77pWnNJ+s=" + "integrity": "sha512-vfXXcGYFP8+0L5IPOtUzzVIvPE/G3GN0TKa/PRBlzPqYyhm+UxhPmvv634EQgO4Ot8dHbBFihOslMJQclY8Z9A==" }, "tough-cookie": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.3.tgz", - "integrity": "sha1-C2GKVWW23qkL80JdBNVe3EdadWE=", + "integrity": "sha512-WR9pjSY3qO0z3yC6g33CRcVt2Wbevh0gP1XiSFql0/xRioi9qbDs3C+g4Nv2N8jmv/BloIi/SYoy/mfw5vus2A==", "requires": { "punycode": "^1.4.1" } @@ -2931,17 +2928,17 @@ "tunnel-agent": { "version": "0.4.3", "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.3.tgz", - "integrity": "sha1-Y3PbdpCf5XDgjXNYM2Xtgop07us=" + "integrity": "sha512-e0IoVDWx8SDHc/hwFTqJDQ7CCDTEeGhmcT9jkWJjoGQSpgBz20nAMr80E3Tpk7PatJ1b37DQDgJR3CNSzcMOZQ==" }, "tweetnacl": { "version": "0.14.5", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=" + "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==" }, "type-is": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.3.1.tgz", - "integrity": "sha1-pnibWlITgomt4e+PbZ8odP/XC2s=", + "integrity": "sha512-PLks4DIqAA9z7zHH0VuUv0aZ36t6cq8/K0y0OdHJtTkfSbGHhNvKh3pw1PPakXkjlAskC4apJlxeYcGpKZWvkA==", "requires": { "media-typer": "0.2.0", "mime-types": "1.0.0" @@ -2950,14 +2947,14 @@ "mime-types": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-1.0.0.tgz", - "integrity": "sha1-antKavLn2S+Xr+A/BHx4AejwAdI=" + "integrity": "sha512-aP3BmIq4ZAPJt6KywU5HbiG0UwCTHZA2JWHO9aLaxyr8OhPOiK4RPSZcS6TDS7zNzGDC3AACnq/XTuEsd/M1Kg==" } } }, "uid-safe": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/uid-safe/-/uid-safe-1.1.0.tgz", - "integrity": "sha1-WNbF2r+N+9jVKDSDmAbAP9YUMjI=", + "integrity": "sha512-7+QtWs9zioL/iQX61G+4h3EPyr3H+tINIp0IAV4EL32vdf7qmFyuW0BgRqWl7p5oZOsEQrlL0bY7m5D8tp7b1w==", "requires": { "base64-url": "1.2.1", "native-or-bluebird": "~1.1.2" @@ -2966,14 +2963,14 @@ "base64-url": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/base64-url/-/base64-url-1.2.1.tgz", - "integrity": "sha1-GZ/WYXAqDnt9yubgaYuwicUvbXg=" + "integrity": "sha512-V8E0l1jyyeSSS9R+J9oljx5eq2rqzClInuwaPcyuv0Mm3ViI/3/rcc4rCEO8i4eQ4I0O0FAGYDA2i5xWHHPhzg==" } } }, "underscore": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz", - "integrity": "sha1-izixDKze9jM3uLJOT/htRa6lKag=" + "integrity": "sha512-z4o1fvKUojIWh9XuaVLUDdf86RQiq13AC1dmHbTpoyuu+bquHms76v16CjycCbec87J7z0k//SiQVk0sMdFmpQ==" }, "uri-js": { "version": "4.2.2", @@ -2993,27 +2990,27 @@ "util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" }, "utils-merge": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.0.tgz", - "integrity": "sha1-ApT7kiu5N1FTVBxPcJYjHyh8ivg=" + "integrity": "sha512-HwU9SLQEtyo+0uoKXd1nkLqigUWLB+QuNQR4OcmB73eWqksM5ovuqcycks2x043W8XVb75rG1HQ0h93TMXkzQQ==" }, "uuid": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.0.tgz", - "integrity": "sha1-Zyj8BFnEUNeWqZwxg3VpvfZy1yg=" + "integrity": "sha512-rqE1LoOVLv3QrZMjb4NkF5UWlkurCfPyItVnFPNKDDGkHw4dQUdE4zMcLqx28+0Kcf3+bnUk4PisaiRJT4aiaQ==" }, "vary": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/vary/-/vary-0.1.0.tgz", - "integrity": "sha1-3wlFiZ6TwMxb0YzIMh2dIedPYXY=" + "integrity": "sha512-tyyeG46NQdwyVP/RsWLSrT78ouwEuvwk9gK8vQK4jdXmqoXtTXW+vsCfNcnqRhigF8olV34QVZarmAi6wBV2Mw==" }, "verror": { "version": "1.10.0", "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", - "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", + "integrity": "sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw==", "requires": { "assert-plus": "^1.0.0", "core-util-is": "1.0.2", @@ -3023,29 +3020,29 @@ "assert-plus": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" + "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==" } } }, "vhost": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/vhost/-/vhost-2.0.0.tgz", - "integrity": "sha1-HiZ3C9D86GxAlFWR5vKExokXkeI=" + "integrity": "sha512-TSExWM12MVtvIuBLMPyBuWBQLbHnmDZ3zfsoZwcUmKxzPX8l/cHKl5vVfbo8/KZ56UBAc/tTYXbaDGVDaIcrWw==" }, "when": { "version": "3.7.8", "resolved": "https://registry.npmjs.org/when/-/when-3.7.8.tgz", - "integrity": "sha1-xxMLan6gRpPoQs3J56Hyqjmjn4I=" + "integrity": "sha512-5cZ7mecD3eYcMiCH4wtRPA5iFJZ50BJYDfckI5RRpQiktMiYTcn0ccLTZOvcbBume+1304fQztxeNzNS9Gvrnw==" }, "wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, "xtend": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz", - "integrity": "sha1-pcbVMr5lbiPbgg77lDofBJmNY68=" + "integrity": "sha512-iTwvhNBRetXWe81+VcIw5YeadVSWyze7uA7nVnpP13ulrpnJ3UfQm5ApGnrkmxDJFdrblRdZs0EvaTCIfei5oQ==" }, "yallist": { "version": "3.0.3", From d9138c91f47823a1036ee6144e3f7723192097b8 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 17 Feb 2020 14:05:59 +0000 Subject: [PATCH 567/769] upgrade request to latest version --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index c0c03bfd80..ea4c27cb67 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -28,7 +28,7 @@ "metrics-sharelatex": "^2.2.0", "mongojs": "^2.6.0", "redis-sharelatex": "^1.0.11", - "request": "2.47.0", + "request": "^2.47.0", "requestretry": "^1.12.0", "settings-sharelatex": "^1.1.0" }, From 3aa997a0bc598433aa335782efabd04dadfba6eb Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 17 Feb 2020 14:06:13 +0000 Subject: [PATCH 568/769] update package-lock.json --- services/document-updater/package-lock.json | 2332 +++++++++++-------- 1 file changed, 1360 insertions(+), 972 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 687dbe463c..61f5f68388 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -5,21 +5,19 @@ "requires": true, "dependencies": { "@google-cloud/common": { - "version": "0.32.1", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.32.1.tgz", - "integrity": "sha512-bLdPzFvvBMtVkwsoBtygE9oUm3yrNmPa71gvOgucYI/GqvNP2tb6RYsDHPq98kvignhcgHGDI5wyNgxaCo8bKQ==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-2.3.0.tgz", + "integrity": "sha512-nmIyi3q/FL2j6ZJ61xK/863DoJEZayI2/W/iCgwrCYUYsem277XO45MBTAimjgiKBCA0c9InmQyfT48h/IK4jg==", "requires": { - "@google-cloud/projectify": "^0.3.3", - "@google-cloud/promisify": "^0.4.0", - "@types/request": "^2.48.1", + "@google-cloud/projectify": "^1.0.0", + "@google-cloud/promisify": "^1.0.0", "arrify": "^2.0.0", "duplexify": "^3.6.0", "ent": "^2.2.0", "extend": "^3.0.2", - "google-auth-library": "^3.1.1", - "pify": "^4.0.1", + "google-auth-library": "^5.5.0", "retry-request": "^4.0.0", - "teeny-request": "^3.11.3" + "teeny-request": "^6.0.0" } }, "@google-cloud/debug-agent": { @@ -43,18 +41,218 @@ "split": "^1.0.0" }, "dependencies": { - "coffeescript": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.4.1.tgz", - "integrity": "sha512-34GV1aHrsMpTaO3KfMJL40ZNuvKDR/g98THHnE9bQj8HjMaZvSrLik99WWqyMhRtbe8V5hpx5iLgdcSvM/S2wg==" + "@google-cloud/common": { + "version": "0.32.1", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.32.1.tgz", + "integrity": "sha512-bLdPzFvvBMtVkwsoBtygE9oUm3yrNmPa71gvOgucYI/GqvNP2tb6RYsDHPq98kvignhcgHGDI5wyNgxaCo8bKQ==", + "requires": { + "@google-cloud/projectify": "^0.3.3", + "@google-cloud/promisify": "^0.4.0", + "@types/request": "^2.48.1", + "arrify": "^2.0.0", + "duplexify": "^3.6.0", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^3.1.1", + "pify": "^4.0.1", + "retry-request": "^4.0.0", + "teeny-request": "^3.11.3" + } }, - "semver": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.1.1.tgz", - "integrity": "sha512-rWYq2e5iYW+fFe/oPPtYJxYgjBm8sC4rmoGdUOgBB7VnwKt6HrL793l2voH1UlsyYZpJ4g0wfjnTEO1s1NP2eQ==" + "@google-cloud/projectify": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.3.tgz", + "integrity": "sha512-7522YHQ4IhaafgSunsFF15nG0TGVmxgXidy9cITMe+256RgqfcrfWphiMufW+Ou4kqagW/u3yxwbzVEW3dk2Uw==" + }, + "@google-cloud/promisify": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.4.0.tgz", + "integrity": "sha512-4yAHDC52TEMCNcMzVC8WlqnKKKq+Ssi2lXoUg9zWWkZ6U6tq9ZBRYLHHCRdfU+EU9YJsVmivwGcKYCjRGjnf4Q==" + }, + "agent-base": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz", + "integrity": "sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg==", + "requires": { + "es6-promisify": "^5.0.0" + } + }, + "coffeescript": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.5.1.tgz", + "integrity": "sha512-J2jRPX0eeFh5VKyVnoLrfVFgLZtnnmp96WQSLAS8OrLm2wtQLcnikYKe1gViJKDH7vucjuhHvBKKBP3rKcD1tQ==" + }, + "debug": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", + "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "requires": { + "ms": "^2.1.1" + } + }, + "gaxios": { + "version": "1.8.4", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.8.4.tgz", + "integrity": "sha512-BoENMnu1Gav18HcpV9IleMPZ9exM+AvUjrAOV4Mzs/vfz2Lu/ABv451iEXByKiMPn2M140uul1txXCg83sAENw==", + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^2.2.1", + "node-fetch": "^2.3.0" + } + }, + "gcp-metadata": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-1.0.0.tgz", + "integrity": "sha512-Q6HrgfrCQeEircnNP3rCcEgiDv7eF9+1B+1MMgpE190+/+0mjQR8PxeOaRgxZWmdDAF9EIryHB9g1moPiw1SbQ==", + "requires": { + "gaxios": "^1.0.2", + "json-bigint": "^0.3.0" + } + }, + "google-auth-library": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-3.1.2.tgz", + "integrity": "sha512-cDQMzTotwyWMrg5jRO7q0A4TL/3GWBgO7I7q5xGKNiiFf9SmGY/OJ1YsLMgI2MVHHsEGyrqYnbnmV1AE+Z6DnQ==", + "requires": { + "base64-js": "^1.3.0", + "fast-text-encoding": "^1.0.0", + "gaxios": "^1.2.1", + "gcp-metadata": "^1.0.0", + "gtoken": "^2.3.2", + "https-proxy-agent": "^2.2.1", + "jws": "^3.1.5", + "lru-cache": "^5.0.0", + "semver": "^5.5.0" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + } + } + }, + "google-p12-pem": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.4.tgz", + "integrity": "sha512-SwLAUJqUfTB2iS+wFfSS/G9p7bt4eWcc2LyfvmUXe7cWp6p3mpxDo6LLI29MXdU6wvPcQ/up298X7GMC5ylAlA==", + "requires": { + "node-forge": "^0.8.0", + "pify": "^4.0.0" + } + }, + "gtoken": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.3.tgz", + "integrity": "sha512-EaB49bu/TCoNeQjhCYKI/CurooBKkGxIqFHsWABW0b25fobBYVTMe84A8EBVVZhl8emiUdNypil9huMOTmyAnw==", + "requires": { + "gaxios": "^1.0.4", + "google-p12-pem": "^1.0.0", + "jws": "^3.1.5", + "mime": "^2.2.0", + "pify": "^4.0.0" + } + }, + "https-proxy-agent": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.4.tgz", + "integrity": "sha512-OmvfoQ53WLjtA9HeYP9RNrWMJzzAz1JGaSFr1nijg0PVR1JaD/xbJq1mdEIIlxGpXp9eSe/O2LgU9DJmTPd0Eg==", + "requires": { + "agent-base": "^4.3.0", + "debug": "^3.1.0" + } + }, + "jwa": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", + "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "requires": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "jws": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", + "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "requires": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + } + }, + "mime": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", + "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==" + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node-forge": { + "version": "0.8.5", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.5.tgz", + "integrity": "sha512-vFMQIWt+J/7FLNyKouZ9TazT74PRV3wgv9UT4cRjC8BffxFbKXkgIWR42URCPSnHm/QDz6BOlb2Q0U4+VQT67Q==" + }, + "teeny-request": { + "version": "3.11.3", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-3.11.3.tgz", + "integrity": "sha512-CKncqSF7sH6p4rzCgkb/z/Pcos5efl0DmolzvlqRQUNcpRIruOhY9+T1FsIlyEbfWd7MsFpodROOwHYh2BaXzw==", + "requires": { + "https-proxy-agent": "^2.2.1", + "node-fetch": "^2.2.0", + "uuid": "^3.3.2" + } } } }, + "@google-cloud/logging": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/@google-cloud/logging/-/logging-7.1.0.tgz", + "integrity": "sha512-0E2ywYYcR/6aZdaPjjX4qe6EN4DciZQMhtAeSZJl3tJZ+L/BUZajodhadwXm3AJ0syEzsqmsOeEBNEBLEtD8XQ==", + "requires": { + "@google-cloud/common": "^2.2.2", + "@google-cloud/paginator": "^2.0.0", + "@google-cloud/projectify": "^1.0.0", + "@google-cloud/promisify": "^1.0.0", + "@opencensus/propagation-stackdriver": "0.0.19", + "arrify": "^2.0.0", + "dot-prop": "^5.1.0", + "eventid": "^1.0.0", + "extend": "^3.0.2", + "gcp-metadata": "^3.1.0", + "google-auth-library": "^5.2.2", + "google-gax": "^1.11.0", + "is": "^3.3.0", + "on-finished": "^2.3.0", + "pumpify": "^2.0.0", + "snakecase-keys": "^3.0.0", + "stream-events": "^1.0.4", + "through2": "^3.0.0", + "type-fest": "^0.9.0" + } + }, + "@google-cloud/logging-bunyan": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/logging-bunyan/-/logging-bunyan-2.0.3.tgz", + "integrity": "sha512-8n9MwsCRd4v8WZg17+d3m7qInud7lYTm5rpwXHY0/lzWEJYjeiztT09BiCYh56EEhHr+ynymJnzUDZKazkywlg==", + "requires": { + "@google-cloud/logging": "^7.0.0", + "google-auth-library": "^5.0.0" + } + }, + "@google-cloud/paginator": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-2.0.3.tgz", + "integrity": "sha512-kp/pkb2p/p0d8/SKUu4mOq8+HGwF8NPzHWkj+VKrIPQPyMRw8deZtrO/OcSiy9C/7bpfU5Txah5ltUNfPkgEXg==", + "requires": { + "arrify": "^2.0.0", + "extend": "^3.0.2" + } + }, "@google-cloud/profiler": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/@google-cloud/profiler/-/profiler-0.2.3.tgz", @@ -96,16 +294,48 @@ "through2": "^3.0.0" } }, + "@google-cloud/projectify": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.3.tgz", + "integrity": "sha512-7522YHQ4IhaafgSunsFF15nG0TGVmxgXidy9cITMe+256RgqfcrfWphiMufW+Ou4kqagW/u3yxwbzVEW3dk2Uw==" + }, "@google-cloud/promisify": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.3.1.tgz", "integrity": "sha512-QzB0/IMvB0eFxFK7Eqh+bfC8NLv3E9ScjWQrPOk6GgfNroxcVITdTlT8NRsRrcp5+QQJVPLkRqKG0PUdaWXmHw==" }, + "agent-base": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz", + "integrity": "sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg==", + "requires": { + "es6-promisify": "^5.0.0" + } + }, "arrify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==" }, + "debug": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", + "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "requires": { + "ms": "^2.1.1" + } + }, + "gaxios": { + "version": "1.8.4", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.8.4.tgz", + "integrity": "sha512-BoENMnu1Gav18HcpV9IleMPZ9exM+AvUjrAOV4Mzs/vfz2Lu/ABv451iEXByKiMPn2M140uul1txXCg83sAENw==", + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^2.2.1", + "node-fetch": "^2.3.0" + } + }, "gcp-metadata": { "version": "0.9.3", "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.9.3.tgz", @@ -141,25 +371,96 @@ } } }, - "through2": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.1.tgz", - "integrity": "sha512-M96dvTalPT3YbYLaKaCuwu+j06D/8Jfib0o/PxbVt6Amhv3dUAtW6rTV1jPgJSBG83I/e04Y6xkVdVhSRhi0ww==", + "google-p12-pem": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.4.tgz", + "integrity": "sha512-SwLAUJqUfTB2iS+wFfSS/G9p7bt4eWcc2LyfvmUXe7cWp6p3mpxDo6LLI29MXdU6wvPcQ/up298X7GMC5ylAlA==", "requires": { - "readable-stream": "2 || 3" + "node-forge": "^0.8.0", + "pify": "^4.0.0" + } + }, + "gtoken": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.3.tgz", + "integrity": "sha512-EaB49bu/TCoNeQjhCYKI/CurooBKkGxIqFHsWABW0b25fobBYVTMe84A8EBVVZhl8emiUdNypil9huMOTmyAnw==", + "requires": { + "gaxios": "^1.0.4", + "google-p12-pem": "^1.0.0", + "jws": "^3.1.5", + "mime": "^2.2.0", + "pify": "^4.0.0" + } + }, + "https-proxy-agent": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.4.tgz", + "integrity": "sha512-OmvfoQ53WLjtA9HeYP9RNrWMJzzAz1JGaSFr1nijg0PVR1JaD/xbJq1mdEIIlxGpXp9eSe/O2LgU9DJmTPd0Eg==", + "requires": { + "agent-base": "^4.3.0", + "debug": "^3.1.0" + } + }, + "jwa": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", + "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "requires": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "jws": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", + "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "requires": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + } + }, + "mime": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", + "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==" + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node-forge": { + "version": "0.8.5", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.5.tgz", + "integrity": "sha512-vFMQIWt+J/7FLNyKouZ9TazT74PRV3wgv9UT4cRjC8BffxFbKXkgIWR42URCPSnHm/QDz6BOlb2Q0U4+VQT67Q==" + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + }, + "teeny-request": { + "version": "3.11.3", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-3.11.3.tgz", + "integrity": "sha512-CKncqSF7sH6p4rzCgkb/z/Pcos5efl0DmolzvlqRQUNcpRIruOhY9+T1FsIlyEbfWd7MsFpodROOwHYh2BaXzw==", + "requires": { + "https-proxy-agent": "^2.2.1", + "node-fetch": "^2.2.0", + "uuid": "^3.3.2" } } } }, "@google-cloud/projectify": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.3.tgz", - "integrity": "sha512-7522YHQ4IhaafgSunsFF15nG0TGVmxgXidy9cITMe+256RgqfcrfWphiMufW+Ou4kqagW/u3yxwbzVEW3dk2Uw==" + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-1.0.4.tgz", + "integrity": "sha512-ZdzQUN02eRsmTKfBj9FDL0KNDIFNjBn/d6tHQmA/+FImH5DO6ZV8E7FzxMgAUiVAUq41RFAkb25p1oHOZ8psfg==" }, "@google-cloud/promisify": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.4.0.tgz", - "integrity": "sha512-4yAHDC52TEMCNcMzVC8WlqnKKKq+Ssi2lXoUg9zWWkZ6U6tq9ZBRYLHHCRdfU+EU9YJsVmivwGcKYCjRGjnf4Q==" + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-1.0.4.tgz", + "integrity": "sha512-VccZDcOql77obTnFh0TbNED/6ZbbmHDf8UMNnzO1d5g9V0Htfm4k5cllY8P1tJsRKC3zWYGRLaViiupcgVjBoQ==" }, "@google-cloud/trace-agent": { "version": "3.6.1", @@ -181,23 +482,218 @@ "uuid": "^3.0.1" }, "dependencies": { + "@google-cloud/common": { + "version": "0.32.1", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.32.1.tgz", + "integrity": "sha512-bLdPzFvvBMtVkwsoBtygE9oUm3yrNmPa71gvOgucYI/GqvNP2tb6RYsDHPq98kvignhcgHGDI5wyNgxaCo8bKQ==", + "requires": { + "@google-cloud/projectify": "^0.3.3", + "@google-cloud/promisify": "^0.4.0", + "@types/request": "^2.48.1", + "arrify": "^2.0.0", + "duplexify": "^3.6.0", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^3.1.1", + "pify": "^4.0.1", + "retry-request": "^4.0.0", + "teeny-request": "^3.11.3" + } + }, + "@google-cloud/projectify": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.3.tgz", + "integrity": "sha512-7522YHQ4IhaafgSunsFF15nG0TGVmxgXidy9cITMe+256RgqfcrfWphiMufW+Ou4kqagW/u3yxwbzVEW3dk2Uw==" + }, + "@google-cloud/promisify": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.4.0.tgz", + "integrity": "sha512-4yAHDC52TEMCNcMzVC8WlqnKKKq+Ssi2lXoUg9zWWkZ6U6tq9ZBRYLHHCRdfU+EU9YJsVmivwGcKYCjRGjnf4Q==" + }, + "agent-base": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz", + "integrity": "sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg==", + "requires": { + "es6-promisify": "^5.0.0" + } + }, + "debug": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", + "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "requires": { + "ms": "^2.1.1" + } + }, + "gaxios": { + "version": "1.8.4", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.8.4.tgz", + "integrity": "sha512-BoENMnu1Gav18HcpV9IleMPZ9exM+AvUjrAOV4Mzs/vfz2Lu/ABv451iEXByKiMPn2M140uul1txXCg83sAENw==", + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^2.2.1", + "node-fetch": "^2.3.0" + } + }, + "gcp-metadata": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-1.0.0.tgz", + "integrity": "sha512-Q6HrgfrCQeEircnNP3rCcEgiDv7eF9+1B+1MMgpE190+/+0mjQR8PxeOaRgxZWmdDAF9EIryHB9g1moPiw1SbQ==", + "requires": { + "gaxios": "^1.0.2", + "json-bigint": "^0.3.0" + } + }, + "google-auth-library": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-3.1.2.tgz", + "integrity": "sha512-cDQMzTotwyWMrg5jRO7q0A4TL/3GWBgO7I7q5xGKNiiFf9SmGY/OJ1YsLMgI2MVHHsEGyrqYnbnmV1AE+Z6DnQ==", + "requires": { + "base64-js": "^1.3.0", + "fast-text-encoding": "^1.0.0", + "gaxios": "^1.2.1", + "gcp-metadata": "^1.0.0", + "gtoken": "^2.3.2", + "https-proxy-agent": "^2.2.1", + "jws": "^3.1.5", + "lru-cache": "^5.0.0", + "semver": "^5.5.0" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + } + } + }, + "google-p12-pem": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.4.tgz", + "integrity": "sha512-SwLAUJqUfTB2iS+wFfSS/G9p7bt4eWcc2LyfvmUXe7cWp6p3mpxDo6LLI29MXdU6wvPcQ/up298X7GMC5ylAlA==", + "requires": { + "node-forge": "^0.8.0", + "pify": "^4.0.0" + } + }, + "gtoken": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.3.tgz", + "integrity": "sha512-EaB49bu/TCoNeQjhCYKI/CurooBKkGxIqFHsWABW0b25fobBYVTMe84A8EBVVZhl8emiUdNypil9huMOTmyAnw==", + "requires": { + "gaxios": "^1.0.4", + "google-p12-pem": "^1.0.0", + "jws": "^3.1.5", + "mime": "^2.2.0", + "pify": "^4.0.0" + } + }, + "https-proxy-agent": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.4.tgz", + "integrity": "sha512-OmvfoQ53WLjtA9HeYP9RNrWMJzzAz1JGaSFr1nijg0PVR1JaD/xbJq1mdEIIlxGpXp9eSe/O2LgU9DJmTPd0Eg==", + "requires": { + "agent-base": "^4.3.0", + "debug": "^3.1.0" + } + }, + "jwa": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", + "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "requires": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "jws": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", + "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "requires": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + } + }, "methods": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==" }, - "semver": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.1.1.tgz", - "integrity": "sha512-rWYq2e5iYW+fFe/oPPtYJxYgjBm8sC4rmoGdUOgBB7VnwKt6HrL793l2voH1UlsyYZpJ4g0wfjnTEO1s1NP2eQ==" + "mime": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", + "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==" }, - "uuid": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", - "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node-forge": { + "version": "0.8.5", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.5.tgz", + "integrity": "sha512-vFMQIWt+J/7FLNyKouZ9TazT74PRV3wgv9UT4cRjC8BffxFbKXkgIWR42URCPSnHm/QDz6BOlb2Q0U4+VQT67Q==" + }, + "teeny-request": { + "version": "3.11.3", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-3.11.3.tgz", + "integrity": "sha512-CKncqSF7sH6p4rzCgkb/z/Pcos5efl0DmolzvlqRQUNcpRIruOhY9+T1FsIlyEbfWd7MsFpodROOwHYh2BaXzw==", + "requires": { + "https-proxy-agent": "^2.2.1", + "node-fetch": "^2.2.0", + "uuid": "^3.3.2" + } } } }, + "@grpc/grpc-js": { + "version": "0.6.16", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-0.6.16.tgz", + "integrity": "sha512-TckwrK2duWTeqE/fYQ5JaLMDwqLuun0B/yswf8Bb9Pb7vb5xGd3iulmcnnaA2RDVd/abQTHnkSAjfRibYU24eQ==", + "requires": { + "semver": "^6.2.0" + } + }, + "@grpc/proto-loader": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.5.3.tgz", + "integrity": "sha512-8qvUtGg77G2ZT2HqdqYoM/OY97gQd/0crSG34xNmZ4ZOsv3aQT/FQV9QfZPazTGna6MIoyUd+u6AxsoZjJ/VMQ==", + "requires": { + "lodash.camelcase": "^4.3.0", + "protobufjs": "^6.8.6" + } + }, + "@opencensus/core": { + "version": "0.0.19", + "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.19.tgz", + "integrity": "sha512-Y5QXa7vggMU0+jveLcworfX9jNnztix7x1NraAV0uGkTp4y46HrFl0DnNcnNxUDvBu/cYeWRwlmhiWlr9+adOQ==", + "requires": { + "continuation-local-storage": "^3.2.1", + "log-driver": "^1.2.7", + "semver": "^6.0.0", + "shimmer": "^1.2.0", + "uuid": "^3.2.1" + } + }, + "@opencensus/propagation-stackdriver": { + "version": "0.0.19", + "resolved": "https://registry.npmjs.org/@opencensus/propagation-stackdriver/-/propagation-stackdriver-0.0.19.tgz", + "integrity": "sha512-TTL9KIOkvTpd+DT2gj3R3JP7XqOAf69ab/wzxIwpBlFqfRiIFBkOALyC/Gy4pKooAe5DemDhXZuRtIa0PgfoZQ==", + "requires": { + "@opencensus/core": "^0.0.19", + "hex2dec": "^1.0.1", + "uuid": "^3.2.1" + } + }, + "@overleaf/o-error": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@overleaf/o-error/-/o-error-2.1.0.tgz", + "integrity": "sha512-Zd9sks9LrLw8ErHt/cXeWIkyxWAqNAvNGn7wIjLQJH6TTEEW835PWOhpch+hQwwWsTxWIx/JDj+IpZ3ouw925g==" + }, "@protobufjs/aspromise": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", @@ -257,6 +753,11 @@ "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.15.0.tgz", "integrity": "sha512-lu8BpxjAtRCAo5ifytTpCPCj99LF7o/2Myn+NXyNCBqvPYn7Pjd76AMmUB5l7XF1U6t0hcWrlEM5ESufW7wAeA==" }, + "@tootallnate/once": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.0.0.tgz", + "integrity": "sha512-KYyTT/T6ALPkIRd2Ge080X/BsXvy9O0hcWTtMWkPvwAwF99+vn6Dv4GzrFT/Nn1LePr+FFDbRXXlqmsy9lw2zA==" + }, "@types/caseless": { "version": "0.12.2", "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.2.tgz", @@ -275,33 +776,53 @@ "@types/node": "*" } }, - "@types/form-data": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/@types/form-data/-/form-data-2.2.1.tgz", - "integrity": "sha512-JAMFhOaHIciYVh8fb5/83nmuO/AHwmto+Hq7a9y8FzLDcC1KCU344XDOMEmahnrTFlHjgh4L0WJFczNIX2GxnQ==", + "@types/fs-extra": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-8.0.1.tgz", + "integrity": "sha512-J00cVDALmi/hJOYsunyT52Hva5TnJeKP5yd1r+mH/ZU0mbYZflR0Z5kw5kITtKTRYMhm1JMClOFYdHnQszEvqw==", "requires": { "@types/node": "*" } }, "@types/long": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.0.tgz", - "integrity": "sha512-1w52Nyx4Gq47uuu0EVcsHBxZFJgurQ+rTKS3qMHxR1GY2T8c2AJYd6vZoZ9q1rupaDjU0yT+Jc2XTyXkjeMA+Q==" + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.1.tgz", + "integrity": "sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w==" }, "@types/node": { - "version": "12.0.8", - "resolved": "https://registry.npmjs.org/@types/node/-/node-12.0.8.tgz", - "integrity": "sha512-b8bbUOTwzIY3V5vDTY1fIJ+ePKDUBqt2hC2woVGotdQQhG/2Sh62HOKHrT7ab+VerXAcPyAiTEipPu/FsreUtg==" + "version": "10.17.15", + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.15.tgz", + "integrity": "sha512-daFGV9GSs6USfPgxceDA8nlSe48XrVCJfDeYm7eokxq/ye7iuOH87hKXgMtEAVLFapkczbZsx868PMDT1Y0a6A==" }, "@types/request": { - "version": "2.48.1", - "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.1.tgz", - "integrity": "sha512-ZgEZ1TiD+KGA9LiAAPPJL68Id2UWfeSO62ijSXZjFJArVV+2pKcsVHmrcu+1oiE3q6eDGiFiSolRc4JHoerBBg==", + "version": "2.48.4", + "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.4.tgz", + "integrity": "sha512-W1t1MTKYR8PxICH+A4HgEIPuAC3sbljoEVfyZbeFJJDbr30guDspJri2XOaM2E+Un7ZjrihaDi7cf6fPa2tbgw==", "requires": { "@types/caseless": "*", - "@types/form-data": "*", "@types/node": "*", - "@types/tough-cookie": "*" + "@types/tough-cookie": "*", + "form-data": "^2.5.0" + }, + "dependencies": { + "form-data": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", + "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + } + }, + "mime-types": { + "version": "2.1.26", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.26.tgz", + "integrity": "sha512-01paPWYgLrkqAyrlDorC1uDwl2p3qZT7yl806vW7DvDoxwXi46jsjFbg+WdwotBIk6/MbEhO/dh5aZ5sNj/dWQ==", + "requires": { + "mime-db": "1.43.0" + } + } } }, "@types/semver": { @@ -310,9 +831,9 @@ "integrity": "sha512-41qEJgBH/TWgo5NFSvBCJ1qkoi3Q6ONSF2avrHq1LVEZfYpdHmj0y9SuTK+u9ZhG1sYQKBL1AWXKyLWP4RaUoQ==" }, "@types/tough-cookie": { - "version": "2.3.5", - "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-2.3.5.tgz", - "integrity": "sha512-SCcK7mvGi3+ZNz833RRjFIxrn4gI1PPR3NtuIS+6vMkvmsGjosqTJwRt5bAEFLRz+wtJMWv8+uOnZf2hi2QXTg==" + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-2.3.6.tgz", + "integrity": "sha512-wHNBMnkoEBiRAd3s8KTKwIuO9biFtTf0LehITzBhSco+HQI0xkXZbLOD55SW3Aqw3oUkHstkm5SPv58yaAdFPQ==" }, "abort-controller": { "version": "3.0.0", @@ -332,27 +853,42 @@ } }, "acorn": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.1.1.tgz", - "integrity": "sha512-jPTiwtOxaHNaAPg/dmrJ/beuzLRnXtB0kQPQ8JpotKJgTB6rX6c8mlf315941pyjBSaPg8NHXS9fhP4u17DpGA==" + "version": "6.4.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.0.tgz", + "integrity": "sha512-gac8OEcQ2Li1dxIEWGZzsp2BitJxwkwcOm0zHAJLcPJaVvm58FRnk6RkuLRpU1EujipU2ZFODv2P9DLMfnV8mw==" }, "agent-base": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz", - "integrity": "sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.0.tgz", + "integrity": "sha512-j1Q7cSCqN+AwrmDd+pzgqc0/NpC655x2bUf5ZjRIO77DcNBFmh+OgRNzF6OKdCC9RSCb19fGd99+bhXFdkRNqw==", "requires": { - "es6-promisify": "^5.0.0" + "debug": "4" + }, + "dependencies": { + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "requires": { + "ms": "^2.1.1" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + } } }, "ajv": { - "version": "5.5.2", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz", - "integrity": "sha512-Ajr4IcMXq/2QmMkEmSvxqfLN5zGmJ92gHXAeOXq1OekoH2rfDNsgdDoL2f7QaRCy7G/E6TpxBVdRuNraMztGHw==", + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.11.0.tgz", + "integrity": "sha512-nCprB/0syFYy9fVYU1ox1l2KN8S9I+tziH8D4zdZuLT3N6RMlGSGt5FSTpAiHB/Whv8Qs1cWHma1aMKZyaHRKA==", "requires": { - "co": "^4.6.0", - "fast-deep-equal": "^1.0.0", + "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.3.0" + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" } }, "arrify": { @@ -361,14 +897,17 @@ "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==" }, "asn1": { - "version": "0.1.11", - "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.1.11.tgz", - "integrity": "sha512-Fh9zh3G2mZ8qM/kwsiKwL2U2FmXxVsboP4x1mXjnhKHv3SmzaBZoYvxEQJz/YS2gnCgd8xlAVWcZnQyC9qZBsA==" + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", + "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", + "requires": { + "safer-buffer": "~2.1.0" + } }, "assert-plus": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.1.5.tgz", - "integrity": "sha512-brU24g7ryhRwGCI2y+1dGQmQXiZF7TtIj583S96y0jjdajIe6wn8BuXyELYhvD22dtIxDQVFk04YTJwwdwOYJw==" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==" }, "assertion-error": { "version": "1.1.0", @@ -377,11 +916,11 @@ "dev": true }, "async": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/async/-/async-2.6.0.tgz", - "integrity": "sha512-xAfGg1/NTLBBKlHFmnd7PlmUW9KhVQIUuSrYem9xzFUZy13ScvtyGGejaae9iAVRiRq9+Cx7DPFaAAhCpyxyPw==", + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/async/-/async-2.6.3.tgz", + "integrity": "sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==", "requires": { - "lodash": "^4.14.0" + "lodash": "^4.17.14" } }, "async-listener": { @@ -391,6 +930,13 @@ "requires": { "semver": "^5.3.0", "shimmer": "^1.1.0" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + } } }, "asynckit": { @@ -404,9 +950,9 @@ "integrity": "sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA==" }, "aws4": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.6.0.tgz", - "integrity": "sha512-tkleq4Df8UWu/7xf/tfbo7t2vDa07bcONGnKhl0QXKQsh3fJ0yJ1M5wzpy8BtBSENQw/9VTsthMhLG+yXHfStQ==" + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.9.1.tgz", + "integrity": "sha512-wMHVg2EOHaMRxbzgFJ9gtjOOCrI80OHLG14rxi28XwOW8ux6IiEbRCGGGqCtdAIg4FQCbW20k9RsT4y3gJlFug==" }, "axios": { "version": "0.18.1", @@ -423,9 +969,9 @@ "integrity": "sha512-9Y0g0Q8rmSt+H33DfKv7FOc3v+iRI+o1lbzt8jGcIosYW37IIW/2XVYq5NPdmaD5NQ59Nk26Kl/vZbwW9Fr8vg==" }, "base64-js": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.0.tgz", - "integrity": "sha512-ccav/yGvoa80BQDljCxsmmQ3Xvx60/UpBIij5QN21W3wBi/hhIC9OoO+KLpu9IJTS9j4DRVJ3aDDF9cMSoa2lw==" + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.1.tgz", + "integrity": "sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g==" }, "base64-url": { "version": "1.3.3", @@ -443,9 +989,9 @@ "integrity": "sha512-OXRjc65VJvFtb7JD5HszSI1WWwsI6YnJS7Qmlx1CaDQrZ5urNIeRjtTyBe1YapNXyoWzrcc4yqg4rNe8YMyong==" }, "bcrypt-pbkdf": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz", - "integrity": "sha512-vY4sOrSlpwNZXsinfJ0HpbSkFft4nhSVLeUrQ4j2ydGmBOiVY83aMJStJATBy0C3+XdaYa990kIA1qkC2mUq6g==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", + "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==", "requires": { "tweetnacl": "^0.14.3" } @@ -468,37 +1014,6 @@ "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz", "integrity": "sha512-tbaUB1QpTIj4cKY8c1rvNAvEQXA+ekzHmbe4jzNfW3QWsF9GnnP/BRWyl6/qqS53heoYJ93naaFcm/jooONH8g==" }, - "bl": { - "version": "0.9.5", - "resolved": "https://registry.npmjs.org/bl/-/bl-0.9.5.tgz", - "integrity": "sha512-njlCs8XLBIK7LCChTWfzWuIAxkpmmLXcL7/igCofFT1B039Sz0IPnAmosN5QaO22lU4qr8LcUz2ojUlE6pLkRQ==", - "requires": { - "readable-stream": "~1.0.26" - }, - "dependencies": { - "isarray": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==" - }, - "readable-stream": { - "version": "1.0.34", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", - "integrity": "sha512-ok1qVCJuRkNmvebYikljxJA/UEsKwLl2nI1OmaqAu4/UE+h0wKCHok4XkL/gvi39OacXvw59RJUOFUkDib2rHg==", - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.1", - "isarray": "0.0.1", - "string_decoder": "~0.10.x" - } - }, - "string_decoder": { - "version": "0.10.31", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", - "integrity": "sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ==" - } - } - }, "body-parser": { "version": "1.4.3", "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.4.3.tgz", @@ -511,27 +1026,12 @@ "qs": "0.6.6", "raw-body": "1.2.2", "type-is": "1.3.1" - }, - "dependencies": { - "qs": { - "version": "0.6.6", - "resolved": "https://registry.npmjs.org/qs/-/qs-0.6.6.tgz", - "integrity": "sha512-kN+yNdAf29Jgp+AYHUmC7X4QdJPR8czuMWLNLc0aRxkQ7tB3vJQEONKKT9ou/rW7EbqVec11srC9q9BiVbcnHA==" - } - } - }, - "boom": { - "version": "0.4.2", - "resolved": "https://registry.npmjs.org/boom/-/boom-0.4.2.tgz", - "integrity": "sha512-OvfN8y1oAxxphzkl2SnCS+ztV/uVKTATtgLjWYg/7KwcNyf3rzpHxNQJZCKtsZd4+MteKczhWbSjtEX4bGgU9g==", - "requires": { - "hoek": "0.9.x" } }, "brace-expansion": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz", - "integrity": "sha512-Dnfc9ROAPrkkeLIUweEbh7LFT9Mc53tO/bbM044rKjhgAEyIGKvKXg97PM/kRizZIfUHaROZIoeEaWao+Unzfw==", + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", "requires": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -576,6 +1076,15 @@ "requires": { "dtrace-provider": "0.2.8", "mv": "~2" + }, + "dependencies": { + "dtrace-provider": { + "version": "0.2.8", + "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.2.8.tgz", + "integrity": "sha512-wufYnYt4ISHnT9MEiRgQ3trXuolt7mICTa/ckT+KYHR667K9H82lmI8KM7zKUJ8l5I343A34wJnvL++1TJn1iA==", + "dev": true, + "optional": true + } } }, "buster-core": { @@ -612,14 +1121,6 @@ "assertion-error": "^1.0.1", "deep-eql": "^0.1.3", "type-detect": "^1.0.0" - }, - "dependencies": { - "type-detect": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-1.0.0.tgz", - "integrity": "sha512-f9Uv6ezcpvCQjJU0Zqbg+65qdcszv3qUQsZfjdRbWiZ7AMenrX1u0lNk9EoWWX6e1F+NULyg27mtdeZ5WhpljA==", - "dev": true - } } }, "chai-spies": { @@ -629,15 +1130,9 @@ "dev": true }, "cluster-key-slot": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.0.8.tgz", - "integrity": "sha512-OF/xJE08NvIL6Fbi8XZpAlt6p55mE7SXyBWXMp9TNo4s4XnRO6kIQ8JLwQ77luFXpoB+G9zi/Ks8OWHBg0vvVg==", - "dev": true - }, - "co": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", - "integrity": "sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.0.tgz", + "integrity": "sha512-2Nii8p3RwAPiFwsnZvukotvow2rIHM+yQ6ZcBXGHdniadkYGZYiGmkHJIbZPIV9nfv7m/U1IPMVVcAhoWFeklw==" }, "coffee-script": { "version": "1.7.1", @@ -648,11 +1143,11 @@ } }, "combined-stream": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz", - "integrity": "sha512-qfexlmLp9MyrkajQVyjEDb0Vj+KhRgR/rxLiVhaihlT+ZkX0lReqtH6Ack40CvMDERR4b5eFp3CreskpBs1Pig==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", "requires": { - "delayed-stream": "0.0.5" + "delayed-stream": "~1.0.0" } }, "commander": { @@ -719,26 +1214,6 @@ "serve-static": "1.2.3", "type-is": "1.3.1", "vhost": "2.0.0" - }, - "dependencies": { - "debug": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.2.tgz", - "integrity": "sha512-T9bufXIzQvCa4VrTIpLvvwdLhH+wuBtvIJJA3xgzVcaVETGmTIWMfEXQEd1K4p8BaRmQJPn6MPut38H7YQ+iIA==", - "requires": { - "ms": "0.6.2" - } - }, - "ms": { - "version": "0.6.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", - "integrity": "sha512-/pc3eh7TWorTtbvXg8je4GvrvEqCfH7PA3P7iW01yL2E53FKixzgMBaQi0NOPbMJqY34cBSvR0tZtmlTkdUG4A==" - }, - "qs": { - "version": "0.6.6", - "resolved": "https://registry.npmjs.org/qs/-/qs-0.6.6.tgz", - "integrity": "sha512-kN+yNdAf29Jgp+AYHUmC7X4QdJPR8czuMWLNLc0aRxkQ7tB3vJQEONKKT9ou/rW7EbqVec11srC9q9BiVbcnHA==" - } } }, "connect-timeout": { @@ -748,21 +1223,6 @@ "requires": { "debug": "1.0.2", "on-headers": "0.0.0" - }, - "dependencies": { - "debug": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.2.tgz", - "integrity": "sha512-T9bufXIzQvCa4VrTIpLvvwdLhH+wuBtvIJJA3xgzVcaVETGmTIWMfEXQEd1K4p8BaRmQJPn6MPut38H7YQ+iIA==", - "requires": { - "ms": "0.6.2" - } - }, - "ms": { - "version": "0.6.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", - "integrity": "sha512-/pc3eh7TWorTtbvXg8je4GvrvEqCfH7PA3P7iW01yL2E53FKixzgMBaQi0NOPbMJqY34cBSvR0tZtmlTkdUG4A==" - } } }, "console-log-level": { @@ -803,14 +1263,6 @@ "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", "integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ==" }, - "cryptiles": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.2.2.tgz", - "integrity": "sha512-gvWSbgqP+569DdslUiCelxIv3IYK5Lgmq1UrRnk+s1WxQOQ16j3GPDcjdtgL5Au65DU/xQi6q3xPtf5Kta+3IQ==", - "requires": { - "boom": "0.4.x" - } - }, "csrf-tokens": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/csrf-tokens/-/csrf-tokens-2.0.0.tgz", @@ -830,10 +1282,10 @@ "csrf-tokens": "~2.0.0" } }, - "ctype": { - "version": "0.5.3", - "resolved": "https://registry.npmjs.org/ctype/-/ctype-0.5.3.tgz", - "integrity": "sha512-T6CEkoSV4q50zW3TlTHMbzy1E5+zlnNcY+yb7tWVYlTwPhx9LpnfAkd4wecpWknDyptp4k97LUZeInlf6jdzBg==" + "d64": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz", + "integrity": "sha512-5eNy3WZziVYnrogqgXhcdEmqcDB2IHurTqLcrgssJsfkMVCUoUaZpK6cJjxxvLV2dUm5SuJMNcYfVGoin9UIRw==" }, "dashdash": { "version": "1.14.1", @@ -841,21 +1293,14 @@ "integrity": "sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==", "requires": { "assert-plus": "^1.0.0" - }, - "dependencies": { - "assert-plus": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==" - } } }, "debug": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", - "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.2.tgz", + "integrity": "sha512-T9bufXIzQvCa4VrTIpLvvwdLhH+wuBtvIJJA3xgzVcaVETGmTIWMfEXQEd1K4p8BaRmQJPn6MPut38H7YQ+iIA==", "requires": { - "ms": "2.0.0" + "ms": "0.6.2" } }, "deep-eql": { @@ -881,9 +1326,9 @@ "integrity": "sha512-Lwaf3zVFDMBop1yDuFZ19F9WyGcZcGacsbdlZtWjQmM50tOcMntm1njF/Nb/Vjij3KaSvCF+sEYGKrrjObu2NA==" }, "delayed-stream": { - "version": "0.0.5", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz", - "integrity": "sha512-v+7uBd1pqe5YtgPacIIbZ8HuHeLFVNe4mUEyFDXL6KiqzEykjbw+5mXZXpGFgNVasdL4jWKgaKIXrEHiynN1LA==" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" }, "denque": { "version": "1.4.1", @@ -901,12 +1346,22 @@ "integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==", "dev": true }, + "dot-prop": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.2.0.tgz", + "integrity": "sha512-uEUyaDKoSQ1M4Oq8l45hSE26SnTxL6snNnqvK/VWx5wJhmff5z0FUVJDKDanor/6w3kzE3i7XZOk+7wC0EXr1A==", + "requires": { + "is-obj": "^2.0.0" + } + }, "dtrace-provider": { - "version": "0.2.8", - "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.2.8.tgz", - "integrity": "sha512-wufYnYt4ISHnT9MEiRgQ3trXuolt7mICTa/ckT+KYHR667K9H82lmI8KM7zKUJ8l5I343A34wJnvL++1TJn1iA==", - "dev": true, - "optional": true + "version": "0.8.8", + "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.8.8.tgz", + "integrity": "sha512-b7Z7cNtHPhH9EJhNNbbeqTcXB8LGFFZhq1PGgEvpeHlzd36bhbdTWoE/Ba/YguqpBSlAPKnARWhVlhunCMwfxg==", + "optional": true, + "requires": { + "nan": "^2.14.0" + } }, "duplexify": { "version": "3.7.1", @@ -917,6 +1372,35 @@ "inherits": "^2.0.1", "readable-stream": "^2.0.0", "stream-shift": "^1.0.0" + }, + "dependencies": { + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" + }, + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } + } } }, "each-series": { @@ -925,11 +1409,12 @@ "integrity": "sha512-4MQloCGGCmT5GJZK5ibgJSvTK1c1QSrNlDvLk6fEyRxjZnXjl+NNFfzhfXpmnWh33Owc9D9klrdzCUi7yc9r4Q==" }, "ecc-jsbn": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz", - "integrity": "sha512-8Pvg9QY16SYajEL9W1Lk+9yM7XCK/MOq2wibslLZYAAEEkbAIO6mLkW+GFYbvvw8qTuDFzFMg40rS9IxkNCWPg==", + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", + "integrity": "sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw==", "requires": { - "jsbn": "~0.1.0" + "jsbn": "~0.1.0", + "safer-buffer": "^2.1.0" } }, "ecdsa-sig-formatter": { @@ -954,9 +1439,9 @@ } }, "end-of-stream": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.1.tgz", - "integrity": "sha512-1MkrZNvWTKCaigbn+W15elq2BB/L22nqrSY5DKlo3X6+vclJm8Bb5djXJBmEX6fS3+zCh/F4VBK5Z2KxJt4s2Q==", + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", "requires": { "once": "^1.4.0" } @@ -1004,6 +1489,15 @@ "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==" }, + "eventid": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/eventid/-/eventid-1.0.0.tgz", + "integrity": "sha512-4upSDsvpxhWPsmw4fsJCp0zj8S7I0qh1lCDTmZXP8V3TtryQKDI8CgQPN+e5JakbWwzaAX3lrdp2b3KSoMSUpw==", + "requires": { + "d64": "^1.0.0", + "uuid": "^3.0.1" + } + }, "express": { "version": "3.11.0", "resolved": "https://registry.npmjs.org/express/-/express-3.11.0.tgz", @@ -1028,14 +1522,6 @@ "vary": "0.1.0" }, "dependencies": { - "debug": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.2.tgz", - "integrity": "sha512-T9bufXIzQvCa4VrTIpLvvwdLhH+wuBtvIJJA3xgzVcaVETGmTIWMfEXQEd1K4p8BaRmQJPn6MPut38H7YQ+iIA==", - "requires": { - "ms": "0.6.2" - } - }, "mkdirp": { "version": "0.5.0", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.0.tgz", @@ -1043,11 +1529,6 @@ "requires": { "minimist": "0.0.8" } - }, - "ms": { - "version": "0.6.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", - "integrity": "sha512-/pc3eh7TWorTtbvXg8je4GvrvEqCfH7PA3P7iW01yL2E53FKixzgMBaQi0NOPbMJqY34cBSvR0tZtmlTkdUG4A==" } } }, @@ -1063,21 +1544,6 @@ "on-headers": "0.0.0", "rand-token": "0.2.1", "utils-merge": "1.0.0" - }, - "dependencies": { - "debug": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.2.tgz", - "integrity": "sha512-T9bufXIzQvCa4VrTIpLvvwdLhH+wuBtvIJJA3xgzVcaVETGmTIWMfEXQEd1K4p8BaRmQJPn6MPut38H7YQ+iIA==", - "requires": { - "ms": "0.6.2" - } - }, - "ms": { - "version": "0.6.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", - "integrity": "sha512-/pc3eh7TWorTtbvXg8je4GvrvEqCfH7PA3P7iW01yL2E53FKixzgMBaQi0NOPbMJqY34cBSvR0tZtmlTkdUG4A==" - } } }, "extend": { @@ -1091,14 +1557,14 @@ "integrity": "sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g==" }, "fast-deep-equal": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-1.0.0.tgz", - "integrity": "sha512-46+Jxk9Yj/nQY+3a1KTnpbBTemcAbPySTKya8iM9D7EsiONpSWbvzesalcCJ6tmJrCUITT2fmAQfNHFG+OHM6Q==" + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.1.tgz", + "integrity": "sha512-8UEa58QDLauDNfpbrX55Q9jrGHThw2ZMdOky5Gl1CDtVeJDPVrG4Jxx1N8jw2gkWaff5UUuX1KJd+9zGe2B+ZA==" }, "fast-json-stable-stringify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", - "integrity": "sha512-eIgZvM9C3P05kg0qxfqaVU6Tma4QedCPIByQOcemV0vju8ot3cS2DpHi4m2G2JvbSMI152rjfLX0p1pkSdyPlQ==" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" }, "fast-text-encoding": { "version": "1.0.0", @@ -1117,21 +1583,6 @@ "requires": { "debug": "1.0.2", "escape-html": "1.0.1" - }, - "dependencies": { - "debug": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.2.tgz", - "integrity": "sha512-T9bufXIzQvCa4VrTIpLvvwdLhH+wuBtvIJJA3xgzVcaVETGmTIWMfEXQEd1K4p8BaRmQJPn6MPut38H7YQ+iIA==", - "requires": { - "ms": "0.6.2" - } - }, - "ms": { - "version": "0.6.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", - "integrity": "sha512-/pc3eh7TWorTtbvXg8je4GvrvEqCfH7PA3P7iW01yL2E53FKixzgMBaQi0NOPbMJqY34cBSvR0tZtmlTkdUG4A==" - } } }, "findit2": { @@ -1153,27 +1604,45 @@ "integrity": "sha512-0V5l4Cizzvqt5D44aTXbFZz+FtyXV1vrDN6qrelxtfYQKW0KO0W2T/hkE8xvGa/540LkZlkaUjO4ailYTFtHVQ==", "requires": { "debug": "=3.1.0" + }, + "dependencies": { + "debug": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", + "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", + "requires": { + "ms": "2.0.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + } } }, "forever-agent": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.5.2.tgz", - "integrity": "sha512-PDG5Ef0Dob/JsZUxUltJOhm/Y9mlteAE+46y3M9RBz/Rd3QVENJ75aGRhN56yekTUboaBIkd8KVWX2NjF6+91A==" + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "integrity": "sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw==" }, "form-data": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-0.1.4.tgz", - "integrity": "sha512-x8eE+nzFtAMA0YYlSxf/Qhq6vP1f8wSoZ7Aw1GuctBcmudCNuTUmmx45TfEplyb6cjsZO/jvh6+1VpZn24ez+w==", + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", "requires": { - "async": "~0.9.0", - "combined-stream": "~0.0.4", - "mime": "~1.2.11" + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" }, "dependencies": { - "async": { - "version": "0.9.2", - "resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz", - "integrity": "sha512-l6ToIJIotphWahxxHyzK9bnLR6kM4jJIIgLShZeqLY7iboHoGkdgFl7W2/Ivi4SkMJYGKqW8vSuk0uKUj6qsSw==" + "mime-types": { + "version": "2.1.26", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.26.tgz", + "integrity": "sha512-01paPWYgLrkqAyrlDorC1uDwl2p3qZT7yl806vW7DvDoxwXi46jsjFbg+WdwotBIk6/MbEhO/dh5aZ5sNj/dWQ==", + "requires": { + "mime-db": "1.43.0" + } } } }, @@ -1189,22 +1658,23 @@ "dev": true }, "gaxios": { - "version": "1.8.4", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.8.4.tgz", - "integrity": "sha512-BoENMnu1Gav18HcpV9IleMPZ9exM+AvUjrAOV4Mzs/vfz2Lu/ABv451iEXByKiMPn2M140uul1txXCg83sAENw==", + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-2.3.1.tgz", + "integrity": "sha512-DQOesWEx59/bm63lTX0uHDDXpGTW9oKqNsoigwCoRe2lOb5rFqxzHjLTa6aqEBecLcz69dHLw7rbS068z1fvIQ==", "requires": { "abort-controller": "^3.0.0", "extend": "^3.0.2", - "https-proxy-agent": "^2.2.1", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", "node-fetch": "^2.3.0" } }, "gcp-metadata": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-1.0.0.tgz", - "integrity": "sha512-Q6HrgfrCQeEircnNP3rCcEgiDv7eF9+1B+1MMgpE190+/+0mjQR8PxeOaRgxZWmdDAF9EIryHB9g1moPiw1SbQ==", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-3.3.1.tgz", + "integrity": "sha512-RrASg1HaVAxoB9Q/8sYfJ++v9PMiiqIgOrOxZeagMgS4osZtICT1lKBx2uvzYgwetxj8i6K99Z0iuKMg7WraTg==", "requires": { - "gaxios": "^1.0.2", + "gaxios": "^2.1.0", "json-bigint": "^0.3.0" } }, @@ -1214,13 +1684,6 @@ "integrity": "sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==", "requires": { "assert-plus": "^1.0.0" - }, - "dependencies": { - "assert-plus": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==" - } } }, "glob": { @@ -1237,40 +1700,65 @@ } }, "google-auth-library": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-3.1.2.tgz", - "integrity": "sha512-cDQMzTotwyWMrg5jRO7q0A4TL/3GWBgO7I7q5xGKNiiFf9SmGY/OJ1YsLMgI2MVHHsEGyrqYnbnmV1AE+Z6DnQ==", + "version": "5.9.2", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.9.2.tgz", + "integrity": "sha512-rBE1YTOZ3/Hu6Mojkr+UUmbdc/F28hyMGYEGxjyfVA9ZFmq12oqS3AeftX4h9XpdVIcxPooSo8hECYGT6B9XqQ==", "requires": { + "arrify": "^2.0.0", "base64-js": "^1.3.0", "fast-text-encoding": "^1.0.0", - "gaxios": "^1.2.1", - "gcp-metadata": "^1.0.0", - "gtoken": "^2.3.2", - "https-proxy-agent": "^2.2.1", - "jws": "^3.1.5", - "lru-cache": "^5.0.0", - "semver": "^5.5.0" + "gaxios": "^2.1.0", + "gcp-metadata": "^3.3.0", + "gtoken": "^4.1.0", + "jws": "^4.0.0", + "lru-cache": "^5.0.0" + } + }, + "google-gax": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-1.14.1.tgz", + "integrity": "sha512-lAvILUMnXL+BVSSlbzwpGzs3ZP2r+b1l44zeDTRWceejDgyZORKdPEEhtUw49x9CVwxpPx02+v0yktqnRhUD1A==", + "requires": { + "@grpc/grpc-js": "^0.6.12", + "@grpc/proto-loader": "^0.5.1", + "@types/fs-extra": "^8.0.1", + "@types/long": "^4.0.0", + "abort-controller": "^3.0.0", + "duplexify": "^3.6.0", + "google-auth-library": "^5.0.0", + "is-stream-ended": "^0.1.4", + "lodash.at": "^4.6.0", + "lodash.has": "^4.5.2", + "node-fetch": "^2.6.0", + "protobufjs": "^6.8.8", + "retry-request": "^4.0.0", + "semver": "^6.0.0", + "walkdir": "^0.4.0" } }, "google-p12-pem": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.4.tgz", - "integrity": "sha512-SwLAUJqUfTB2iS+wFfSS/G9p7bt4eWcc2LyfvmUXe7cWp6p3mpxDo6LLI29MXdU6wvPcQ/up298X7GMC5ylAlA==", + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-2.0.4.tgz", + "integrity": "sha512-S4blHBQWZRnEW44OcR7TL9WR+QCqByRvhNDZ/uuQfpxywfupikf/miba8js1jZi6ZOGv5slgSuoshCWh6EMDzg==", "requires": { - "node-forge": "^0.8.0", - "pify": "^4.0.0" + "node-forge": "^0.9.0" } }, + "growl": { + "version": "1.10.5", + "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", + "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==", + "dev": true + }, "gtoken": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.3.tgz", - "integrity": "sha512-EaB49bu/TCoNeQjhCYKI/CurooBKkGxIqFHsWABW0b25fobBYVTMe84A8EBVVZhl8emiUdNypil9huMOTmyAnw==", + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-4.1.4.tgz", + "integrity": "sha512-VxirzD0SWoFUo5p8RDP8Jt2AGyOmyYcT/pOUgDKJCK+iSw0TMqwrVfY37RXTNmoKwrzmDHSk0GMT9FsgVmnVSA==", "requires": { - "gaxios": "^1.0.4", - "google-p12-pem": "^1.0.0", - "jws": "^3.1.5", - "mime": "^2.2.0", - "pify": "^4.0.0" + "gaxios": "^2.1.0", + "google-p12-pem": "^2.0.0", + "jws": "^4.0.0", + "mime": "^2.2.0" }, "dependencies": { "mime": { @@ -1286,24 +1774,19 @@ "integrity": "sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q==" }, "har-validator": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.0.3.tgz", - "integrity": "sha512-r7LZkP7Z6WMxj5zARzB9dSpIKu/sp0NfHIgtj6kmQXhEArNctjB5FEv/L2XfLdWqIocPT2QVt0LFOlEUioTBtQ==", + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz", + "integrity": "sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g==", "requires": { - "ajv": "^5.1.0", + "ajv": "^6.5.5", "har-schema": "^2.0.0" } }, - "hawk": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/hawk/-/hawk-1.1.1.tgz", - "integrity": "sha512-am8sVA2bCJIw8fuuVcKvmmNnGFUGW8spTkVtj2fXTEZVkfN42bwFZFtDem57eFi+NSxurJB8EQ7Jd3uCHLn8Vw==", - "requires": { - "boom": "0.4.x", - "cryptiles": "0.2.x", - "hoek": "0.9.x", - "sntp": "0.2.x" - } + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true }, "he": { "version": "1.1.1", @@ -1316,28 +1799,63 @@ "resolved": "https://registry.npmjs.org/hex2dec/-/hex2dec-1.1.2.tgz", "integrity": "sha512-Yu+q/XWr2fFQ11tHxPq4p4EiNkb2y+lAacJNhAdRXVfRIcDH6gi7htWFnnlIzvqHMHoWeIsfXlNAjZInpAOJDA==" }, - "hoek": { - "version": "0.9.1", - "resolved": "https://registry.npmjs.org/hoek/-/hoek-0.9.1.tgz", - "integrity": "sha512-ZZ6eGyzGjyMTmpSPYVECXy9uNfqBR7x5CavhUaLOeD6W0vWK1mp/b7O3f86XE0Mtfo9rZ6Bh3fnuw9Xr8MF9zA==" + "http-proxy-agent": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", + "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", + "requires": { + "@tootallnate/once": "1", + "agent-base": "6", + "debug": "4" + }, + "dependencies": { + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "requires": { + "ms": "^2.1.1" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + } + } }, "http-signature": { - "version": "0.10.1", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-0.10.1.tgz", - "integrity": "sha512-coK8uR5rq2IMj+Hen+sKPA5ldgbCc1/spPdKCL1Fw6h+D0s/2LzMcRK0Cqufs1h0ryx/niwBHGFu8HC3hwU+lA==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", + "integrity": "sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ==", "requires": { - "asn1": "0.1.11", - "assert-plus": "^0.1.5", - "ctype": "0.5.3" + "assert-plus": "^1.0.0", + "jsprim": "^1.2.2", + "sshpk": "^1.7.0" } }, "https-proxy-agent": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.1.tgz", - "integrity": "sha512-HPCTS1LW51bcyMYbxUIOO4HEOlQ1/1qRaFWcyxvwaqUS9TY88aoEuHUY33kuAh1YhVVaDQhLZsnPd+XNARWZlQ==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz", + "integrity": "sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA==", "requires": { - "agent-base": "^4.1.0", - "debug": "^3.1.0" + "agent-base": "6", + "debug": "4" + }, + "dependencies": { + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "requires": { + "ms": "^2.1.1" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + } } }, "iconv-lite": { @@ -1355,9 +1873,9 @@ } }, "inherits": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "integrity": "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==" + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, "ioredis": { "version": "4.14.1", @@ -1375,11 +1893,6 @@ "standard-as-callback": "^2.0.1" }, "dependencies": { - "cluster-key-slot": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.0.tgz", - "integrity": "sha512-2Nii8p3RwAPiFwsnZvukotvow2rIHM+yQ6ZcBXGHdniadkYGZYiGmkHJIbZPIV9nfv7m/U1IPMVVcAhoWFeklw==" - }, "debug": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", @@ -1406,9 +1919,24 @@ "integrity": "sha512-nW24QBoPcFGGHJGUwnfpI7Yc5CdqWNdsyHQszVE/z2pKHXzh7FZ5GWhJqSyaQ9wMkQnsTx+kAI8bHlCX4tKdbg==" }, "is-buffer": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.3.tgz", - "integrity": "sha512-U15Q7MXTuZlrbymiz95PJpZxu8IlipAp4dtS3wOdgPXx3mqBnslrWU14kxfHB+Py/+2PVKSr37dMAgM2A4uArw==" + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.4.tgz", + "integrity": "sha512-Kq1rokWXOPXWuaMAqZiJW4XxsmD9zGx9q4aePabbn3qCRGedtH7Cm+zV8WETitMfu1wdh+Rvd6w5egwSngUX2A==" + }, + "is-obj": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", + "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==" + }, + "is-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz", + "integrity": "sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==" + }, + "is-stream-ended": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/is-stream-ended/-/is-stream-ended-0.1.4.tgz", + "integrity": "sha512-xj0XPvmr7bQFTvirqnFr50o0hQIh6ZItDqloxt5aJrR4NQsYeSsyFQERYGCAzfindAcnKjINnwEEgLx4IqVzQw==" }, "is-typedarray": { "version": "1.0.0", @@ -1416,9 +1944,9 @@ "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==" }, "isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==" }, "isstream": { "version": "0.1.2", @@ -1444,9 +1972,9 @@ "integrity": "sha512-a3xHnILGMtk+hDOqNwHzF6e2fNbiMrXZvxKQiEv2MlgQP+pjIOzqAmKYD2mDpXYE/44M7g+n9p2bKkYWDUcXCQ==" }, "json-schema-traverse": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz", - "integrity": "sha512-4JD/Ivzg7PoW8NzdrBSr3UFwC9mHgvI7Z6z3QGBsSHgKaRTUDmyZAAKJo2UbG1kUVfS9WS8bi36N49U1xw43DA==" + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" }, "json-stringify-safe": { "version": "5.0.1", @@ -1462,19 +1990,12 @@ "extsprintf": "1.3.0", "json-schema": "0.2.3", "verror": "1.10.0" - }, - "dependencies": { - "assert-plus": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==" - } } }, "jwa": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", - "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz", + "integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==", "requires": { "buffer-equal-constant-time": "1.0.1", "ecdsa-sig-formatter": "1.0.11", @@ -1482,11 +2003,11 @@ } }, "jws": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", - "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", + "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", "requires": { - "jwa": "^1.4.1", + "jwa": "^2.0.0", "safe-buffer": "^5.0.1" } }, @@ -1496,9 +2017,19 @@ "integrity": "sha512-x0yf9PL/nx9Nw9oLL8ZVErFAk85/lslwEP7Vz7s5SI1ODXZIgit3C5qyWjw4DxOuO/3Hb4866SQh28a1V1d+WA==" }, "lodash": { - "version": "4.17.13", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.13.tgz", - "integrity": "sha512-vm3/XWXfWtRua0FkUyEHBZy8kCPjErNBT9fJx8Zvs+U6zjqPbTUOpkaoum3O5uiA8sm+yNMHXfYkTUHFoMxFNA==" + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==" + }, + "lodash.at": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/lodash.at/-/lodash.at-4.6.0.tgz", + "integrity": "sha512-GOTh0SEp+Yosnlpjic+8cl2WM9MykorogkGA9xyIFkkObQ3H3kNZqZ+ohuq4K3FrSVo7hMcZBMataJemrxC3BA==" + }, + "lodash.camelcase": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", + "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==" }, "lodash.defaults": { "version": "4.2.0", @@ -1510,42 +2041,34 @@ "resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz", "integrity": "sha512-C5N2Z3DgnnKr0LOpv/hKCgKdb7ZZwafIrsesve6lmzvZIRZRGaZ/l6Q8+2W7NaT+ZwO3fFlSCzCzrDCFdJfZ4g==" }, + "lodash.has": { + "version": "4.5.2", + "resolved": "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz", + "integrity": "sha512-rnYUdIo6xRCJnQmbVFEwcxF144erlD+M3YcJUVesflU9paQaE8p+fJDcIQrlMYbxoANFL+AB9hZrzSBBk5PL+g==" + }, "lodash.pickby": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz", "integrity": "sha512-AZV+GsS/6ckvPOVQPXSiFFacKvKB4kOQu6ynt9wz0F3LO4R9Ij4K1ddYsIytDpSgLz88JHd9P+oaLeej5/Sl7Q==" }, + "log-driver": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/log-driver/-/log-driver-1.2.7.tgz", + "integrity": "sha512-U7KCmLdqsGHBLeWqYlFA0V0Sl6P08EE1ZrmA9cxjUE0WVqT9qnyVDPz1kzpFEP0jdJuFnasWIfSd7fsaNXkpbg==" + }, "logger-sharelatex": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.7.0.tgz", - "integrity": "sha512-9sxDGPSphOMDqUqGpOu/KxFAVcpydKggWv60g9D7++FDCxGkhLLn0kmBkDdgB00d1PadgX1CBMWKzIBpptDU/Q==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.9.0.tgz", + "integrity": "sha512-yVTuha82047IiMOQLgQHCZGKkJo6I2+2KtiFKpgkIooR2yZaoTEvAeoMwBesSDSpGUpvUJ/+9UI+PmRyc+PQKQ==", "requires": { + "@google-cloud/logging-bunyan": "^2.0.0", + "@overleaf/o-error": "^2.0.0", "bunyan": "1.8.12", "raven": "1.1.3", - "request": "2.88.0" + "request": "2.88.0", + "yn": "^3.1.1" }, "dependencies": { - "ajv": { - "version": "6.10.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.10.0.tgz", - "integrity": "sha512-nffhOpkymDECQyR0mnsUtoCE8RlX38G0rYP+wgLWFyZuUyuuojSSvi/+euOiQBIn63whYwYVIIH1TvE3tu4OEg==", - "requires": { - "fast-deep-equal": "^2.0.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - } - }, - "assert-plus": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==" - }, - "aws4": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz", - "integrity": "sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ==" - }, "bunyan": { "version": "1.8.12", "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.12.tgz", @@ -1557,90 +2080,14 @@ "safe-json-stringify": "~1" } }, - "combined-stream": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", - "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "requires": { - "delayed-stream": "~1.0.0" - } - }, - "delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" - }, - "dtrace-provider": { - "version": "0.8.7", - "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.8.7.tgz", - "integrity": "sha512-V+HIGbAdxCIxddHNDwzXi6cx8Cz5RRlQOVcsryHfsyVVebpBEnDwHSgqxpgKzqeU/6/0DWqRLAGUwkbg2ecN1Q==", - "optional": true, - "requires": { - "nan": "^2.10.0" - } - }, - "fast-deep-equal": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz", - "integrity": "sha512-bCK/2Z4zLidyB4ReuIsvALH6w31YfAQDmXMqMx6FyfHqvBxtjC0eRumeSu4Bs3XtXwpyIywtSTrVT99BxY1f9w==" - }, - "forever-agent": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "integrity": "sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw==" - }, - "form-data": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", - "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", - "requires": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.6", - "mime-types": "^2.1.12" - } - }, - "har-validator": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz", - "integrity": "sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g==", - "requires": { - "ajv": "^6.5.5", - "har-schema": "^2.0.0" - } - }, - "http-signature": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", - "integrity": "sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ==", - "requires": { - "assert-plus": "^1.0.0", - "jsprim": "^1.2.2", - "sshpk": "^1.7.0" - } - }, - "json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" - }, - "mime-db": { - "version": "1.40.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.40.0.tgz", - "integrity": "sha512-jYdeOMPy9vnxEqFRRo6ZvTZ8d9oPb+k18PKoYNYUe2stVEBPPwsln/qWzdbmaIvnhZ9v2P+CuecK+fpUfsV2mA==" - }, "mime-types": { - "version": "2.1.24", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.24.tgz", - "integrity": "sha512-WaFHS3MCl5fapm3oLxU4eYDw77IQM2ACcxQ9RIxfaC3ooc6PFuBMGZZsYpvoXS5D5QTWPieo1jjLdAm3TBP3cQ==", + "version": "2.1.26", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.26.tgz", + "integrity": "sha512-01paPWYgLrkqAyrlDorC1uDwl2p3qZT7yl806vW7DvDoxwXi46jsjFbg+WdwotBIk6/MbEhO/dh5aZ5sNj/dWQ==", "requires": { - "mime-db": "1.40.0" + "mime-db": "1.43.0" } }, - "oauth-sign": { - "version": "0.9.0", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", - "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==" - }, "qs": { "version": "6.5.2", "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", @@ -1672,33 +2119,6 @@ "tunnel-agent": "^0.6.0", "uuid": "^3.3.2" } - }, - "safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" - }, - "tough-cookie": { - "version": "2.4.3", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", - "integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==", - "requires": { - "psl": "^1.1.24", - "punycode": "^1.4.1" - } - }, - "tunnel-agent": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", - "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", - "requires": { - "safe-buffer": "^5.0.1" - } - }, - "uuid": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", - "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" } } }, @@ -1720,6 +2140,20 @@ "resolved": "https://registry.npmjs.org/lsmod/-/lsmod-1.0.0.tgz", "integrity": "sha512-Y+6V75r+mGWzWEPr9h6PFmStielICu5JBHLUg18jCsD2VFmEfgHbq/EgnY4inElsUD9eKL9id1qp34w46rSIKQ==" }, + "lynx": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz", + "integrity": "sha512-JI52N0NwK2b/Md0TFPdPtUBI46kjyJXF7+q08l2yvQ56q6QA8s7ZjZQQRoxFpS2jDXNf/B0p8ID+OIKcTsZwzw==", + "requires": { + "mersenne": "~0.0.3", + "statsd-parser": "~0.0.4" + } + }, + "map-obj": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.1.0.tgz", + "integrity": "sha512-glc9y00wgtwcDmp7GaE/0b0OnxpNJsVf3ael/An6Fe2Q51LLwN1er6sdomLRzz5h0+yMpiYLhWYF5R7HeqVd4g==" + }, "media-typer": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.2.0.tgz", @@ -1751,9 +2185,9 @@ "integrity": "sha512-2403MfnVypWSNIEpmQ26/ObZ5kSUx37E8NHRvriw0+I8Sne7k0HGuLGCk0OrCqURh4UIygD0cSsYq+Ll+kzNqA==" }, "metrics-sharelatex": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.2.0.tgz", - "integrity": "sha512-kjj3EdkrOJrENLFW/QHiPqBr5AbGEHeti90nMbw6sjKO2TOcuPJHT2Y66m8tqgotnMPKw+kXToRs8Rc9+0xuMQ==", + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.4.0.tgz", + "integrity": "sha512-FbIRRhReVCEM4ETzh+qVMm3lP33zSSAdrHfSTtegkcB7GGi1kYs+Qt1/dXFawUA8pIZRQTtsfxiS1nZamiSwHg==", "requires": { "@google-cloud/debug-agent": "^3.0.0", "@google-cloud/profiler": "^0.2.3", @@ -1761,22 +2195,14 @@ "coffee-script": "1.6.0", "lynx": "~0.1.1", "prom-client": "^11.1.3", - "underscore": "~1.6.0" + "underscore": "~1.6.0", + "yn": "^3.1.1" }, "dependencies": { "coffee-script": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", "integrity": "sha512-Tx8itEfCsQp8RbLDFt7qwjqXycAx2g6SI7//4PPUR2j6meLmNifYm6zKrNDcU1+Q/GWRhjhEZk7DaLG1TfIzGA==" - }, - "lynx": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz", - "integrity": "sha512-JI52N0NwK2b/Md0TFPdPtUBI46kjyJXF7+q08l2yvQ56q6QA8s7ZjZQQRoxFpS2jDXNf/B0p8ID+OIKcTsZwzw==", - "requires": { - "mersenne": "~0.0.3", - "statsd-parser": "~0.0.4" - } } } }, @@ -1786,17 +2212,14 @@ "integrity": "sha512-Ysa2F/nqTNGHhhm9MV8ure4+Hc+Y8AWiqUdHxsO7xu8zc92ND9f3kpALHjaP026Ft17UfxrMt95c50PLUeynBw==" }, "mime-db": { - "version": "1.30.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.30.0.tgz", - "integrity": "sha512-SUaL89ROHF5P6cwrhLxE1Xmk60cFcctcJl3zwMeQWcoQzt0Al/X8qxUz2gi19NECqYspzbYpAJryIRnLcjp20g==" + "version": "1.43.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.43.0.tgz", + "integrity": "sha512-+5dsGEEovYbT8UY9yD7eE4XTc4UwJ1jBYlgaQQF38ENsKR3wj/8q8RFZrF9WIZpB2V1ArTVFUva8sAul1NzRzQ==" }, "mime-types": { - "version": "2.1.17", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.17.tgz", - "integrity": "sha512-rOFZoFAbaupSpzARUe5CU1P9mwfX+lIFAuj0soNsEZEnrHu6LZNyV7/FClEB/oF9A1o5KStlumRjW6D4Q2FRCA==", - "requires": { - "mime-db": "~1.30.0" - } + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-1.0.0.tgz", + "integrity": "sha512-aP3BmIq4ZAPJt6KywU5HbiG0UwCTHZA2JWHO9aLaxyr8OhPOiK4RPSZcS6TDS7zNzGDC3AACnq/XTuEsd/M1Kg==" }, "minimatch": { "version": "3.0.4", @@ -1841,6 +2264,15 @@ "integrity": "sha512-VlfT9F3V0v+jr4yxPc5gg9s62/fIVWsd2Bk2iD435um1NlGMYdVCq+MjcXnhYq2icNOizHr1kK+5TI6H0Hy0ag==", "dev": true }, + "debug": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", + "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", + "dev": true, + "requires": { + "ms": "2.0.0" + } + }, "glob": { "version": "7.1.2", "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz", @@ -1855,18 +2287,6 @@ "path-is-absolute": "^1.0.0" } }, - "growl": { - "version": "1.10.5", - "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", - "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==", - "dev": true - }, - "has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", - "dev": true - }, "mkdirp": { "version": "0.5.1", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", @@ -1876,14 +2296,11 @@ "minimist": "0.0.8" } }, - "supports-color": { - "version": "5.4.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.4.0.tgz", - "integrity": "sha512-zjaXglF5nnWpsq470jSv6P9DwPvgLkuapYmfDm3JWOm0vkNTVF2tI4UrN2r6jH1qM/uc/WtxYY1hYoA2dOKj5w==", - "dev": true, - "requires": { - "has-flag": "^3.0.0" - } + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "dev": true } } }, @@ -1913,6 +2330,11 @@ "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-3.2.1.tgz", "integrity": "sha512-oj4jOSXvWglTsc3wrw86iom3LDPOx1nbipQk+jaG3dy+sMRM6ReSgVr/VlmBuF6lXUrflN9DCcQHeSbAwGUl4g==" }, + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" + }, "process-nextick-args": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz", @@ -1964,6 +2386,35 @@ "thunky": "^1.0.2", "to-mongodb-core": "^2.0.0", "xtend": "^4.0.1" + }, + "dependencies": { + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" + }, + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } + } } }, "morgan": { @@ -1975,9 +2426,9 @@ } }, "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", + "integrity": "sha512-/pc3eh7TWorTtbvXg8je4GvrvEqCfH7PA3P7iW01yL2E53FKixzgMBaQi0NOPbMJqY34cBSvR0tZtmlTkdUG4A==" }, "multiparty": { "version": "3.2.9", @@ -1986,29 +2437,6 @@ "requires": { "readable-stream": "~1.1.9", "stream-counter": "~0.2.0" - }, - "dependencies": { - "isarray": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==" - }, - "readable-stream": { - "version": "1.1.14", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", - "integrity": "sha512-+MeVjFf4L44XUkhM1eYbD8fyEsxcV81pqMSR5gblfcLCHfZvbrqy4/qYHE+/R5HoBUT11WV5O08Cr1n3YXkWVQ==", - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.1", - "isarray": "0.0.1", - "string_decoder": "~0.10.x" - } - }, - "string_decoder": { - "version": "0.10.31", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", - "integrity": "sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ==" - } } }, "mv": { @@ -2034,9 +2462,9 @@ } }, "nan": { - "version": "2.12.1", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.12.1.tgz", - "integrity": "sha512-JY7V6lRkStKcKTvHO5NVSQRv+RV+FIL5pvDoLiAtSL9pKlC5x9PKQcZDsq7m4FO4d57mkhC6Z+QhAh3Jdk5JFw==" + "version": "2.14.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", + "integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==" }, "native-or-bluebird": { "version": "1.1.2", @@ -2060,19 +2488,29 @@ "integrity": "sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==" }, "node-forge": { - "version": "0.8.4", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.4.tgz", - "integrity": "sha512-UOfdpxivIYY4g5tqp5FNRNgROVNxRACUxxJREntJLFaJr1E0UEqFtUIk0F/jYx/E+Y6sVXd0KDi/m5My0yGCVw==" - }, - "node-uuid": { - "version": "1.4.8", - "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz", - "integrity": "sha512-TkCET/3rr9mUuRp+CpO7qfgT++aAxfDRaalQhwPFzI9BY/2rCDn6OfpZOVggi1AXfTPpfkTrg5f5WQx5G1uLxA==" + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.9.1.tgz", + "integrity": "sha512-G6RlQt5Sb4GMBzXvhfkeFmbqR6MzhtnT7VTHuLadjkii3rdYHNdw0m8zA4BTxVIh68FicCQ2NSUANpsqkr9jvQ==" }, "oauth-sign": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.4.0.tgz", - "integrity": "sha512-vF36cbrUyfy7Yr6kTIzrj3RsuaPYeJKU3IUOC6MglfNTyiGT6leGvEVOa3UsSsgwBzfVfRnvMiMVyUnpXNqN8w==" + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", + "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==" + }, + "on-finished": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "integrity": "sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww==", + "requires": { + "ee-first": "1.1.1" + }, + "dependencies": { + "ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" + } + } }, "on-headers": { "version": "0.0.0", @@ -2088,9 +2526,9 @@ } }, "p-limit": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.0.tgz", - "integrity": "sha512-pZbTJpoUsCzV48Mc9Nh51VbwO0X9cuPFE8gYwx9BTCt9SF8/b7Zljd2fVgOxhIF/HDTKgpVzs+GPhyKfjLLFRQ==", + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.2.tgz", + "integrity": "sha512-WGR+xHecKTr7EbUEhyLSh5Dube9JtdiG78ufaeLxTgpudf/20KqyMioIUZJAezlTIi6evxuoUs9YXc11cU+yzQ==", "requires": { "p-try": "^2.0.0" } @@ -2101,9 +2539,9 @@ "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" }, "parse-duration": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.1.1.tgz", - "integrity": "sha512-MPkERaX8suJ97HH2TtTN81ASYdFWouJqcnfVYSFHvWCI13vN4NzbvKsBOYN/7o8cTSoaNVMz4H8wG7GjTj0q6g==" + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.1.2.tgz", + "integrity": "sha512-0qfMZyjOUFBeEIvJ5EayfXJqaEXxQ+Oj2b7tWJM3hvEXvXsYCk05EDVI23oYnEw2NaFYUWdABEVPBvBMh8L/pA==" }, "parse-mongo-url": { "version": "1.1.1", @@ -2154,14 +2592,14 @@ } }, "process-nextick-args": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz", - "integrity": "sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw==" + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" }, "prom-client": { - "version": "11.5.1", - "resolved": "https://registry.npmjs.org/prom-client/-/prom-client-11.5.1.tgz", - "integrity": "sha512-AcFuxVgzoA/4nlpeg9SkM2HkDjNU3V7g2LCLwpudXSbcSLiFpRMVfsCoCY5RYeR/d9jkQng1mCmVKj1mPHvP0Q==", + "version": "11.5.3", + "resolved": "https://registry.npmjs.org/prom-client/-/prom-client-11.5.3.tgz", + "integrity": "sha512-iz22FmTbtkyL2vt0MdDFY+kWof+S9UB/NACxSn2aJcewtw+EERsen0urSkZ2WrHseNdydsvcxCTAnPcSMZZv4Q==", "requires": { "tdigest": "^0.1.1" } @@ -2184,13 +2622,6 @@ "@types/long": "^4.0.0", "@types/node": "^10.1.0", "long": "^4.0.0" - }, - "dependencies": { - "@types/node": { - "version": "10.14.9", - "resolved": "https://registry.npmjs.org/@types/node/-/node-10.14.9.tgz", - "integrity": "sha512-NelG/dSahlXYtSoVPErrp06tYFrvzj8XLWmKA+X8x0W//4MqbUyZu++giUG/v0bjAT6/Qxa8IjodrfdACyb0Fg==" - } } }, "proxy-addr": { @@ -2202,14 +2633,69 @@ } }, "psl": { - "version": "1.1.32", - "resolved": "https://registry.npmjs.org/psl/-/psl-1.1.32.tgz", - "integrity": "sha512-MHACAkHpihU/REGGPLj4sEfc/XKW2bheigvHO1dUqjaKigMp1C8+WLQYRGgeKFMsw5PMfegZcaN8IDXK/cD0+g==" + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.7.0.tgz", + "integrity": "sha512-5NsSEDv8zY70ScRnOTn7bK7eanl2MvFrOrS/R6x+dBt5g1ghnj9Zv90kO8GwT8gxcu2ANyFprnFYB85IogIJOQ==" + }, + "pump": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "requires": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "pumpify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pumpify/-/pumpify-2.0.1.tgz", + "integrity": "sha512-m7KOje7jZxrmutanlkS1daj1dS6z6BgslzOXmcSEpIlCxM3VJH7lG5QLeck/6hgF6F4crFf01UtQmNsJfweTAw==", + "requires": { + "duplexify": "^4.1.1", + "inherits": "^2.0.3", + "pump": "^3.0.0" + }, + "dependencies": { + "duplexify": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", + "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", + "requires": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + }, + "safe-buffer": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz", + "integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==" + }, + "string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "requires": { + "safe-buffer": "~5.2.0" + } + } + } }, "punycode": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==" + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" }, "q": { "version": "0.9.2", @@ -2217,9 +2703,9 @@ "integrity": "sha512-ZOxMuWPMJnsUdYhuQ9glpZwKhB4cm8ubYFy1nNCY8TkSAuZun5fd8jCDTlf2ykWnK8x9HGn1stNtLeG179DebQ==" }, "qs": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/qs/-/qs-2.3.3.tgz", - "integrity": "sha512-f5M0HQqZWkzU8GELTY8LyMrGkr3bPjKoFtTkwUEqJQbcljbeK8M7mliP9Ia2xoOI6oMerp+QPS7oYJtpGmWe/A==" + "version": "0.6.6", + "resolved": "https://registry.npmjs.org/qs/-/qs-0.6.6.tgz", + "integrity": "sha512-kN+yNdAf29Jgp+AYHUmC7X4QdJPR8czuMWLNLc0aRxkQ7tB3vJQEONKKT9ou/rW7EbqVec11srC9q9BiVbcnHA==" }, "rand-token": { "version": "0.2.1", @@ -2247,6 +2733,11 @@ "version": "0.3.1", "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", "integrity": "sha512-+IJOX0OqlHCszo2mBUq+SrEbCj6w7Kpffqx60zYbPTFaO4+yYgRjHwcZNpWvaTylDHaV7PPmBHzSecZiMhtPgw==" + }, + "uuid": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.0.tgz", + "integrity": "sha512-rqE1LoOVLv3QrZMjb4NkF5UWlkurCfPyItVnFPNKDDGkHw4dQUdE4zMcLqx28+0Kcf3+bnUk4PisaiRJT4aiaQ==" } } }, @@ -2260,17 +2751,14 @@ } }, "readable-stream": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", - "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "version": "1.1.14", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", + "integrity": "sha512-+MeVjFf4L44XUkhM1eYbD8fyEsxcV81pqMSR5gblfcLCHfZvbrqy4/qYHE+/R5HoBUT11WV5O08Cr1n3YXkWVQ==", "requires": { "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" } }, "redis": { @@ -2333,211 +2821,75 @@ } }, "request": { - "version": "2.47.0", - "resolved": "https://registry.npmjs.org/request/-/request-2.47.0.tgz", - "integrity": "sha512-7HDodfmCGAgxZWJddewFP3t3dKGFyMfb/tz9uWkyA3VbR79Wb/ydZ+OihNgOIj1IliYYbqohqox5evZgBCv5aw==", + "version": "2.88.2", + "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz", + "integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==", "requires": { - "aws-sign2": "~0.5.0", - "bl": "~0.9.0", - "caseless": "~0.6.0", - "combined-stream": "~0.0.5", - "forever-agent": "~0.5.0", - "form-data": "~0.1.0", - "hawk": "1.1.1", - "http-signature": "~0.10.0", - "json-stringify-safe": "~5.0.0", - "mime-types": "~1.0.1", - "node-uuid": "~1.4.0", - "oauth-sign": "~0.4.0", - "qs": "~2.3.1", - "stringstream": "~0.0.4", - "tough-cookie": ">=0.12.0", - "tunnel-agent": "~0.4.0" + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "har-validator": "~5.1.3", + "http-signature": "~1.2.0", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "oauth-sign": "~0.9.0", + "performance-now": "^2.1.0", + "qs": "~6.5.2", + "safe-buffer": "^5.1.2", + "tough-cookie": "~2.5.0", + "tunnel-agent": "^0.6.0", + "uuid": "^3.3.2" }, "dependencies": { - "aws-sign2": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.5.0.tgz", - "integrity": "sha512-oqUX0DM5j7aPWPCnpWebiyNIj2wiNI87ZxnOMoGv0aE4TGlBy2N+5iWc6dQ/NOKZaBD2W6PVz8jtOGkWzSC5EA==" - }, - "caseless": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.6.0.tgz", - "integrity": "sha512-/X9C8oGbZJ95LwJyK4XvN9GSBgw/rqBnUg6mejGhf/GNfJukt5tzOXP+CJicXdWSqAX0ETaufLDxXuN2m4/mDg==" - }, "mime-types": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-1.0.2.tgz", - "integrity": "sha512-echfutj/t5SoTL4WZpqjA1DCud1XO0WQF3/GJ48YBmc4ZMhCK77QA6Z/w6VTQERLKuJ4drze3kw2TUT8xZXVNw==" + "version": "2.1.26", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.26.tgz", + "integrity": "sha512-01paPWYgLrkqAyrlDorC1uDwl2p3qZT7yl806vW7DvDoxwXi46jsjFbg+WdwotBIk6/MbEhO/dh5aZ5sNj/dWQ==", + "requires": { + "mime-db": "1.43.0" + } + }, + "qs": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", + "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==" + }, + "tough-cookie": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", + "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", + "requires": { + "psl": "^1.1.28", + "punycode": "^2.1.1" + } } } }, "requestretry": { - "version": "1.12.2", - "resolved": "https://registry.npmjs.org/requestretry/-/requestretry-1.12.2.tgz", - "integrity": "sha512-wDYnH4imurLs5upu31WoPaOFfEu31qhFlF7KgpYbBsmBagFmreZZo8E/XpoQ3erCP5za+72t8k8QI4wlrtwVXw==", + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/requestretry/-/requestretry-1.13.0.tgz", + "integrity": "sha512-Lmh9qMvnQXADGAQxsXHP4rbgO6pffCfuR8XUBdP9aitJcLQJxhp7YZK4xAVYXnPJ5E52mwrfiKQtKonPL8xsmg==", "requires": { "extend": "^3.0.0", "lodash": "^4.15.0", "request": "^2.74.0", "when": "^3.7.7" - }, - "dependencies": { - "assert-plus": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==" - }, - "boom": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/boom/-/boom-4.3.1.tgz", - "integrity": "sha512-FA8ZqcHBLjyFCPns8EsFTWxARi8iKzLfl3vXS1n1O6mlUpZvjXg9E+0Ys8mh7k/s8mHVpROgeoUmz4HadhPhAQ==", - "requires": { - "hoek": "4.x.x" - } - }, - "combined-stream": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz", - "integrity": "sha512-JgSRe4l4UzPwpJuxfcPWEK1SCrL4dxNjp1uqrQLMop3QZUVo+hDU8w9BJKA4JPbulTWI+UzrI2UA3tK12yQ6bg==", - "requires": { - "delayed-stream": "~1.0.0" - } - }, - "cryptiles": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-3.1.2.tgz", - "integrity": "sha512-XBDBQo47RxVcR5GNxNOI38Cac044AdfU60sJBGZsmWw5TDuG5BAby8W/soTp4SIKeCl37sCGwkbh9wGLEd0cLw==", - "requires": { - "boom": "5.x.x" - }, - "dependencies": { - "boom": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/boom/-/boom-5.2.0.tgz", - "integrity": "sha512-Z5BTk6ZRe4tXXQlkqftmsAUANpXmuwlsF5Oov8ThoMbQRzdGTA1ngYRW160GexgOgjsFOKJz0LYhoNi+2AMBUw==", - "requires": { - "hoek": "4.x.x" - } - } - } - }, - "delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" - }, - "forever-agent": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "integrity": "sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw==" - }, - "form-data": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.1.tgz", - "integrity": "sha512-ZznzvgkNMfVvSHP0rlg09OeW/g7ib4+NpwNGxLFJOrwUcjN0O8OUASn5cvnpnWve9ZlzW6GUa6NhhlCdb6DqCw==", - "requires": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.5", - "mime-types": "^2.1.12" - } - }, - "hawk": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/hawk/-/hawk-6.0.2.tgz", - "integrity": "sha512-miowhl2+U7Qle4vdLqDdPt9m09K6yZhkLDTWGoUiUzrQCn+mHHSmfJgAyGaLRZbPmTqfFFjRV1QWCW0VWUJBbQ==", - "requires": { - "boom": "4.x.x", - "cryptiles": "3.x.x", - "hoek": "4.x.x", - "sntp": "2.x.x" - } - }, - "hoek": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/hoek/-/hoek-4.2.0.tgz", - "integrity": "sha512-v0XCLxICi9nPfYrS9RL8HbYnXi9obYAeLbSP00BmnZwCK9+Ih9WOjoZ8YoHCoav2csqn4FOz4Orldsy2dmDwmQ==" - }, - "http-signature": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", - "integrity": "sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ==", - "requires": { - "assert-plus": "^1.0.0", - "jsprim": "^1.2.2", - "sshpk": "^1.7.0" - } - }, - "oauth-sign": { - "version": "0.8.2", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz", - "integrity": "sha512-VlF07iu3VV3+BTXj43Nmp6Irt/G7j/NgEctUS6IweH1RGhURjjCc2NWtzXFPXXWWfc7hgbXQdtiQu2LGp6MxUg==" - }, - "qs": { - "version": "6.5.1", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz", - "integrity": "sha512-eRzhrN1WSINYCDCbrz796z37LOe3m5tmW7RQf6oBntukAG1nmovJvhnwHHRMAfeoItc1m2Hk02WER2aQ/iqs+A==" - }, - "request": { - "version": "2.83.0", - "resolved": "https://registry.npmjs.org/request/-/request-2.83.0.tgz", - "integrity": "sha512-lR3gD69osqm6EYLk9wB/G1W/laGWjzH90t1vEa2xuxHD5KUrSzp9pUSfTm+YC5Nxt2T8nMPEvKlhbQayU7bgFw==", - "requires": { - "aws-sign2": "~0.7.0", - "aws4": "^1.6.0", - "caseless": "~0.12.0", - "combined-stream": "~1.0.5", - "extend": "~3.0.1", - "forever-agent": "~0.6.1", - "form-data": "~2.3.1", - "har-validator": "~5.0.3", - "hawk": "~6.0.2", - "http-signature": "~1.2.0", - "is-typedarray": "~1.0.0", - "isstream": "~0.1.2", - "json-stringify-safe": "~5.0.1", - "mime-types": "~2.1.17", - "oauth-sign": "~0.8.2", - "performance-now": "^2.1.0", - "qs": "~6.5.1", - "safe-buffer": "^5.1.1", - "stringstream": "~0.0.5", - "tough-cookie": "~2.3.3", - "tunnel-agent": "^0.6.0", - "uuid": "^3.1.0" - } - }, - "sntp": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/sntp/-/sntp-2.1.0.tgz", - "integrity": "sha512-FL1b58BDrqS3A11lJ0zEdnJ3UOKqVxawAkF3k7F0CVN7VQ34aZrV+G8BZ1WC9ZL7NyrwsW0oviwsWDgRuVYtJg==", - "requires": { - "hoek": "4.x.x" - } - }, - "tunnel-agent": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", - "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", - "requires": { - "safe-buffer": "^5.0.1" - } - }, - "uuid": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.1.0.tgz", - "integrity": "sha512-DIWtzUkw04M4k3bf1IcpS2tngXEL26YUD2M0tMDUpnUrz2hgzUBlD55a4FjdLGPvfHxS6uluGWvaVEqgBcVa+g==" - } } }, "require-in-the-middle": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-4.0.0.tgz", - "integrity": "sha512-GX12iFhCUzzNuIqvei0dTLUbBEjZ420KTY/MmDxe2GQKPDGyH/wgfGMWFABpnM/M6sLwC3IaSg8A95U6gIb+HQ==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-4.0.1.tgz", + "integrity": "sha512-EfkM2zANyGkrfIExsECMeNn/uzjvHrE9h36yLXSavmrDiH4tgDNvltAmEKnt4PNLbqKPHZz+uszW2wTKrLUX0w==", "requires": { "debug": "^4.1.1", "module-details-from-path": "^1.0.3", - "resolve": "^1.10.0" + "resolve": "^1.12.0" }, "dependencies": { "debug": { @@ -2568,12 +2920,19 @@ "requires": { "resolve-from": "^2.0.0", "semver": "^5.1.0" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + } } }, "resolve": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.11.0.tgz", - "integrity": "sha512-WL2pBDjqT6pGUNSUzMw00o4T7If+z4H2x3Gz893WoUQ5KW8Vr9txp00ykiP16VBaZF5+j/OcXJHZ9+PCvdiDKw==", + "version": "1.15.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.1.tgz", + "integrity": "sha512-84oo6ZTtoTUpjgNEr5SJyzQhzL72gaRodsSfyxC/AXRvwu0Yse9H8eF9IpGo7b8YetZhlI6v7ZQ6bKBFV/6S7w==", "requires": { "path-parse": "^1.0.6" } @@ -2597,11 +2956,27 @@ "integrity": "sha512-jp4YlI0qyDFfXiXGhkCOliBN1G7fRH03Nqy8YdShzGqbY5/9S2x/IR6C88ls2DFkbWuL3ASkP7QD3pVrNpPgwQ==" }, "retry-request": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-4.0.0.tgz", - "integrity": "sha512-S4HNLaWcMP6r8E4TMH52Y7/pM8uNayOcTDDQNBwsCccL1uI+Ol2TljxRDPzaNfbhOB30+XWP5NnZkB3LiJxi1w==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-4.1.1.tgz", + "integrity": "sha512-BINDzVtLI2BDukjWmjAIRZ0oglnCAkpP2vQjM3jdLhmT62h0xnQgciPwBRDAvHqpkPT2Wo1XuUyLyn6nbGrZQQ==", "requires": { - "through2": "^2.0.0" + "debug": "^4.1.1", + "through2": "^3.0.1" + }, + "dependencies": { + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "requires": { + "ms": "^2.1.1" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + } } }, "rimraf": { @@ -2619,9 +2994,9 @@ "integrity": "sha512-fJhQQI5tLrQvYIYFpOnFinzv9dwmR7hRnUz1XqP3OJ1jIweTNOd6aTO4jwQSgcBSFUB+/KHJxuGneime+FdzOw==" }, "safe-buffer": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz", - "integrity": "sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg==" + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" }, "safe-json-stringify": { "version": "1.2.0", @@ -2658,9 +3033,9 @@ "integrity": "sha512-ya4sPuUOfcrJnfC+OUqTFgFVBEMOXMS1Xopn0wwIhxKwD4eveTwJoIUN9u1QHJ47nL29/m545dV8KqI92MlHPw==" }, "semver": { - "version": "5.6.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.6.0.tgz", - "integrity": "sha512-RS9R6R35NYgQn++fkDWaOmqGoj4Ek9gGs+DPxNUZKuwE183xjJroKvyo1IzVFeXvUrvmALy6FWD5xrdJT25gMg==" + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" }, "send": { "version": "0.4.3", @@ -2673,21 +3048,6 @@ "fresh": "0.2.2", "mime": "1.2.11", "range-parser": "~1.0.0" - }, - "dependencies": { - "debug": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.2.tgz", - "integrity": "sha512-T9bufXIzQvCa4VrTIpLvvwdLhH+wuBtvIJJA3xgzVcaVETGmTIWMfEXQEd1K4p8BaRmQJPn6MPut38H7YQ+iIA==", - "requires": { - "ms": "0.6.2" - } - }, - "ms": { - "version": "0.6.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", - "integrity": "sha512-/pc3eh7TWorTtbvXg8je4GvrvEqCfH7PA3P7iW01yL2E53FKixzgMBaQi0NOPbMJqY34cBSvR0tZtmlTkdUG4A==" - } } }, "serve-favicon": { @@ -2746,12 +3106,13 @@ "buster-format": "~0.5" } }, - "sntp": { - "version": "0.2.4", - "resolved": "https://registry.npmjs.org/sntp/-/sntp-0.2.4.tgz", - "integrity": "sha512-bDLrKa/ywz65gCl+LmOiIhteP1bhEsAAzhfMedPoiHP3dyYnAevlaJshdqb9Yu0sRifyP/fRqSt8t+5qGIWlGQ==", + "snakecase-keys": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/snakecase-keys/-/snakecase-keys-3.1.2.tgz", + "integrity": "sha512-NrzHj8ctStnd1LYx3+L4buS7yildFum7WAbQQxkhPCNi3Qeqv7hoBne2c9n++HWxDG9Nv23pNEyyLCITZTv24Q==", "requires": { - "hoek": "0.9.x" + "map-obj": "^4.0.0", + "to-snake-case": "^1.0.0" } }, "source-map": { @@ -2781,21 +3142,6 @@ "jsbn": "~0.1.0", "safer-buffer": "^2.0.2", "tweetnacl": "~0.14.0" - }, - "dependencies": { - "asn1": { - "version": "0.2.4", - "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", - "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", - "requires": { - "safer-buffer": "~2.1.0" - } - }, - "assert-plus": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==" - } } }, "stack-trace": { @@ -2819,48 +3165,39 @@ "integrity": "sha512-GjA2zKc2iXUUKRcOxXQmhEx0Ev3XHJ6c8yWGqhQjWwhGrqNwSsvq9YlRLgoGtZ5Kx2Ln94IedaqJ5GUG6aBbxA==", "requires": { "readable-stream": "~1.1.8" - }, - "dependencies": { - "isarray": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==" - }, - "readable-stream": { - "version": "1.1.14", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", - "integrity": "sha512-+MeVjFf4L44XUkhM1eYbD8fyEsxcV81pqMSR5gblfcLCHfZvbrqy4/qYHE+/R5HoBUT11WV5O08Cr1n3YXkWVQ==", - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.1", - "isarray": "0.0.1", - "string_decoder": "~0.10.x" - } - }, - "string_decoder": { - "version": "0.10.31", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", - "integrity": "sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ==" - } + } + }, + "stream-events": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz", + "integrity": "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==", + "requires": { + "stubs": "^3.0.0" } }, "stream-shift": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.0.tgz", - "integrity": "sha512-Afuc4BKirbx0fwm9bKOehZPG01DJkm/4qbklw4lo9nMPqd2x0kZTLcgwQUXdGiPPY489l3w8cQ5xEEAGbg8ACQ==" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.1.tgz", + "integrity": "sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==" }, "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "requires": { - "safe-buffer": "~5.1.0" - } + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ==" }, - "stringstream": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.6.tgz", - "integrity": "sha512-87GEBAkegbBcweToUrdzf3eLhWNg06FJTebl4BVJz/JgWy8CvEr9dRtX5qWphiynMSQlxxi+QqN0z5T32SLlhA==" + "stubs": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", + "integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==" + }, + "supports-color": { + "version": "5.4.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.4.0.tgz", + "integrity": "sha512-zjaXglF5nnWpsq470jSv6P9DwPvgLkuapYmfDm3JWOm0vkNTVF2tI4UrN2r6jH1qM/uc/WtxYY1hYoA2dOKj5w==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } }, "tdigest": { "version": "0.1.1", @@ -2871,20 +3208,15 @@ } }, "teeny-request": { - "version": "3.11.3", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-3.11.3.tgz", - "integrity": "sha512-CKncqSF7sH6p4rzCgkb/z/Pcos5efl0DmolzvlqRQUNcpRIruOhY9+T1FsIlyEbfWd7MsFpodROOwHYh2BaXzw==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-6.0.2.tgz", + "integrity": "sha512-B6fxA0fSnY/bul06NggdN1nywtr5U5Uvt96pHfTi8pi4MNe6++VUWcAAFBrcMeha94s+gULwA5WvagoSZ+AcYg==", "requires": { - "https-proxy-agent": "^2.2.1", + "http-proxy-agent": "^4.0.0", + "https-proxy-agent": "^5.0.0", "node-fetch": "^2.2.0", + "stream-events": "^1.0.5", "uuid": "^3.3.2" - }, - "dependencies": { - "uuid": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", - "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" - } } }, "through": { @@ -2893,23 +3225,47 @@ "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==" }, "through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.1.tgz", + "integrity": "sha512-M96dvTalPT3YbYLaKaCuwu+j06D/8Jfib0o/PxbVt6Amhv3dUAtW6rTV1jPgJSBG83I/e04Y6xkVdVhSRhi0ww==", "requires": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" + "readable-stream": "2 || 3" + }, + "dependencies": { + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + }, + "safe-buffer": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz", + "integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==" + }, + "string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "requires": { + "safe-buffer": "~5.2.0" + } + } } }, "thunky": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/thunky/-/thunky-1.0.3.tgz", - "integrity": "sha512-YwT8pjmNcAXBZqrubu22P4FYsh2D4dxRmnWBOL8Jk8bUcRUtc5326kx32tuTmFDAZtLOGEVNl8POAR8j896Iow==" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz", + "integrity": "sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==" }, "timekeeper": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/timekeeper/-/timekeeper-2.0.0.tgz", - "integrity": "sha512-DVH+iEKcVwU3JkZK0Z86qFx8osIG05U1H/F6lAE+iPfvElioM9HPVd2ZKmoI4zS0AWsDogOXl/BuKWXNadI/fw==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/timekeeper/-/timekeeper-2.2.0.tgz", + "integrity": "sha512-W3AmPTJWZkRwu+iSNxPIsLZ2ByADsOLbbLxe46UJyWj3mlYLlwucKiq+/dPm0l9wTzqoF3/2PH0AGFCebjq23A==", "dev": true }, "to-mongodb-core": { @@ -2917,24 +3273,67 @@ "resolved": "https://registry.npmjs.org/to-mongodb-core/-/to-mongodb-core-2.0.0.tgz", "integrity": "sha512-vfXXcGYFP8+0L5IPOtUzzVIvPE/G3GN0TKa/PRBlzPqYyhm+UxhPmvv634EQgO4Ot8dHbBFihOslMJQclY8Z9A==" }, - "tough-cookie": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.3.tgz", - "integrity": "sha512-WR9pjSY3qO0z3yC6g33CRcVt2Wbevh0gP1XiSFql0/xRioi9qbDs3C+g4Nv2N8jmv/BloIi/SYoy/mfw5vus2A==", + "to-no-case": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/to-no-case/-/to-no-case-1.0.2.tgz", + "integrity": "sha512-Z3g735FxuZY8rodxV4gH7LxClE4H0hTIyHNIHdk+vpQxjLm0cwnKXq/OFVZ76SOQmto7txVcwSCwkU5kqp+FKg==" + }, + "to-snake-case": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/to-snake-case/-/to-snake-case-1.0.0.tgz", + "integrity": "sha512-joRpzBAk1Bhi2eGEYBjukEWHOe/IvclOkiJl3DtA91jV6NwQ3MwXA4FHYeqk8BNp/D8bmi9tcNbRu/SozP0jbQ==", "requires": { + "to-space-case": "^1.0.0" + } + }, + "to-space-case": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/to-space-case/-/to-space-case-1.0.0.tgz", + "integrity": "sha512-rLdvwXZ39VOn1IxGL3V6ZstoTbwLRckQmn/U8ZDLuWwIXNpuZDhQ3AiRUlhTbOXFVE9C+dR51wM0CBDhk31VcA==", + "requires": { + "to-no-case": "^1.0.0" + } + }, + "tough-cookie": { + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", + "integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==", + "requires": { + "psl": "^1.1.24", "punycode": "^1.4.1" + }, + "dependencies": { + "punycode": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==" + } } }, "tunnel-agent": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.3.tgz", - "integrity": "sha512-e0IoVDWx8SDHc/hwFTqJDQ7CCDTEeGhmcT9jkWJjoGQSpgBz20nAMr80E3Tpk7PatJ1b37DQDgJR3CNSzcMOZQ==" + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", + "requires": { + "safe-buffer": "^5.0.1" + } }, "tweetnacl": { "version": "0.14.5", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==" }, + "type-detect": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-1.0.0.tgz", + "integrity": "sha512-f9Uv6ezcpvCQjJU0Zqbg+65qdcszv3qUQsZfjdRbWiZ7AMenrX1u0lNk9EoWWX6e1F+NULyg27mtdeZ5WhpljA==", + "dev": true + }, + "type-fest": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.9.0.tgz", + "integrity": "sha512-j55pzONIdg7rdtJTRZPKIbV0FosUqYdhHK1aAYJIrUvejv1VVyBokrILE8KQDT4emW/1Ev9tx+yZG+AxuSBMmA==" + }, "type-is": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.3.1.tgz", @@ -2942,13 +3341,6 @@ "requires": { "media-typer": "0.2.0", "mime-types": "1.0.0" - }, - "dependencies": { - "mime-types": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-1.0.0.tgz", - "integrity": "sha512-aP3BmIq4ZAPJt6KywU5HbiG0UwCTHZA2JWHO9aLaxyr8OhPOiK4RPSZcS6TDS7zNzGDC3AACnq/XTuEsd/M1Kg==" - } } }, "uid-safe": { @@ -2978,13 +3370,6 @@ "integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==", "requires": { "punycode": "^2.1.0" - }, - "dependencies": { - "punycode": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", - "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" - } } }, "util-deprecate": { @@ -2998,9 +3383,9 @@ "integrity": "sha512-HwU9SLQEtyo+0uoKXd1nkLqigUWLB+QuNQR4OcmB73eWqksM5ovuqcycks2x043W8XVb75rG1HQ0h93TMXkzQQ==" }, "uuid": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.0.tgz", - "integrity": "sha512-rqE1LoOVLv3QrZMjb4NkF5UWlkurCfPyItVnFPNKDDGkHw4dQUdE4zMcLqx28+0Kcf3+bnUk4PisaiRJT4aiaQ==" + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" }, "vary": { "version": "0.1.0", @@ -3015,13 +3400,6 @@ "assert-plus": "^1.0.0", "core-util-is": "1.0.2", "extsprintf": "^1.2.0" - }, - "dependencies": { - "assert-plus": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==" - } } }, "vhost": { @@ -3029,6 +3407,11 @@ "resolved": "https://registry.npmjs.org/vhost/-/vhost-2.0.0.tgz", "integrity": "sha512-TSExWM12MVtvIuBLMPyBuWBQLbHnmDZ3zfsoZwcUmKxzPX8l/cHKl5vVfbo8/KZ56UBAc/tTYXbaDGVDaIcrWw==" }, + "walkdir": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/walkdir/-/walkdir-0.4.1.tgz", + "integrity": "sha512-3eBwRyEln6E1MSzcxcVpQIhRG8Q1jLvEqRmCZqS3dsfXEDR/AhOF4d+jHg1qvDCpYaVRZjENPQyrVxAkQqxPgQ==" + }, "when": { "version": "3.7.8", "resolved": "https://registry.npmjs.org/when/-/when-3.7.8.tgz", @@ -3040,14 +3423,19 @@ "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, "xtend": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz", - "integrity": "sha512-iTwvhNBRetXWe81+VcIw5YeadVSWyze7uA7nVnpP13ulrpnJ3UfQm5ApGnrkmxDJFdrblRdZs0EvaTCIfei5oQ==" + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" }, "yallist": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz", - "integrity": "sha512-S+Zk8DEWE6oKpV+vI3qWkaK+jSbIK86pCwe2IF/xwIpQ8jEuxpw9NyaGjmp9+BoJv5FV2piqCDcoCtStppiq2A==" + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" + }, + "yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==" } } } From 4e7c5c904667a7952a6872aa4da2a45ef714e485 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 18 Feb 2020 09:21:30 +0000 Subject: [PATCH 569/769] update requestretry --- services/document-updater/package-lock.json | 11 +++++------ services/document-updater/package.json | 2 +- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 61f5f68388..2301dc7728 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -2872,13 +2872,12 @@ } }, "requestretry": { - "version": "1.13.0", - "resolved": "https://registry.npmjs.org/requestretry/-/requestretry-1.13.0.tgz", - "integrity": "sha512-Lmh9qMvnQXADGAQxsXHP4rbgO6pffCfuR8XUBdP9aitJcLQJxhp7YZK4xAVYXnPJ5E52mwrfiKQtKonPL8xsmg==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/requestretry/-/requestretry-4.1.0.tgz", + "integrity": "sha512-q3IT2vz5vkcMT6xgwB/BWzsmnu7N/27l9fW86U48gt9Mwrce5rSEyFvpAW7Il1/B78/NBUlYBvcCY1RzWUWy7w==", "requires": { - "extend": "^3.0.0", - "lodash": "^4.15.0", - "request": "^2.74.0", + "extend": "^3.0.2", + "lodash": "^4.17.10", "when": "^3.7.7" } }, diff --git a/services/document-updater/package.json b/services/document-updater/package.json index ea4c27cb67..b827df1544 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -29,7 +29,7 @@ "mongojs": "^2.6.0", "redis-sharelatex": "^1.0.11", "request": "^2.47.0", - "requestretry": "^1.12.0", + "requestretry": "^4.1.0", "settings-sharelatex": "^1.1.0" }, "devDependencies": { From 412eabc3063d37457c1f79e49e52f8848db1d9bc Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 19 Feb 2020 09:26:42 +0000 Subject: [PATCH 570/769] Revert "add tls settings" This reverts commit 72a4994cebab2731f99f0ada7a094c8a0acb3293. --- .../config/settings.defaults.coffee | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index a14a6d29fb..6724aa6a9a 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -1,7 +1,6 @@ Path = require('path') http = require('http') http.globalAgent.maxSockets = 300 -fs = require('fs') module.exports = internal: @@ -45,12 +44,6 @@ module.exports = key_schema: projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:{#{project_id}}" projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:{#{project_id}}" - tls: if process.env['REDIS_CA_CERT'] && process.env['REDIS_CLIENT_CERT'] && process.env['REDIS_CLIENT_KEY'] - ca: fs.readFileSync(process.env['REDIS_CA_CERT']), - cert: fs.readFileSync( - process.env['REDIS_CLIENT_CERT'] - ), - key: fs.readFileSync(process.env['REDIS_CLIENT_KEY']) new_project_history: port: process.env["NEW_HISTORY_REDIS_PORT"] or "6379" @@ -61,12 +54,6 @@ module.exports = projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:{#{project_id}}" projectHistoryMigrationKey: ({project_id}) -> "ProjectHistory:MigrationKey:{#{project_id}}" migration_phase: process.env["PROJECT_HISTORY_MIGRATION_PHASE"] or "prepare" - tls: if process.env['NEW_HISTORY_REDIS_CA_CERT'] && process.env['NEW_HISTORY_REDIS_CLIENT_CERT'] && process.env['NEW_HISTORY_REDIS_CLIENT_KEY'] - ca: fs.readFileSync(process.env['NEW_HISTORY_REDIS_CA_CERT']), - cert: fs.readFileSync( - process.env['NEW_HISTORY_REDIS_CLIENT_CERT'] - ), - key: fs.readFileSync(process.env['NEW_HISTORY_REDIS_CLIENT_KEY']) redisOptions: keepAlive: 100 From 922f237c393e441b0124c44e4beabcc31d3bc9fe Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 19 Feb 2020 09:26:59 +0000 Subject: [PATCH 571/769] Revert "add redislabs ca cert to repository" This reverts commit eb7419b0f45992228393086caf7ed6f66296801f. --- services/document-updater/Dockerfile | 1 - services/document-updater/install_deps.sh | 2 - services/document-updater/redislabs_ca.pem | 77 ---------------------- 3 files changed, 80 deletions(-) delete mode 100644 services/document-updater/install_deps.sh delete mode 100644 services/document-updater/redislabs_ca.pem diff --git a/services/document-updater/Dockerfile b/services/document-updater/Dockerfile index 2845544ae6..59f5e61889 100644 --- a/services/document-updater/Dockerfile +++ b/services/document-updater/Dockerfile @@ -17,7 +17,6 @@ FROM node:6.9.5 COPY --from=app /app /app WORKDIR /app -RUN chmod 0755 ./install_deps.sh && ./install_deps.sh USER node CMD ["node", "--expose-gc", "app.js"] diff --git a/services/document-updater/install_deps.sh b/services/document-updater/install_deps.sh deleted file mode 100644 index 8016ec6c85..0000000000 --- a/services/document-updater/install_deps.sh +++ /dev/null @@ -1,2 +0,0 @@ -cp redislabs_ca.pem /usr/local/share/ca-certificates/redislabs_ca.crt -update-ca-certificates diff --git a/services/document-updater/redislabs_ca.pem b/services/document-updater/redislabs_ca.pem deleted file mode 100644 index a4af612d25..0000000000 --- a/services/document-updater/redislabs_ca.pem +++ /dev/null @@ -1,77 +0,0 @@ -Certificate: - Data: - Version: 3 (0x2) - Serial Number: 11859567854091286320 (0xa495a620ecc0b730) - Signature Algorithm: sha1WithRSAEncryption - Issuer: O=Garantia Data, CN=SSL Certification Authority - Validity - Not Before: Oct 1 12:14:55 2013 GMT - Not After : Sep 29 12:14:55 2023 GMT - Subject: O=Garantia Data, CN=SSL Certification Authority - Subject Public Key Info: - Public Key Algorithm: rsaEncryption - Public-Key: (2048 bit) - Modulus: - 00:b6:6a:92:1f:c3:73:35:8f:26:7c:67:1c:b4:3b: - 40:bd:13:e0:1e:02:0c:a5:81:28:27:22:b2:b8:86: - 6c:0e:99:78:f5:95:36:8e:21:7c:a4:02:e8:9a:f3: - 7d:1f:b4:f3:53:5e:0f:a5:5c:59:48:b3:ae:67:7e: - 8e:d3:e1:21:8e:1c:f9:65:50:62:6e:4f:29:a3:7a: - 0d:3d:62:99:87:71:43:0e:da:a8:ee:63:d8:a5:02: - 12:1f:dc:ce:7a:4b:c5:e4:87:a1:3c:65:47:7e:04: - 43:01:76:f1:69:77:7a:0d:af:73:97:2d:f0:b8:d4: - dd:ea:33:59:59:37:81:be:da:97:1f:66:48:0d:92: - 82:6b:97:e6:51:10:6b:09:7e:fa:b4:a3:b0:14:ad: - 7a:66:36:04:3c:0e:a4:03:17:22:b7:44:c8:ff:dc: - 56:7f:26:92:f8:bf:04:3b:39:33:91:be:d3:d8:f4: - 81:f8:72:0b:34:56:31:0e:c7:9f:bd:6e:d5:ea:25: - 47:1c:15:c6:08:b7:4c:c9:fe:fe:f4:da:15:2a:b1: - 2a:38:1c:93:ac:ee:01:88:c1:44:f6:87:7b:ba:8b: - c4:73:6b:d5:2a:3f:31:cf:67:3f:2f:b7:c0:77:9b: - 17:06:c8:72:75:28:8f:06:e9:e2:77:2d:91:66:e3: - 6f:67 - Exponent: 65537 (0x10001) - X509v3 extensions: - X509v3 Subject Key Identifier: - FD:70:86:D7:2B:C9:D9:96:DD:92:5E:B9:2A:0A:64:82:A3:CD:ED:F0 - X509v3 Authority Key Identifier: - keyid:FD:70:86:D7:2B:C9:D9:96:DD:92:5E:B9:2A:0A:64:82:A3:CD:ED:F0 - - X509v3 Basic Constraints: - CA:TRUE - Signature Algorithm: sha1WithRSAEncryption - 6d:9e:ad:78:70:44:06:bb:f9:93:81:b3:40:7a:5f:9e:c7:c3: - 27:75:47:89:1f:99:77:2c:d2:bb:5a:95:b3:e9:be:05:0b:4a: - 20:7e:4c:26:df:dc:46:e1:26:71:c6:ca:f7:42:63:5b:6f:95: - f7:cb:8d:d0:3b:1c:9d:0f:08:e9:fe:61:82:c1:03:4a:53:53: - f7:72:be:b3:7a:4a:ef:0d:b9:2e:72:b9:b9:ed:f6:66:f5:de: - 70:c6:62:8d:6b:9e:dd:18:45:fc:4d:fb:c0:cc:dd:f5:c8:56: - bd:37:f0:0d:f4:52:53:d7:d8:eb:b5:13:11:49:4f:43:19:b8: - 52:98:e9:9b:cb:74:8e:bf:d5:c6:e0:9a:0b:8c:94:08:4c:f8: - 38:4a:c9:5e:92:af:9e:bd:f4:b3:37:ce:a7:88:f3:5e:a9:66: - 69:51:10:44:d8:90:6a:fd:d6:ae:e4:06:95:c9:bb:f7:6d:1d: - a1:b1:83:56:46:bb:ac:3f:3c:2b:18:19:47:04:09:61:0d:60: - 3e:15:40:f7:7c:37:7d:89:8c:e7:ee:ea:f1:20:a0:40:30:7c: - f3:fe:de:81:a9:67:89:b7:7b:00:02:71:63:80:7a:7a:9f:95: - bf:9c:41:80:b8:3e:c1:7b:a9:b5:c3:99:16:96:ad:b2:a7:b4: - e9:59:de:7d ------BEGIN CERTIFICATE----- -MIIDTzCCAjegAwIBAgIJAKSVpiDswLcwMA0GCSqGSIb3DQEBBQUAMD4xFjAUBgNV -BAoMDUdhcmFudGlhIERhdGExJDAiBgNVBAMMG1NTTCBDZXJ0aWZpY2F0aW9uIEF1 -dGhvcml0eTAeFw0xMzEwMDExMjE0NTVaFw0yMzA5MjkxMjE0NTVaMD4xFjAUBgNV -BAoMDUdhcmFudGlhIERhdGExJDAiBgNVBAMMG1NTTCBDZXJ0aWZpY2F0aW9uIEF1 -dGhvcml0eTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALZqkh/DczWP -JnxnHLQ7QL0T4B4CDKWBKCcisriGbA6ZePWVNo4hfKQC6JrzfR+081NeD6VcWUiz -rmd+jtPhIY4c+WVQYm5PKaN6DT1imYdxQw7aqO5j2KUCEh/cznpLxeSHoTxlR34E -QwF28Wl3eg2vc5ct8LjU3eozWVk3gb7alx9mSA2SgmuX5lEQawl++rSjsBStemY2 -BDwOpAMXIrdEyP/cVn8mkvi/BDs5M5G+09j0gfhyCzRWMQ7Hn71u1eolRxwVxgi3 -TMn+/vTaFSqxKjgck6zuAYjBRPaHe7qLxHNr1So/Mc9nPy+3wHebFwbIcnUojwbp -4nctkWbjb2cCAwEAAaNQME4wHQYDVR0OBBYEFP1whtcrydmW3ZJeuSoKZIKjze3w -MB8GA1UdIwQYMBaAFP1whtcrydmW3ZJeuSoKZIKjze3wMAwGA1UdEwQFMAMBAf8w -DQYJKoZIhvcNAQEFBQADggEBAG2erXhwRAa7+ZOBs0B6X57Hwyd1R4kfmXcs0rta -lbPpvgULSiB+TCbf3EbhJnHGyvdCY1tvlffLjdA7HJ0PCOn+YYLBA0pTU/dyvrN6 -Su8NuS5yubnt9mb13nDGYo1rnt0YRfxN+8DM3fXIVr038A30UlPX2Ou1ExFJT0MZ -uFKY6ZvLdI6/1cbgmguMlAhM+DhKyV6Sr5699LM3zqeI816pZmlREETYkGr91q7k -BpXJu/dtHaGxg1ZGu6w/PCsYGUcECWENYD4VQPd8N32JjOfu6vEgoEAwfPP+3oGp -Z4m3ewACcWOAenqflb+cQYC4PsF7qbXDmRaWrbKntOlZ3n0= ------END CERTIFICATE----- From 2e178b0e2d48e26935af69848baa3c4c80d72c3f Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 21 Feb 2020 14:06:58 +0000 Subject: [PATCH 572/769] resolve merge conflicts --- services/document-updater/Dockerfile | 13 ++++++--- .../document-updater/docker-compose.ci.yml | 19 +++++++------ services/document-updater/docker-compose.yml | 27 ++++++++----------- 3 files changed, 29 insertions(+), 30 deletions(-) diff --git a/services/document-updater/Dockerfile b/services/document-updater/Dockerfile index 59f5e61889..e538fb48d9 100644 --- a/services/document-updater/Dockerfile +++ b/services/document-updater/Dockerfile @@ -1,7 +1,14 @@ -FROM node:6.9.5 as app +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/sharelatex/sharelatex-dev-environment +# Version: 1.3.5 + +FROM node:10.19.0 as base WORKDIR /app +FROM base as app + #wildcard as some files may not be in all repos COPY package*.json npm-shrink*.json /app/ @@ -12,11 +19,9 @@ COPY . /app RUN npm run compile:all -FROM node:6.9.5 +FROM base COPY --from=app /app /app - -WORKDIR /app USER node CMD ["node", "--expose-gc", "app.js"] diff --git a/services/document-updater/docker-compose.ci.yml b/services/document-updater/docker-compose.ci.yml index d65f97b913..56f3e1c42e 100644 --- a/services/document-updater/docker-compose.ci.yml +++ b/services/document-updater/docker-compose.ci.yml @@ -1,9 +1,9 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.24 +# Version: 1.3.5 -version: "2" +version: "2.3" services: test_unit: @@ -13,7 +13,6 @@ services: environment: NODE_ENV: test - test_acceptance: build: . image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER @@ -26,14 +25,15 @@ services: MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test depends_on: - - mongo - - redis - - new_redis + mongo: + condition: service_healthy + redis: + condition: service_healthy + new_redis: + condition: service_healthy user: node command: npm run test:acceptance:_run - - tar: build: . image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER @@ -41,7 +41,6 @@ services: - ./:/tmp/build/ command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . user: root - redis: image: redis @@ -49,4 +48,4 @@ services: image: redis mongo: - image: mongo:3.4 + image: mongo:3.6 diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index 31869acb50..805e3b0d06 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -1,13 +1,13 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.24 +# Version: 1.3.5 -version: "2" +version: "2.3" services: test_unit: - image: node:6.9.5 + image: node:10.19.0 volumes: - .:/app working_dir: /app @@ -18,7 +18,7 @@ services: user: node test_acceptance: - build: . + image: node:10.19.0 volumes: - .:/app working_dir: /app @@ -33,19 +33,14 @@ services: NODE_ENV: test user: node depends_on: - - mongo - - redis - - new_redis + mongo: + condition: service_healthy + redis: + condition: service_healthy + new_redis: + condition: service_healthy command: npm run test:acceptance - tar: - build: . - image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER - volumes: - - ./:/tmp/build/ - command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . - user: root - redis: image: redis @@ -53,4 +48,4 @@ services: image: redis mongo: - image: mongo:3.4 + image: mongo:3.6 From 852ae5fbaee1c5f8622e46d0c12bfe651b137f7e Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 26 Feb 2020 09:47:38 +0000 Subject: [PATCH 573/769] move bunyan out of devDependencies (needed by @google-cloud/logging-bunyan) --- services/document-updater/package-lock.json | 2 -- services/document-updater/package.json | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 2301dc7728..f66bcc3fd4 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -1072,7 +1072,6 @@ "version": "0.22.3", "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-0.22.3.tgz", "integrity": "sha512-v9dd5qmd6nJHEi7fiNo1fR2pMpE8AiB47Ap984p4iJKj+dEA69jSccmq6grFQn6pxIh0evvKpC5XO1SKfiaRoQ==", - "dev": true, "requires": { "dtrace-provider": "0.2.8", "mv": "~2" @@ -1082,7 +1081,6 @@ "version": "0.2.8", "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.2.8.tgz", "integrity": "sha512-wufYnYt4ISHnT9MEiRgQ3trXuolt7mICTa/ckT+KYHR667K9H82lmI8KM7zKUJ8l5I343A34wJnvL++1TJn1iA==", - "dev": true, "optional": true } } diff --git a/services/document-updater/package.json b/services/document-updater/package.json index b827df1544..6051cca512 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -21,6 +21,7 @@ }, "dependencies": { "async": "^2.5.0", + "bunyan": "~0.22.1", "coffee-script": "~1.7.0", "express": "3.11.0", "lodash": "^4.17.13", @@ -33,7 +34,6 @@ "settings-sharelatex": "^1.1.0" }, "devDependencies": { - "bunyan": "~0.22.1", "chai": "^3.5.0", "chai-spies": "^0.7.1", "cluster-key-slot": "^1.0.5", From ffd8d0745db87ec6454fee79fee46e74dae226ca Mon Sep 17 00:00:00 2001 From: nate stemen Date: Fri, 6 Mar 2020 13:49:30 -0500 Subject: [PATCH 574/769] use empty object for ranges if it doesn't exist --- services/document-updater/app/coffee/DocumentManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 59db98e97f..ad0cb0eecb 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -30,7 +30,7 @@ module.exports = DocumentManager = return callback(error) if error? RedisManager.setHistoryType doc_id, projectHistoryType, (error) -> return callback(error) if error? - callback null, lines, version, ranges, pathname, projectHistoryId, null, false + callback null, lines, version, ranges || {}, pathname, projectHistoryId, null, false else callback null, lines, version, ranges, pathname, projectHistoryId, unflushedTime, true From c09bc0e8685cf461500a9ffb7dac7796ba7fb1ef Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Sat, 7 Mar 2020 08:11:18 -0500 Subject: [PATCH 575/769] Add a "flush: false" option to the doc delete endpoint This will delete the document from Redis without flushing to web, docstore or history. To be used when something is broken. --- services/document-updater/app.coffee | 2 +- .../app/coffee/DocumentManager.coffee | 4 +++ .../app/coffee/HttpController.coffee | 28 ++++++++++++------- .../HttpController/HttpControllerTests.coffee | 23 +++++++++++---- 4 files changed, 41 insertions(+), 16 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index e3aee88bf7..96c7514f64 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -54,7 +54,7 @@ app.post '/project/:project_id/get_and_flush_if_old', HttpCont app.post '/project/:project_id/clearState', HttpController.clearProjectState app.post '/project/:project_id/doc/:doc_id', HttpController.setDoc app.post '/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLoaded -app.delete '/project/:project_id/doc/:doc_id', HttpController.flushAndDeleteDoc +app.delete '/project/:project_id/doc/:doc_id', HttpController.deleteDoc app.delete '/project/:project_id', HttpController.deleteProject app.delete '/project', HttpController.deleteMultipleProjects app.post '/project/:project_id', HttpController.updateProject diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 59db98e97f..1920e05777 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -222,6 +222,10 @@ module.exports = DocumentManager = UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.flushAndDeleteDoc, project_id, doc_id, callback + deleteDocWithLock: (project_id, doc_id, callback) -> + UpdateManager = require "./UpdateManager" + UpdateManager.lockUpdatesAndDo RedisManager.removeDocFromMemory, project_id, doc_id, callback + acceptChangesWithLock: (project_id, doc_id, change_ids, callback = (error) ->) -> UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.acceptChanges, project_id, doc_id, change_ids, callback diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index e2e2e712bc..e1b1a57d7b 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -103,20 +103,28 @@ module.exports = HttpController = logger.log project_id: project_id, doc_id: doc_id, "flushed doc via http" res.send 204 # No Content - flushAndDeleteDoc: (req, res, next = (error) ->) -> + deleteDoc: (req, res, next = (error) ->) -> doc_id = req.params.doc_id project_id = req.params.project_id - logger.log project_id: project_id, doc_id: doc_id, "deleting doc via http" + flush = req.body.flush ? true + logger.log project_id: project_id, doc_id: doc_id, flush: flush, "deleting doc via http" timer = new Metrics.Timer("http.deleteDoc") - DocumentManager.flushAndDeleteDocWithLock project_id, doc_id, (error) -> - timer.done() - # There is no harm in flushing project history if the previous call - # failed and sometimes it is required - HistoryManager.flushProjectChangesAsync project_id + if flush + DocumentManager.flushAndDeleteDocWithLock project_id, doc_id, (error) -> + timer.done() + # There is no harm in flushing project history if the previous call + # failed and sometimes it is required + HistoryManager.flushProjectChangesAsync project_id - return next(error) if error? - logger.log project_id: project_id, doc_id: doc_id, "deleted doc via http" - res.send 204 # No Content + return next(error) if error? + logger.log project_id: project_id, doc_id: doc_id, "deleted doc via http" + res.send 204 # No Content + else + DocumentManager.deleteDocWithLock project_id, doc_id, (error) -> + timer.done() + return next(error) if error? + logger.log project_id: project_id, doc_id: doc_id, "deleted doc via http" + res.send 204 # No Content flushProject: (req, res, next = (error) ->) -> project_id = req.params.project_id diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index b8ace494f5..d816babead 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -264,18 +264,19 @@ describe "HttpController", -> @next .calledWith(new Error("oops")) .should.equal true - - describe "flushAndDeleteDoc", -> + + describe "deleteDoc", -> beforeEach -> @req = params: project_id: @project_id doc_id: @doc_id + body: {} describe "successfully", -> beforeEach -> @DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArgWith(2) - @HttpController.flushAndDeleteDoc(@req, @res, @next) + @HttpController.deleteDoc(@req, @res, @next) it "should flush and delete the doc", -> @DocumentManager.flushAndDeleteDocWithLock @@ -294,16 +295,28 @@ describe "HttpController", -> it "should log the request", -> @logger.log - .calledWith(doc_id: @doc_id, project_id: @project_id, "deleting doc via http") + .calledWith(doc_id: @doc_id, project_id: @project_id, flush: true, "deleting doc via http") .should.equal true it "should time the request", -> @Metrics.Timer::done.called.should.equal true + describe "without flush", -> + beforeEach -> + @req.body.flush = false + @DocumentManager.deleteDocWithLock = sinon.stub().yields() + @HttpController.deleteDoc(@req, @res, @next) + + it "should delete the doc", -> + @DocumentManager.deleteDocWithLock.calledWith(@project_id, @doc_id).should.equal true + + it "should return a successful No Content response", -> + @res.send.calledWith(204).should.equal true + describe "when an errors occurs", -> beforeEach -> @DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArgWith(2, new Error("oops")) - @HttpController.flushAndDeleteDoc(@req, @res, @next) + @HttpController.deleteDoc(@req, @res, @next) it "should flush project history", -> @HistoryManager.flushProjectChangesAsync From 9b70eb75b33ff2ad61b580bfb4d75a692ecb117f Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Mon, 9 Mar 2020 16:27:32 -0400 Subject: [PATCH 576/769] Rename flush param to skip_flush in delete doc Also move it to the query string instead of the body. --- .../app/coffee/HttpController.coffee | 27 +++++++++++-------- .../HttpController/HttpControllerTests.coffee | 6 ++--- 2 files changed, 19 insertions(+), 14 deletions(-) diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index e1b1a57d7b..7ed408cfcd 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -106,10 +106,21 @@ module.exports = HttpController = deleteDoc: (req, res, next = (error) ->) -> doc_id = req.params.doc_id project_id = req.params.project_id - flush = req.body.flush ? true - logger.log project_id: project_id, doc_id: doc_id, flush: flush, "deleting doc via http" + skip_flush = req.query.skip_flush == 'true' timer = new Metrics.Timer("http.deleteDoc") - if flush + if skip_flush + logger.log project_id: project_id, doc_id: doc_id, "deleting doc skipping flush via http (contents may be lost)" + + # Warning: This action is destructive. Skipping the flush will lose + # contents that have not been flushed yet. Use this to fix a document in a + # bad state that can't be flushed anyway. + DocumentManager.deleteDocWithLock project_id, doc_id, (error) -> + timer.done() + return next(error) if error? + logger.log project_id: project_id, doc_id: doc_id, "deleted doc via http" + res.send 204 # No Content + else + logger.log project_id: project_id, doc_id: doc_id, "deleting doc via http" DocumentManager.flushAndDeleteDocWithLock project_id, doc_id, (error) -> timer.done() # There is no harm in flushing project history if the previous call @@ -119,12 +130,6 @@ module.exports = HttpController = return next(error) if error? logger.log project_id: project_id, doc_id: doc_id, "deleted doc via http" res.send 204 # No Content - else - DocumentManager.deleteDocWithLock project_id, doc_id, (error) -> - timer.done() - return next(error) if error? - logger.log project_id: project_id, doc_id: doc_id, "deleted doc via http" - res.send 204 # No Content flushProject: (req, res, next = (error) ->) -> project_id = req.params.project_id @@ -212,7 +217,7 @@ module.exports = HttpController = flushAllProjects: (req, res, next = (error)-> )-> res.setTimeout(5 * 60 * 1000) - options = + options = limit : req.query.limit || 1000 concurrency : req.query.concurrency || 5 dryRun : req.query.dryRun || false @@ -225,7 +230,7 @@ module.exports = HttpController = flushQueuedProjects: (req, res, next = (error) ->) -> res.setTimeout(10 * 60 * 1000) - options = + options = limit : req.query.limit || 1000 timeout: 5 * 60 * 1000 min_delete_age: req.query.min_delete_age || 5 * 60 * 1000 diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index d816babead..c182727ebc 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -271,7 +271,7 @@ describe "HttpController", -> params: project_id: @project_id doc_id: @doc_id - body: {} + query: {} describe "successfully", -> beforeEach -> @@ -295,7 +295,7 @@ describe "HttpController", -> it "should log the request", -> @logger.log - .calledWith(doc_id: @doc_id, project_id: @project_id, flush: true, "deleting doc via http") + .calledWith(doc_id: @doc_id, project_id: @project_id, "deleting doc via http") .should.equal true it "should time the request", -> @@ -303,7 +303,7 @@ describe "HttpController", -> describe "without flush", -> beforeEach -> - @req.body.flush = false + @req.query.skip_flush = 'true' @DocumentManager.deleteDocWithLock = sinon.stub().yields() @HttpController.deleteDoc(@req, @res, @next) From d9caced0d60502068d697446851c35ae28228da5 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Tue, 10 Mar 2020 09:40:49 -0400 Subject: [PATCH 577/769] Change skip_flush option to ignore_flush_errors in delete doc Instead of skipping the flush, we'll still try to flush and proceed with the doc deletion, even when the flush fails. --- .../app/coffee/DocumentManager.coffee | 18 +++++------ .../app/coffee/HttpController.coffee | 32 ++++++------------- .../app/coffee/ProjectManager.coffee | 2 +- .../DocumentManagerTests.coffee | 27 +++++++++++++--- .../HttpController/HttpControllerTests.coffee | 16 ++++++---- .../flushAndDeleteProjectTests.coffee | 8 ++--- 6 files changed, 56 insertions(+), 47 deletions(-) diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.coffee index 1920e05777..7a00641a93 100644 --- a/services/document-updater/app/coffee/DocumentManager.coffee +++ b/services/document-updater/app/coffee/DocumentManager.coffee @@ -91,7 +91,7 @@ module.exports = DocumentManager = return callback(error) if error? callback null else - DocumentManager.flushAndDeleteDoc project_id, doc_id, (error) -> + DocumentManager.flushAndDeleteDoc project_id, doc_id, {}, (error) -> # There is no harm in flushing project history if the previous # call failed and sometimes it is required HistoryManager.flushProjectChangesAsync project_id @@ -115,14 +115,18 @@ module.exports = DocumentManager = return callback(error) if error? RedisManager.clearUnflushedTime doc_id, callback - flushAndDeleteDoc: (project_id, doc_id, _callback = (error) ->) -> + flushAndDeleteDoc: (project_id, doc_id, options, _callback) -> timer = new Metrics.Timer("docManager.flushAndDeleteDoc") callback = (args...) -> timer.done() _callback(args...) DocumentManager.flushDocIfLoaded project_id, doc_id, (error) -> - return callback(error) if error? + if error? + if options.ignoreFlushErrors + logger.warn {project_id: project_id, doc_id: doc_id, err: error}, "ignoring flush error while deleting document" + else + return callback(error) # Flush in the background since it requires a http request HistoryManager.flushDocChangesAsync project_id, doc_id @@ -218,13 +222,9 @@ module.exports = DocumentManager = UpdateManager = require "./UpdateManager" UpdateManager.lockUpdatesAndDo DocumentManager.flushDocIfLoaded, project_id, doc_id, callback - flushAndDeleteDocWithLock: (project_id, doc_id, callback = (error) ->) -> + flushAndDeleteDocWithLock: (project_id, doc_id, options, callback) -> UpdateManager = require "./UpdateManager" - UpdateManager.lockUpdatesAndDo DocumentManager.flushAndDeleteDoc, project_id, doc_id, callback - - deleteDocWithLock: (project_id, doc_id, callback) -> - UpdateManager = require "./UpdateManager" - UpdateManager.lockUpdatesAndDo RedisManager.removeDocFromMemory, project_id, doc_id, callback + UpdateManager.lockUpdatesAndDo DocumentManager.flushAndDeleteDoc, project_id, doc_id, options, callback acceptChangesWithLock: (project_id, doc_id, change_ids, callback = (error) ->) -> UpdateManager = require "./UpdateManager" diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index 7ed408cfcd..b7d38343d4 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -106,30 +106,18 @@ module.exports = HttpController = deleteDoc: (req, res, next = (error) ->) -> doc_id = req.params.doc_id project_id = req.params.project_id - skip_flush = req.query.skip_flush == 'true' + ignoreFlushErrors = req.query.ignore_flush_errors == 'true' timer = new Metrics.Timer("http.deleteDoc") - if skip_flush - logger.log project_id: project_id, doc_id: doc_id, "deleting doc skipping flush via http (contents may be lost)" + logger.log project_id: project_id, doc_id: doc_id, "deleting doc via http" + DocumentManager.flushAndDeleteDocWithLock project_id, doc_id, { ignoreFlushErrors: ignoreFlushErrors }, (error) -> + timer.done() + # There is no harm in flushing project history if the previous call + # failed and sometimes it is required + HistoryManager.flushProjectChangesAsync project_id - # Warning: This action is destructive. Skipping the flush will lose - # contents that have not been flushed yet. Use this to fix a document in a - # bad state that can't be flushed anyway. - DocumentManager.deleteDocWithLock project_id, doc_id, (error) -> - timer.done() - return next(error) if error? - logger.log project_id: project_id, doc_id: doc_id, "deleted doc via http" - res.send 204 # No Content - else - logger.log project_id: project_id, doc_id: doc_id, "deleting doc via http" - DocumentManager.flushAndDeleteDocWithLock project_id, doc_id, (error) -> - timer.done() - # There is no harm in flushing project history if the previous call - # failed and sometimes it is required - HistoryManager.flushProjectChangesAsync project_id - - return next(error) if error? - logger.log project_id: project_id, doc_id: doc_id, "deleted doc via http" - res.send 204 # No Content + return next(error) if error? + logger.log project_id: project_id, doc_id: doc_id, "deleted doc via http" + res.send 204 # No Content flushProject: (req, res, next = (error) ->) -> project_id = req.params.project_id diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.coffee index 0d57687668..b60bb98d5e 100644 --- a/services/document-updater/app/coffee/ProjectManager.coffee +++ b/services/document-updater/app/coffee/ProjectManager.coffee @@ -52,7 +52,7 @@ module.exports = ProjectManager = for doc_id in (doc_ids or []) do (doc_id) -> jobs.push (callback) -> - DocumentManager.flushAndDeleteDocWithLock project_id, doc_id, (error) -> + DocumentManager.flushAndDeleteDocWithLock project_id, doc_id, {}, (error) -> if error? logger.error err: error, project_id: project_id, doc_id: doc_id, "error deleting doc" errors.push(error) diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee index 76ad7f5af5..a8520f7fc1 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee @@ -16,7 +16,7 @@ describe "DocumentManager", -> "./HistoryManager": @HistoryManager = flushDocChangesAsync: sinon.stub() flushProjectChangesAsync: sinon.stub() - "logger-sharelatex": @logger = {log: sinon.stub()} + "logger-sharelatex": @logger = {log: sinon.stub(), warn: sinon.stub()} "./DocOpsManager": @DocOpsManager = {} "./Metrics": @Metrics = Timer: class Timer @@ -47,7 +47,7 @@ describe "DocumentManager", -> beforeEach -> @RedisManager.removeDocFromMemory = sinon.stub().callsArg(2) @DocumentManager.flushDocIfLoaded = sinon.stub().callsArgWith(2) - @DocumentManager.flushAndDeleteDoc @project_id, @doc_id, @callback + @DocumentManager.flushAndDeleteDoc @project_id, @doc_id, {}, @callback it "should flush the doc", -> @DocumentManager.flushDocIfLoaded @@ -70,6 +70,25 @@ describe "DocumentManager", -> .calledWithExactly(@project_id, @doc_id) .should.equal true + describe "when a flush error occurs", -> + beforeEach -> + @DocumentManager.flushDocIfLoaded = sinon.stub().callsArgWith(2, new Error("boom!")) + @RedisManager.removeDocFromMemory = sinon.stub().callsArg(2) + + it "should not remove the doc from redis", (done) -> + @DocumentManager.flushAndDeleteDoc @project_id, @doc_id, {}, (error) => + error.should.exist + @RedisManager.removeDocFromMemory.called.should.equal false + done() + + describe "when ignoring flush errors", -> + it "should remove the doc from redis", (done) -> + @DocumentManager.flushAndDeleteDoc @project_id, @doc_id, { ignoreFlushErrors: true }, (error) => + if error? + return done(error) + @RedisManager.removeDocFromMemory.called.should.equal true + done() + describe "flushDocIfLoaded", -> describe "when the doc is in Redis", -> beforeEach -> @@ -220,7 +239,7 @@ describe "DocumentManager", -> @DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, @ops) @UpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null) @DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2) - @DocumentManager.flushAndDeleteDoc = sinon.stub().callsArg(2) + @DocumentManager.flushAndDeleteDoc = sinon.stub().callsArg(3) describe "when already loaded", -> beforeEach -> @@ -276,7 +295,7 @@ describe "DocumentManager", -> it "should flush and delete the doc from the doc updater", -> @DocumentManager.flushAndDeleteDoc - .calledWith(@project_id, @doc_id) + .calledWith(@project_id, @doc_id, {}) .should.equal true it "should not flush the project history", -> diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index c182727ebc..00fd16c088 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -275,12 +275,12 @@ describe "HttpController", -> describe "successfully", -> beforeEach -> - @DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArgWith(2) + @DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArgWith(3) @HttpController.deleteDoc(@req, @res, @next) it "should flush and delete the doc", -> @DocumentManager.flushAndDeleteDocWithLock - .calledWith(@project_id, @doc_id) + .calledWith(@project_id, @doc_id, { ignoreFlushErrors: false }) .should.equal true it "should flush project history", -> @@ -301,21 +301,23 @@ describe "HttpController", -> it "should time the request", -> @Metrics.Timer::done.called.should.equal true - describe "without flush", -> + describe "ignoring errors", -> beforeEach -> - @req.query.skip_flush = 'true' - @DocumentManager.deleteDocWithLock = sinon.stub().yields() + @req.query.ignore_flush_errors = 'true' + @DocumentManager.flushAndDeleteDocWithLock = sinon.stub().yields() @HttpController.deleteDoc(@req, @res, @next) it "should delete the doc", -> - @DocumentManager.deleteDocWithLock.calledWith(@project_id, @doc_id).should.equal true + @DocumentManager.flushAndDeleteDocWithLock + .calledWith(@project_id, @doc_id, { ignoreFlushErrors: true }) + .should.equal true it "should return a successful No Content response", -> @res.send.calledWith(204).should.equal true describe "when an errors occurs", -> beforeEach -> - @DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArgWith(2, new Error("oops")) + @DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArgWith(3, new Error("oops")) @HttpController.deleteDoc(@req, @res, @next) it "should flush project history", -> diff --git a/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee index 08fb6eab04..596d827726 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee @@ -23,7 +23,7 @@ describe "ProjectManager - flushAndDeleteProject", -> beforeEach (done) -> @doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"] @RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, @doc_ids) - @DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArg(2) + @DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArg(3) @ProjectManager.flushAndDeleteProjectWithLocks @project_id, {}, (error) => @callback(error) done() @@ -36,7 +36,7 @@ describe "ProjectManager - flushAndDeleteProject", -> it "should delete each doc in the project", -> for doc_id in @doc_ids @DocumentManager.flushAndDeleteDocWithLock - .calledWith(@project_id, doc_id) + .calledWith(@project_id, doc_id, {}) .should.equal true it "should flush project history", -> @@ -54,7 +54,7 @@ describe "ProjectManager - flushAndDeleteProject", -> beforeEach (done) -> @doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"] @RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, @doc_ids) - @DocumentManager.flushAndDeleteDocWithLock = sinon.spy (project_id, doc_id, callback = (error) ->) => + @DocumentManager.flushAndDeleteDocWithLock = sinon.spy (project_id, doc_id, options, callback) => if doc_id == "doc-id-1" callback(@error = new Error("oops, something went wrong")) else @@ -66,7 +66,7 @@ describe "ProjectManager - flushAndDeleteProject", -> it "should still flush each doc in the project", -> for doc_id in @doc_ids @DocumentManager.flushAndDeleteDocWithLock - .calledWith(@project_id, doc_id) + .calledWith(@project_id, doc_id, {}) .should.equal true it "should still flush project history", -> From e2564c9cdd18d8c18e9dc99fd0686d9b28381c9a Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 11 Mar 2020 10:25:48 +0000 Subject: [PATCH 578/769] update metrics module to 2.5.1 reduce time window for summary metrics to 60s --- services/document-updater/package-lock.json | 6 +++--- services/document-updater/package.json | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index f66bcc3fd4..852c1670c1 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -2183,9 +2183,9 @@ "integrity": "sha512-2403MfnVypWSNIEpmQ26/ObZ5kSUx37E8NHRvriw0+I8Sne7k0HGuLGCk0OrCqURh4UIygD0cSsYq+Ll+kzNqA==" }, "metrics-sharelatex": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.4.0.tgz", - "integrity": "sha512-FbIRRhReVCEM4ETzh+qVMm3lP33zSSAdrHfSTtegkcB7GGi1kYs+Qt1/dXFawUA8pIZRQTtsfxiS1nZamiSwHg==", + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.5.1.tgz", + "integrity": "sha512-C2gmkl/tUnq3IlSX/x3dixGhdvfD6H9FR9mBf9lnkeyy2arafxhCU6u+1IQj6byjBM7vGpYHyjwWnmoi3Vb+ZQ==", "requires": { "@google-cloud/debug-agent": "^3.0.0", "@google-cloud/profiler": "^0.2.3", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 6051cca512..e01ebbbb8d 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -26,7 +26,7 @@ "express": "3.11.0", "lodash": "^4.17.13", "logger-sharelatex": "^1.7.0", - "metrics-sharelatex": "^2.2.0", + "metrics-sharelatex": "^2.5.1", "mongojs": "^2.6.0", "redis-sharelatex": "^1.0.11", "request": "^2.47.0", From b631a741ac98009890ffc6ca99e282450e2b4552 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Thu, 19 Mar 2020 15:06:36 +0000 Subject: [PATCH 579/769] update build scripts --- services/document-updater/Dockerfile | 1 - services/document-updater/Makefile | 1 - services/document-updater/buildscript.txt | 10 +++++----- services/document-updater/docker-compose.ci.yml | 1 - services/document-updater/docker-compose.yml | 1 - 5 files changed, 5 insertions(+), 9 deletions(-) diff --git a/services/document-updater/Dockerfile b/services/document-updater/Dockerfile index e538fb48d9..eaa771000f 100644 --- a/services/document-updater/Dockerfile +++ b/services/document-updater/Dockerfile @@ -1,7 +1,6 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.3.5 FROM node:10.19.0 as base diff --git a/services/document-updater/Makefile b/services/document-updater/Makefile index 64646d796f..64224ff99c 100644 --- a/services/document-updater/Makefile +++ b/services/document-updater/Makefile @@ -1,7 +1,6 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.3.5 BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) diff --git a/services/document-updater/buildscript.txt b/services/document-updater/buildscript.txt index b7928a7044..d27e04cef1 100644 --- a/services/document-updater/buildscript.txt +++ b/services/document-updater/buildscript.txt @@ -1,10 +1,10 @@ document-updater ---public-repo=True ---language=coffeescript ---env-add= ---node-version=10.19.0 --acceptance-creds=None --dependencies=mongo,redis --docker-repos=gcr.io/overleaf-ops +--env-add= --env-pass-through= ---script-version=1.3.5 +--language=coffeescript +--node-version=10.19.0 +--public-repo=True +--script-version=2.0.0 diff --git a/services/document-updater/docker-compose.ci.yml b/services/document-updater/docker-compose.ci.yml index b99da9b18e..d609e7b5ec 100644 --- a/services/document-updater/docker-compose.ci.yml +++ b/services/document-updater/docker-compose.ci.yml @@ -1,7 +1,6 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.3.5 version: "2.3" diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index 6a1bbb1005..161476b8d1 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -1,7 +1,6 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.3.5 version: "2.3" From aa324a19206ca39bc5cbc671a76a82e41c7ca8c4 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Thu, 19 Mar 2020 15:08:59 +0000 Subject: [PATCH 580/769] npm audit fix --- services/document-updater/package-lock.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 852c1670c1..f90c8882b9 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -853,9 +853,9 @@ } }, "acorn": { - "version": "6.4.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.0.tgz", - "integrity": "sha512-gac8OEcQ2Li1dxIEWGZzsp2BitJxwkwcOm0zHAJLcPJaVvm58FRnk6RkuLRpU1EujipU2ZFODv2P9DLMfnV8mw==" + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.1.tgz", + "integrity": "sha512-ZVA9k326Nwrj3Cj9jlh3wGFutC2ZornPNARZwsNYqQYgN0EsV2d53w5RN/co65Ohn4sUAUtb1rSUAOD6XN9idA==" }, "agent-base": { "version": "6.0.0", From f069a2515d83fd7f27e653998c4f39c3e15494f1 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Thu, 19 Mar 2020 15:20:32 +0000 Subject: [PATCH 581/769] upgrade express --- services/document-updater/app.coffee | 15 +- services/document-updater/package-lock.json | 690 +++++++----------- services/document-updater/package.json | 3 +- .../coffee/helpers/MockWebApi.coffee | 3 +- 4 files changed, 269 insertions(+), 442 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 96c7514f64..6ff06f2f2a 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -21,15 +21,14 @@ mongojs = require "./app/js/mongojs" async = require "async" Path = require "path" +bodyParser = require "body-parser" Metrics.mongodb.monitor(Path.resolve(__dirname + "/node_modules/mongojs/node_modules/mongodb"), logger) Metrics.event_loop.monitor(logger, 100) app = express() -app.configure -> - app.use(Metrics.http.monitor(logger)); - app.use express.bodyParser({limit: (Settings.max_doc_length + 64 * 1024)}) - app.use app.router +app.use(Metrics.http.monitor(logger)); +app.use bodyParser({limit: (Settings.max_doc_length + 64 * 1024)}) Metrics.injectMetricsRoute(app) DispatchManager.createAndStartDispatchers(Settings.dispatcherCount || 10) @@ -68,11 +67,11 @@ app.get '/flush_all_projects', HttpCont app.get '/flush_queued_projects', HttpController.flushQueuedProjects app.get '/total', (req, res)-> - timer = new Metrics.Timer("http.allDocList") + timer = new Metrics.Timer("http.allDocList") RedisManager.getCountOfDocsInMemory (err, count)-> timer.done() res.send {total:count} - + app.get '/status', (req, res)-> if Settings.shuttingDown res.send 503 # Service unavailable @@ -87,7 +86,7 @@ app.get "/health_check/redis", (req, res, next) -> res.send 500 else res.send 200 - + docUpdaterRedisClient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) app.get "/health_check/redis_cluster", (req, res, next) -> docUpdaterRedisClient.healthCheck (error) -> @@ -99,7 +98,7 @@ app.get "/health_check/redis_cluster", (req, res, next) -> app.get "/health_check", (req, res, next) -> async.series [ - (cb) -> + (cb) -> pubsubClient.healthCheck (error) -> if error? logger.err {err: error}, "failed redis health check" diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index f90c8882b9..2d6d7ac858 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -844,12 +844,12 @@ } }, "accepts": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.0.3.tgz", - "integrity": "sha512-cZqKqO3VXtuIZ5vQLVc8M6JDFVTZoVwZrlmTCA1nH9EoN5v6ZWWStKvd1A5RWpduRVXD55px3t75TvS7JdLfHA==", + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.7.tgz", + "integrity": "sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA==", "requires": { - "mime": "~1.2.11", - "negotiator": "0.4.6" + "mime-types": "~2.1.24", + "negotiator": "0.6.2" } }, "acorn": { @@ -891,6 +891,11 @@ "uri-js": "^4.2.2" } }, + "array-flatten": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" + }, "arrify": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", @@ -973,21 +978,6 @@ "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.1.tgz", "integrity": "sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g==" }, - "base64-url": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/base64-url/-/base64-url-1.3.3.tgz", - "integrity": "sha512-UiVPRwO/m133KIQrOEIqO07D8jaYjFIx7/lYRWTRVR23tDSn00Ves6A+Bk0eLmhyz6IJGSFlNCKUuUBO2ssytA==" - }, - "basic-auth-connect": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/basic-auth-connect/-/basic-auth-connect-1.0.0.tgz", - "integrity": "sha512-kiV+/DTgVro4aZifY/hwRwALBISViL5NP4aReaR2EVJEObpbUBHIkdJh/YpcoEiYt7nBodZ6U2ajZeZvSxUCCg==" - }, - "batch": { - "version": "0.5.1", - "resolved": "https://registry.npmjs.org/batch/-/batch-0.5.1.tgz", - "integrity": "sha512-OXRjc65VJvFtb7JD5HszSI1WWwsI6YnJS7Qmlx1CaDQrZ5urNIeRjtTyBe1YapNXyoWzrcc4yqg4rNe8YMyong==" - }, "bcrypt-pbkdf": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", @@ -1015,17 +1005,20 @@ "integrity": "sha512-tbaUB1QpTIj4cKY8c1rvNAvEQXA+ekzHmbe4jzNfW3QWsF9GnnP/BRWyl6/qqS53heoYJ93naaFcm/jooONH8g==" }, "body-parser": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.4.3.tgz", - "integrity": "sha512-+/wGpsrfMR0d7nPNnmpKAPQVXg37cU3YVvR/hThORfbiJYvzmGHf+A/x0QWtE/s2XMdj2/UTQUweVqNPlkZlEw==", + "version": "1.19.0", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.0.tgz", + "integrity": "sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==", "requires": { - "bytes": "1.0.0", - "depd": "0.3.0", - "iconv-lite": "0.4.3", - "media-typer": "0.2.0", - "qs": "0.6.6", - "raw-body": "1.2.2", - "type-is": "1.3.1" + "bytes": "3.1.0", + "content-type": "~1.0.4", + "debug": "2.6.9", + "depd": "~1.1.2", + "http-errors": "1.7.2", + "iconv-lite": "0.4.24", + "on-finished": "~2.3.0", + "qs": "6.7.0", + "raw-body": "2.4.0", + "type-is": "~1.6.17" } }, "brace-expansion": { @@ -1048,11 +1041,6 @@ "resolved": "https://registry.npmjs.org/bson/-/bson-1.0.9.tgz", "integrity": "sha512-IQX9/h7WdMBIW/q/++tGd+emQr0XMdeZ6icnT/74Xk9fnabWn+gZgpE+9V+gujL3hhJOoNrnDVY7tWdzc7NUTg==" }, - "buffer-crc32": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.3.tgz", - "integrity": "sha512-HLvoSqq1z8fJEcT1lUlJZ4OJaXJZ1wsWm0+fBxkz9Bdf/WphA4Da7FtGUguNNyEXL4WB0hNMTaWmdFRFPy8YOQ==" - }, "buffer-equal-constant-time": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", @@ -1101,9 +1089,9 @@ } }, "bytes": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-1.0.0.tgz", - "integrity": "sha512-/x68VkHLeTl3/Ll8IvxdwzhrT+IyKc52e/oyHhA2RwqPqswSnjVbSddfPRwAsJtbilMAPSRWwAlpxdYsSWOTKQ==" + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", + "integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==" }, "caseless": { "version": "0.12.0", @@ -1148,86 +1136,29 @@ "delayed-stream": "~1.0.0" } }, - "commander": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/commander/-/commander-1.3.2.tgz", - "integrity": "sha512-uoVVA5dchmxZeTMv2Qsd0vhn/RebJYsWo4all1qtrUL3BBhQFn4AQDF4PL+ZvOeK7gczXKEZaSCyMDMwFBlpBg==", - "requires": { - "keypress": "0.1.x" - } - }, - "compressible": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/compressible/-/compressible-1.1.0.tgz", - "integrity": "sha512-rCwUIlpYk3MyJwPuNJUFY4GkusYq33phMUj0iuJxpmRa7FVyFyTy4O4S2DxheA8LBWZcd3ZiotCR9GZE2PLyzQ==" - }, - "compression": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/compression/-/compression-1.0.7.tgz", - "integrity": "sha512-358POVi/83+vOraY0hLNi1s/7G7e3MiZKVlrYiu422gWWjI1AKBXa4sQXnoYYLAyB29jUdo8bqFL7C4JX6kBnA==", - "requires": { - "accepts": "1.0.3", - "bytes": "1.0.0", - "compressible": "1.1.0", - "on-headers": "0.0.0", - "vary": "0.1.0" - } - }, "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" }, - "connect": { - "version": "2.20.2", - "resolved": "https://registry.npmjs.org/connect/-/connect-2.20.2.tgz", - "integrity": "sha512-D5vV4EMPQFc/mmOoUWm7sRtrWNMmKRHJR2NBkNMjzudrGPJfDNdMGsPWfxbThVv4GAiEp1O1EVW9IZe3Ovao+w==", - "requires": { - "basic-auth-connect": "1.0.0", - "body-parser": "1.4.3", - "bytes": "1.0.0", - "compression": "1.0.7", - "connect-timeout": "1.1.1", - "cookie": "0.1.2", - "cookie-parser": "1.3.1", - "cookie-signature": "1.0.3", - "csurf": "1.2.2", - "debug": "1.0.2", - "depd": "0.3.0", - "errorhandler": "1.1.0", - "express-session": "1.4.0", - "finalhandler": "0.0.2", - "fresh": "0.2.2", - "media-typer": "0.2.0", - "method-override": "2.0.2", - "morgan": "1.1.1", - "multiparty": "3.2.9", - "on-headers": "0.0.0", - "parseurl": "1.0.1", - "pause": "0.0.1", - "qs": "0.6.6", - "response-time": "2.0.0", - "serve-favicon": "2.0.1", - "serve-index": "1.1.2", - "serve-static": "1.2.3", - "type-is": "1.3.1", - "vhost": "2.0.0" - } - }, - "connect-timeout": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/connect-timeout/-/connect-timeout-1.1.1.tgz", - "integrity": "sha512-HS5OPZHc0cAJkzE1jgGjwL95rzF+Znk10Pq0vpUEm4ieDV+4HiAu4U/I71G5Epqs3b3YDeHkxBwE7lZtDRpNPQ==", - "requires": { - "debug": "1.0.2", - "on-headers": "0.0.0" - } - }, "console-log-level": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz", "integrity": "sha512-VZzbIORbP+PPcN/gg3DXClTLPLg5Slwd5fL2MIc+o1qZ4BXBvWyc6QxPk6T/Mkr6IVjRpoAGf32XxP3ZWMVRcQ==" }, + "content-disposition": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.3.tgz", + "integrity": "sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g==", + "requires": { + "safe-buffer": "5.1.2" + } + }, + "content-type": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", + "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==" + }, "continuation-local-storage": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/continuation-local-storage/-/continuation-local-storage-3.2.1.tgz", @@ -1238,48 +1169,20 @@ } }, "cookie": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.1.2.tgz", - "integrity": "sha512-+mHmWbhevLwkiBf7QcbZXHr0v4ZQQ/OgHk3fsQHrsMMiGzuvAmU/YMUR+ZfrO/BLAGIWFfx2Z7Oyso0tZR/wiA==" - }, - "cookie-parser": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/cookie-parser/-/cookie-parser-1.3.1.tgz", - "integrity": "sha512-DExg4B+vImwqftLcz11Kxv5wGEOnfotGbnxwNUV7BpljO0nd+p9dIJ40p0eD2YKBq012vONSETBVO0aXAiY4WA==", - "requires": { - "cookie": "0.1.2", - "cookie-signature": "1.0.3" - } + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.0.tgz", + "integrity": "sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg==" }, "cookie-signature": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.3.tgz", - "integrity": "sha512-/KzKzsm0OlguYov01OlOpTkX5MhBKUmfL/KMum7R80rPKheb9AwUzr78TwtBt1OdbnWrt4X+wxbTfcQ3noZqHw==" + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" }, "core-util-is": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", "integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ==" }, - "csrf-tokens": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/csrf-tokens/-/csrf-tokens-2.0.0.tgz", - "integrity": "sha512-IzcrVVxQJvHoeNSSA9zc9LqIBUPM3OdRUzJ/4ooSbROhvJOSAi6qve2J6XEhmltcECmf/UiR/pgzkHXY5x1mGA==", - "requires": { - "base64-url": "1", - "rndm": "1", - "scmp": "~0.0.3", - "uid-safe": "1" - } - }, - "csurf": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/csurf/-/csurf-1.2.2.tgz", - "integrity": "sha512-wCwNMBSAzrj4918iemazALbLZBYQRVN4BpnSpVBcE5Cx5cx/5HPkkpidpL0k+Tbk3Dlg0pDSY3mUvh7Gqc5H7w==", - "requires": { - "csrf-tokens": "~2.0.0" - } - }, "d64": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz", @@ -1294,11 +1197,11 @@ } }, "debug": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.2.tgz", - "integrity": "sha512-T9bufXIzQvCa4VrTIpLvvwdLhH+wuBtvIJJA3xgzVcaVETGmTIWMfEXQEd1K4p8BaRmQJPn6MPut38H7YQ+iIA==", + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "requires": { - "ms": "0.6.2" + "ms": "2.0.0" } }, "deep-eql": { @@ -1334,9 +1237,14 @@ "integrity": "sha512-OfzPuSZKGcgr96rf1oODnfjqBFmr1DVoc/TrItj3Ohe0Ah1C5WX5Baquw/9U9KovnQ88EqmJbD66rKYUQYN1tQ==" }, "depd": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/depd/-/depd-0.3.0.tgz", - "integrity": "sha512-Uyx3FgdvEYlpA3W4lf37Ide++2qOsjLlJ7dap0tbM63j/BxTCcxmyIOO6PXbKbOuNSko+fsDHzzx1DUeo1+3fA==" + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=" + }, + "destroy": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz", + "integrity": "sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=" }, "diff": { "version": "3.5.0", @@ -1423,11 +1331,6 @@ "safe-buffer": "^5.0.1" } }, - "ee-first": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.0.3.tgz", - "integrity": "sha512-1q/3kz+ZwmrrWpJcCCrBZ3JnBzB1BMA5EVW9nxnIP1LxDZ16Cqs9VdolqLWlExet1vU+bar3WSkAa4/YrA9bIw==" - }, "emitter-listener": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/emitter-listener/-/emitter-listener-1.1.2.tgz", @@ -1436,6 +1339,11 @@ "shimmer": "^1.2.0" } }, + "encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=" + }, "end-of-stream": { "version": "1.4.4", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", @@ -1449,15 +1357,6 @@ "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz", "integrity": "sha512-GHrMyVZQWvTIdDtpiEXdHZnFQKzeO09apj8Cbl4pKWy4i0Oprcq17usfDt5aO63swf0JOeMWjWQE/LzgSRuWpA==" }, - "errorhandler": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/errorhandler/-/errorhandler-1.1.0.tgz", - "integrity": "sha512-Xn9tbC+zOP5uc95i72z2b1G1HEF2TuIwZNOamHZLxxumtBXgSxRDeTB5HTNB6URwtfC0fRbwHBdSGtpntjlgcg==", - "requires": { - "accepts": "1.0.3", - "escape-html": "1.0.1" - } - }, "es6-promise": { "version": "4.2.8", "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz", @@ -1472,9 +1371,9 @@ } }, "escape-html": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.1.tgz", - "integrity": "sha512-z6kAnok8fqVTra7Yu77dZF2Y6ETJlxH58wN38wNyuNQLm8xXdKnfNrlSmfXsTePWP03rRVUKHubtUwanwUi7+g==" + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=" }, "escape-string-regexp": { "version": "1.0.5", @@ -1482,6 +1381,11 @@ "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", "dev": true }, + "etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=" + }, "event-target-shim": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", @@ -1497,51 +1401,40 @@ } }, "express": { - "version": "3.11.0", - "resolved": "https://registry.npmjs.org/express/-/express-3.11.0.tgz", - "integrity": "sha512-/647bsD/48HoC+myehc3S93C6KUBpncWSjxEImmRajSlnI7McA9F9QFb6gc6Vxp9KfO/S7OiETvoT2xU0nDfVw==", + "version": "4.17.1", + "resolved": "https://registry.npmjs.org/express/-/express-4.17.1.tgz", + "integrity": "sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==", "requires": { - "buffer-crc32": "0.2.3", - "commander": "1.3.2", - "connect": "2.20.2", - "cookie": "0.1.2", - "cookie-signature": "1.0.3", - "debug": "1.0.2", - "depd": "0.3.0", - "escape-html": "1.0.1", - "fresh": "0.2.2", - "merge-descriptors": "0.0.2", - "methods": "1.0.1", - "mkdirp": "0.5.0", - "parseurl": "1.0.1", - "proxy-addr": "1.0.1", - "range-parser": "1.0.0", - "send": "0.4.3", - "vary": "0.1.0" - }, - "dependencies": { - "mkdirp": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.0.tgz", - "integrity": "sha512-xjjNGy+ry1lhtIKcr2PT6ok3aszhQfgrUDp4OZLHacgRgFmF6XR9XCOJVcXlVGQonIqXcK1DvqgKKQOPWYGSfw==", - "requires": { - "minimist": "0.0.8" - } - } - } - }, - "express-session": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/express-session/-/express-session-1.4.0.tgz", - "integrity": "sha512-W/4B7cE/+IMX+zrmY+A3xUKIsvKc85O/75i+sAUVztqO6cwgNhjMidy1UVr6jn75NUmrce3sBQeHgMZuNk2XCw==", - "requires": { - "buffer-crc32": "0.2.3", - "cookie": "0.1.2", - "cookie-signature": "1.0.3", - "debug": "1.0.2", - "on-headers": "0.0.0", - "rand-token": "0.2.1", - "utils-merge": "1.0.0" + "accepts": "~1.3.7", + "array-flatten": "1.1.1", + "body-parser": "1.19.0", + "content-disposition": "0.5.3", + "content-type": "~1.0.4", + "cookie": "0.4.0", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "~1.1.2", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "~1.1.2", + "fresh": "0.5.2", + "merge-descriptors": "1.0.1", + "methods": "~1.1.2", + "on-finished": "~2.3.0", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.7", + "proxy-addr": "~2.0.5", + "qs": "6.7.0", + "range-parser": "~1.2.1", + "safe-buffer": "5.1.2", + "send": "0.17.1", + "serve-static": "1.14.1", + "setprototypeof": "1.1.1", + "statuses": "~1.5.0", + "type-is": "~1.6.18", + "utils-merge": "1.0.1", + "vary": "~1.1.2" } }, "extend": { @@ -1575,12 +1468,17 @@ "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==" }, "finalhandler": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-0.0.2.tgz", - "integrity": "sha512-SbpQfvWVwWEBlPTQyaM9gs0D5404ENTC0x2jzbb7t+P+EOD/cBlWjAAvfozIQYtOepUuNkxoLNLCK9/kS29f4w==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz", + "integrity": "sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA==", "requires": { - "debug": "1.0.2", - "escape-html": "1.0.1" + "debug": "2.6.9", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "on-finished": "~2.3.0", + "parseurl": "~1.3.3", + "statuses": "~1.5.0", + "unpipe": "~1.0.0" } }, "findit2": { @@ -1588,14 +1486,6 @@ "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz", "integrity": "sha512-lg/Moejf4qXovVutL0Lz4IsaPoNYMuxt4PA0nGqFxnJ1CTTGGlEO2wKgoDpwknhvZ8k4Q2F+eesgkLbG2Mxfog==" }, - "finished": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/finished/-/finished-1.2.2.tgz", - "integrity": "sha512-HPJ8x7Gn1pmTS1zWyMoXmQ1yxHkYHRoFsBI66ONq4PS9iWBJy1iHYXOSqMWNp3ksMXfrBpenkSwBhl9WG4zr4Q==", - "requires": { - "ee-first": "1.0.3" - } - }, "follow-redirects": { "version": "1.5.10", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.10.tgz", @@ -1644,10 +1534,15 @@ } } }, + "forwarded": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", + "integrity": "sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ=" + }, "fresh": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.2.2.tgz", - "integrity": "sha512-ZGGi8GROK//ijm2gB33sUuN9TjN1tC/dvG4Bt4j6IWrVGpMmudUBCxx+Ir7qePsdREfkpQC4FL8W0jeSOsgv1w==" + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=" }, "fs.realpath": { "version": "1.0.0", @@ -1797,6 +1692,25 @@ "resolved": "https://registry.npmjs.org/hex2dec/-/hex2dec-1.1.2.tgz", "integrity": "sha512-Yu+q/XWr2fFQ11tHxPq4p4EiNkb2y+lAacJNhAdRXVfRIcDH6gi7htWFnnlIzvqHMHoWeIsfXlNAjZInpAOJDA==" }, + "http-errors": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.7.2.tgz", + "integrity": "sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg==", + "requires": { + "depd": "~1.1.2", + "inherits": "2.0.3", + "setprototypeof": "1.1.1", + "statuses": ">= 1.5.0 < 2", + "toidentifier": "1.0.0" + }, + "dependencies": { + "inherits": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" + } + } + }, "http-proxy-agent": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", @@ -1857,9 +1771,12 @@ } }, "iconv-lite": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.3.tgz", - "integrity": "sha512-fBUZHWVujxJd0hOJLaN4Zj4h1LeOn+qi5qyts4HFFa0jaOo/0E6DO1UsJReZV0qwiIzeaqm/1LhYBbvvGjQkNg==" + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "requires": { + "safer-buffer": ">= 2.1.2 < 3" + } }, "inflight": { "version": "1.0.6", @@ -1907,9 +1824,9 @@ } }, "ipaddr.js": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-0.1.2.tgz", - "integrity": "sha512-MGrEjHz4Hk5UVpJXZQ2tHB+bp6xgdRKCAEWdrgFsoAmXCgKAPtj8LqMxgvlWEAj9aN+PpTcvE051uZU3K3kLSQ==" + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==" }, "is": { "version": "3.3.0", @@ -1941,11 +1858,6 @@ "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==" }, - "isarray": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==" - }, "isstream": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", @@ -2009,11 +1921,6 @@ "safe-buffer": "^5.0.1" } }, - "keypress": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/keypress/-/keypress-0.1.0.tgz", - "integrity": "sha512-x0yf9PL/nx9Nw9oLL8ZVErFAk85/lslwEP7Vz7s5SI1ODXZIgit3C5qyWjw4DxOuO/3Hb4866SQh28a1V1d+WA==" - }, "lodash": { "version": "4.17.15", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", @@ -2153,34 +2060,24 @@ "integrity": "sha512-glc9y00wgtwcDmp7GaE/0b0OnxpNJsVf3ael/An6Fe2Q51LLwN1er6sdomLRzz5h0+yMpiYLhWYF5R7HeqVd4g==" }, "media-typer": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.2.0.tgz", - "integrity": "sha512-TSggxYk75oP4tae7JkT8InpcFGUP4340zg1dOWjcu9qcphaDKtXEuNUv3OD4vJ+gVTvIDK797W0uYeNm8qqsDg==" + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=" }, "merge-descriptors": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-0.0.2.tgz", - "integrity": "sha512-dYBT4Ep+t/qnPeJcnMymmhTdd4g8/hn48ciaDqLAkfRf8abzLPS6Rb6EBdz5CZCL8tzZuI5ps9MhGQGxk+EuKg==" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", + "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=" }, "mersenne": { "version": "0.0.4", "resolved": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz", "integrity": "sha512-XoSUL+nF8hMTKGQxUs8r3Btdsf1yuKKBdCCGbh3YXgCXuVKishpZv1CNc385w9s8t4Ynwc5h61BwW/FCVulkbg==" }, - "method-override": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/method-override/-/method-override-2.0.2.tgz", - "integrity": "sha512-VdXhehVbkQcJD4MJisBqFjCGLlCQ5bhVkJqT9VpSgXyCccskmEYn/MA52pnDlqqffmkFazjGbFEwZFKwOIAKXg==", - "requires": { - "methods": "1.0.1", - "parseurl": "1.0.1", - "vary": "0.1.0" - } - }, "methods": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/methods/-/methods-1.0.1.tgz", - "integrity": "sha512-2403MfnVypWSNIEpmQ26/ObZ5kSUx37E8NHRvriw0+I8Sne7k0HGuLGCk0OrCqURh4UIygD0cSsYq+Ll+kzNqA==" + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=" }, "metrics-sharelatex": { "version": "2.5.1", @@ -2205,9 +2102,9 @@ } }, "mime": { - "version": "1.2.11", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", - "integrity": "sha512-Ysa2F/nqTNGHhhm9MV8ure4+Hc+Y8AWiqUdHxsO7xu8zc92ND9f3kpALHjaP026Ft17UfxrMt95c50PLUeynBw==" + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==" }, "mime-db": { "version": "1.43.0", @@ -2215,9 +2112,12 @@ "integrity": "sha512-+5dsGEEovYbT8UY9yD7eE4XTc4UwJ1jBYlgaQQF38ENsKR3wj/8q8RFZrF9WIZpB2V1ArTVFUva8sAul1NzRzQ==" }, "mime-types": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-1.0.0.tgz", - "integrity": "sha512-aP3BmIq4ZAPJt6KywU5HbiG0UwCTHZA2JWHO9aLaxyr8OhPOiK4RPSZcS6TDS7zNzGDC3AACnq/XTuEsd/M1Kg==" + "version": "2.1.26", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.26.tgz", + "integrity": "sha512-01paPWYgLrkqAyrlDorC1uDwl2p3qZT7yl806vW7DvDoxwXi46jsjFbg+WdwotBIk6/MbEhO/dh5aZ5sNj/dWQ==", + "requires": { + "mime-db": "1.43.0" + } }, "minimatch": { "version": "3.0.4", @@ -2415,27 +2315,10 @@ } } }, - "morgan": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/morgan/-/morgan-1.1.1.tgz", - "integrity": "sha512-Jx1pZHnbZ43TFAeY0NVuLqpeXX0O2aL7todwFModvpjZCGR+vBTKH0wOKQjwK1wgO/cERhFISIf4roSj1fx5Jg==", - "requires": { - "bytes": "1.0.0" - } - }, "ms": { - "version": "0.6.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", - "integrity": "sha512-/pc3eh7TWorTtbvXg8je4GvrvEqCfH7PA3P7iW01yL2E53FKixzgMBaQi0NOPbMJqY34cBSvR0tZtmlTkdUG4A==" - }, - "multiparty": { - "version": "3.2.9", - "resolved": "https://registry.npmjs.org/multiparty/-/multiparty-3.2.9.tgz", - "integrity": "sha512-zkG0d0TVz4yw8qDgsP7zZepl9GNOV2kN/CwBpiSXbOP41P824Eu0xrQ+6DnOgni8e+2DNeBZrVI8mg2ppVoWtg==", - "requires": { - "readable-stream": "~1.1.9", - "stream-counter": "~0.2.0" - } + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" }, "mv": { "version": "2.1.1", @@ -2464,11 +2347,6 @@ "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", "integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==" }, - "native-or-bluebird": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/native-or-bluebird/-/native-or-bluebird-1.1.2.tgz", - "integrity": "sha512-Bgn5FHNkd+lPTjIzq1NVU/VZTvPKFvhdIDEyYjxrKNrScSXbVvNVzOKwoleysun0/HoN7R+TXmK9mCtEs84osA==" - }, "ncp": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", @@ -2476,9 +2354,9 @@ "optional": true }, "negotiator": { - "version": "0.4.6", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.4.6.tgz", - "integrity": "sha512-nkhZDoiMZOCbMRPfDAilhyb8sETDhHP+zDCUv+JD26OSPOrYG+/76uooeqz3WTVh7BvQE41VV0YMTGKUgn9GQg==" + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz", + "integrity": "sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==" }, "node-fetch": { "version": "2.6.0", @@ -2510,11 +2388,6 @@ } } }, - "on-headers": { - "version": "0.0.0", - "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-0.0.0.tgz", - "integrity": "sha512-sd6W+EIQTNDbMndkGZqf1q6x3PlMxAIoufoNhcfpvzrXhtN+IWVyM2sjdsZ3p+TVddtTG5u0lujTglZ+R1VGvQ==" - }, "once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -2552,9 +2425,9 @@ "integrity": "sha512-kHt7kzLoS9VBZfUsiKjv43mr91ea+U05EyKkEtqp7vNbHxmaVuEqN7XxeEVnGrMtYOAxGrDElSi96K7EgO1zCA==" }, "parseurl": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.0.1.tgz", - "integrity": "sha512-6W9+0+9Ihayqwjgp4OaLLqZ3KDtqPY2PtUPz8YNiy4PamjJv+7x6J9GO93O9rUZOLgaanTPxsKTasxqKkO1iSw==" + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==" }, "path-is-absolute": { "version": "1.0.1", @@ -2566,10 +2439,10 @@ "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==" }, - "pause": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/pause/-/pause-0.0.1.tgz", - "integrity": "sha512-KG8UEiEVkR3wGEb4m5yZkVCzigAD+cVEJck2CzYZO37ZGJfctvVptVO192MwrtPhzONn6go8ylnOdMhKqi4nfg==" + "path-to-regexp": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", + "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" }, "performance-now": { "version": "2.1.0", @@ -2623,11 +2496,12 @@ } }, "proxy-addr": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-1.0.1.tgz", - "integrity": "sha512-rIUGzBlSfkJMWWCgsd4N5wvVSNAcJZg//UwPZumDIbScHRUzuSOjBmIdyICiKkB9yArv+er9qC6RA/NL3AWc6A==", + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.6.tgz", + "integrity": "sha512-dh/frvCBVmSsDYzw6n926jv974gddhkFPfiN8hPOi30Wax25QZyZEGveluCgliBnqmuM+UJmBErbAUFIoDbjOw==", "requires": { - "ipaddr.js": "0.1.2" + "forwarded": "~0.1.2", + "ipaddr.js": "1.9.1" } }, "psl": { @@ -2701,19 +2575,14 @@ "integrity": "sha512-ZOxMuWPMJnsUdYhuQ9glpZwKhB4cm8ubYFy1nNCY8TkSAuZun5fd8jCDTlf2ykWnK8x9HGn1stNtLeG179DebQ==" }, "qs": { - "version": "0.6.6", - "resolved": "https://registry.npmjs.org/qs/-/qs-0.6.6.tgz", - "integrity": "sha512-kN+yNdAf29Jgp+AYHUmC7X4QdJPR8czuMWLNLc0aRxkQ7tB3vJQEONKKT9ou/rW7EbqVec11srC9q9BiVbcnHA==" - }, - "rand-token": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/rand-token/-/rand-token-0.2.1.tgz", - "integrity": "sha512-yEiCpxsNXZ78N3oEsCZdvv2xAWXUmWCSIetJfMSbvhTlENOozW7ax0lPhonieVe1HCwa/I82Djgy79caeVMysw==" + "version": "6.7.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.7.0.tgz", + "integrity": "sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==" }, "range-parser": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.0.0.tgz", - "integrity": "sha512-wOH5LIH2ZHo0P7/bwkR+aNbJ+kv3CHVX4B8qs9GqbtY29fi1bGPV5xczrutN20G+Z4XhRqRMTW3q0S4iyJJPfw==" + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==" }, "raven": { "version": "1.1.3", @@ -2740,23 +2609,14 @@ } }, "raw-body": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-1.2.2.tgz", - "integrity": "sha512-52kUCLQKKfbzsJtWdlQmrWwhR8WPc8zsCmIDMEygfiEgT3E/AApymJo8eza+zgaLnDxbNRq+U/UXR79s4uX1qw==", + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.4.0.tgz", + "integrity": "sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==", "requires": { - "bytes": "1", - "iconv-lite": "0.4.3" - } - }, - "readable-stream": { - "version": "1.1.14", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", - "integrity": "sha512-+MeVjFf4L44XUkhM1eYbD8fyEsxcV81pqMSR5gblfcLCHfZvbrqy4/qYHE+/R5HoBUT11WV5O08Cr1n3YXkWVQ==", - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.1", - "isarray": "0.0.1", - "string_decoder": "~0.10.x" + "bytes": "3.1.0", + "http-errors": "1.7.2", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" } }, "redis": { @@ -2939,14 +2799,6 @@ "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz", "integrity": "sha512-qpFcKaXsq8+oRoLilkwyc7zHGF5i9Q2/25NIgLQQ/+VVv9rU4qvr6nXVAw1DsnXJyQkZsR4Ytfbtg5ehfcUssQ==" }, - "response-time": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/response-time/-/response-time-2.0.0.tgz", - "integrity": "sha512-1PeD/WjcPWgv4c1Lpfh+whxgOxauMckWZMWBJNVBXg4Sz/MR1bvtA2V0KOr4gYObkp1GW2NyyiNsJkNMtTOt3w==", - "requires": { - "on-headers": "0.0.0" - } - }, "retry-axios": { "version": "0.3.2", "resolved": "https://registry.npmjs.org/retry-axios/-/retry-axios-0.3.2.tgz", @@ -2985,11 +2837,6 @@ "glob": "^6.0.1" } }, - "rndm": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/rndm/-/rndm-1.2.0.tgz", - "integrity": "sha512-fJhQQI5tLrQvYIYFpOnFinzv9dwmR7hRnUz1XqP3OJ1jIweTNOd6aTO4jwQSgcBSFUB+/KHJxuGneime+FdzOw==" - }, "safe-buffer": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", @@ -3024,56 +2871,54 @@ } } }, - "scmp": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/scmp/-/scmp-0.0.3.tgz", - "integrity": "sha512-ya4sPuUOfcrJnfC+OUqTFgFVBEMOXMS1Xopn0wwIhxKwD4eveTwJoIUN9u1QHJ47nL29/m545dV8KqI92MlHPw==" - }, "semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" }, "send": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/send/-/send-0.4.3.tgz", - "integrity": "sha512-Tl3/iKtlp1WM0hDyackntOVwx5kc8GET/zgEj9AOYRX5ideM/33FeRYk4L19IqioGxCkxHSyq1PThVs6PVvk+w==", + "version": "0.17.1", + "resolved": "https://registry.npmjs.org/send/-/send-0.17.1.tgz", + "integrity": "sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg==", "requires": { - "debug": "1.0.2", - "escape-html": "1.0.1", - "finished": "1.2.2", - "fresh": "0.2.2", - "mime": "1.2.11", - "range-parser": "~1.0.0" - } - }, - "serve-favicon": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/serve-favicon/-/serve-favicon-2.0.1.tgz", - "integrity": "sha512-ER7Nk+que+Og6kDJpADjLMkTkllBKWz9FPef5A+uELiYAODTjaMJMszKhzUzsNcvqXM5+mzAdpv/6FaxRlJUng==", - "requires": { - "fresh": "0.2.2" - } - }, - "serve-index": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/serve-index/-/serve-index-1.1.2.tgz", - "integrity": "sha512-hGLXKYyzxrFTDEtrNELQ61nLk5jw3Mfm6piJsefpb6fMQDlmW+kunRe09s68DHfSilh1UN5pU7ZWNFn7WmDbTQ==", - "requires": { - "accepts": "1.0.3", - "batch": "0.5.1" + "debug": "2.6.9", + "depd": "~1.1.2", + "destroy": "~1.0.4", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "~1.7.2", + "mime": "1.6.0", + "ms": "2.1.1", + "on-finished": "~2.3.0", + "range-parser": "~1.2.1", + "statuses": "~1.5.0" + }, + "dependencies": { + "ms": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", + "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==" + } } }, "serve-static": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.2.3.tgz", - "integrity": "sha512-xaOEJYYnhmT2iVnDHcPullns+dFGC18BHseW1ZzkddtPWe4Ot/ZdifPFYk14r+tdWpVNWtXClRRENQ9ODd1Eeg==", + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.14.1.tgz", + "integrity": "sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg==", "requires": { - "escape-html": "1.0.1", - "parseurl": "1.0.1", - "send": "0.4.3" + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "0.17.1" } }, + "setprototypeof": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz", + "integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==" + }, "settings-sharelatex": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/settings-sharelatex/-/settings-sharelatex-1.1.0.tgz", @@ -3156,13 +3001,10 @@ "resolved": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz", "integrity": "sha512-7XO+ur89EalMXXFQaydsczB8sclr5nDsNIoUu0IzJx1pIbHUhO3LtpSzBwetIuU9DyTLMiVaJBMtWS/Nb2KR4g==" }, - "stream-counter": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/stream-counter/-/stream-counter-0.2.0.tgz", - "integrity": "sha512-GjA2zKc2iXUUKRcOxXQmhEx0Ev3XHJ6c8yWGqhQjWwhGrqNwSsvq9YlRLgoGtZ5Kx2Ln94IedaqJ5GUG6aBbxA==", - "requires": { - "readable-stream": "~1.1.8" - } + "statuses": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=" }, "stream-events": { "version": "1.0.5", @@ -3177,11 +3019,6 @@ "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.1.tgz", "integrity": "sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==" }, - "string_decoder": { - "version": "0.10.31", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", - "integrity": "sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ==" - }, "stubs": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", @@ -3291,6 +3128,11 @@ "to-no-case": "^1.0.0" } }, + "toidentifier": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz", + "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==" + }, "tough-cookie": { "version": "2.4.3", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", @@ -3332,28 +3174,12 @@ "integrity": "sha512-j55pzONIdg7rdtJTRZPKIbV0FosUqYdhHK1aAYJIrUvejv1VVyBokrILE8KQDT4emW/1Ev9tx+yZG+AxuSBMmA==" }, "type-is": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.3.1.tgz", - "integrity": "sha512-PLks4DIqAA9z7zHH0VuUv0aZ36t6cq8/K0y0OdHJtTkfSbGHhNvKh3pw1PPakXkjlAskC4apJlxeYcGpKZWvkA==", + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", "requires": { - "media-typer": "0.2.0", - "mime-types": "1.0.0" - } - }, - "uid-safe": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/uid-safe/-/uid-safe-1.1.0.tgz", - "integrity": "sha512-7+QtWs9zioL/iQX61G+4h3EPyr3H+tINIp0IAV4EL32vdf7qmFyuW0BgRqWl7p5oZOsEQrlL0bY7m5D8tp7b1w==", - "requires": { - "base64-url": "1.2.1", - "native-or-bluebird": "~1.1.2" - }, - "dependencies": { - "base64-url": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/base64-url/-/base64-url-1.2.1.tgz", - "integrity": "sha512-V8E0l1jyyeSSS9R+J9oljx5eq2rqzClInuwaPcyuv0Mm3ViI/3/rcc4rCEO8i4eQ4I0O0FAGYDA2i5xWHHPhzg==" - } + "media-typer": "0.3.0", + "mime-types": "~2.1.24" } }, "underscore": { @@ -3361,6 +3187,11 @@ "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz", "integrity": "sha512-z4o1fvKUojIWh9XuaVLUDdf86RQiq13AC1dmHbTpoyuu+bquHms76v16CjycCbec87J7z0k//SiQVk0sMdFmpQ==" }, + "unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=" + }, "uri-js": { "version": "4.2.2", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz", @@ -3375,9 +3206,9 @@ "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" }, "utils-merge": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.0.tgz", - "integrity": "sha512-HwU9SLQEtyo+0uoKXd1nkLqigUWLB+QuNQR4OcmB73eWqksM5ovuqcycks2x043W8XVb75rG1HQ0h93TMXkzQQ==" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=" }, "uuid": { "version": "3.4.0", @@ -3385,9 +3216,9 @@ "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" }, "vary": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/vary/-/vary-0.1.0.tgz", - "integrity": "sha512-tyyeG46NQdwyVP/RsWLSrT78ouwEuvwk9gK8vQK4jdXmqoXtTXW+vsCfNcnqRhigF8olV34QVZarmAi6wBV2Mw==" + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=" }, "verror": { "version": "1.10.0", @@ -3399,11 +3230,6 @@ "extsprintf": "^1.2.0" } }, - "vhost": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/vhost/-/vhost-2.0.0.tgz", - "integrity": "sha512-TSExWM12MVtvIuBLMPyBuWBQLbHnmDZ3zfsoZwcUmKxzPX8l/cHKl5vVfbo8/KZ56UBAc/tTYXbaDGVDaIcrWw==" - }, "walkdir": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/walkdir/-/walkdir-0.4.1.tgz", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index e01ebbbb8d..2633eb5d39 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -21,9 +21,10 @@ }, "dependencies": { "async": "^2.5.0", + "body-parser": "^1.19.0", "bunyan": "~0.22.1", "coffee-script": "~1.7.0", - "express": "3.11.0", + "express": "4.17.1", "lodash": "^4.17.13", "logger-sharelatex": "^1.7.0", "metrics-sharelatex": "^2.5.1", diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee index 4f73017a1d..daea1dcf40 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee @@ -1,4 +1,5 @@ express = require("express") +bodyParser = require("body-parser") app = express() MAX_REQUEST_SIZE = 2*(2*1024*1024 + 64*1024) @@ -36,7 +37,7 @@ module.exports = MockWebApi = else res.send 404 - app.post "/project/:project_id/doc/:doc_id", express.bodyParser({limit: MAX_REQUEST_SIZE}), (req, res, next) => + app.post "/project/:project_id/doc/:doc_id", bodyParser({limit: MAX_REQUEST_SIZE}), (req, res, next) => MockWebApi.setDocument req.params.project_id, req.params.doc_id, req.body.lines, req.body.version, req.body.ranges, req.body.lastUpdatedAt, req.body.lastUpdatedBy, (error) -> if error? res.send 500 From 21194e52ca69bdcb0dfee99e07d31129bfe5351d Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Thu, 19 Mar 2020 15:23:45 +0000 Subject: [PATCH 582/769] more upgrades --- services/document-updater/package-lock.json | 1229 ++++++++++++++++--- services/document-updater/package.json | 8 +- 2 files changed, 1085 insertions(+), 152 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 2d6d7ac858..84cd4fd6c9 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -4,6 +4,124 @@ "lockfileVersion": 1, "requires": true, "dependencies": { + "@babel/code-frame": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.8.3.tgz", + "integrity": "sha512-a9gxpmdXtZEInkCSHUJDLHZVBgb1QS0jhss4cPP93EW7s+uC5bikET2twEF3KV+7rDblJcmNvTR7VJejqd2C2g==", + "requires": { + "@babel/highlight": "^7.8.3" + } + }, + "@babel/generator": { + "version": "7.8.8", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.8.8.tgz", + "integrity": "sha512-HKyUVu69cZoclptr8t8U5b6sx6zoWjh8jiUhnuj3MpZuKT2dJ8zPTuiy31luq32swhI0SpwItCIlU8XW7BZeJg==", + "requires": { + "@babel/types": "^7.8.7", + "jsesc": "^2.5.1", + "lodash": "^4.17.13", + "source-map": "^0.5.0" + }, + "dependencies": { + "source-map": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=" + } + } + }, + "@babel/helper-function-name": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.8.3.tgz", + "integrity": "sha512-BCxgX1BC2hD/oBlIFUgOCQDOPV8nSINxCwM3o93xP4P9Fq6aV5sgv2cOOITDMtCfQ+3PvHp3l689XZvAM9QyOA==", + "requires": { + "@babel/helper-get-function-arity": "^7.8.3", + "@babel/template": "^7.8.3", + "@babel/types": "^7.8.3" + } + }, + "@babel/helper-get-function-arity": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.8.3.tgz", + "integrity": "sha512-FVDR+Gd9iLjUMY1fzE2SR0IuaJToR4RkCDARVfsBBPSP53GEqSFjD8gNyxg246VUyc/ALRxFaAK8rVG7UT7xRA==", + "requires": { + "@babel/types": "^7.8.3" + } + }, + "@babel/helper-split-export-declaration": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.8.3.tgz", + "integrity": "sha512-3x3yOeyBhW851hroze7ElzdkeRXQYQbFIb7gLK1WQYsw2GWDay5gAJNw1sWJ0VFP6z5J1whqeXH/WCdCjZv6dA==", + "requires": { + "@babel/types": "^7.8.3" + } + }, + "@babel/highlight": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.8.3.tgz", + "integrity": "sha512-PX4y5xQUvy0fnEVHrYOarRPXVWafSjTW9T0Hab8gVIawpl2Sj0ORyrygANq+KjcNlSSTw0YCLSNA8OyZ1I4yEg==", + "requires": { + "chalk": "^2.0.0", + "esutils": "^2.0.2", + "js-tokens": "^4.0.0" + } + }, + "@babel/parser": { + "version": "7.8.8", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.8.8.tgz", + "integrity": "sha512-mO5GWzBPsPf6865iIbzNE0AvkKF3NE+2S3eRUpE+FE07BOAkXh6G+GW/Pj01hhXjve1WScbaIO4UlY1JKeqCcA==" + }, + "@babel/template": { + "version": "7.8.6", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.8.6.tgz", + "integrity": "sha512-zbMsPMy/v0PWFZEhQJ66bqjhH+z0JgMoBWuikXybgG3Gkd/3t5oQ1Rw2WQhnSrsOmsKXnZOx15tkC4qON/+JPg==", + "requires": { + "@babel/code-frame": "^7.8.3", + "@babel/parser": "^7.8.6", + "@babel/types": "^7.8.6" + } + }, + "@babel/traverse": { + "version": "7.8.6", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.8.6.tgz", + "integrity": "sha512-2B8l0db/DPi8iinITKuo7cbPznLCEk0kCxDoB9/N6gGNg/gxOXiR/IcymAFPiBwk5w6TtQ27w4wpElgp9btR9A==", + "requires": { + "@babel/code-frame": "^7.8.3", + "@babel/generator": "^7.8.6", + "@babel/helper-function-name": "^7.8.3", + "@babel/helper-split-export-declaration": "^7.8.3", + "@babel/parser": "^7.8.6", + "@babel/types": "^7.8.6", + "debug": "^4.1.0", + "globals": "^11.1.0", + "lodash": "^4.17.13" + }, + "dependencies": { + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "requires": { + "ms": "^2.1.1" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + } + } + }, + "@babel/types": { + "version": "7.8.7", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.8.7.tgz", + "integrity": "sha512-k2TreEHxFA4CjGkL+GYjRyx35W0Mr7DP5+9q6WMkyKXB+904bYmG40syjMFV0oLlhhFCwWl0vA0DyzTDkwAiJw==", + "requires": { + "esutils": "^2.0.2", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + }, "@google-cloud/common": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-2.3.0.tgz", @@ -315,7 +433,7 @@ "arrify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", - "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==" + "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=" }, "debug": { "version": "3.2.6", @@ -618,11 +736,6 @@ "safe-buffer": "^5.0.1" } }, - "methods": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", - "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==" - }, "mime": { "version": "2.4.4", "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", @@ -814,14 +927,6 @@ "combined-stream": "^1.0.6", "mime-types": "^2.1.12" } - }, - "mime-types": { - "version": "2.1.26", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.26.tgz", - "integrity": "sha512-01paPWYgLrkqAyrlDorC1uDwl2p3qZT7yl806vW7DvDoxwXi46jsjFbg+WdwotBIk6/MbEhO/dh5aZ5sNj/dWQ==", - "requires": { - "mime-db": "1.43.0" - } } } }, @@ -891,6 +996,40 @@ "uri-js": "^4.2.2" } }, + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==" + }, + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "requires": { + "color-convert": "^1.9.0" + } + }, + "append-transform": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/append-transform/-/append-transform-1.0.0.tgz", + "integrity": "sha512-P009oYkeHyU742iSZJzZZywj4QRJdnTWffaKuJQLablCZ1uz6/cW4yaRgcDaoQ+uwOxxnt0gRUcwfsNP2ri0gw==", + "requires": { + "default-require-extensions": "^2.0.0" + } + }, + "archy": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/archy/-/archy-1.0.0.tgz", + "integrity": "sha1-+cjBN1fMHde8N5rHeyxipcKGjEA=" + }, + "argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "requires": { + "sprintf-js": "~1.0.2" + } + }, "array-flatten": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", @@ -1002,7 +1141,32 @@ "bintrees": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz", - "integrity": "sha512-tbaUB1QpTIj4cKY8c1rvNAvEQXA+ekzHmbe4jzNfW3QWsF9GnnP/BRWyl6/qqS53heoYJ93naaFcm/jooONH8g==" + "integrity": "sha1-DmVcm5wkNeqraL9AJyJtK1WjRSQ=" + }, + "bl": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-2.2.0.tgz", + "integrity": "sha512-wbgvOpqopSr7uq6fJrLH8EsvYMJf9gzfo2jCsL2eTy75qXPukA4pCgHamOQkZtY5vmfVtjB+P3LNlMHW5CEZXA==", + "requires": { + "readable-stream": "^2.3.5", + "safe-buffer": "^5.1.1" + }, + "dependencies": { + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + } + } }, "body-parser": { "version": "1.19.0", @@ -1037,20 +1201,15 @@ "dev": true }, "bson": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/bson/-/bson-1.0.9.tgz", - "integrity": "sha512-IQX9/h7WdMBIW/q/++tGd+emQr0XMdeZ6icnT/74Xk9fnabWn+gZgpE+9V+gujL3hhJOoNrnDVY7tWdzc7NUTg==" + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/bson/-/bson-1.1.3.tgz", + "integrity": "sha512-TdiJxMVnodVS7r0BdL42y/pqC9cL2iKynVwA0Ho3qbsQYr428veL3l7BQyuqiw+Q5SqqoT0m4srSY/BlZ9AxXg==" }, "buffer-equal-constant-time": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==" }, - "buffer-shims": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/buffer-shims/-/buffer-shims-1.0.0.tgz", - "integrity": "sha512-Zy8ZXMyxIT6RMTeY7OP/bDndfj6bwCan7SS98CEndS6deHwWPpseeHlwarNcBim+etXnF9HBc1non5JgDaJU1g==" - }, "builtin-modules": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.1.0.tgz", @@ -1093,6 +1252,22 @@ "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", "integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==" }, + "caching-transform": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/caching-transform/-/caching-transform-3.0.2.tgz", + "integrity": "sha512-Mtgcv3lh3U0zRii/6qVgQODdPA4G3zhG+jtbCWj39RXuUFTMzH0vcdMtaJS1jPowd+It2Pqr6y3NJMQqOqCE2w==", + "requires": { + "hasha": "^3.0.0", + "make-dir": "^2.0.0", + "package-hash": "^3.0.0", + "write-file-atomic": "^2.4.2" + } + }, + "camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==" + }, "caseless": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", @@ -1115,6 +1290,26 @@ "integrity": "sha512-ezo+u5DUDjPhOYkgsjbbVhtdzsnVr6n2CL/juJA89YnBsWO4ocL14Ake0txlGrGZo/HwcfhFGaV0czdunr3tHA==", "dev": true }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "cliui": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", + "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", + "requires": { + "string-width": "^3.1.0", + "strip-ansi": "^5.2.0", + "wrap-ansi": "^5.1.0" + } + }, "cluster-key-slot": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.0.tgz", @@ -1128,6 +1323,19 @@ "mkdirp": "~0.3.5" } }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" + }, "combined-stream": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", @@ -1136,6 +1344,11 @@ "delayed-stream": "~1.0.0" } }, + "commondir": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", + "integrity": "sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs=" + }, "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", @@ -1168,6 +1381,14 @@ "emitter-listener": "^1.1.1" } }, + "convert-source-map": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.7.0.tgz", + "integrity": "sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA==", + "requires": { + "safe-buffer": "~5.1.1" + } + }, "cookie": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.0.tgz", @@ -1183,6 +1404,43 @@ "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", "integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ==" }, + "cp-file": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/cp-file/-/cp-file-6.2.0.tgz", + "integrity": "sha512-fmvV4caBnofhPe8kOcitBwSn2f39QLjnAnGq3gO9dfd75mUytzKNZB1hde6QHunW2Rt+OwuBOMc3i1tNElbszA==", + "requires": { + "graceful-fs": "^4.1.2", + "make-dir": "^2.0.0", + "nested-error-stacks": "^2.0.0", + "pify": "^4.0.1", + "safe-buffer": "^5.0.1" + } + }, + "cross-spawn": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-4.0.2.tgz", + "integrity": "sha1-e5JHYhwjrf3ThWAEqCPL45dCTUE=", + "requires": { + "lru-cache": "^4.0.1", + "which": "^1.2.9" + }, + "dependencies": { + "lru-cache": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", + "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "requires": { + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" + } + }, + "yallist": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=" + } + } + }, "d64": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz", @@ -1204,6 +1462,11 @@ "ms": "2.0.0" } }, + "decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=" + }, "deep-eql": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-0.1.3.tgz", @@ -1221,6 +1484,14 @@ } } }, + "default-require-extensions": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/default-require-extensions/-/default-require-extensions-2.0.0.tgz", + "integrity": "sha1-9fj7sYp9bVCyH2QfZJ67Uiz+JPc=", + "requires": { + "strip-bom": "^3.0.0" + } + }, "delay": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/delay/-/delay-4.3.0.tgz", @@ -1312,7 +1583,7 @@ "each-series": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/each-series/-/each-series-1.0.0.tgz", - "integrity": "sha512-4MQloCGGCmT5GJZK5ibgJSvTK1c1QSrNlDvLk6fEyRxjZnXjl+NNFfzhfXpmnWh33Owc9D9klrdzCUi7yc9r4Q==" + "integrity": "sha1-+Ibmxm39sl7x/nNWQUbuXLR4r8s=" }, "ecc-jsbn": { "version": "0.1.2", @@ -1339,6 +1610,11 @@ "shimmer": "^1.2.0" } }, + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==" + }, "encodeurl": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", @@ -1357,6 +1633,19 @@ "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz", "integrity": "sha512-GHrMyVZQWvTIdDtpiEXdHZnFQKzeO09apj8Cbl4pKWy4i0Oprcq17usfDt5aO63swf0JOeMWjWQE/LzgSRuWpA==" }, + "error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "requires": { + "is-arrayish": "^0.2.1" + } + }, + "es6-error": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz", + "integrity": "sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==" + }, "es6-promise": { "version": "4.2.8", "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz", @@ -1365,7 +1654,7 @@ "es6-promisify": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz", - "integrity": "sha512-C+d6UdsYDk0lMebHNR4S2NybQMMngAOnOwYBQjTOiv0MkoJMP0Myw2mgpDLBcpfCmRLxyFqYhS/CfOENq4SJhQ==", + "integrity": "sha1-UQnWLz5W6pZ8S2NQWu8IKRyKUgM=", "requires": { "es6-promise": "^4.0.3" } @@ -1378,8 +1667,17 @@ "escape-string-regexp": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", - "dev": true + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, + "esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==" + }, + "esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==" }, "etag": { "version": "1.8.1", @@ -1481,10 +1779,28 @@ "unpipe": "~1.0.0" } }, + "find-cache-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-2.1.0.tgz", + "integrity": "sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==", + "requires": { + "commondir": "^1.0.1", + "make-dir": "^2.0.0", + "pkg-dir": "^3.0.0" + } + }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "requires": { + "locate-path": "^3.0.0" + } + }, "findit2": { "version": "2.2.3", "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz", - "integrity": "sha512-lg/Moejf4qXovVutL0Lz4IsaPoNYMuxt4PA0nGqFxnJ1CTTGGlEO2wKgoDpwknhvZ8k4Q2F+eesgkLbG2Mxfog==" + "integrity": "sha1-WKRmaX34piBc39vzlVNri9d3pfY=" }, "follow-redirects": { "version": "1.5.10", @@ -1501,14 +1817,18 @@ "requires": { "ms": "2.0.0" } - }, - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" } } }, + "foreground-child": { + "version": "1.5.6", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-1.5.6.tgz", + "integrity": "sha1-T9ca0t/elnibmApcCilZN8svXOk=", + "requires": { + "cross-spawn": "^4", + "signal-exit": "^3.0.0" + } + }, "forever-agent": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", @@ -1547,8 +1867,7 @@ "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", - "dev": true + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" }, "gaxios": { "version": "2.3.1", @@ -1571,6 +1890,11 @@ "json-bigint": "^0.3.0" } }, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==" + }, "getpass": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", @@ -1592,6 +1916,11 @@ "path-is-absolute": "^1.0.0" } }, + "globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==" + }, "google-auth-library": { "version": "5.9.2", "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.9.2.tgz", @@ -1637,6 +1966,11 @@ "node-forge": "^0.9.0" } }, + "graceful-fs": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz", + "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==" + }, "growl": { "version": "1.10.5", "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", @@ -1678,8 +2012,22 @@ "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", - "dev": true + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==" + }, + "hasha": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hasha/-/hasha-3.0.0.tgz", + "integrity": "sha1-UqMvq4Vp1BymmmH/GiFPjrfIvTk=", + "requires": { + "is-stream": "^1.0.1" + }, + "dependencies": { + "is-stream": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", + "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=" + } + } }, "he": { "version": "1.1.1", @@ -1692,6 +2040,16 @@ "resolved": "https://registry.npmjs.org/hex2dec/-/hex2dec-1.1.2.tgz", "integrity": "sha512-Yu+q/XWr2fFQ11tHxPq4p4EiNkb2y+lAacJNhAdRXVfRIcDH6gi7htWFnnlIzvqHMHoWeIsfXlNAjZInpAOJDA==" }, + "hosted-git-info": { + "version": "2.8.8", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.8.tgz", + "integrity": "sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==" + }, + "html-escaper": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.0.tgz", + "integrity": "sha512-a4u9BeERWGu/S8JiWEAQcdrg9v4QArtP9keViQjGMdff20fBdd8waotXaNmODqBe6uZ3Nafi7K/ho4gCQHV3Ig==" + }, "http-errors": { "version": "1.7.2", "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.7.2.tgz", @@ -1778,6 +2136,11 @@ "safer-buffer": ">= 2.1.2 < 3" } }, + "imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=" + }, "inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", @@ -1833,11 +2196,21 @@ "resolved": "https://registry.npmjs.org/is/-/is-3.3.0.tgz", "integrity": "sha512-nW24QBoPcFGGHJGUwnfpI7Yc5CdqWNdsyHQszVE/z2pKHXzh7FZ5GWhJqSyaQ9wMkQnsTx+kAI8bHlCX4tKdbg==" }, + "is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=" + }, "is-buffer": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.4.tgz", "integrity": "sha512-Kq1rokWXOPXWuaMAqZiJW4XxsmD9zGx9q4aePabbn3qCRGedtH7Cm+zV8WETitMfu1wdh+Rvd6w5egwSngUX2A==" }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=" + }, "is-obj": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", @@ -1858,16 +2231,148 @@ "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==" }, + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" + }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=" + }, "isstream": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", "integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g==" }, + "istanbul-lib-coverage": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.5.tgz", + "integrity": "sha512-8aXznuEPCJvGnMSRft4udDRDtb1V3pkQkMMI5LI+6HuQz5oQ4J2UFn1H82raA3qJtyOLkkwVqICBQkjnGtn5mA==" + }, + "istanbul-lib-hook": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/istanbul-lib-hook/-/istanbul-lib-hook-2.0.7.tgz", + "integrity": "sha512-vrRztU9VRRFDyC+aklfLoeXyNdTfga2EI3udDGn4cZ6fpSXpHLV9X6CHvfoMCPtggg8zvDDmC4b9xfu0z6/llA==", + "requires": { + "append-transform": "^1.0.0" + } + }, + "istanbul-lib-instrument": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-3.3.0.tgz", + "integrity": "sha512-5nnIN4vo5xQZHdXno/YDXJ0G+I3dAm4XgzfSVTPLQpj/zAV2dV6Juy0yaf10/zrJOJeHoN3fraFe+XRq2bFVZA==", + "requires": { + "@babel/generator": "^7.4.0", + "@babel/parser": "^7.4.3", + "@babel/template": "^7.4.0", + "@babel/traverse": "^7.4.3", + "@babel/types": "^7.4.0", + "istanbul-lib-coverage": "^2.0.5", + "semver": "^6.0.0" + } + }, + "istanbul-lib-report": { + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-2.0.8.tgz", + "integrity": "sha512-fHBeG573EIihhAblwgxrSenp0Dby6tJMFR/HvlerBsrCTD5bkUuoNtn3gVh29ZCS824cGGBPn7Sg7cNk+2xUsQ==", + "requires": { + "istanbul-lib-coverage": "^2.0.5", + "make-dir": "^2.1.0", + "supports-color": "^6.1.0" + }, + "dependencies": { + "supports-color": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", + "integrity": "sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ==", + "requires": { + "has-flag": "^3.0.0" + } + } + } + }, + "istanbul-lib-source-maps": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-3.0.6.tgz", + "integrity": "sha512-R47KzMtDJH6X4/YW9XTx+jrLnZnscW4VpNN+1PViSYTejLVPWv7oov+Duf8YQSPyVRUvueQqz1TcsC6mooZTXw==", + "requires": { + "debug": "^4.1.1", + "istanbul-lib-coverage": "^2.0.5", + "make-dir": "^2.1.0", + "rimraf": "^2.6.3", + "source-map": "^0.6.1" + }, + "dependencies": { + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "requires": { + "ms": "^2.1.1" + } + }, + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "requires": { + "glob": "^7.1.3" + } + } + } + }, + "istanbul-reports": { + "version": "2.2.7", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-2.2.7.tgz", + "integrity": "sha512-uu1F/L1o5Y6LzPVSVZXNOoD/KXpJue9aeLRd0sM9uMXfZvzomB0WxVamWb5ue8kA2vVWEmW7EG+A5n3f1kqHKg==", + "requires": { + "html-escaper": "^2.0.0" + } + }, + "js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" + }, + "js-yaml": { + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", + "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + } + }, "jsbn": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", "integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==" }, + "jsesc": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", + "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==" + }, "json-bigint": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-0.3.0.tgz", @@ -1876,6 +2381,11 @@ "bignumber.js": "^7.0.0" } }, + "json-parse-better-errors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", + "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==" + }, "json-schema": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", @@ -1921,6 +2431,33 @@ "safe-buffer": "^5.0.1" } }, + "load-json-file": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", + "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", + "requires": { + "graceful-fs": "^4.1.2", + "parse-json": "^4.0.0", + "pify": "^3.0.0", + "strip-bom": "^3.0.0" + }, + "dependencies": { + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" + } + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, "lodash": { "version": "4.17.15", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", @@ -1946,6 +2483,11 @@ "resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz", "integrity": "sha512-C5N2Z3DgnnKr0LOpv/hKCgKdb7ZZwafIrsesve6lmzvZIRZRGaZ/l6Q8+2W7NaT+ZwO3fFlSCzCzrDCFdJfZ4g==" }, + "lodash.flattendeep": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz", + "integrity": "sha1-+wMJF/hqMTTlvJvsDWngAT3f7bI=" + }, "lodash.has": { "version": "4.5.2", "resolved": "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz", @@ -1954,7 +2496,7 @@ "lodash.pickby": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz", - "integrity": "sha512-AZV+GsS/6ckvPOVQPXSiFFacKvKB4kOQu6ynt9wz0F3LO4R9Ij4K1ddYsIytDpSgLz88JHd9P+oaLeej5/Sl7Q==" + "integrity": "sha1-feoh2MGNdwOifHBMFdO4SmfjOv8=" }, "log-driver": { "version": "1.2.7", @@ -2048,12 +2590,28 @@ "lynx": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz", - "integrity": "sha512-JI52N0NwK2b/Md0TFPdPtUBI46kjyJXF7+q08l2yvQ56q6QA8s7ZjZQQRoxFpS2jDXNf/B0p8ID+OIKcTsZwzw==", + "integrity": "sha1-Mxjc7xaQi4KG6Bisz9sxzXQkj50=", "requires": { "mersenne": "~0.0.3", "statsd-parser": "~0.0.4" } }, + "make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "requires": { + "pify": "^4.0.1", + "semver": "^5.6.0" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + } + } + }, "map-obj": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.1.0.tgz", @@ -2064,15 +2622,29 @@ "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=" }, + "memory-pager": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/memory-pager/-/memory-pager-1.5.0.tgz", + "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==", + "optional": true + }, "merge-descriptors": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=" }, + "merge-source-map": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/merge-source-map/-/merge-source-map-1.1.0.tgz", + "integrity": "sha512-Qkcp7P2ygktpMPh2mCQZaf3jhN6D3Z/qVZHSdWvQ+2Ef5HgRAPBO57A77+ENm0CPx2+1Ce/MYKi3ymqdfuqibw==", + "requires": { + "source-map": "^0.6.1" + } + }, "mersenne": { "version": "0.0.4", "resolved": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz", - "integrity": "sha512-XoSUL+nF8hMTKGQxUs8r3Btdsf1yuKKBdCCGbh3YXgCXuVKishpZv1CNc385w9s8t4Ynwc5h61BwW/FCVulkbg==" + "integrity": "sha1-QB/ex+whzbngPNPTAhOY2iGycIU=" }, "methods": { "version": "1.1.2", @@ -2080,9 +2652,9 @@ "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=" }, "metrics-sharelatex": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.5.1.tgz", - "integrity": "sha512-C2gmkl/tUnq3IlSX/x3dixGhdvfD6H9FR9mBf9lnkeyy2arafxhCU6u+1IQj6byjBM7vGpYHyjwWnmoi3Vb+ZQ==", + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.6.0.tgz", + "integrity": "sha512-kPWCtgBrRZwLXCxqJVVn3c7g+GHQEBGYBpwCIt0Vqp0NaKvgKiPkJMkoPg9vkCsjsN2AgpGxXcOAdnHAjxfrzA==", "requires": { "@google-cloud/debug-agent": "^3.0.0", "@google-cloud/profiler": "^0.2.3", @@ -2097,7 +2669,7 @@ "coffee-script": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", - "integrity": "sha512-Tx8itEfCsQp8RbLDFt7qwjqXycAx2g6SI7//4PPUR2j6meLmNifYm6zKrNDcU1+Q/GWRhjhEZk7DaLG1TfIzGA==" + "integrity": "sha1-gIs5bhEPU9AhoZpO8fZb4OjjX6M=" } } }, @@ -2205,7 +2777,7 @@ "module-details-from-path": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz", - "integrity": "sha512-ySViT69/76t8VhE1xXHK6Ch4NcDd26gx0MzKXLO+F7NOtnqH68d9zF94nT8ZWSxXh8ELOERsnJO/sWt1xZYw5A==" + "integrity": "sha1-EUyUlnPiqKNenTV4hSeqN7Z52is=" }, "moment": { "version": "2.24.0", @@ -2214,105 +2786,31 @@ "optional": true }, "mongodb": { - "version": "2.2.36", - "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-2.2.36.tgz", - "integrity": "sha512-P2SBLQ8Z0PVx71ngoXwo12+FiSfbNfGOClAao03/bant5DgLNkOPAck5IaJcEk4gKlQhDEURzfR3xuBG1/B+IA==", + "version": "3.5.5", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.5.5.tgz", + "integrity": "sha512-GCjDxR3UOltDq00Zcpzql6dQo1sVry60OXJY3TDmFc2SWFY6c8Gn1Ardidc5jDirvJrx2GC3knGOImKphbSL3A==", "requires": { - "es6-promise": "3.2.1", - "mongodb-core": "2.1.20", - "readable-stream": "2.2.7" - }, - "dependencies": { - "es6-promise": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-3.2.1.tgz", - "integrity": "sha512-oj4jOSXvWglTsc3wrw86iom3LDPOx1nbipQk+jaG3dy+sMRM6ReSgVr/VlmBuF6lXUrflN9DCcQHeSbAwGUl4g==" - }, - "isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" - }, - "process-nextick-args": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz", - "integrity": "sha512-yN0WQmuCX63LP/TMvAg31nvT6m4vDqJEiiv2CAZqWOGNWutc9DfDk1NPYYmKUFmaVM2UwDowH4u5AHWYP/jxKw==" - }, - "readable-stream": { - "version": "2.2.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.2.7.tgz", - "integrity": "sha512-a6ibcfWFhgihuTw/chl+u3fB5ykBZFmnvpyZHebY0MCQE4vvYcsCLpCeaQ1BkH7HdJYavNSqF0WDLeo4IPHQaQ==", - "requires": { - "buffer-shims": "~1.0.0", - "core-util-is": "~1.0.0", - "inherits": "~2.0.1", - "isarray": "~1.0.0", - "process-nextick-args": "~1.0.6", - "string_decoder": "~1.0.0", - "util-deprecate": "~1.0.1" - } - }, - "string_decoder": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.0.3.tgz", - "integrity": "sha512-4AH6Z5fzNNBcH+6XDMfA/BTt87skxqJlO0lAh3Dker5zThcAxG6mKz+iGu308UKoPPQ8Dcqx/4JhujzltRa+hQ==", - "requires": { - "safe-buffer": "~5.1.0" - } - } - } - }, - "mongodb-core": { - "version": "2.1.20", - "resolved": "https://registry.npmjs.org/mongodb-core/-/mongodb-core-2.1.20.tgz", - "integrity": "sha512-IN57CX5/Q1bhDq6ShAR6gIv4koFsZP7L8WOK1S0lR0pVDQaScffSMV5jxubLsmZ7J+UdqmykKw4r9hG3XQEGgQ==", - "requires": { - "bson": "~1.0.4", - "require_optional": "~1.0.0" + "bl": "^2.2.0", + "bson": "^1.1.1", + "denque": "^1.4.1", + "require_optional": "^1.0.1", + "safe-buffer": "^5.1.2", + "saslprep": "^1.0.0" } }, "mongojs": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/mongojs/-/mongojs-2.6.0.tgz", - "integrity": "sha512-r6tj71DjYcaRTi2jpa+CA6Iq72cTZclB2JKy+Zub+0JPTEq/l2plsAYfF2eHqSYBtZbKNcObvhGYk9E9UKZWJg==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mongojs/-/mongojs-3.1.0.tgz", + "integrity": "sha512-aXJ4xfXwx9s1cqtKTZ24PypXiWhIgvgENObQzCGbV4QBxEVedy3yuErhx6znk959cF2dOzL2ClgXJvIhfgkpIQ==", "requires": { "each-series": "^1.0.0", - "mongodb": "^2.2.31", + "mongodb": "^3.3.2", + "nyc": "^14.1.1", "once": "^1.4.0", "parse-mongo-url": "^1.1.1", - "readable-stream": "^2.3.3", - "thunky": "^1.0.2", - "to-mongodb-core": "^2.0.0", - "xtend": "^4.0.1" - }, - "dependencies": { - "isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" - }, - "readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "requires": { - "safe-buffer": "~5.1.0" - } - } + "readable-stream": "^3.4.0", + "thunky": "^1.1.0", + "to-mongodb-core": "^2.0.0" } }, "ms": { @@ -2358,6 +2856,11 @@ "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz", "integrity": "sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==" }, + "nested-error-stacks": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/nested-error-stacks/-/nested-error-stacks-2.1.0.tgz", + "integrity": "sha512-AO81vsIO1k1sM4Zrd6Hu7regmJN1NSiAja10gc4bX3F0wd+9rQmcuHQaHVQCYIEC8iFXnE+mavh23GOt7wBgug==" + }, "node-fetch": { "version": "2.6.0", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz", @@ -2368,6 +2871,84 @@ "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.9.1.tgz", "integrity": "sha512-G6RlQt5Sb4GMBzXvhfkeFmbqR6MzhtnT7VTHuLadjkii3rdYHNdw0m8zA4BTxVIh68FicCQ2NSUANpsqkr9jvQ==" }, + "normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "requires": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + } + } + }, + "nyc": { + "version": "14.1.1", + "resolved": "https://registry.npmjs.org/nyc/-/nyc-14.1.1.tgz", + "integrity": "sha512-OI0vm6ZGUnoGZv/tLdZ2esSVzDwUC88SNs+6JoSOMVxA+gKMB8Tk7jBwgemLx4O40lhhvZCVw1C+OYLOBOPXWw==", + "requires": { + "archy": "^1.0.0", + "caching-transform": "^3.0.2", + "convert-source-map": "^1.6.0", + "cp-file": "^6.2.0", + "find-cache-dir": "^2.1.0", + "find-up": "^3.0.0", + "foreground-child": "^1.5.6", + "glob": "^7.1.3", + "istanbul-lib-coverage": "^2.0.5", + "istanbul-lib-hook": "^2.0.7", + "istanbul-lib-instrument": "^3.3.0", + "istanbul-lib-report": "^2.0.8", + "istanbul-lib-source-maps": "^3.0.6", + "istanbul-reports": "^2.2.4", + "js-yaml": "^3.13.1", + "make-dir": "^2.1.0", + "merge-source-map": "^1.1.0", + "resolve-from": "^4.0.0", + "rimraf": "^2.6.3", + "signal-exit": "^3.0.2", + "spawn-wrap": "^1.4.2", + "test-exclude": "^5.2.3", + "uuid": "^3.3.2", + "yargs": "^13.2.2", + "yargs-parser": "^13.0.0" + }, + "dependencies": { + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==" + }, + "rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "requires": { + "glob": "^7.1.3" + } + } + } + }, "oauth-sign": { "version": "0.9.0", "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", @@ -2396,6 +2977,11 @@ "wrappy": "1" } }, + "os-homedir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", + "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=" + }, "p-limit": { "version": "2.2.2", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.2.tgz", @@ -2404,20 +2990,48 @@ "p-try": "^2.0.0" } }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "requires": { + "p-limit": "^2.0.0" + } + }, "p-try": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" }, + "package-hash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/package-hash/-/package-hash-3.0.0.tgz", + "integrity": "sha512-lOtmukMDVvtkL84rJHI7dpTYq+0rli8N2wlnqUcBuDWCfVhRUfOmnR9SsoHFMLpACvEV60dX7rd0rFaYDZI+FA==", + "requires": { + "graceful-fs": "^4.1.15", + "hasha": "^3.0.0", + "lodash.flattendeep": "^4.4.0", + "release-zalgo": "^1.0.0" + } + }, "parse-duration": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.1.2.tgz", "integrity": "sha512-0qfMZyjOUFBeEIvJ5EayfXJqaEXxQ+Oj2b7tWJM3hvEXvXsYCk05EDVI23oYnEw2NaFYUWdABEVPBvBMh8L/pA==" }, + "parse-json": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", + "requires": { + "error-ex": "^1.3.1", + "json-parse-better-errors": "^1.0.1" + } + }, "parse-mongo-url": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/parse-mongo-url/-/parse-mongo-url-1.1.1.tgz", - "integrity": "sha512-7bZUusQIrFLwvsLHBnCz2WKYQ5LKO/LwKPnvQxbMIh9gDx8H5ZsknRmLjZdn6GVdrgVOwqDrZKsY0qDLNmRgcw==" + "integrity": "sha1-ZiON9fjnwMjKTNlw1KtqE3PrdbU=" }, "parse-ms": { "version": "2.1.0", @@ -2429,6 +3043,11 @@ "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==" }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=" + }, "path-is-absolute": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", @@ -2444,6 +3063,21 @@ "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" }, + "path-type": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", + "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", + "requires": { + "pify": "^3.0.0" + }, + "dependencies": { + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" + } + } + }, "performance-now": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", @@ -2454,6 +3088,14 @@ "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==" }, + "pkg-dir": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", + "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", + "requires": { + "find-up": "^3.0.0" + } + }, "pretty-ms": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-4.0.0.tgz", @@ -2504,6 +3146,11 @@ "ipaddr.js": "1.9.1" } }, + "pseudomap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", + "integrity": "sha1-8FKijacOYYkX7wqKw0wa5aaChrM=" + }, "psl": { "version": "1.7.0", "resolved": "https://registry.npmjs.org/psl/-/psl-1.7.0.tgz", @@ -2619,6 +3266,35 @@ "unpipe": "1.0.0" } }, + "read-pkg": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", + "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=", + "requires": { + "load-json-file": "^4.0.0", + "normalize-package-data": "^2.3.2", + "path-type": "^3.0.0" + } + }, + "read-pkg-up": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-4.0.0.tgz", + "integrity": "sha512-6etQSH7nJGsK0RbG/2TeDzZFa8shjQ1um+SwQQ5cwKy0dhSXdOncEhb1CPpvQG4h7FyOV6EB6YlV0yJvZQNAkA==", + "requires": { + "find-up": "^3.0.0", + "read-pkg": "^3.0.0" + } + }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + }, "redis": { "version": "0.11.0", "resolved": "https://registry.npmjs.org/redis/-/redis-0.11.0.tgz", @@ -2678,6 +3354,14 @@ } } }, + "release-zalgo": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/release-zalgo/-/release-zalgo-1.0.0.tgz", + "integrity": "sha1-CXALflB0Mpc5Mw5TXFqQ+2eFFzA=", + "requires": { + "es6-error": "^4.0.1" + } + }, "request": { "version": "2.88.2", "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz", @@ -2739,6 +3423,11 @@ "when": "^3.7.7" } }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=" + }, "require-in-the-middle": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-4.0.1.tgz", @@ -2770,6 +3459,11 @@ "integrity": "sha512-oyrU88skkMtDdauHDuKVrgR+zuItqr6/c//FXzvmxRGMexSDc6hNvJInGW3LL46n+8b50RykrvwSUIIQH2LQ5A==", "dev": true }, + "require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==" + }, "require_optional": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/require_optional/-/require_optional-1.0.1.tgz", @@ -2797,7 +3491,7 @@ "resolve-from": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz", - "integrity": "sha512-qpFcKaXsq8+oRoLilkwyc7zHGF5i9Q2/25NIgLQQ/+VVv9rU4qvr6nXVAw1DsnXJyQkZsR4Ytfbtg5ehfcUssQ==" + "integrity": "sha1-lICrIOlP+h2egKgEx+oUdhGWa1c=" }, "retry-axios": { "version": "0.3.2", @@ -2871,6 +3565,15 @@ } } }, + "saslprep": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/saslprep/-/saslprep-1.0.3.tgz", + "integrity": "sha512-/MY/PEMbk2SuY5sScONwhUDsV2p77Znkb/q3nSVstq/yQzYJOH/Azh29p9oJLsl3LnQwSvZDKagDGBsBwSooag==", + "optional": true, + "requires": { + "sparse-bitfield": "^3.0.3" + } + }, "semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", @@ -2914,6 +3617,11 @@ "send": "0.17.1" } }, + "set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=" + }, "setprototypeof": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz", @@ -2939,6 +3647,11 @@ "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz", "integrity": "sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw==" }, + "signal-exit": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", + "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=" + }, "sinon": { "version": "1.5.2", "resolved": "https://registry.npmjs.org/sinon/-/sinon-1.5.2.tgz", @@ -2962,6 +3675,92 @@ "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" }, + "sparse-bitfield": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz", + "integrity": "sha1-/0rm5oZWBWuks+eSqzM004JzyhE=", + "optional": true, + "requires": { + "memory-pager": "^1.0.2" + } + }, + "spawn-wrap": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/spawn-wrap/-/spawn-wrap-1.4.3.tgz", + "integrity": "sha512-IgB8md0QW/+tWqcavuFgKYR/qIRvJkRLPJDFaoXtLLUaVcCDK0+HeFTkmQHj3eprcYhc+gOl0aEA1w7qZlYezw==", + "requires": { + "foreground-child": "^1.5.6", + "mkdirp": "^0.5.0", + "os-homedir": "^1.0.1", + "rimraf": "^2.6.2", + "signal-exit": "^3.0.2", + "which": "^1.3.0" + }, + "dependencies": { + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "minimist": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" + }, + "mkdirp": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.3.tgz", + "integrity": "sha512-P+2gwrFqx8lhew375MQHHeTlY8AuOJSrGf0R5ddkEndUkmwpgUob/vQuBD1V22/Cw1/lJr4x+EjllSezBThzBg==", + "requires": { + "minimist": "^1.2.5" + } + }, + "rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "requires": { + "glob": "^7.1.3" + } + } + } + }, + "spdx-correct": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.0.tgz", + "integrity": "sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q==", + "requires": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "spdx-exceptions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz", + "integrity": "sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA==" + }, + "spdx-expression-parse": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz", + "integrity": "sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg==", + "requires": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "spdx-license-ids": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz", + "integrity": "sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q==" + }, "split": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", @@ -2970,6 +3769,11 @@ "through": "2" } }, + "sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=" + }, "sshpk": { "version": "1.16.1", "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", @@ -2999,7 +3803,7 @@ "statsd-parser": { "version": "0.0.4", "resolved": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz", - "integrity": "sha512-7XO+ur89EalMXXFQaydsczB8sclr5nDsNIoUu0IzJx1pIbHUhO3LtpSzBwetIuU9DyTLMiVaJBMtWS/Nb2KR4g==" + "integrity": "sha1-y9JDlTzELv/VSLXSI4jtaJ7GOb0=" }, "statuses": { "version": "1.5.0", @@ -3019,6 +3823,37 @@ "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.1.tgz", "integrity": "sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==" }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "requires": { + "ansi-regex": "^4.1.0" + } + }, + "strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=" + }, "stubs": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", @@ -3028,7 +3863,6 @@ "version": "5.4.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.4.0.tgz", "integrity": "sha512-zjaXglF5nnWpsq470jSv6P9DwPvgLkuapYmfDm3JWOm0vkNTVF2tI4UrN2r6jH1qM/uc/WtxYY1hYoA2dOKj5w==", - "dev": true, "requires": { "has-flag": "^3.0.0" } @@ -3036,7 +3870,7 @@ "tdigest": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.1.tgz", - "integrity": "sha512-CXcDY/NIgIbKZPx5H4JJNpq6JwJhU5Z4+yWj4ZghDc7/9nVajiRlPPyMXRePPPlBfcayUqtoCXjo7/Hm82ecUA==", + "integrity": "sha1-Ljyyw56kSeVdHmzZEReszKRYgCE=", "requires": { "bintrees": "1.0.1" } @@ -3053,10 +3887,36 @@ "uuid": "^3.3.2" } }, + "test-exclude": { + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-5.2.3.tgz", + "integrity": "sha512-M+oxtseCFO3EDtAaGH7iiej3CBkzXqFMbzqYAACdzKui4eZA+pq3tZEwChvOdNfa7xxy8BfbmgJSIr43cC/+2g==", + "requires": { + "glob": "^7.1.3", + "minimatch": "^3.0.4", + "read-pkg-up": "^4.0.0", + "require-main-filename": "^2.0.0" + }, + "dependencies": { + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + } + } + }, "through": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", - "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==" + "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=" }, "through2": { "version": "3.0.1", @@ -3102,10 +3962,15 @@ "integrity": "sha512-W3AmPTJWZkRwu+iSNxPIsLZ2ByADsOLbbLxe46UJyWj3mlYLlwucKiq+/dPm0l9wTzqoF3/2PH0AGFCebjq23A==", "dev": true }, + "to-fast-properties": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=" + }, "to-mongodb-core": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/to-mongodb-core/-/to-mongodb-core-2.0.0.tgz", - "integrity": "sha512-vfXXcGYFP8+0L5IPOtUzzVIvPE/G3GN0TKa/PRBlzPqYyhm+UxhPmvv634EQgO4Ot8dHbBFihOslMJQclY8Z9A==" + "integrity": "sha1-NZbsdhOsmtO5ioncua77pWnNJ+s=" }, "to-no-case": { "version": "1.0.2", @@ -3185,7 +4050,7 @@ "underscore": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz", - "integrity": "sha512-z4o1fvKUojIWh9XuaVLUDdf86RQiq13AC1dmHbTpoyuu+bquHms76v16CjycCbec87J7z0k//SiQVk0sMdFmpQ==" + "integrity": "sha1-izixDKze9jM3uLJOT/htRa6lKag=" }, "unpipe": { "version": "1.0.0", @@ -3215,6 +4080,15 @@ "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" }, + "validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "requires": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, "vary": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", @@ -3240,21 +4114,80 @@ "resolved": "https://registry.npmjs.org/when/-/when-3.7.8.tgz", "integrity": "sha512-5cZ7mecD3eYcMiCH4wtRPA5iFJZ50BJYDfckI5RRpQiktMiYTcn0ccLTZOvcbBume+1304fQztxeNzNS9Gvrnw==" }, + "which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "requires": { + "isexe": "^2.0.0" + } + }, + "which-module": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", + "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=" + }, + "wrap-ansi": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", + "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", + "requires": { + "ansi-styles": "^3.2.0", + "string-width": "^3.0.0", + "strip-ansi": "^5.0.0" + } + }, "wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, - "xtend": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", - "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" + "write-file-atomic": { + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.3.tgz", + "integrity": "sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==", + "requires": { + "graceful-fs": "^4.1.11", + "imurmurhash": "^0.1.4", + "signal-exit": "^3.0.2" + } + }, + "y18n": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", + "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==" }, "yallist": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" }, + "yargs": { + "version": "13.3.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.2.tgz", + "integrity": "sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==", + "requires": { + "cliui": "^5.0.0", + "find-up": "^3.0.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^3.0.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^13.1.2" + } + }, + "yargs-parser": { + "version": "13.1.2", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.2.tgz", + "integrity": "sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==", + "requires": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + } + }, "yn": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 2633eb5d39..5dce90f558 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -26,11 +26,11 @@ "coffee-script": "~1.7.0", "express": "4.17.1", "lodash": "^4.17.13", - "logger-sharelatex": "^1.7.0", - "metrics-sharelatex": "^2.5.1", - "mongojs": "^2.6.0", + "logger-sharelatex": "^1.9.0", + "metrics-sharelatex": "^2.6.0", + "mongojs": "^3.1.0", "redis-sharelatex": "^1.0.11", - "request": "^2.47.0", + "request": "^2.88.2", "requestretry": "^4.1.0", "settings-sharelatex": "^1.1.0" }, From ada4fba3dc7d38ff6f47eb2c83bc23089b09a1f3 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Thu, 19 Mar 2020 15:39:57 +0000 Subject: [PATCH 583/769] Fix express deprecations --- services/document-updater/app.coffee | 26 ++++++------- .../app/coffee/HttpController.coffee | 32 ++++++++-------- .../helpers/MockProjectHistoryApi.coffee | 4 +- .../coffee/helpers/MockTrackChangesApi.coffee | 4 +- .../coffee/helpers/MockWebApi.coffee | 10 ++--- .../HttpController/HttpControllerTests.coffee | 37 ++++++++++--------- 6 files changed, 57 insertions(+), 56 deletions(-) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 6ff06f2f2a..3ca7ef2b6f 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -28,7 +28,7 @@ Metrics.event_loop.monitor(logger, 100) app = express() app.use(Metrics.http.monitor(logger)); -app.use bodyParser({limit: (Settings.max_doc_length + 64 * 1024)}) +app.use bodyParser.json({limit: (Settings.max_doc_length + 64 * 1024)}) Metrics.injectMetricsRoute(app) DispatchManager.createAndStartDispatchers(Settings.dispatcherCount || 10) @@ -61,7 +61,7 @@ app.post '/project/:project_id/history/resync', HttpCont app.post '/project/:project_id/flush', HttpController.flushProject app.post '/project/:project_id/doc/:doc_id/change/:change_id/accept', HttpController.acceptChanges app.post '/project/:project_id/doc/:doc_id/change/accept', HttpController.acceptChanges -app.del '/project/:project_id/doc/:doc_id/comment/:comment_id', HttpController.deleteComment +app.delete '/project/:project_id/doc/:doc_id/comment/:comment_id', HttpController.deleteComment app.get '/flush_all_projects', HttpController.flushAllProjects app.get '/flush_queued_projects', HttpController.flushQueuedProjects @@ -74,7 +74,7 @@ app.get '/total', (req, res)-> app.get '/status', (req, res)-> if Settings.shuttingDown - res.send 503 # Service unavailable + res.sendStatus 503 # Service unavailable else res.send('document updater is alive') @@ -83,18 +83,18 @@ app.get "/health_check/redis", (req, res, next) -> pubsubClient.healthCheck (error) -> if error? logger.err {err: error}, "failed redis health check" - res.send 500 + res.sendStatus 500 else - res.send 200 + res.sendStatus 200 docUpdaterRedisClient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) app.get "/health_check/redis_cluster", (req, res, next) -> docUpdaterRedisClient.healthCheck (error) -> if error? logger.err {err: error}, "failed redis cluster health check" - res.send 500 + res.sendStatus 500 else - res.send 200 + res.sendStatus 200 app.get "/health_check", (req, res, next) -> async.series [ @@ -115,20 +115,20 @@ app.get "/health_check", (req, res, next) -> cb(error) ] , (error) -> if error? - res.send 500 + res.sendStatus 500 else - res.send 200 + res.sendStatus 200 app.use (error, req, res, next) -> if error instanceof Errors.NotFoundError - res.send 404 + res.sendStatus 404 else if error instanceof Errors.OpRangeNotAvailableError - res.send 422 # Unprocessable Entity + res.sendStatus 422 # Unprocessable Entity else if error.statusCode is 413 - res.send(413, "request entity too large") + res.status(413).send("request entity too large") else logger.error err: error, req: req, "request errored" - res.send(500, "Oops, something went wrong") + res.status(500).send("Oops, something went wrong") shutdownCleanly = (signal) -> return () -> diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.coffee index b7d38343d4..67d247ab97 100644 --- a/services/document-updater/app/coffee/HttpController.coffee +++ b/services/document-updater/app/coffee/HttpController.coffee @@ -57,7 +57,7 @@ module.exports = HttpController = ProjectManager.getProjectDocsAndFlushIfOld project_id, projectStateHash, excludeVersions, (error, result) -> timer.done() if error instanceof Errors.ProjectStateChangedError - res.send 409 # conflict + res.sendStatus 409 # conflict else if error? return next(error) else @@ -73,7 +73,7 @@ module.exports = HttpController = if error? return next(error) else - res.send 200 + res.sendStatus 200 setDoc: (req, res, next = (error) ->) -> doc_id = req.params.doc_id @@ -82,14 +82,14 @@ module.exports = HttpController = lineSize = HttpController._getTotalSizeOfLines(lines) if lineSize > TWO_MEGABYTES logger.log {project_id, doc_id, source, lineSize, user_id}, "document too large, returning 406 response" - return res.send 406 + return res.sendStatus 406 logger.log {project_id, doc_id, lines, source, user_id, undoing}, "setting doc via http" timer = new Metrics.Timer("http.setDoc") DocumentManager.setDocWithLock project_id, doc_id, lines, source, user_id, undoing, (error) -> timer.done() return next(error) if error? logger.log project_id: project_id, doc_id: doc_id, "set doc via http" - res.send 204 # No Content + res.sendStatus 204 # No Content flushDocIfLoaded: (req, res, next = (error) ->) -> @@ -101,7 +101,7 @@ module.exports = HttpController = timer.done() return next(error) if error? logger.log project_id: project_id, doc_id: doc_id, "flushed doc via http" - res.send 204 # No Content + res.sendStatus 204 # No Content deleteDoc: (req, res, next = (error) ->) -> doc_id = req.params.doc_id @@ -117,7 +117,7 @@ module.exports = HttpController = return next(error) if error? logger.log project_id: project_id, doc_id: doc_id, "deleted doc via http" - res.send 204 # No Content + res.sendStatus 204 # No Content flushProject: (req, res, next = (error) ->) -> project_id = req.params.project_id @@ -127,7 +127,7 @@ module.exports = HttpController = timer.done() return next(error) if error? logger.log project_id: project_id, "flushed project via http" - res.send 204 # No Content + res.sendStatus 204 # No Content deleteProject: (req, res, next = (error) ->) -> project_id = req.params.project_id @@ -139,14 +139,14 @@ module.exports = HttpController = ProjectManager.queueFlushAndDeleteProject project_id, (error) -> return next(error) if error? logger.log project_id: project_id, "queue delete of project via http" - res.send 204 # No Content + res.sendStatus 204 # No Content else timer = new Metrics.Timer("http.deleteProject") ProjectManager.flushAndDeleteProjectWithLocks project_id, options, (error) -> timer.done() return next(error) if error? logger.log project_id: project_id, "deleted project via http" - res.send 204 # No Content + res.sendStatus 204 # No Content deleteMultipleProjects: (req, res, next = (error) ->) -> project_ids = req.body?.project_ids || [] @@ -156,7 +156,7 @@ module.exports = HttpController = ProjectManager.queueFlushAndDeleteProject project_id, cb , (error) -> return next(error) if error? - res.send 204 # No Content + res.sendStatus 204 # No Content acceptChanges: (req, res, next = (error) ->) -> {project_id, doc_id} = req.params @@ -169,7 +169,7 @@ module.exports = HttpController = timer.done() return next(error) if error? logger.log {project_id, doc_id}, "accepted #{ change_ids.length } changes via http" - res.send 204 # No Content + res.sendStatus 204 # No Content deleteComment: (req, res, next = (error) ->) -> {project_id, doc_id, comment_id} = req.params @@ -179,7 +179,7 @@ module.exports = HttpController = timer.done() return next(error) if error? logger.log {project_id, doc_id, comment_id}, "deleted comment via http" - res.send 204 # No Content + res.sendStatus 204 # No Content updateProject: (req, res, next = (error) ->) -> timer = new Metrics.Timer("http.updateProject") @@ -191,7 +191,7 @@ module.exports = HttpController = timer.done() return next(error) if error? logger.log project_id: project_id, "updated project via http" - res.send 204 # No Content + res.sendStatus 204 # No Content resyncProjectHistory: (req, res, next = (error) ->) -> project_id = req.params.project_id @@ -201,7 +201,7 @@ module.exports = HttpController = HistoryManager.resyncProjectHistory project_id, projectHistoryId, docs, files, (error) -> return next(error) if error? logger.log {project_id}, "queued project history resync via http" - res.send 204 + res.sendStatus 204 flushAllProjects: (req, res, next = (error)-> )-> res.setTimeout(5 * 60 * 1000) @@ -212,7 +212,7 @@ module.exports = HttpController = ProjectFlusher.flushAllProjects options, (err, project_ids)-> if err? logger.err err:err, "error bulk flushing projects" - res.send 500 + res.sendStatus 500 else res.send project_ids @@ -225,7 +225,7 @@ module.exports = HttpController = DeleteQueueManager.flushAndDeleteOldProjects options, (err, flushed)-> if err? logger.err err:err, "error flushing old projects" - res.send 500 + res.sendStatus 500 else logger.log {flushed: flushed}, "flush of queued projects completed" res.send {flushed: flushed} diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockProjectHistoryApi.coffee b/services/document-updater/test/acceptance/coffee/helpers/MockProjectHistoryApi.coffee index 2a0c8603a4..eb635225da 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/MockProjectHistoryApi.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/MockProjectHistoryApi.coffee @@ -9,9 +9,9 @@ module.exports = MockProjectHistoryApi = app.post "/project/:project_id/flush", (req, res, next) => @flushProject req.params.project_id, (error) -> if error? - res.send 500 + res.sendStatus 500 else - res.send 204 + res.sendStatus 204 app.listen 3054, (error) -> throw error if error? diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.coffee b/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.coffee index 95caead368..924937fe39 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.coffee @@ -9,9 +9,9 @@ module.exports = MockTrackChangesApi = app.post "/project/:project_id/doc/:doc_id/flush", (req, res, next) => @flushDoc req.params.doc_id, (error) -> if error? - res.send 500 + res.sendStatus 500 else - res.send 204 + res.sendStatus 204 app.listen 3015, (error) -> throw error if error? diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee index daea1dcf40..19b518f7c6 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee +++ b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee @@ -31,18 +31,18 @@ module.exports = MockWebApi = app.get "/project/:project_id/doc/:doc_id", (req, res, next) => @getDocument req.params.project_id, req.params.doc_id, (error, doc) -> if error? - res.send 500 + res.sendStatus 500 else if doc? res.send JSON.stringify doc else - res.send 404 + res.sendStatus 404 - app.post "/project/:project_id/doc/:doc_id", bodyParser({limit: MAX_REQUEST_SIZE}), (req, res, next) => + app.post "/project/:project_id/doc/:doc_id", bodyParser.json({limit: MAX_REQUEST_SIZE}), (req, res, next) => MockWebApi.setDocument req.params.project_id, req.params.doc_id, req.body.lines, req.body.version, req.body.ranges, req.body.lastUpdatedAt, req.body.lastUpdatedBy, (error) -> if error? - res.send 500 + res.sendStatus 500 else - res.send 204 + res.sendStatus 204 app.listen 3000, (error) -> throw error if error? diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee index 00fd16c088..4f316a12ba 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee @@ -24,8 +24,9 @@ describe "HttpController", -> @next = sinon.stub() @res = send: sinon.stub() + sendStatus: sinon.stub() json: sinon.stub() - + describe "getDoc", -> beforeEach -> @lines = ["one", "two", "three"] @@ -119,7 +120,7 @@ describe "HttpController", -> @next .calledWith(new Error("oops")) .should.equal true - + describe "setDoc", -> beforeEach -> @lines = ["one", "two", "three"] @@ -147,7 +148,7 @@ describe "HttpController", -> .should.equal true it "should return a successful No Content response", -> - @res.send + @res.sendStatus .calledWith(204) .should.equal true @@ -179,7 +180,7 @@ describe "HttpController", -> @HttpController.setDoc(@req, @res, @next) it 'should send back a 406 response', -> - @res.send.calledWith(406).should.equal true + @res.sendStatus.calledWith(406).should.equal true it 'should not call setDocWithLock', -> @DocumentManager.setDocWithLock.callCount.should.equal 0 @@ -201,7 +202,7 @@ describe "HttpController", -> .should.equal true it "should return a successful No Content response", -> - @res.send + @res.sendStatus .calledWith(204) .should.equal true @@ -222,7 +223,7 @@ describe "HttpController", -> @next .calledWith(new Error("oops")) .should.equal true - + describe "flushDocIfLoaded", -> beforeEach -> @lines = ["one", "two", "three"] @@ -243,7 +244,7 @@ describe "HttpController", -> .should.equal true it "should return a successful No Content response", -> - @res.send + @res.sendStatus .calledWith(204) .should.equal true @@ -289,7 +290,7 @@ describe "HttpController", -> .should.equal true it "should return a successful No Content response", -> - @res.send + @res.sendStatus .calledWith(204) .should.equal true @@ -313,7 +314,7 @@ describe "HttpController", -> .should.equal true it "should return a successful No Content response", -> - @res.send.calledWith(204).should.equal true + @res.sendStatus.calledWith(204).should.equal true describe "when an errors occurs", -> beforeEach -> @@ -329,7 +330,7 @@ describe "HttpController", -> @next .calledWith(new Error("oops")) .should.equal true - + describe "deleteProject", -> beforeEach -> @req = @@ -347,7 +348,7 @@ describe "HttpController", -> .should.equal true it "should return a successful No Content response", -> - @res.send + @res.sendStatus .calledWith(204) .should.equal true @@ -379,7 +380,7 @@ describe "HttpController", -> @next .calledWith(new Error("oops")) .should.equal true - + describe "acceptChanges", -> beforeEach -> @req = @@ -399,7 +400,7 @@ describe "HttpController", -> .should.equal true it "should return a successful No Content response", -> - @res.send + @res.sendStatus .calledWith(204) .should.equal true @@ -438,7 +439,7 @@ describe "HttpController", -> @next .calledWith(new Error("oops")) .should.equal true - + describe "deleteComment", -> beforeEach -> @req = @@ -458,7 +459,7 @@ describe "HttpController", -> .should.equal true it "should return a successful No Content response", -> - @res.send + @res.sendStatus .calledWith(204) .should.equal true @@ -524,7 +525,7 @@ describe "HttpController", -> @HttpController.getProjectDocsAndFlushIfOld(@req, @res, @next) it "should return an HTTP 409 Conflict response", -> - @res.send + @res.sendStatus .calledWith(409) .should.equal true @@ -561,7 +562,7 @@ describe "HttpController", -> .should.equal true it "should return a successful No Content response", -> - @res.send + @res.sendStatus .calledWith(204) .should.equal true @@ -601,7 +602,7 @@ describe "HttpController", -> .should.equal true it "should return a successful No Content response", -> - @res.send + @res.sendStatus .calledWith(204) .should.equal true From b2d1718a2ecef4adb59a4b9b056446eecac49f10 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Mon, 23 Mar 2020 16:18:05 +0100 Subject: [PATCH 584/769] [misc] bump logger-sharelatex to 1.9.1 --- services/document-updater/package-lock.json | 6 +++--- services/document-updater/package.json | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 852c1670c1..410fa44a09 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -2055,9 +2055,9 @@ "integrity": "sha512-U7KCmLdqsGHBLeWqYlFA0V0Sl6P08EE1ZrmA9cxjUE0WVqT9qnyVDPz1kzpFEP0jdJuFnasWIfSd7fsaNXkpbg==" }, "logger-sharelatex": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.9.0.tgz", - "integrity": "sha512-yVTuha82047IiMOQLgQHCZGKkJo6I2+2KtiFKpgkIooR2yZaoTEvAeoMwBesSDSpGUpvUJ/+9UI+PmRyc+PQKQ==", + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.9.1.tgz", + "integrity": "sha512-9s6JQnH/PN+Js2CmI8+J3MQCTNlRzP2Dh4pcekXrV6Jm5J4HzyPi+6d3zfBskZ4NBmaUVw9hC4p5dmdaRmh4mQ==", "requires": { "@google-cloud/logging-bunyan": "^2.0.0", "@overleaf/o-error": "^2.0.0", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index e01ebbbb8d..b0277ecea1 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -25,7 +25,7 @@ "coffee-script": "~1.7.0", "express": "3.11.0", "lodash": "^4.17.13", - "logger-sharelatex": "^1.7.0", + "logger-sharelatex": "^1.9.1", "metrics-sharelatex": "^2.5.1", "mongojs": "^2.6.0", "redis-sharelatex": "^1.0.11", From e293d86c148a93b039ef3a084d84e3a9a0983229 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 25 Mar 2020 12:15:16 +0000 Subject: [PATCH 585/769] add metric for project history queue --- .../app/coffee/ProjectHistoryRedisManager.coffee | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee index 1cc80ea722..5763aa7ed3 100644 --- a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee +++ b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee @@ -2,9 +2,13 @@ Settings = require('settings-sharelatex') projectHistoryKeys = Settings.redis?.project_history?.key_schema rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) logger = require('logger-sharelatex') +metrics = require('./Metrics') module.exports = ProjectHistoryRedisManager = queueOps: (project_id, ops..., callback = (error, projectUpdateCount) ->) -> + # Record metric for ops pushed onto queue + for op in ops + metrics.summary "redis.projectHistoryOps", op.length, {status: "push"} multi = rclient.multi() # Push the ops onto the project history queue multi.rpush projectHistoryKeys.projectHistoryOps({project_id}), ops... From 891fcc696bcef5d5606dd4086d041eb55c3ccff0 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 25 Mar 2020 12:15:35 +0000 Subject: [PATCH 586/769] add metric for pending updates queue --- .../document-updater/app/coffee/RealTimeRedisManager.coffee | 3 +++ 1 file changed, 3 insertions(+) diff --git a/services/document-updater/app/coffee/RealTimeRedisManager.coffee b/services/document-updater/app/coffee/RealTimeRedisManager.coffee index d04f2304d3..6fd48033da 100644 --- a/services/document-updater/app/coffee/RealTimeRedisManager.coffee +++ b/services/document-updater/app/coffee/RealTimeRedisManager.coffee @@ -5,6 +5,7 @@ Keys = Settings.redis.documentupdater.key_schema logger = require('logger-sharelatex') os = require "os" crypto = require "crypto" +metrics = require('./Metrics') HOST = os.hostname() RND = crypto.randomBytes(4).toString('hex') # generate a random key for this process @@ -27,6 +28,8 @@ module.exports = RealTimeRedisManager = catch e return callback e updates.push update + # record metric for updates removed from queue + metrics.summary "redis.pendingUpdates", jsonUpdate.length, {status: "pop"} callback error, updates getUpdatesLength: (doc_id, callback)-> From 1a0550364dc0fa88dd5d18004632926da14cd66f Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 25 Mar 2020 13:39:37 +0000 Subject: [PATCH 587/769] add metric for getdoc bytes --- services/document-updater/app/coffee/RedisManager.coffee | 1 + 1 file changed, 1 insertion(+) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index ca4151d299..6b4dc20257 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -41,6 +41,7 @@ module.exports = RedisManager = logger.error {err: error, doc_id: doc_id, docLines: docLines}, error.message return callback(error) docHash = RedisManager._computeHash(docLines) + metrics.summary "redis.setDoc", docLines.length, {status: "set"} logger.log {project_id, doc_id, version, docHash, pathname, projectHistoryId}, "putting doc in redis" RedisManager._serializeRanges ranges, (error, ranges) -> if error? From fcb72b9bf7a6a0974c7f971c9ba2864f75b287a3 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 25 Mar 2020 14:27:04 +0000 Subject: [PATCH 588/769] update tests --- services/document-updater/package-lock.json | 12 ++++++------ services/document-updater/package.json | 2 +- .../ProjectHistoryRedisManagerTests.coffee | 1 + .../RealTimeRedisManagerTests.coffee | 15 ++++++++------- .../coffee/RedisManager/RedisManagerTests.coffee | 1 + 5 files changed, 17 insertions(+), 14 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 852c1670c1..83448c1c77 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -853,9 +853,9 @@ } }, "acorn": { - "version": "6.4.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.0.tgz", - "integrity": "sha512-gac8OEcQ2Li1dxIEWGZzsp2BitJxwkwcOm0zHAJLcPJaVvm58FRnk6RkuLRpU1EujipU2ZFODv2P9DLMfnV8mw==" + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.1.tgz", + "integrity": "sha512-ZVA9k326Nwrj3Cj9jlh3wGFutC2ZornPNARZwsNYqQYgN0EsV2d53w5RN/co65Ohn4sUAUtb1rSUAOD6XN9idA==" }, "agent-base": { "version": "6.0.0", @@ -2183,9 +2183,9 @@ "integrity": "sha512-2403MfnVypWSNIEpmQ26/ObZ5kSUx37E8NHRvriw0+I8Sne7k0HGuLGCk0OrCqURh4UIygD0cSsYq+Ll+kzNqA==" }, "metrics-sharelatex": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.5.1.tgz", - "integrity": "sha512-C2gmkl/tUnq3IlSX/x3dixGhdvfD6H9FR9mBf9lnkeyy2arafxhCU6u+1IQj6byjBM7vGpYHyjwWnmoi3Vb+ZQ==", + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.6.2.tgz", + "integrity": "sha512-bOLfkSCexiPgB96hdXhoOWyvvrwscgjeZPEqdcJ7BTGxY59anzvymNf5hTGJ1RtS4sblDKxITw3L5a+gYKhRYQ==", "requires": { "@google-cloud/debug-agent": "^3.0.0", "@google-cloud/profiler": "^0.2.3", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index e01ebbbb8d..72bb0cd868 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -26,7 +26,7 @@ "express": "3.11.0", "lodash": "^4.17.13", "logger-sharelatex": "^1.7.0", - "metrics-sharelatex": "^2.5.1", + "metrics-sharelatex": "^2.6.2", "mongojs": "^2.6.0", "redis-sharelatex": "^1.0.11", "request": "^2.47.0", diff --git a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee index a93545b250..9810b77d5f 100644 --- a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee @@ -26,6 +26,7 @@ describe "ProjectHistoryRedisManager", -> createClient: () => @rclient "logger-sharelatex": log:-> + "./Metrics": @metrics = { summary: sinon.stub()} globals: JSON: @JSON = JSON diff --git a/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee index 1d97779bfa..13e532736e 100644 --- a/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee @@ -11,7 +11,7 @@ describe "RealTimeRedisManager", -> auth: () -> exec: sinon.stub() @rclient.multi = () => @rclient - @pubsubClient = + @pubsubClient = publish: sinon.stub() @RealTimeRedisManager = SandboxedModule.require modulePath, requires: "redis-sharelatex": createClient: (config) => if (config.name is 'pubsub') then @pubsubClient else @rclient @@ -25,11 +25,12 @@ describe "RealTimeRedisManager", -> "logger-sharelatex": { log: () -> } "crypto": @crypto = { randomBytes: sinon.stub().withArgs(4).returns(Buffer.from([0x1, 0x2, 0x3, 0x4])) } "os": @os = {hostname: sinon.stub().returns("somehost")} + "./Metrics": @metrics = { summary: sinon.stub()} @doc_id = "doc-id-123" @project_id = "project-id-123" @callback = sinon.stub() - + describe "getPendingUpdatesForDoc", -> beforeEach -> @rclient.lrange = sinon.stub() @@ -44,7 +45,7 @@ describe "RealTimeRedisManager", -> @jsonUpdates = @updates.map (update) -> JSON.stringify update @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonUpdates]) @RealTimeRedisManager.getPendingUpdatesForDoc @doc_id, @callback - + it "should get the pending updates", -> @rclient.lrange .calledWith("PendingUpdates:#{@doc_id}", 0, 7) @@ -75,10 +76,10 @@ describe "RealTimeRedisManager", -> beforeEach -> @rclient.llen = sinon.stub().yields(null, @length = 3) @RealTimeRedisManager.getUpdatesLength @doc_id, @callback - + it "should look up the length", -> @rclient.llen.calledWith("PendingUpdates:#{@doc_id}").should.equal true - + it "should return the length", -> @callback.calledWith(null, @length).should.equal true @@ -86,6 +87,6 @@ describe "RealTimeRedisManager", -> beforeEach -> @message_id = "doc:somehost:01020304-0" @RealTimeRedisManager.sendData({op: "thisop"}) - + it "should send the op with a message id", -> - @pubsubClient.publish.calledWith("applied-ops", JSON.stringify({op:"thisop",_id:@message_id})).should.equal true \ No newline at end of file + @pubsubClient.publish.calledWith("applied-ops", JSON.stringify({op:"thisop",_id:@message_id})).should.equal true diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index b666163762..2ac8ac9c16 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -48,6 +48,7 @@ describe "RedisManager", -> createClient: () => @rclient "./Metrics": @metrics = inc: sinon.stub() + summary: sinon.stub() Timer: class Timer constructor: () -> this.start = new Date() From 861ab5be80a9e0d44fec60658e62467d31b50f31 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Thu, 26 Mar 2020 13:40:47 +0000 Subject: [PATCH 589/769] update metrics and logger for latest bug fixes --- services/document-updater/package-lock.json | 189 ++++++++++---------- services/document-updater/package.json | 4 +- 2 files changed, 96 insertions(+), 97 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 84cd4fd6c9..d6ba77e4e4 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -123,9 +123,9 @@ } }, "@google-cloud/common": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-2.3.0.tgz", - "integrity": "sha512-nmIyi3q/FL2j6ZJ61xK/863DoJEZayI2/W/iCgwrCYUYsem277XO45MBTAimjgiKBCA0c9InmQyfT48h/IK4jg==", + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-2.4.0.tgz", + "integrity": "sha512-zWFjBS35eI9leAHhjfeOYlK5Plcuj/77EzstnrJIZbKgF/nkqjcQuGiMCpzCwOfPyUbz8ZaEOYgbHa759AKbjg==", "requires": { "@google-cloud/projectify": "^1.0.0", "@google-cloud/promisify": "^1.0.0", @@ -328,15 +328,15 @@ } }, "@google-cloud/logging": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/@google-cloud/logging/-/logging-7.1.0.tgz", - "integrity": "sha512-0E2ywYYcR/6aZdaPjjX4qe6EN4DciZQMhtAeSZJl3tJZ+L/BUZajodhadwXm3AJ0syEzsqmsOeEBNEBLEtD8XQ==", + "version": "7.3.0", + "resolved": "https://registry.npmjs.org/@google-cloud/logging/-/logging-7.3.0.tgz", + "integrity": "sha512-xTW1V4MKpYC0mjSugyuiyUoZ9g6A42IhrrO3z7Tt3SmAb2IRj2Gf4RLoguKKncs340ooZFXrrVN/++t2Aj5zgg==", "requires": { "@google-cloud/common": "^2.2.2", "@google-cloud/paginator": "^2.0.0", "@google-cloud/projectify": "^1.0.0", "@google-cloud/promisify": "^1.0.0", - "@opencensus/propagation-stackdriver": "0.0.19", + "@opencensus/propagation-stackdriver": "0.0.20", "arrify": "^2.0.0", "dot-prop": "^5.1.0", "eventid": "^1.0.0", @@ -350,7 +350,7 @@ "snakecase-keys": "^3.0.0", "stream-events": "^1.0.4", "through2": "^3.0.0", - "type-fest": "^0.9.0" + "type-fest": "^0.12.0" } }, "@google-cloud/logging-bunyan": { @@ -764,9 +764,9 @@ } }, "@grpc/grpc-js": { - "version": "0.6.16", - "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-0.6.16.tgz", - "integrity": "sha512-TckwrK2duWTeqE/fYQ5JaLMDwqLuun0B/yswf8Bb9Pb7vb5xGd3iulmcnnaA2RDVd/abQTHnkSAjfRibYU24eQ==", + "version": "0.6.18", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-0.6.18.tgz", + "integrity": "sha512-uAzv/tM8qpbf1vpx1xPMfcUMzbfdqJtdCYAqY/LsLeQQlnTb4vApylojr+wlCyr7bZeg3AFfHvtihnNOQQt/nA==", "requires": { "semver": "^6.2.0" } @@ -781,9 +781,9 @@ } }, "@opencensus/core": { - "version": "0.0.19", - "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.19.tgz", - "integrity": "sha512-Y5QXa7vggMU0+jveLcworfX9jNnztix7x1NraAV0uGkTp4y46HrFl0DnNcnNxUDvBu/cYeWRwlmhiWlr9+adOQ==", + "version": "0.0.20", + "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.20.tgz", + "integrity": "sha512-vqOuTd2yuMpKohp8TNNGUAPjWEGjlnGfB9Rh5e3DKqeyR94YgierNs4LbMqxKtsnwB8Dm2yoEtRuUgoe5vD9DA==", "requires": { "continuation-local-storage": "^3.2.1", "log-driver": "^1.2.7", @@ -793,11 +793,11 @@ } }, "@opencensus/propagation-stackdriver": { - "version": "0.0.19", - "resolved": "https://registry.npmjs.org/@opencensus/propagation-stackdriver/-/propagation-stackdriver-0.0.19.tgz", - "integrity": "sha512-TTL9KIOkvTpd+DT2gj3R3JP7XqOAf69ab/wzxIwpBlFqfRiIFBkOALyC/Gy4pKooAe5DemDhXZuRtIa0PgfoZQ==", + "version": "0.0.20", + "resolved": "https://registry.npmjs.org/@opencensus/propagation-stackdriver/-/propagation-stackdriver-0.0.20.tgz", + "integrity": "sha512-P8yuHSLtce+yb+2EZjtTVqG7DQ48laC+IuOWi3X9q78s1Gni5F9+hmbmyP6Nb61jb5BEvXQX1s2rtRI6bayUWA==", "requires": { - "@opencensus/core": "^0.0.19", + "@opencensus/core": "^0.0.20", "hex2dec": "^1.0.1", "uuid": "^3.2.1" } @@ -890,9 +890,9 @@ } }, "@types/fs-extra": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-8.0.1.tgz", - "integrity": "sha512-J00cVDALmi/hJOYsunyT52Hva5TnJeKP5yd1r+mH/ZU0mbYZflR0Z5kw5kITtKTRYMhm1JMClOFYdHnQszEvqw==", + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-8.1.0.tgz", + "integrity": "sha512-UoOfVEzAUpeSPmjm7h1uk5MH6KZma2z2O7a75onTGjnNvAvMVrPzPL/vBbT65iIGHWj6rokwfmYcmxmlSf2uwg==", "requires": { "@types/node": "*" } @@ -1444,7 +1444,7 @@ "d64": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz", - "integrity": "sha512-5eNy3WZziVYnrogqgXhcdEmqcDB2IHurTqLcrgssJsfkMVCUoUaZpK6cJjxxvLV2dUm5SuJMNcYfVGoin9UIRw==" + "integrity": "sha1-QAKofoUMv8n52XBrYPymE6MzbpA=" }, "dashdash": { "version": "1.14.1", @@ -1870,9 +1870,9 @@ "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" }, "gaxios": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-2.3.1.tgz", - "integrity": "sha512-DQOesWEx59/bm63lTX0uHDDXpGTW9oKqNsoigwCoRe2lOb5rFqxzHjLTa6aqEBecLcz69dHLw7rbS068z1fvIQ==", + "version": "2.3.4", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-2.3.4.tgz", + "integrity": "sha512-US8UMj8C5pRnao3Zykc4AAVr+cffoNKRTg9Rsf2GiuZCW69vgJj38VK2PzlPuQU73FZ/nTk9/Av6/JGcE1N9vA==", "requires": { "abort-controller": "^3.0.0", "extend": "^3.0.2", @@ -1882,9 +1882,9 @@ } }, "gcp-metadata": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-3.3.1.tgz", - "integrity": "sha512-RrASg1HaVAxoB9Q/8sYfJ++v9PMiiqIgOrOxZeagMgS4osZtICT1lKBx2uvzYgwetxj8i6K99Z0iuKMg7WraTg==", + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-3.5.0.tgz", + "integrity": "sha512-ZQf+DLZ5aKcRpLzYUyBS3yo3N0JSa82lNDO8rj3nMSlovLcz2riKFBsYgDzeXcv75oo5eqB2lx+B14UvPoCRnA==", "requires": { "gaxios": "^2.1.0", "json-bigint": "^0.3.0" @@ -1922,26 +1922,27 @@ "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==" }, "google-auth-library": { - "version": "5.9.2", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.9.2.tgz", - "integrity": "sha512-rBE1YTOZ3/Hu6Mojkr+UUmbdc/F28hyMGYEGxjyfVA9ZFmq12oqS3AeftX4h9XpdVIcxPooSo8hECYGT6B9XqQ==", + "version": "5.10.1", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz", + "integrity": "sha512-rOlaok5vlpV9rSiUu5EpR0vVpc+PhN62oF4RyX/6++DG1VsaulAFEMlDYBLjJDDPI6OcNOCGAKy9UVB/3NIDXg==", "requires": { "arrify": "^2.0.0", "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", "fast-text-encoding": "^1.0.0", "gaxios": "^2.1.0", - "gcp-metadata": "^3.3.0", + "gcp-metadata": "^3.4.0", "gtoken": "^4.1.0", "jws": "^4.0.0", "lru-cache": "^5.0.0" } }, "google-gax": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-1.14.1.tgz", - "integrity": "sha512-lAvILUMnXL+BVSSlbzwpGzs3ZP2r+b1l44zeDTRWceejDgyZORKdPEEhtUw49x9CVwxpPx02+v0yktqnRhUD1A==", + "version": "1.15.1", + "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-1.15.1.tgz", + "integrity": "sha512-1T1PwSZWnbdRusA+NCZMSe56iU6swGvuZuy54eYl9vEHiRXTLYbQmUkWY2CqgYD9Fd/T4WBkUl22+rZG80unyw==", "requires": { - "@grpc/grpc-js": "^0.6.12", + "@grpc/grpc-js": "^0.6.18", "@grpc/proto-loader": "^0.5.1", "@types/fs-extra": "^8.0.1", "@types/long": "^4.0.0", @@ -1952,10 +1953,32 @@ "lodash.at": "^4.6.0", "lodash.has": "^4.5.2", "node-fetch": "^2.6.0", - "protobufjs": "^6.8.8", + "protobufjs": "^6.8.9", "retry-request": "^4.0.0", "semver": "^6.0.0", "walkdir": "^0.4.0" + }, + "dependencies": { + "protobufjs": { + "version": "6.8.9", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.9.tgz", + "integrity": "sha512-j2JlRdUeL/f4Z6x4aU4gj9I2LECglC+5qR2TrWb193Tla1qfdaNQTZ8I27Pt7K0Ajmvjjpft7O3KWTGciz4gpw==", + "requires": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/long": "^4.0.0", + "@types/node": "^10.1.0", + "long": "^4.0.0" + } + } } }, "google-p12-pem": { @@ -2466,12 +2489,12 @@ "lodash.at": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/lodash.at/-/lodash.at-4.6.0.tgz", - "integrity": "sha512-GOTh0SEp+Yosnlpjic+8cl2WM9MykorogkGA9xyIFkkObQ3H3kNZqZ+ohuq4K3FrSVo7hMcZBMataJemrxC3BA==" + "integrity": "sha1-k83OZk8KGZTqM9181A4jr9EbD/g=" }, "lodash.camelcase": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", - "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==" + "integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY=" }, "lodash.defaults": { "version": "4.2.0", @@ -2491,7 +2514,7 @@ "lodash.has": { "version": "4.5.2", "resolved": "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz", - "integrity": "sha512-rnYUdIo6xRCJnQmbVFEwcxF144erlD+M3YcJUVesflU9paQaE8p+fJDcIQrlMYbxoANFL+AB9hZrzSBBk5PL+g==" + "integrity": "sha1-0Z9NwQlQWMzL4rDN9O4P5Ko3yGI=" }, "lodash.pickby": { "version": "4.6.0", @@ -2504,9 +2527,9 @@ "integrity": "sha512-U7KCmLdqsGHBLeWqYlFA0V0Sl6P08EE1ZrmA9cxjUE0WVqT9qnyVDPz1kzpFEP0jdJuFnasWIfSd7fsaNXkpbg==" }, "logger-sharelatex": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.9.0.tgz", - "integrity": "sha512-yVTuha82047IiMOQLgQHCZGKkJo6I2+2KtiFKpgkIooR2yZaoTEvAeoMwBesSDSpGUpvUJ/+9UI+PmRyc+PQKQ==", + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.9.1.tgz", + "integrity": "sha512-9s6JQnH/PN+Js2CmI8+J3MQCTNlRzP2Dh4pcekXrV6Jm5J4HzyPi+6d3zfBskZ4NBmaUVw9hC4p5dmdaRmh4mQ==", "requires": { "@google-cloud/logging-bunyan": "^2.0.0", "@overleaf/o-error": "^2.0.0", @@ -2519,7 +2542,7 @@ "bunyan": { "version": "1.8.12", "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.12.tgz", - "integrity": "sha512-dmDUbGHeGcvCDLRFOscZkwx1ZO/aFz3bJOCi5nCgzdhFGPxwK+y5AcDBnqagNGlJZ7lje/l6JUEz9mQcutttdg==", + "integrity": "sha1-8VDw9nSKvdcq6uhPBEA74u8RN5c=", "requires": { "dtrace-provider": "~0.8", "moment": "^2.10.6", @@ -2527,14 +2550,6 @@ "safe-json-stringify": "~1" } }, - "mime-types": { - "version": "2.1.26", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.26.tgz", - "integrity": "sha512-01paPWYgLrkqAyrlDorC1uDwl2p3qZT7yl806vW7DvDoxwXi46jsjFbg+WdwotBIk6/MbEhO/dh5aZ5sNj/dWQ==", - "requires": { - "mime-db": "1.43.0" - } - }, "qs": { "version": "6.5.2", "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", @@ -2585,7 +2600,7 @@ "lsmod": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/lsmod/-/lsmod-1.0.0.tgz", - "integrity": "sha512-Y+6V75r+mGWzWEPr9h6PFmStielICu5JBHLUg18jCsD2VFmEfgHbq/EgnY4inElsUD9eKL9id1qp34w46rSIKQ==" + "integrity": "sha1-mgD3bco26yP6BTUK/htYXUKZ5ks=" }, "lynx": { "version": "0.1.1", @@ -2652,9 +2667,9 @@ "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=" }, "metrics-sharelatex": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.6.0.tgz", - "integrity": "sha512-kPWCtgBrRZwLXCxqJVVn3c7g+GHQEBGYBpwCIt0Vqp0NaKvgKiPkJMkoPg9vkCsjsN2AgpGxXcOAdnHAjxfrzA==", + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.6.2.tgz", + "integrity": "sha512-bOLfkSCexiPgB96hdXhoOWyvvrwscgjeZPEqdcJ7BTGxY59anzvymNf5hTGJ1RtS4sblDKxITw3L5a+gYKhRYQ==", "requires": { "@google-cloud/debug-agent": "^3.0.0", "@google-cloud/profiler": "^0.2.3", @@ -3185,29 +3200,6 @@ "readable-stream": "^3.1.1", "stream-shift": "^1.0.0" } - }, - "readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "requires": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - } - }, - "safe-buffer": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz", - "integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==" - }, - "string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "requires": { - "safe-buffer": "~5.2.0" - } } } }, @@ -3234,7 +3226,7 @@ "raven": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/raven/-/raven-1.1.3.tgz", - "integrity": "sha512-RYov4wAaflZasWiCrZuizd3jNXxCOkW1WrXgWsGVb8kRpdHNZ+vPY27R6RhVtqzWp+DG9a5l6iP0QUPK4EgzaQ==", + "integrity": "sha1-QnPBrm005CMPUbLAEEGjK5Iygio=", "requires": { "cookie": "0.3.1", "json-stringify-safe": "5.0.1", @@ -3246,12 +3238,12 @@ "cookie": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", - "integrity": "sha512-+IJOX0OqlHCszo2mBUq+SrEbCj6w7Kpffqx60zYbPTFaO4+yYgRjHwcZNpWvaTylDHaV7PPmBHzSecZiMhtPgw==" + "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" }, "uuid": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.0.tgz", - "integrity": "sha512-rqE1LoOVLv3QrZMjb4NkF5UWlkurCfPyItVnFPNKDDGkHw4dQUdE4zMcLqx28+0Kcf3+bnUk4PisaiRJT4aiaQ==" + "integrity": "sha1-Zyj8BFnEUNeWqZwxg3VpvfZy1yg=" } } }, @@ -3793,7 +3785,7 @@ "stack-trace": { "version": "0.0.9", "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz", - "integrity": "sha512-vjUc6sfgtgY0dxCdnc40mK6Oftjo9+2K8H/NG81TMhgL392FtiPA9tn9RLyTxXmTLPJPjF3VyzFp6bsWFLisMQ==" + "integrity": "sha1-qPbq7KkGdMMz58Q5U/J1tFFRBpU=" }, "standard-as-callback": { "version": "2.0.1", @@ -3857,7 +3849,7 @@ "stubs": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", - "integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==" + "integrity": "sha1-6NK6H6nJBXAwPAMLaQD31fiavls=" }, "supports-color": { "version": "5.4.0", @@ -3876,15 +3868,22 @@ } }, "teeny-request": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-6.0.2.tgz", - "integrity": "sha512-B6fxA0fSnY/bul06NggdN1nywtr5U5Uvt96pHfTi8pi4MNe6++VUWcAAFBrcMeha94s+gULwA5WvagoSZ+AcYg==", + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-6.0.3.tgz", + "integrity": "sha512-TZG/dfd2r6yeji19es1cUIwAlVD8y+/svB1kAC2Y0bjEyysrfbO8EZvJBRwIE6WkwmUoB7uvWLwTIhJbMXZ1Dw==", "requires": { "http-proxy-agent": "^4.0.0", "https-proxy-agent": "^5.0.0", "node-fetch": "^2.2.0", "stream-events": "^1.0.5", - "uuid": "^3.3.2" + "uuid": "^7.0.0" + }, + "dependencies": { + "uuid": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-7.0.2.tgz", + "integrity": "sha512-vy9V/+pKG+5ZTYKf+VcphF5Oc6EFiu3W8Nv3P3zIh0EqVI80ZxOzuPfe9EHjkFNvf8+xuTHVeei4Drydlx4zjw==" + } } }, "test-exclude": { @@ -3975,12 +3974,12 @@ "to-no-case": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/to-no-case/-/to-no-case-1.0.2.tgz", - "integrity": "sha512-Z3g735FxuZY8rodxV4gH7LxClE4H0hTIyHNIHdk+vpQxjLm0cwnKXq/OFVZ76SOQmto7txVcwSCwkU5kqp+FKg==" + "integrity": "sha1-xyKQcWTvaxeBMsjmmTAhLRtKoWo=" }, "to-snake-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-snake-case/-/to-snake-case-1.0.0.tgz", - "integrity": "sha512-joRpzBAk1Bhi2eGEYBjukEWHOe/IvclOkiJl3DtA91jV6NwQ3MwXA4FHYeqk8BNp/D8bmi9tcNbRu/SozP0jbQ==", + "integrity": "sha1-znRpE4l5RgGah+Yu366upMYIq4w=", "requires": { "to-space-case": "^1.0.0" } @@ -3988,7 +3987,7 @@ "to-space-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-space-case/-/to-space-case-1.0.0.tgz", - "integrity": "sha512-rLdvwXZ39VOn1IxGL3V6ZstoTbwLRckQmn/U8ZDLuWwIXNpuZDhQ3AiRUlhTbOXFVE9C+dR51wM0CBDhk31VcA==", + "integrity": "sha1-sFLar7Gysp3HcM6gFj5ewOvJ/Bc=", "requires": { "to-no-case": "^1.0.0" } @@ -4010,7 +4009,7 @@ "punycode": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==" + "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=" } } }, @@ -4034,9 +4033,9 @@ "dev": true }, "type-fest": { - "version": "0.9.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.9.0.tgz", - "integrity": "sha512-j55pzONIdg7rdtJTRZPKIbV0FosUqYdhHK1aAYJIrUvejv1VVyBokrILE8KQDT4emW/1Ev9tx+yZG+AxuSBMmA==" + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.12.0.tgz", + "integrity": "sha512-53RyidyjvkGpnWPMF9bQgFtWp+Sl8O2Rp13VavmJgfAP9WWG6q6TkrKU8iyJdnwnfgHI6k2hTlgqH4aSdjoTbg==" }, "type-is": { "version": "1.6.18", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 5dce90f558..efd602ea53 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -26,8 +26,8 @@ "coffee-script": "~1.7.0", "express": "4.17.1", "lodash": "^4.17.13", - "logger-sharelatex": "^1.9.0", - "metrics-sharelatex": "^2.6.0", + "logger-sharelatex": "^1.9.1", + "metrics-sharelatex": "^2.6.2", "mongojs": "^3.1.0", "redis-sharelatex": "^1.0.11", "request": "^2.88.2", From 17c2add0cf55648aae24dd85643ef8c97eb44be2 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Mon, 30 Mar 2020 11:31:43 +0200 Subject: [PATCH 590/769] [misc] track redis pub/sub payload sizes on publish --- .../app/coffee/RealTimeRedisManager.coffee | 8 ++++++-- .../RealTimeRedisManager/RealTimeRedisManagerTests.coffee | 3 +++ 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/RealTimeRedisManager.coffee b/services/document-updater/app/coffee/RealTimeRedisManager.coffee index 6fd48033da..775132f1b6 100644 --- a/services/document-updater/app/coffee/RealTimeRedisManager.coffee +++ b/services/document-updater/app/coffee/RealTimeRedisManager.coffee @@ -39,9 +39,13 @@ module.exports = RealTimeRedisManager = # create a unique message id using a counter message_id = "doc:#{HOST}:#{RND}-#{COUNT++}" data?._id = message_id + + blob = JSON.stringify(data) + metrics.summary "redis.publish.applied-ops", blob.length + # publish on separate channels for individual projects and docs when # configured (needs realtime to be configured for this too). if Settings.publishOnIndividualChannels - pubsubClient.publish "applied-ops:#{data.doc_id}", JSON.stringify(data) + pubsubClient.publish "applied-ops:#{data.doc_id}", blob else - pubsubClient.publish "applied-ops", JSON.stringify(data) + pubsubClient.publish "applied-ops", blob diff --git a/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee index 13e532736e..429a03b971 100644 --- a/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee @@ -90,3 +90,6 @@ describe "RealTimeRedisManager", -> it "should send the op with a message id", -> @pubsubClient.publish.calledWith("applied-ops", JSON.stringify({op:"thisop",_id:@message_id})).should.equal true + + it "should track the payload size", -> + @metrics.summary.calledWith("redis.publish.applied-ops", JSON.stringify({op:"thisop",_id:@message_id}).length).should.equal true From c2b050e2869aa039f888dc66a69c2c3d096e175d Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 31 Mar 2020 10:21:50 +0100 Subject: [PATCH 591/769] bump redis to 1.0.12 --- services/document-updater/package-lock.json | 14 +++++++------- services/document-updater/package.json | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 410fa44a09..09c2554bac 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -1876,9 +1876,9 @@ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, "ioredis": { - "version": "4.14.1", - "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.14.1.tgz", - "integrity": "sha512-94W+X//GHM+1GJvDk6JPc+8qlM7Dul+9K+lg3/aHixPN7ZGkW6qlvX0DG6At9hWtH2v3B32myfZqWoANUJYGJA==", + "version": "4.16.1", + "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.16.1.tgz", + "integrity": "sha512-g76Mm9dE7BLuewncu1MimGZw5gDDjDwjoRony/VoSxSJEKAhuYncDEwYKYjtHi2NWsTNIB6XXRjE64uVa/wpKQ==", "requires": { "cluster-key-slot": "^1.1.0", "debug": "^4.1.1", @@ -2792,13 +2792,13 @@ } }, "redis-sharelatex": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/redis-sharelatex/-/redis-sharelatex-1.0.11.tgz", - "integrity": "sha512-rKXPVLmFC9ycpRc5e4rULOwi9DB0LqRcWEiUxQuJNSVgcqCxpGqVw+zwivo+grk3G2tGpduh3/8y+4KVHWOntw==", + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/redis-sharelatex/-/redis-sharelatex-1.0.12.tgz", + "integrity": "sha512-Z+LDGaRNgZ+NiDaCC/R0N3Uy6SCtbKXqiXlvCwAbIQRSZUc69OVx/cQ3i5qDF7zeERhh+pnTd+zGs8nVfa5p+Q==", "requires": { "async": "^2.5.0", "coffee-script": "1.8.0", - "ioredis": "~4.14.1", + "ioredis": "~4.16.1", "redis-sentinel": "0.1.1", "underscore": "1.7.0" }, diff --git a/services/document-updater/package.json b/services/document-updater/package.json index b0277ecea1..c27bc47799 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -28,7 +28,7 @@ "logger-sharelatex": "^1.9.1", "metrics-sharelatex": "^2.5.1", "mongojs": "^2.6.0", - "redis-sharelatex": "^1.0.11", + "redis-sharelatex": "^1.0.12", "request": "^2.47.0", "requestretry": "^4.1.0", "settings-sharelatex": "^1.1.0" From 00b11bda96c6c1d2d4ec49e68741f5ed038d5913 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 1 Apr 2020 14:50:55 +0100 Subject: [PATCH 592/769] use separate loop for pendingUpdates metric --- .../document-updater/app/coffee/RealTimeRedisManager.coffee | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/RealTimeRedisManager.coffee b/services/document-updater/app/coffee/RealTimeRedisManager.coffee index 6fd48033da..1bfa509078 100644 --- a/services/document-updater/app/coffee/RealTimeRedisManager.coffee +++ b/services/document-updater/app/coffee/RealTimeRedisManager.coffee @@ -21,6 +21,9 @@ module.exports = RealTimeRedisManager = multi.exec (error, replys) -> return callback(error) if error? jsonUpdates = replys[0] + for jsonUpdate in jsonUpdates + # record metric for each update removed from queue + metrics.summary "redis.pendingUpdates", jsonUpdate.length, {status: "pop"} updates = [] for jsonUpdate in jsonUpdates try @@ -28,8 +31,6 @@ module.exports = RealTimeRedisManager = catch e return callback e updates.push update - # record metric for updates removed from queue - metrics.summary "redis.pendingUpdates", jsonUpdate.length, {status: "pop"} callback error, updates getUpdatesLength: (doc_id, callback)-> From 3a8c362fbaa9f2e875db6322006767a61d39b5e9 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 1 Apr 2020 15:59:25 +0100 Subject: [PATCH 593/769] add doclines set/del metric --- services/document-updater/app/coffee/RedisManager.coffee | 8 ++++++-- .../unit/coffee/RedisManager/RedisManagerTests.coffee | 6 ++++++ 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index 6b4dc20257..d784be8495 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -41,7 +41,7 @@ module.exports = RedisManager = logger.error {err: error, doc_id: doc_id, docLines: docLines}, error.message return callback(error) docHash = RedisManager._computeHash(docLines) - metrics.summary "redis.setDoc", docLines.length, {status: "set"} + metrics.summary "redis.docLines", docLines.length, {status: "set"} logger.log {project_id, doc_id, version, docHash, pathname, projectHistoryId}, "putting doc in redis" RedisManager._serializeRanges ranges, (error, ranges) -> if error? @@ -74,6 +74,7 @@ module.exports = RedisManager = _callback() multi = rclient.multi() + multi.strlen keys.docLines(doc_id:doc_id) multi.del keys.docLines(doc_id:doc_id) multi.del keys.projectKey(doc_id:doc_id) multi.del keys.docVersion(doc_id:doc_id) @@ -85,8 +86,11 @@ module.exports = RedisManager = multi.del keys.unflushedTime(doc_id:doc_id) multi.del keys.lastUpdatedAt(doc_id: doc_id) multi.del keys.lastUpdatedBy(doc_id: doc_id) - multi.exec (error) -> + multi.exec (error, response) -> return callback(error) if error? + length = response?[0] + if length > 0 + metrics.summary "redis.docLines", length, {status: "del"} multi = rclient.multi() multi.srem keys.docsInProject(project_id:project_id), doc_id multi.del keys.projectState(project_id:project_id) diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 2ac8ac9c16..254de8d0a7 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -671,11 +671,17 @@ describe "RedisManager", -> describe "removeDocFromMemory", -> beforeEach (done) -> + @multi.strlen = sinon.stub() @multi.del = sinon.stub() @multi.srem = sinon.stub() @multi.exec.yields() @RedisManager.removeDocFromMemory @project_id, @doc_id, done + it "should check the length of the current doclines", -> + @multi.strlen + .calledWith("doclines:#{@doc_id}") + .should.equal true + it "should delete the lines", -> @multi.del .calledWith("doclines:#{@doc_id}") From 2b72ec49a10e4d599858da32122cc0817f1bebac Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 2 Apr 2020 11:33:52 +0100 Subject: [PATCH 594/769] add comments for redis metrics --- services/document-updater/app/coffee/RedisManager.coffee | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index d784be8495..fa2e312f33 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -41,6 +41,7 @@ module.exports = RedisManager = logger.error {err: error, doc_id: doc_id, docLines: docLines}, error.message return callback(error) docHash = RedisManager._computeHash(docLines) + # record bytes sent to redis metrics.summary "redis.docLines", docLines.length, {status: "set"} logger.log {project_id, doc_id, version, docHash, pathname, projectHistoryId}, "putting doc in redis" RedisManager._serializeRanges ranges, (error, ranges) -> @@ -90,6 +91,7 @@ module.exports = RedisManager = return callback(error) if error? length = response?[0] if length > 0 + # record bytes freed in redis metrics.summary "redis.docLines", length, {status: "del"} multi = rclient.multi() multi.srem keys.docsInProject(project_id:project_id), doc_id From beb3691795e3f7432620d4a1114a8392b0cfe94f Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 2 Apr 2020 11:34:19 +0100 Subject: [PATCH 595/769] add metrics for redis get/update --- services/document-updater/app/coffee/RedisManager.coffee | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index fa2e312f33..f212cfbadc 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -132,6 +132,9 @@ module.exports = RedisManager = if timeSpan > MAX_REDIS_REQUEST_LENGTH error = new Error("redis getDoc exceeded timeout") return callback(error) + # record bytes loaded from redis + if docLines? + metrics.summary "redis.docLines", docLines.length, {status: "get"} # check sha1 hash value if present if docLines? and storedHash? computedHash = RedisManager._computeHash(docLines) @@ -247,7 +250,8 @@ module.exports = RedisManager = opVersions = appliedOps.map (op) -> op?.v logger.log doc_id: doc_id, version: newVersion, hash: newHash, op_versions: opVersions, "updating doc in redis" - + # record bytes sent to redis in update + metrics.summary "redis.docLines", newDocLines.length, {status: "update"} RedisManager._serializeRanges ranges, (error, ranges) -> if error? logger.error {err: error, doc_id}, error.message From c095feaa06a6ac9b89114f96279063549e172e6d Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 6 Apr 2020 10:43:53 +0100 Subject: [PATCH 596/769] upgrade logger-sharelatex --- services/document-updater/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 72bb0cd868..0fb47d191f 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -25,7 +25,7 @@ "coffee-script": "~1.7.0", "express": "3.11.0", "lodash": "^4.17.13", - "logger-sharelatex": "^1.7.0", + "logger-sharelatex": "^1.9.1", "metrics-sharelatex": "^2.6.2", "mongojs": "^2.6.0", "redis-sharelatex": "^1.0.11", From 8e210fe44140ca5fee302410924b2282068f72e2 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 21 Apr 2020 14:41:30 +0100 Subject: [PATCH 597/769] update unit tests --- .../coffee/ApplyingUpdatesToADocTests.coffee | 13 +++++++------ .../ApplyingUpdatesToProjectStructureTests.coffee | 14 +++++++------- 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index 0b28dea7a7..489f8d98eb 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -4,7 +4,8 @@ chai.should() expect = chai.expect async = require "async" Settings = require('settings-sharelatex') -rclient_history = require("redis-sharelatex").createClient(Settings.redis.history) +rclient_history = require("redis-sharelatex").createClient(Settings.redis.history) # note: this is track changes, not project-history +rclient_project_history = require("redis-sharelatex").createClient(Settings.redis.project_history) rclient_du = require("redis-sharelatex").createClient(Settings.redis.documentupdater) Keys = Settings.redis.documentupdater.key_schema HistoryKeys = Settings.redis.history.key_schema @@ -65,14 +66,14 @@ describe "Applying updates to a doc", -> return null it "should push the applied updates to the project history changes api", (done) -> - rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => throw error if error? JSON.parse(updates[0]).op.should.deep.equal @update.op done() return null it "should set the first op timestamp", (done) -> - rclient_history.get ProjectHistoryKeys.projectHistoryFirstOpTimestamp({@project_id}), (error, result) => + rclient_project_history.get ProjectHistoryKeys.projectHistoryFirstOpTimestamp({@project_id}), (error, result) => throw error if error? result.should.be.within(@startTime, Date.now()) @firstOpTimestamp = result @@ -90,7 +91,7 @@ describe "Applying updates to a doc", -> return null it "should not change the first op timestamp", (done) -> - rclient_history.get ProjectHistoryKeys.projectHistoryFirstOpTimestamp({@project_id}), (error, result) => + rclient_project_history.get ProjectHistoryKeys.projectHistoryFirstOpTimestamp({@project_id}), (error, result) => throw error if error? result.should.equal @firstOpTimestamp done() @@ -130,7 +131,7 @@ describe "Applying updates to a doc", -> return null it "should push the applied updates to the project history changes api", (done) -> - rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => JSON.parse(updates[0]).op.should.deep.equal @update.op done() return null @@ -164,7 +165,7 @@ describe "Applying updates to a doc", -> return null it "should push the applied updates to the project history changes api", (done) -> - rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => JSON.parse(updates[0]).op.should.deep.equal @update.op done() return null diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee index cbb9fd9ea5..e18aa2e6a1 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee @@ -2,7 +2,7 @@ sinon = require "sinon" chai = require("chai") chai.should() Settings = require('settings-sharelatex') -rclient_history = require("redis-sharelatex").createClient(Settings.redis.history) +rclient_project_history = require("redis-sharelatex").createClient(Settings.redis.project_history) ProjectHistoryKeys = Settings.redis.project_history.key_schema MockProjectHistoryApi = require "./helpers/MockProjectHistoryApi" @@ -30,7 +30,7 @@ describe "Applying updates to a project's structure", -> setTimeout done, 200 it "should push the applied file renames to the project history api", (done) -> - rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => throw error if error? update = JSON.parse(updates[0]) @@ -61,7 +61,7 @@ describe "Applying updates to a project's structure", -> return null it "should push the applied doc renames to the project history api", (done) -> - rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => throw error if error? update = JSON.parse(updates[0]) @@ -97,7 +97,7 @@ describe "Applying updates to a project's structure", -> return null it "should push the applied doc renames to the project history api", (done) -> - rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => throw error if error? update = JSON.parse(updates[0]) @@ -141,7 +141,7 @@ describe "Applying updates to a project's structure", -> return null it "should push the applied doc renames to the project history api", (done) -> - rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => throw error if error? update = JSON.parse(updates[0]) @@ -194,7 +194,7 @@ describe "Applying updates to a project's structure", -> return null it "should push the file addition to the project history api", (done) -> - rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => throw error if error? update = JSON.parse(updates[0]) @@ -222,7 +222,7 @@ describe "Applying updates to a project's structure", -> return null it "should push the doc addition to the project history api", (done) -> - rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => + rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => throw error if error? update = JSON.parse(updates[0]) From af93193d6e05fca73b5f3f658ac2e6e93078240f Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 21 Apr 2020 14:43:48 +0100 Subject: [PATCH 598/769] remove new_project_history and use project_history instead --- .../app/coffee/ProjectHistoryRedisManager.coffee | 3 +-- .../document-updater/config/settings.defaults.coffee | 11 ----------- .../ProjectHistoryRedisManagerTests.coffee | 2 -- 3 files changed, 1 insertion(+), 15 deletions(-) diff --git a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee index 31842a1c8f..af75487a90 100644 --- a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee +++ b/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee @@ -1,7 +1,6 @@ Settings = require('settings-sharelatex') projectHistoryKeys = Settings.redis?.project_history?.key_schema -#rclient = require("redis-sharelatex").createClient(Settings.redis.project_history) -rclient = require("./RedisMigrationManager").createClient(Settings.redis.project_history, Settings.redis.new_project_history) +rclient = require("redis-sharelatex").createClient(Settings.redis.project_history) logger = require('logger-sharelatex') metrics = require('./Metrics') diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 6724aa6a9a..2fb398251a 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -37,23 +37,12 @@ module.exports = docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:{#{project_id}}" project_history: - port: process.env["HISTORY_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" - host: process.env["HISTORY_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" - password: process.env["HISTORY_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" - maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] or "20") - key_schema: - projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:{#{project_id}}" - projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:{#{project_id}}" - - new_project_history: port: process.env["NEW_HISTORY_REDIS_PORT"] or "6379" host: process.env["NEW_HISTORY_REDIS_HOST"] password: process.env["NEW_HISTORY_REDIS_PASSWORD"] or "" key_schema: projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:{#{project_id}}" projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:{#{project_id}}" - projectHistoryMigrationKey: ({project_id}) -> "ProjectHistory:MigrationKey:{#{project_id}}" - migration_phase: process.env["PROJECT_HISTORY_MIGRATION_PHASE"] or "prepare" redisOptions: keepAlive: 100 diff --git a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee index 7199002162..9810b77d5f 100644 --- a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee @@ -24,8 +24,6 @@ describe "ProjectHistoryRedisManager", -> } "redis-sharelatex": createClient: () => @rclient - "./RedisMigrationManager": - createClient: () => @rclient "logger-sharelatex": log:-> "./Metrics": @metrics = { summary: sinon.stub()} From 248edc03faaa36e6464e28b79d1fcc464ef6fe8e Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 21 Apr 2020 14:44:19 +0100 Subject: [PATCH 599/769] add comment about the two history clients --- services/document-updater/app/coffee/RedisManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index f212cfbadc..3eeed78ffb 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -23,7 +23,7 @@ MEGABYTES = 1024 * 1024 MAX_RANGES_SIZE = 3 * MEGABYTES keys = Settings.redis.documentupdater.key_schema -historyKeys = Settings.redis.history.key_schema +historyKeys = Settings.redis.history.key_schema # note: this is track changes, not project-history module.exports = RedisManager = rclient: rclient From a51f61a5558a276585b1503168d7f8cf5b538c4a Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 21 Apr 2020 14:48:47 +0100 Subject: [PATCH 600/769] remove redis migration code --- .../app/coffee/RedisMigrationManager.coffee | 224 ------------ .../coffee/RedisMigrationManagerTests.coffee | 320 ------------------ 2 files changed, 544 deletions(-) delete mode 100644 services/document-updater/app/coffee/RedisMigrationManager.coffee delete mode 100644 services/document-updater/test/acceptance/coffee/RedisMigrationManagerTests.coffee diff --git a/services/document-updater/app/coffee/RedisMigrationManager.coffee b/services/document-updater/app/coffee/RedisMigrationManager.coffee deleted file mode 100644 index d11024fc94..0000000000 --- a/services/document-updater/app/coffee/RedisMigrationManager.coffee +++ /dev/null @@ -1,224 +0,0 @@ -logger = require "logger-sharelatex" -Settings = require "settings-sharelatex" -redis = require("redis-sharelatex") -LockManager = require("./LockManager") -metrics = require "./Metrics" -async = require("async") - -# The aim is to migrate the project history queues -# ProjectHistory:Ops:{project_id} from the existing redis to a new redis. -# -# This has to work in conjunction with changes in project history. -# -# The basic principles are: -# -# - project history is modified to read from an 'old' and 'new' queue. It reads -# from the 'old' queue first, and when that queue is empty it reads from the -# 'new' queue. -# - docupdater will migrate to writing to the 'new' queue when the 'old' queue -# is empty. -# -# Some facts about the update process: -# -# - project history has a lock on the project-id, so each queue is processed in -# isolation -# - docupdaters take a lock on the doc_id but not the project_id, therefore -# multiple docupdaters can be appending to the queue for a project at the same -# time (provided they updates for individual docs are in order this is -# acceptable) -# - as we want to do this without shutting down the site, we have to take into -# account that different versions of the code will be running while deploys -# are in progress. -# -# The migration has to be carried out with the following constraint: -# -# - a docupdater should never write to the "old" queue when there are updates in -# the "new" queue (there is a strict ordering on the versions, new > old) -# -# The deployment process for docupdater will be -# -# - add a project-level lock to the queuing in docupdater -# - use a per-project migration flag to determine when to write to the new redis -# - set the migration flag for projects with an empty queue in the old redis -# - when all docupdaters respect the flag, make a new deploy which starts to set -# the flag -# - when all docupdaters are setting the flag (and writing to the new redis), -# finish the migration by writing all data to the new redis -# -# Final stage -# -# When all the queues are migrated, remove the migration code and return to a -# single client pointing at the new redis. Delete the -# ProjectHistory:MigrationKey:* entries in the new redis. -# -# Rollback -# -# Under the scheme above a project should only ever have data in the old redis -# or the new redis, but never both at the same time. -# -# Two scenarios: -# -# Hard rollback -# -# If we want to roll back to the old redis immediately, we need to get the data -# out of the new queues and back into the old queues, before appending to the -# old queues again. The actions to do this are: -# -# - close the site -# - revert docupdater so it only writes to the original redis (there will now -# be some data in the new redis for some projects which we need to recover) -# - run a script to move the new queues back into the old redis -# - revert project history to only read from the original redis -# -# Graceful rollback -# -# If we are prepared to keep the new redis running, but not add new projects to -# it we can do the following: -# -# - deploy all docupdaters to update from the "switch" phase into the -# "rollback" phase (projects in the new redis will continue to send data -# there, project not yet migrated will continue to go to the old redis) -# - deploy project history with the "old queue" pointing to the new redis and -# the "new queue" to the old redis to clear the new queue before processing -# the new queue (i.e. add a rollback:true property in new_project_history in -# the project-history settings via the environment variable -# MIGRATION_PHASE="rollback"). -# - projects will now clear gradually from the new redis back to the old redis -# - get a list of all the projects in the new redis and flush them, which will -# cause the new queues to be cleared and the old redis to be used for those -# projects. - -getProjectId = (key) -> - key.match(/\{([0-9a-f]{24})\}/)[1] - -class Multi - constructor: (@migrationClient) -> - @command_list = [] - @queueKey = null - rpush: (args...) -> - @queueKey = args[0] - @updates_count = args.length - 1 - @command_list.push { command:'rpush', args: args} - setnx: (args...) -> - @command_list.push { command: 'setnx', args: args} - exec: (callback) -> - # decide which client to use - project_id = getProjectId(@queueKey) - # Put a lock around finding and updating the queue to avoid time-of-check to - # time-of-use problems. When running in the "switch" phase we need a lock to - # guarantee the order of operations. (Example: docupdater A sees an old - # queue at t=t0 and pushes onto it at t=t1, project history clears the queue - # between t0 and t1, and docupdater B sees the empty queue, sets the - # migration flag and pushes onto the new queue at t2. Without a lock it's - # possible to have t2 < t1 if docupdater A is slower than B - then there - # would be entries in the old and new queues, which we want to avoid.) - LockManager.getLock project_id, (error, lockValue) => - return callback(error) if error? - releaseLock = (args...) => - LockManager.releaseLock project_id, lockValue, (lockError) -> - return callback(lockError) if lockError? - callback(args...) - @migrationClient.findQueue @queueKey, (err, rclient) => - return releaseLock(err) if err? - # add metric for updates - dest = (if rclient == @migrationClient.rclient_new then "new" else "old") - metrics.count "migration", @updates_count, 1, {status: "#{@migrationClient.migration_phase}-#{dest}"} - multi = rclient.multi() - for entry in @command_list - multi[entry.command](entry.args...) - multi.exec releaseLock - -class MigrationClient - constructor: (@old_settings, @new_settings) -> - @rclient_old = redis.createClient(@old_settings) - @rclient_new = redis.createClient(@new_settings) - @new_key_schema = new_settings.key_schema - # check that migration phase is valid on startup - logger.warn {migration_phase: @getMigrationPhase()}, "running with RedisMigrationManager" - - getMigrationPhase: () -> - @migration_phase = @new_settings.migration_phase # FIXME: allow setting migration phase while running for testing - throw new Error("invalid migration phase") unless @migration_phase in ['prepare', 'switch', 'rollback'] - return @migration_phase - - getMigrationStatus: (key, migrationKey, callback) -> - async.series [ - (cb) => @rclient_new.exists migrationKey, cb - (cb) => @rclient_new.exists key, cb - (cb) => @rclient_old.exists key, cb - ], (err, result) -> - return callback(err) if err? - migrationKeyExists = result[0] > 0 - newQueueExists = result[1] > 0 - oldQueueExists = result[2] > 0 - callback(null, migrationKeyExists, newQueueExists, oldQueueExists) - - findQueue: (key, callback) -> - project_id = getProjectId(key) - migrationKey = @new_key_schema.projectHistoryMigrationKey({project_id}) - migration_phase = @getMigrationPhase() # allow setting migration phase while running for testing - @getMigrationStatus key, migrationKey, (err, migrationKeyExists, newQueueExists, oldQueueExists) => - return callback(err) if err? - # In all cases, if the migration key exists we must always write to the - # new redis, unless we are rolling back. - if migration_phase is "prepare" - # in this phase we prepare for the switch, when some docupdaters will - # start setting the migration flag. We monitor the migration key and - # write to the new redis if the key is present, but we do not set the - # migration key. At this point no writes will be going into the new - # redis. When all the docupdaters are in the "prepare" phase we can - # begin deploying the "switch" phase. - if migrationKeyExists - logger.debug {project_id}, "using new client because migration key exists" - return callback(null, @rclient_new) - else - logger.debug {project_id}, "using old client because migration key does not exist" - return callback(null, @rclient_old) - else if migration_phase is "switch" - # As we deploy the "switch" phase new docupdaters will set the migration - # flag for projects which have an empty queue in the old redis, and - # write updates into the new redis. Existing docupdaters still in the - # "prepare" phase will pick up the migration flag and write new updates - # into the new redis when appropriate. When this deploy is complete - # writes will be going into the new redis for projects with an empty - # queue in the old redis. We have to remain in the switch phase until - # all projects are flushed from the old redis. - if migrationKeyExists - logger.debug {project_id}, "using new client because migration key exists" - return callback(null, @rclient_new) - else - if oldQueueExists - logger.debug {project_id}, "using old client because old queue exists" - return callback(null, @rclient_old) - else - @rclient_new.setnx migrationKey, "NEW", (err) => - return callback(err) if err? - logger.debug {key: key}, "switching to new redis because old queue is empty" - return callback(null, @rclient_new) - else if migration_phase is "rollback" - # If we need to roll back gracefully we do the opposite of the "switch" - # phase. We use the new redis when the migration key is set and the - # queue exists in the new redis, but if the queue in the new redis is - # empty we delete the migration key and send further updates to the old - # redis. - if migrationKeyExists - if newQueueExists - logger.debug {project_id}, "using new client because migration key exists and new queue is present" - return callback(null, @rclient_new) - else - @rclient_new.del migrationKey, (err) => - return callback(err) if err? - logger.debug {key: key}, "switching to old redis in rollback phase because new queue is empty" - return callback(null, @rclient_old) - else - logger.debug {project_id}, "using old client because migration key does not exist" - return callback(null, @rclient_old) - else - logger.error {key: key, migration_phase: migration_phase}, "unknown migration phase" - callback(new Error('invalid migration phase')) - multi: () -> - new Multi(@) - -module.exports = RedisMigrationManager = - createClient: (args...) -> - new MigrationClient(args...) diff --git a/services/document-updater/test/acceptance/coffee/RedisMigrationManagerTests.coffee b/services/document-updater/test/acceptance/coffee/RedisMigrationManagerTests.coffee deleted file mode 100644 index 2684a4a3d8..0000000000 --- a/services/document-updater/test/acceptance/coffee/RedisMigrationManagerTests.coffee +++ /dev/null @@ -1,320 +0,0 @@ -sinon = require "sinon" -chai = require("chai") -chai.should() -expect = chai.expect -async = require "async" -Settings = require('settings-sharelatex') -rclient_old = require("redis-sharelatex").createClient(Settings.redis.project_history) -rclient_new = require("redis-sharelatex").createClient(Settings.redis.new_project_history) -rclient_du = require("redis-sharelatex").createClient(Settings.redis.documentupdater) -Keys = Settings.redis.documentupdater.key_schema -HistoryKeys = Settings.redis.history.key_schema -ProjectHistoryKeys = Settings.redis.project_history.key_schema -NewProjectHistoryKeys = Settings.redis.new_project_history.key_schema - -MockTrackChangesApi = require "./helpers/MockTrackChangesApi" -MockWebApi = require "./helpers/MockWebApi" -DocUpdaterClient = require "./helpers/DocUpdaterClient" -DocUpdaterApp = require "./helpers/DocUpdaterApp" - -describe "RedisMigrationManager", -> - before (done) -> - @lines = ["one", "two", "three"] - @version = 42 - @update = - doc: @doc_id - op: [{ - i: "one and a half\n" - p: 4 - }] - v: @version - DocUpdaterApp.ensureRunning(done) - - describe "when the migration phase is 'prepare' (default)", -> - - describe "when there is no migration flag", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => - throw error if error? - sinon.spy MockWebApi, "getDocument" - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> - throw error if error? - setTimeout done, 200 - return null - - after -> - MockWebApi.getDocument.restore() - - it "should push the applied updates to old redis", (done) -> - rclient_old.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => - JSON.parse(updates[0]).op.should.deep.equal @update.op - done() - return null - - it "should not push the applied updates to the new redis", (done) -> - rclient_new.exists ProjectHistoryKeys.projectHistoryOps({@project_id}), (error, result) => - result.should.equal 0 - done() - return null - - it "should not set the migration flag for the project", (done) -> - rclient_new.exists NewProjectHistoryKeys.projectHistoryMigrationKey({@project_id}), (error, result) => - result.should.equal 0 - done() - return null - - describe "when the migration flag is set for the project", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - - rclient_new.set NewProjectHistoryKeys.projectHistoryMigrationKey({@project_id}), '1', (error) => - throw error if error? - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => - throw error if error? - sinon.spy MockWebApi, "getDocument" - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> - throw error if error? - setTimeout done, 200 - return null - - after (done) -> - MockWebApi.getDocument.restore() - rclient_new.del NewProjectHistoryKeys.projectHistoryMigrationKey({@project_id}), done - return null - - it "should push the applied updates to the new redis", (done) -> - rclient_new.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => - JSON.parse(updates[0]).op.should.deep.equal @update.op - done() - return null - - it "should not push the applied updates to the old redis", (done) -> - rclient_old.exists ProjectHistoryKeys.projectHistoryOps({@project_id}), (error, result) => - result.should.equal 0 - done() - return null - - it "should keep the migration flag for the project", (done) -> - rclient_new.exists NewProjectHistoryKeys.projectHistoryMigrationKey({@project_id}), (error, result) => - result.should.equal 1 - done() - return null - - describe "when the migration phase is 'switch'", -> - before -> - Settings.redis.new_project_history.migration_phase = 'switch' - - describe "when the old queue is empty", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => - throw error if error? - sinon.spy MockWebApi, "getDocument" - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> - throw error if error? - setTimeout done, 200 - return null - - after -> - MockWebApi.getDocument.restore() - - it "should push the applied updates to the new redis", (done) -> - rclient_new.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => - JSON.parse(updates[0]).op.should.deep.equal @update.op - done() - return null - - it "should not push the applied updates to the old redis", (done) -> - rclient_old.exists ProjectHistoryKeys.projectHistoryOps({@project_id}), (error, result) => - result.should.equal 0 - done() - return null - - it "should set the migration flag for the project", (done) -> - rclient_new.get NewProjectHistoryKeys.projectHistoryMigrationKey({@project_id}), (error, result) => - result.should.equal "NEW" - done() - return null - - describe "when the old queue is not empty", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => - throw error if error? - sinon.spy MockWebApi, "getDocument" - rclient_old.rpush ProjectHistoryKeys.projectHistoryOps({@project_id}), JSON.stringify({op: "dummy-op"}), (error) => - throw error if error? - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> - throw error if error? - setTimeout done, 200 - return null - - after -> - MockWebApi.getDocument.restore() - - it "should push the applied updates to the old redis", (done) -> - rclient_old.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => - JSON.parse(updates[0]).op.should.deep.equal "dummy-op" - JSON.parse(updates[1]).op.should.deep.equal @update.op - done() - return null - - it "should not push the applied updates to the new redis", (done) -> - rclient_new.exists ProjectHistoryKeys.projectHistoryOps({@project_id}), (error, result) => - result.should.equal 0 - done() - return null - - it "should not set the migration flag for the project", (done) -> - rclient_new.exists NewProjectHistoryKeys.projectHistoryMigrationKey({@project_id}), (error, result) => - result.should.equal 0 - done() - return null - - describe "when the migration flag is set for the project", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - - rclient_new.set NewProjectHistoryKeys.projectHistoryMigrationKey({@project_id}), '1', (error) => - throw error if error? - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => - throw error if error? - sinon.spy MockWebApi, "getDocument" - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> - throw error if error? - setTimeout done, 200 - return null - - after (done) -> - MockWebApi.getDocument.restore() - rclient_new.del NewProjectHistoryKeys.projectHistoryMigrationKey({@project_id}), done - return null - - it "should push the applied updates to the new redis", (done) -> - rclient_new.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => - JSON.parse(updates[0]).op.should.deep.equal @update.op - done() - return null - - it "should not push the applied updates to the old redis", (done) -> - rclient_old.exists ProjectHistoryKeys.projectHistoryOps({@project_id}), (error, result) => - result.should.equal 0 - done() - return null - - it "should keep the migration flag for the project", (done) -> - rclient_new.exists NewProjectHistoryKeys.projectHistoryMigrationKey({@project_id}), (error, result) => - result.should.equal 1 - done() - return null - - describe "when the migration phase is 'rollback'", -> - before -> - Settings.redis.new_project_history.migration_phase = 'rollback' - - describe "when the old queue is empty", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => - throw error if error? - sinon.spy MockWebApi, "getDocument" - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> - throw error if error? - setTimeout done, 200 - return null - - after -> - MockWebApi.getDocument.restore() - - it "should push the applied updates to the old redis", (done) -> - rclient_old.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => - JSON.parse(updates[0]).op.should.deep.equal @update.op - done() - return null - - it "should not push the applied updates to the new redis", (done) -> - rclient_new.exists ProjectHistoryKeys.projectHistoryOps({@project_id}), (error, result) => - result.should.equal 0 - done() - return null - - describe "when the new queue is not empty", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => - throw error if error? - sinon.spy MockWebApi, "getDocument" - rclient_new.rpush ProjectHistoryKeys.projectHistoryOps({@project_id}), JSON.stringify({op: "dummy-op"}), (error) => - throw error if error? - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> - throw error if error? - setTimeout done, 200 - return null - - after -> - MockWebApi.getDocument.restore() - - it "should push the applied updates to the old redis", (done) -> - rclient_old.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => - JSON.parse(updates[0]).op.should.deep.equal @update.op - done() - return null - - it "should not push the applied updates to the new redis", (done) -> - rclient_new.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => - JSON.parse(updates[0]).op.should.deep.equal "dummy-op" - updates.length.should.equal 1 - done() - return null - - describe "when the migration flag is set for the project", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - - rclient_new.set NewProjectHistoryKeys.projectHistoryMigrationKey({@project_id}), '1', (error) => - throw error if error? - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => - throw error if error? - sinon.spy MockWebApi, "getDocument" - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> - throw error if error? - setTimeout done, 200 - return null - - after (done) -> - MockWebApi.getDocument.restore() - rclient_new.del NewProjectHistoryKeys.projectHistoryMigrationKey({@project_id}), done - return null - - it "should push the applied updates to the old redis", (done) -> - rclient_old.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => - JSON.parse(updates[0]).op.should.deep.equal @update.op - done() - return null - - it "should not push the applied updates to the new redis", (done) -> - rclient_new.exists ProjectHistoryKeys.projectHistoryOps({@project_id}), (error, result) => - result.should.equal 0 - done() - return null - - it "should delete the migration flag for the project", (done) -> - rclient_new.exists NewProjectHistoryKeys.projectHistoryMigrationKey({@project_id}), (error, result) => - result.should.equal 0 - done() - return null - From 61da130cf401ef9eb2fe892553b74a660298dec8 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 22 Apr 2020 13:50:39 +0100 Subject: [PATCH 601/769] keep maxRetriesPerRequest for project_history redis config --- services/document-updater/config/settings.defaults.coffee | 1 + 1 file changed, 1 insertion(+) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 2fb398251a..1f1c951e1f 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -40,6 +40,7 @@ module.exports = port: process.env["NEW_HISTORY_REDIS_PORT"] or "6379" host: process.env["NEW_HISTORY_REDIS_HOST"] password: process.env["NEW_HISTORY_REDIS_PASSWORD"] or "" + maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] or "20") key_schema: projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:{#{project_id}}" projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:{#{project_id}}" From 2e24d1670c84c64cfe153383348e2b4299085cf7 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 22 Apr 2020 13:51:14 +0100 Subject: [PATCH 602/769] remove old unused ioredis keepalive option --- services/document-updater/config/settings.defaults.coffee | 2 -- 1 file changed, 2 deletions(-) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index 1f1c951e1f..f387a56122 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -44,8 +44,6 @@ module.exports = key_schema: projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:{#{project_id}}" projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:{#{project_id}}" - redisOptions: - keepAlive: 100 lock: port: process.env["LOCK_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" From 79c934759c37e16e42cfa4ea16a015f21a9c1721 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 22 Apr 2020 14:04:28 +0100 Subject: [PATCH 603/769] add default redis settings for project history --- services/document-updater/config/settings.defaults.coffee | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index f387a56122..0ced9eeedd 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -37,9 +37,9 @@ module.exports = docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:{#{project_id}}" project_history: - port: process.env["NEW_HISTORY_REDIS_PORT"] or "6379" - host: process.env["NEW_HISTORY_REDIS_HOST"] - password: process.env["NEW_HISTORY_REDIS_PASSWORD"] or "" + port: process.env["NEW_HISTORY_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" + host: process.env["NEW_HISTORY_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" + password: process.env["NEW_HISTORY_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] or "20") key_schema: projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:{#{project_id}}" From 93a5ee063a1163c71994a66237254183a635b386 Mon Sep 17 00:00:00 2001 From: Miguel Serrano Date: Mon, 27 Apr 2020 13:41:23 +0200 Subject: [PATCH 604/769] regenerated lockfile --- services/document-updater/package-lock.json | 64 ++++++++++----------- 1 file changed, 32 insertions(+), 32 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 7f4120b1ef..72c7eefbae 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -433,7 +433,7 @@ "arrify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", - "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=" + "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==" }, "debug": { "version": "3.2.6", @@ -1141,7 +1141,7 @@ "bintrees": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz", - "integrity": "sha1-DmVcm5wkNeqraL9AJyJtK1WjRSQ=" + "integrity": "sha512-tbaUB1QpTIj4cKY8c1rvNAvEQXA+ekzHmbe4jzNfW3QWsF9GnnP/BRWyl6/qqS53heoYJ93naaFcm/jooONH8g==" }, "bl": { "version": "2.2.0", @@ -1444,7 +1444,7 @@ "d64": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz", - "integrity": "sha1-QAKofoUMv8n52XBrYPymE6MzbpA=" + "integrity": "sha512-5eNy3WZziVYnrogqgXhcdEmqcDB2IHurTqLcrgssJsfkMVCUoUaZpK6cJjxxvLV2dUm5SuJMNcYfVGoin9UIRw==" }, "dashdash": { "version": "1.14.1", @@ -1583,7 +1583,7 @@ "each-series": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/each-series/-/each-series-1.0.0.tgz", - "integrity": "sha1-+Ibmxm39sl7x/nNWQUbuXLR4r8s=" + "integrity": "sha512-4MQloCGGCmT5GJZK5ibgJSvTK1c1QSrNlDvLk6fEyRxjZnXjl+NNFfzhfXpmnWh33Owc9D9klrdzCUi7yc9r4Q==" }, "ecc-jsbn": { "version": "0.1.2", @@ -1654,7 +1654,7 @@ "es6-promisify": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz", - "integrity": "sha1-UQnWLz5W6pZ8S2NQWu8IKRyKUgM=", + "integrity": "sha512-C+d6UdsYDk0lMebHNR4S2NybQMMngAOnOwYBQjTOiv0MkoJMP0Myw2mgpDLBcpfCmRLxyFqYhS/CfOENq4SJhQ==", "requires": { "es6-promise": "^4.0.3" } @@ -1800,7 +1800,7 @@ "findit2": { "version": "2.2.3", "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz", - "integrity": "sha1-WKRmaX34piBc39vzlVNri9d3pfY=" + "integrity": "sha512-lg/Moejf4qXovVutL0Lz4IsaPoNYMuxt4PA0nGqFxnJ1CTTGGlEO2wKgoDpwknhvZ8k4Q2F+eesgkLbG2Mxfog==" }, "follow-redirects": { "version": "1.5.10", @@ -2489,12 +2489,12 @@ "lodash.at": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/lodash.at/-/lodash.at-4.6.0.tgz", - "integrity": "sha1-k83OZk8KGZTqM9181A4jr9EbD/g=" + "integrity": "sha512-GOTh0SEp+Yosnlpjic+8cl2WM9MykorogkGA9xyIFkkObQ3H3kNZqZ+ohuq4K3FrSVo7hMcZBMataJemrxC3BA==" }, "lodash.camelcase": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", - "integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY=" + "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==" }, "lodash.defaults": { "version": "4.2.0", @@ -2514,12 +2514,12 @@ "lodash.has": { "version": "4.5.2", "resolved": "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz", - "integrity": "sha1-0Z9NwQlQWMzL4rDN9O4P5Ko3yGI=" + "integrity": "sha512-rnYUdIo6xRCJnQmbVFEwcxF144erlD+M3YcJUVesflU9paQaE8p+fJDcIQrlMYbxoANFL+AB9hZrzSBBk5PL+g==" }, "lodash.pickby": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz", - "integrity": "sha1-feoh2MGNdwOifHBMFdO4SmfjOv8=" + "integrity": "sha512-AZV+GsS/6ckvPOVQPXSiFFacKvKB4kOQu6ynt9wz0F3LO4R9Ij4K1ddYsIytDpSgLz88JHd9P+oaLeej5/Sl7Q==" }, "log-driver": { "version": "1.2.7", @@ -2542,7 +2542,7 @@ "bunyan": { "version": "1.8.12", "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.12.tgz", - "integrity": "sha1-8VDw9nSKvdcq6uhPBEA74u8RN5c=", + "integrity": "sha512-dmDUbGHeGcvCDLRFOscZkwx1ZO/aFz3bJOCi5nCgzdhFGPxwK+y5AcDBnqagNGlJZ7lje/l6JUEz9mQcutttdg==", "requires": { "dtrace-provider": "~0.8", "moment": "^2.10.6", @@ -2600,12 +2600,12 @@ "lsmod": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/lsmod/-/lsmod-1.0.0.tgz", - "integrity": "sha1-mgD3bco26yP6BTUK/htYXUKZ5ks=" + "integrity": "sha512-Y+6V75r+mGWzWEPr9h6PFmStielICu5JBHLUg18jCsD2VFmEfgHbq/EgnY4inElsUD9eKL9id1qp34w46rSIKQ==" }, "lynx": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz", - "integrity": "sha1-Mxjc7xaQi4KG6Bisz9sxzXQkj50=", + "integrity": "sha512-JI52N0NwK2b/Md0TFPdPtUBI46kjyJXF7+q08l2yvQ56q6QA8s7ZjZQQRoxFpS2jDXNf/B0p8ID+OIKcTsZwzw==", "requires": { "mersenne": "~0.0.3", "statsd-parser": "~0.0.4" @@ -2659,7 +2659,7 @@ "mersenne": { "version": "0.0.4", "resolved": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz", - "integrity": "sha1-QB/ex+whzbngPNPTAhOY2iGycIU=" + "integrity": "sha512-XoSUL+nF8hMTKGQxUs8r3Btdsf1yuKKBdCCGbh3YXgCXuVKishpZv1CNc385w9s8t4Ynwc5h61BwW/FCVulkbg==" }, "methods": { "version": "1.1.2", @@ -2684,7 +2684,7 @@ "coffee-script": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", - "integrity": "sha1-gIs5bhEPU9AhoZpO8fZb4OjjX6M=" + "integrity": "sha512-Tx8itEfCsQp8RbLDFt7qwjqXycAx2g6SI7//4PPUR2j6meLmNifYm6zKrNDcU1+Q/GWRhjhEZk7DaLG1TfIzGA==" } } }, @@ -2792,7 +2792,7 @@ "module-details-from-path": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz", - "integrity": "sha1-EUyUlnPiqKNenTV4hSeqN7Z52is=" + "integrity": "sha512-ySViT69/76t8VhE1xXHK6Ch4NcDd26gx0MzKXLO+F7NOtnqH68d9zF94nT8ZWSxXh8ELOERsnJO/sWt1xZYw5A==" }, "moment": { "version": "2.24.0", @@ -3046,7 +3046,7 @@ "parse-mongo-url": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/parse-mongo-url/-/parse-mongo-url-1.1.1.tgz", - "integrity": "sha1-ZiON9fjnwMjKTNlw1KtqE3PrdbU=" + "integrity": "sha512-7bZUusQIrFLwvsLHBnCz2WKYQ5LKO/LwKPnvQxbMIh9gDx8H5ZsknRmLjZdn6GVdrgVOwqDrZKsY0qDLNmRgcw==" }, "parse-ms": { "version": "2.1.0", @@ -3226,7 +3226,7 @@ "raven": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/raven/-/raven-1.1.3.tgz", - "integrity": "sha1-QnPBrm005CMPUbLAEEGjK5Iygio=", + "integrity": "sha512-RYov4wAaflZasWiCrZuizd3jNXxCOkW1WrXgWsGVb8kRpdHNZ+vPY27R6RhVtqzWp+DG9a5l6iP0QUPK4EgzaQ==", "requires": { "cookie": "0.3.1", "json-stringify-safe": "5.0.1", @@ -3238,12 +3238,12 @@ "cookie": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", - "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" + "integrity": "sha512-+IJOX0OqlHCszo2mBUq+SrEbCj6w7Kpffqx60zYbPTFaO4+yYgRjHwcZNpWvaTylDHaV7PPmBHzSecZiMhtPgw==" }, "uuid": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.0.tgz", - "integrity": "sha1-Zyj8BFnEUNeWqZwxg3VpvfZy1yg=" + "integrity": "sha512-rqE1LoOVLv3QrZMjb4NkF5UWlkurCfPyItVnFPNKDDGkHw4dQUdE4zMcLqx28+0Kcf3+bnUk4PisaiRJT4aiaQ==" } } }, @@ -3483,7 +3483,7 @@ "resolve-from": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz", - "integrity": "sha1-lICrIOlP+h2egKgEx+oUdhGWa1c=" + "integrity": "sha512-qpFcKaXsq8+oRoLilkwyc7zHGF5i9Q2/25NIgLQQ/+VVv9rU4qvr6nXVAw1DsnXJyQkZsR4Ytfbtg5ehfcUssQ==" }, "retry-axios": { "version": "0.3.2", @@ -3785,7 +3785,7 @@ "stack-trace": { "version": "0.0.9", "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz", - "integrity": "sha1-qPbq7KkGdMMz58Q5U/J1tFFRBpU=" + "integrity": "sha512-vjUc6sfgtgY0dxCdnc40mK6Oftjo9+2K8H/NG81TMhgL392FtiPA9tn9RLyTxXmTLPJPjF3VyzFp6bsWFLisMQ==" }, "standard-as-callback": { "version": "2.0.1", @@ -3795,7 +3795,7 @@ "statsd-parser": { "version": "0.0.4", "resolved": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz", - "integrity": "sha1-y9JDlTzELv/VSLXSI4jtaJ7GOb0=" + "integrity": "sha512-7XO+ur89EalMXXFQaydsczB8sclr5nDsNIoUu0IzJx1pIbHUhO3LtpSzBwetIuU9DyTLMiVaJBMtWS/Nb2KR4g==" }, "statuses": { "version": "1.5.0", @@ -3849,7 +3849,7 @@ "stubs": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", - "integrity": "sha1-6NK6H6nJBXAwPAMLaQD31fiavls=" + "integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==" }, "supports-color": { "version": "5.4.0", @@ -3862,7 +3862,7 @@ "tdigest": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.1.tgz", - "integrity": "sha1-Ljyyw56kSeVdHmzZEReszKRYgCE=", + "integrity": "sha512-CXcDY/NIgIbKZPx5H4JJNpq6JwJhU5Z4+yWj4ZghDc7/9nVajiRlPPyMXRePPPlBfcayUqtoCXjo7/Hm82ecUA==", "requires": { "bintrees": "1.0.1" } @@ -3915,7 +3915,7 @@ "through": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", - "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=" + "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==" }, "through2": { "version": "3.0.1", @@ -3969,17 +3969,17 @@ "to-mongodb-core": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/to-mongodb-core/-/to-mongodb-core-2.0.0.tgz", - "integrity": "sha1-NZbsdhOsmtO5ioncua77pWnNJ+s=" + "integrity": "sha512-vfXXcGYFP8+0L5IPOtUzzVIvPE/G3GN0TKa/PRBlzPqYyhm+UxhPmvv634EQgO4Ot8dHbBFihOslMJQclY8Z9A==" }, "to-no-case": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/to-no-case/-/to-no-case-1.0.2.tgz", - "integrity": "sha1-xyKQcWTvaxeBMsjmmTAhLRtKoWo=" + "integrity": "sha512-Z3g735FxuZY8rodxV4gH7LxClE4H0hTIyHNIHdk+vpQxjLm0cwnKXq/OFVZ76SOQmto7txVcwSCwkU5kqp+FKg==" }, "to-snake-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-snake-case/-/to-snake-case-1.0.0.tgz", - "integrity": "sha1-znRpE4l5RgGah+Yu366upMYIq4w=", + "integrity": "sha512-joRpzBAk1Bhi2eGEYBjukEWHOe/IvclOkiJl3DtA91jV6NwQ3MwXA4FHYeqk8BNp/D8bmi9tcNbRu/SozP0jbQ==", "requires": { "to-space-case": "^1.0.0" } @@ -3987,7 +3987,7 @@ "to-space-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-space-case/-/to-space-case-1.0.0.tgz", - "integrity": "sha1-sFLar7Gysp3HcM6gFj5ewOvJ/Bc=", + "integrity": "sha512-rLdvwXZ39VOn1IxGL3V6ZstoTbwLRckQmn/U8ZDLuWwIXNpuZDhQ3AiRUlhTbOXFVE9C+dR51wM0CBDhk31VcA==", "requires": { "to-no-case": "^1.0.0" } @@ -4009,7 +4009,7 @@ "punycode": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=" + "integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==" } } }, @@ -4049,7 +4049,7 @@ "underscore": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz", - "integrity": "sha1-izixDKze9jM3uLJOT/htRa6lKag=" + "integrity": "sha512-z4o1fvKUojIWh9XuaVLUDdf86RQiq13AC1dmHbTpoyuu+bquHms76v16CjycCbec87J7z0k//SiQVk0sMdFmpQ==" }, "unpipe": { "version": "1.0.0", From bb9a5340f4ee7725bf24b0c11be1fd3958351b0b Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Wed, 6 May 2020 12:03:41 +0200 Subject: [PATCH 605/769] update build scripts to 2.2.0 --- services/document-updater/Dockerfile | 2 +- services/document-updater/Makefile | 42 +++++++++++++++---- services/document-updater/buildscript.txt | 2 +- .../document-updater/docker-compose.ci.yml | 8 +--- services/document-updater/docker-compose.yml | 7 +--- 5 files changed, 39 insertions(+), 22 deletions(-) diff --git a/services/document-updater/Dockerfile b/services/document-updater/Dockerfile index eaa771000f..4c269d5a49 100644 --- a/services/document-updater/Dockerfile +++ b/services/document-updater/Dockerfile @@ -11,7 +11,7 @@ FROM base as app #wildcard as some files may not be in all repos COPY package*.json npm-shrink*.json /app/ -RUN npm install --quiet +RUN npm ci --quiet COPY . /app diff --git a/services/document-updater/Makefile b/services/document-updater/Makefile index 64224ff99c..c020c537e6 100644 --- a/services/document-updater/Makefile +++ b/services/document-updater/Makefile @@ -5,6 +5,8 @@ BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) PROJECT_NAME = document-updater +BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]') + DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ BRANCH_NAME=$(BRANCH_NAME) \ @@ -12,6 +14,12 @@ DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ MOCHA_GREP=${MOCHA_GREP} \ docker-compose ${DOCKER_COMPOSE_FLAGS} +DOCKER_COMPOSE_TEST_ACCEPTANCE = \ + COMPOSE_PROJECT_NAME=test_acceptance_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) + +DOCKER_COMPOSE_TEST_UNIT = \ + COMPOSE_PROJECT_NAME=test_unit_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) + clean: docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) @@ -23,23 +31,41 @@ clean: test: test_unit test_acceptance test_unit: - @[ ! -d test/unit ] && echo "document-updater has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) run --rm test_unit + $(MAKE) test_unit_clean +endif -test_acceptance: test_clean test_acceptance_pre_run test_acceptance_run +test_clean: test_unit_clean +test_unit_clean: +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) down -v -t 0 +endif -test_acceptance_debug: test_clean test_acceptance_pre_run test_acceptance_run_debug +test_acceptance: test_acceptance_clean test_acceptance_pre_run test_acceptance_run + $(MAKE) test_acceptance_clean + +test_acceptance_debug: test_acceptance_clean test_acceptance_pre_run test_acceptance_run_debug + $(MAKE) test_acceptance_clean test_acceptance_run: - @[ ! -d test/acceptance ] && echo "document-updater has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance +endif test_acceptance_run_debug: - @[ ! -d test/acceptance ] && echo "document-updater has no acceptance tests" || $(DOCKER_COMPOSE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk +endif -test_clean: - $(DOCKER_COMPOSE) down -v -t 0 +test_clean: test_acceptance_clean +test_acceptance_clean: + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0 test_acceptance_pre_run: - @[ ! -f test/acceptance/js/scripts/pre-run ] && echo "document-updater has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/js/scripts/pre-run +ifneq (,$(wildcard test/acceptance/js/scripts/pre-run)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run +endif build: docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ diff --git a/services/document-updater/buildscript.txt b/services/document-updater/buildscript.txt index d27e04cef1..9b20b21f85 100644 --- a/services/document-updater/buildscript.txt +++ b/services/document-updater/buildscript.txt @@ -7,4 +7,4 @@ document-updater --language=coffeescript --node-version=10.19.0 --public-repo=True ---script-version=2.0.0 +--script-version=2.2.0 diff --git a/services/document-updater/docker-compose.ci.yml b/services/document-updater/docker-compose.ci.yml index a12ccd14a2..d609e7b5ec 100644 --- a/services/document-updater/docker-compose.ci.yml +++ b/services/document-updater/docker-compose.ci.yml @@ -12,13 +12,13 @@ services: environment: NODE_ENV: test + test_acceptance: build: . image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER environment: ELASTIC_SEARCH_DSN: es:9200 REDIS_HOST: redis - NEW_HISTORY_REDIS_HOST: new_redis MONGO_HOST: mongo POSTGRES_HOST: postgres MOCHA_GREP: ${MOCHA_GREP} @@ -28,11 +28,10 @@ services: condition: service_healthy redis: condition: service_healthy - new_redis: - condition: service_healthy user: node command: npm run test:acceptance:_run + tar: build: . image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER @@ -43,8 +42,5 @@ services: redis: image: redis - new_redis: - image: redis - mongo: image: mongo:3.6 diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index c562cc4eb8..161476b8d1 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -24,7 +24,6 @@ services: environment: ELASTIC_SEARCH_DSN: es:9200 REDIS_HOST: redis - NEW_HISTORY_REDIS_HOST: new_redis MONGO_HOST: mongo POSTGRES_HOST: postgres MOCHA_GREP: ${MOCHA_GREP} @@ -36,15 +35,11 @@ services: condition: service_healthy redis: condition: service_healthy - new_redis: - condition: service_healthy command: npm run test:acceptance redis: image: redis - new_redis: - image: redis - mongo: image: mongo:3.6 + From 569a1f58245c41ba5cfcf1f821bb917215779017 Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Wed, 6 May 2020 12:04:19 +0200 Subject: [PATCH 606/769] decaffeinate: update build scripts to es --- services/document-updater/.dockerignore | 2 - services/document-updater/.eslintrc | 64 +++++++++++++++++++++++ services/document-updater/.prettierrc | 7 +++ services/document-updater/Dockerfile | 1 - services/document-updater/Jenkinsfile | 7 +++ services/document-updater/Makefile | 15 ++++-- services/document-updater/buildscript.txt | 2 +- services/document-updater/nodemon.json | 7 ++- services/document-updater/package.json | 32 ++++++++---- 9 files changed, 113 insertions(+), 24 deletions(-) create mode 100644 services/document-updater/.eslintrc create mode 100644 services/document-updater/.prettierrc diff --git a/services/document-updater/.dockerignore b/services/document-updater/.dockerignore index 386f26df30..ba1c3442de 100644 --- a/services/document-updater/.dockerignore +++ b/services/document-updater/.dockerignore @@ -5,5 +5,3 @@ gitrev .npm .nvmrc nodemon.json -app.js -**/js/* diff --git a/services/document-updater/.eslintrc b/services/document-updater/.eslintrc new file mode 100644 index 0000000000..2e945d6ffb --- /dev/null +++ b/services/document-updater/.eslintrc @@ -0,0 +1,64 @@ +// this file was auto-generated, do not edit it directly. +// instead run bin/update_build_scripts from +// https://github.com/sharelatex/sharelatex-dev-environment +{ + "extends": [ + "standard", + "prettier", + "prettier/standard" + ], + "parserOptions": { + "ecmaVersion": 2017 + }, + "plugins": [ + "mocha", + "chai-expect", + "chai-friendly" + ], + "env": { + "node": true, + "mocha": true + }, + "rules": { + // Swap the no-unused-expressions rule with a more chai-friendly one + "no-unused-expressions": 0, + "chai-friendly/no-unused-expressions": "error" + }, + "overrides": [ + { + // Test specific rules + "files": ["test/**/*.js"], + "globals": { + "expect": true + }, + "rules": { + // mocha-specific rules + "mocha/handle-done-callback": "error", + "mocha/no-exclusive-tests": "error", + "mocha/no-global-tests": "error", + "mocha/no-identical-title": "error", + "mocha/no-nested-tests": "error", + "mocha/no-pending-tests": "error", + "mocha/no-skipped-tests": "error", + "mocha/no-mocha-arrows": "error", + + // chai-specific rules + "chai-expect/missing-assertion": "error", + "chai-expect/terminating-properties": "error", + + // prefer-arrow-callback applies to all callbacks, not just ones in mocha tests. + // we don't enforce this at the top-level - just in tests to manage `this` scope + // based on mocha's context mechanism + "mocha/prefer-arrow-callback": "error" + } + }, + { + // Backend specific rules + "files": ["app/**/*.js", "app.js", "index.js"], + "rules": { + // don't allow console.log in backend code + "no-console": "error" + } + } + ] +} diff --git a/services/document-updater/.prettierrc b/services/document-updater/.prettierrc new file mode 100644 index 0000000000..24f9ec526f --- /dev/null +++ b/services/document-updater/.prettierrc @@ -0,0 +1,7 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/sharelatex/sharelatex-dev-environment +{ + "semi": false, + "singleQuote": true +} diff --git a/services/document-updater/Dockerfile b/services/document-updater/Dockerfile index 4c269d5a49..4242e7d3be 100644 --- a/services/document-updater/Dockerfile +++ b/services/document-updater/Dockerfile @@ -16,7 +16,6 @@ RUN npm ci --quiet COPY . /app -RUN npm run compile:all FROM base diff --git a/services/document-updater/Jenkinsfile b/services/document-updater/Jenkinsfile index 92db215930..803963773e 100644 --- a/services/document-updater/Jenkinsfile +++ b/services/document-updater/Jenkinsfile @@ -37,6 +37,13 @@ pipeline { } } + stage('Linting') { + steps { + sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make format' + sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make lint' + } + } + stage('Unit Tests') { steps { sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_unit' diff --git a/services/document-updater/Makefile b/services/document-updater/Makefile index c020c537e6..df879265c9 100644 --- a/services/document-updater/Makefile +++ b/services/document-updater/Makefile @@ -23,12 +23,17 @@ DOCKER_COMPOSE_TEST_UNIT = \ clean: docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) - rm -f app.js - rm -rf app/js - rm -rf test/unit/js - rm -rf test/acceptance/js -test: test_unit test_acceptance +format: + $(DOCKER_COMPOSE) run --rm test_unit npm run format + +format_fix: + $(DOCKER_COMPOSE) run --rm test_unit npm run format:fix + +lint: + $(DOCKER_COMPOSE) run --rm test_unit npm run lint + +test: format lint test_unit test_acceptance test_unit: ifneq (,$(wildcard test/unit)) diff --git a/services/document-updater/buildscript.txt b/services/document-updater/buildscript.txt index 9b20b21f85..b234a9b3ac 100644 --- a/services/document-updater/buildscript.txt +++ b/services/document-updater/buildscript.txt @@ -4,7 +4,7 @@ document-updater --docker-repos=gcr.io/overleaf-ops --env-add= --env-pass-through= ---language=coffeescript +--language=es --node-version=10.19.0 --public-repo=True --script-version=2.2.0 diff --git a/services/document-updater/nodemon.json b/services/document-updater/nodemon.json index 98db38d71b..5826281b84 100644 --- a/services/document-updater/nodemon.json +++ b/services/document-updater/nodemon.json @@ -10,10 +10,9 @@ }, "watch": [ - "app/coffee/", - "app.coffee", + "app/js/", + "app.js", "config/" ], - "ext": "coffee" - + "ext": "js" } diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 706a382d6f..43c40ce469 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -7,17 +7,15 @@ "url": "https://github.com/sharelatex/document-updater-sharelatex.git" }, "scripts": { - "compile:app": "([ -e app/coffee ] && coffee -m $COFFEE_OPTIONS -o app/js -c app/coffee || echo 'No CoffeeScript folder to compile') && ( [ -e app.coffee ] && coffee -m $COFFEE_OPTIONS -c app.coffee || echo 'No CoffeeScript app to compile')", - "start": "npm run compile:app && node $NODE_APP_OPTIONS app.js", - "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 30000 --exit $@ test/acceptance/js", - "test:acceptance": "npm run compile:app && npm run compile:acceptance_tests && npm run test:acceptance:_run -- --grep=$MOCHA_GREP", - "test:unit:_run": "mocha --recursive --reporter spec --exit $@ test/unit/js", - "test:unit": "npm run compile:app && npm run compile:unit_tests && npm run test:unit:_run -- --grep=$MOCHA_GREP", - "compile:unit_tests": "[ ! -e test/unit/coffee ] && echo 'No unit tests to compile' || coffee -o test/unit/js -c test/unit/coffee", - "compile:acceptance_tests": "[ ! -e test/acceptance/coffee ] && echo 'No acceptance tests to compile' || coffee -o test/acceptance/js -c test/acceptance/coffee", - "compile:all": "npm run compile:app && npm run compile:unit_tests && npm run compile:acceptance_tests && npm run compile:smoke_tests", + "start": "node $NODE_APP_OPTIONS app.js", + "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP", + "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js", + "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", "nodemon": "nodemon --config nodemon.json", - "compile:smoke_tests": "[ ! -e test/smoke/coffee ] && echo 'No smoke tests to compile' || coffee -o test/smoke/js -c test/smoke/coffee" + "lint": "node_modules/.bin/eslint .", + "format": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --list-different", + "format:fix": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --write" }, "dependencies": { "async": "^2.5.0", @@ -41,6 +39,18 @@ "mocha": "^5.0.1", "sandboxed-module": "~0.2.0", "sinon": "~1.5.2", - "timekeeper": "^2.0.0" + "timekeeper": "^2.0.0", + "eslint": "^6.8.0", + "eslint-config-prettier": "^6.10.0", + "eslint-config-standard": "^14.1.0", + "eslint-plugin-chai-expect": "^2.1.0", + "eslint-plugin-chai-friendly": "^0.5.0", + "eslint-plugin-import": "^2.20.1", + "eslint-plugin-mocha": "^6.3.0", + "eslint-plugin-node": "^11.0.0", + "eslint-plugin-prettier": "^3.1.2", + "eslint-plugin-promise": "^4.2.1", + "eslint-plugin-standard": "^4.0.1", + "prettier-eslint-cli": "^5.0.0" } } From 16c98b3d15a01deb98f963236d0e162f327912a6 Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Wed, 6 May 2020 12:04:26 +0200 Subject: [PATCH 607/769] decaffeinate: update .gitignore --- services/document-updater/.gitignore | 5 ----- 1 file changed, 5 deletions(-) diff --git a/services/document-updater/.gitignore b/services/document-updater/.gitignore index dbce694d3b..86e9e7a2fc 100644 --- a/services/document-updater/.gitignore +++ b/services/document-updater/.gitignore @@ -37,12 +37,7 @@ Thumbs.db /node_modules/* -app.js -app/js/* -**/*.map -test/unit/js/* -test/acceptance/js/* forever/ From e2e8593f5704fee0f4e63397536164b9246f9cf9 Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Wed, 6 May 2020 12:06:28 +0200 Subject: [PATCH 608/769] decaffeinate: add eslint and prettier packages --- services/document-updater/package-lock.json | 2531 +++++++++++++++++++ services/document-updater/package.json | 32 +- 2 files changed, 2550 insertions(+), 13 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 72c7eefbae..f83a21dbc8 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -71,6 +71,25 @@ "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.8.8.tgz", "integrity": "sha512-mO5GWzBPsPf6865iIbzNE0AvkKF3NE+2S3eRUpE+FE07BOAkXh6G+GW/Pj01hhXjve1WScbaIO4UlY1JKeqCcA==" }, + "@babel/runtime": { + "version": "7.9.6", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.9.6.tgz", + "integrity": "sha512-64AF1xY3OAkFHqOb9s4jpgk1Mm5vDZ4L3acHvAml+53nO1XbXLuDodsVpO4OIUsmemlUHMxNdYMNJmsvOwLrvQ==", + "dev": true, + "requires": { + "regenerator-runtime": "^0.13.4" + } + }, + "@babel/runtime-corejs3": { + "version": "7.9.6", + "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.9.6.tgz", + "integrity": "sha512-6toWAfaALQjt3KMZQc6fABqZwUDDuWzz+cAfPhqyEnzxvdWOAkjwPNxgF8xlmo7OWLsSjaKjsskpKHRLaMArOA==", + "dev": true, + "requires": { + "core-js-pure": "^3.0.0", + "regenerator-runtime": "^0.13.4" + } + }, "@babel/template": { "version": "7.8.6", "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.8.6.tgz", @@ -876,6 +895,12 @@ "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.2.tgz", "integrity": "sha512-6ckxMjBBD8URvjB6J3NcnuAn5Pkl7t3TizAg+xdlzzQGSPSmBcXf8KoIH0ua/i+tio+ZRUHEXp0HEmvaR4kt0w==" }, + "@types/color-name": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@types/color-name/-/color-name-1.1.1.tgz", + "integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==", + "dev": true + }, "@types/console-log-level": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/@types/console-log-level/-/console-log-level-1.4.0.tgz", @@ -889,6 +914,12 @@ "@types/node": "*" } }, + "@types/eslint-visitor-keys": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@types/eslint-visitor-keys/-/eslint-visitor-keys-1.0.0.tgz", + "integrity": "sha512-OCutwjDZ4aFS6PB1UZ988C4YgwlBHJd6wCeQqaLdmadZ/7e+w79+hbMUFC1QXDNCmdyoRfAFdm0RypzwR+Qpag==", + "dev": true + }, "@types/fs-extra": { "version": "8.1.0", "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-8.1.0.tgz", @@ -897,6 +928,12 @@ "@types/node": "*" } }, + "@types/json-schema": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.4.tgz", + "integrity": "sha512-8+KAKzEvSUdeo+kmqnKrqgeE+LcA0tjYWFY7RPProVYwnqDjukzO+3b6dLD56rYX5TdWejnEOLJYOIeh4CXKuA==", + "dev": true + }, "@types/long": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.1.tgz", @@ -940,6 +977,59 @@ "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-2.3.6.tgz", "integrity": "sha512-wHNBMnkoEBiRAd3s8KTKwIuO9biFtTf0LehITzBhSco+HQI0xkXZbLOD55SW3Aqw3oUkHstkm5SPv58yaAdFPQ==" }, + "@typescript-eslint/experimental-utils": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-1.13.0.tgz", + "integrity": "sha512-zmpS6SyqG4ZF64ffaJ6uah6tWWWgZ8m+c54XXgwFtUv0jNz8aJAVx8chMCvnk7yl6xwn8d+d96+tWp7fXzTuDg==", + "dev": true, + "requires": { + "@types/json-schema": "^7.0.3", + "@typescript-eslint/typescript-estree": "1.13.0", + "eslint-scope": "^4.0.0" + }, + "dependencies": { + "eslint-scope": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-4.0.3.tgz", + "integrity": "sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==", + "dev": true, + "requires": { + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" + } + } + } + }, + "@typescript-eslint/parser": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-1.13.0.tgz", + "integrity": "sha512-ITMBs52PCPgLb2nGPoeT4iU3HdQZHcPaZVw+7CsFagRJHUhyeTgorEwHXhFf3e7Evzi8oujKNpHc8TONth8AdQ==", + "dev": true, + "requires": { + "@types/eslint-visitor-keys": "^1.0.0", + "@typescript-eslint/experimental-utils": "1.13.0", + "@typescript-eslint/typescript-estree": "1.13.0", + "eslint-visitor-keys": "^1.0.0" + } + }, + "@typescript-eslint/typescript-estree": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-1.13.0.tgz", + "integrity": "sha512-b5rCmd2e6DCC6tCTN9GSUAuxdYwCM/k/2wdjHGrIRGPSJotWMCe/dGpi66u42bhuh8q3QBzqM4TMA1GUUCJvdw==", + "dev": true, + "requires": { + "lodash.unescape": "4.0.1", + "semver": "5.5.0" + }, + "dependencies": { + "semver": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.5.0.tgz", + "integrity": "sha512-4SJ3dm0WAwWy/NVeioZh5AntkdJoWKxHxcmyP622fOkgHa4z3R0TdBJICINyaSDE6uNwVc8gZr+ZinwZAH4xIA==", + "dev": true + } + } + }, "abort-controller": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", @@ -962,6 +1052,12 @@ "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.1.tgz", "integrity": "sha512-ZVA9k326Nwrj3Cj9jlh3wGFutC2ZornPNARZwsNYqQYgN0EsV2d53w5RN/co65Ohn4sUAUtb1rSUAOD6XN9idA==" }, + "acorn-jsx": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.2.0.tgz", + "integrity": "sha512-HiUX/+K2YpkpJ+SzBffkM/AQ2YE03S0U1kjTLVpoJdhZMOWy8qvXVN9JdLqv2QsaQ6MPYQIuNmwD8zOiYUofLQ==", + "dev": true + }, "agent-base": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.0.tgz", @@ -996,6 +1092,23 @@ "uri-js": "^4.2.2" } }, + "ansi-escapes": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.1.tgz", + "integrity": "sha512-JWF7ocqNrp8u9oqpgV+wH5ftbt+cfvv+PTjOvKLT3AdYly/LmORARfEVT1iyjwN+4MqE5UmVKoAdIBqeoCHgLA==", + "dev": true, + "requires": { + "type-fest": "^0.11.0" + }, + "dependencies": { + "type-fest": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.11.0.tgz", + "integrity": "sha512-OdjXJxnCN1AvyLSzeKIgXTXxV+99ZuXl3Hpo9XpJAv9MBcHrrJOQ5kV7ypXOuQie+AmWG25hLbiKdwYTifzcfQ==", + "dev": true + } + } + }, "ansi-regex": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", @@ -1030,11 +1143,42 @@ "sprintf-js": "~1.0.2" } }, + "aria-query": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-3.0.0.tgz", + "integrity": "sha1-ZbP8wcoRVajJrmTW7uKX8V1RM8w=", + "dev": true, + "requires": { + "ast-types-flow": "0.0.7", + "commander": "^2.11.0" + } + }, "array-flatten": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" }, + "array-includes": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.1.tgz", + "integrity": "sha512-c2VXaCHl7zPsvpkFsw4nxvFie4fh1ur9bpcgsVkIjqn0H/Xwdg+7fv3n2r/isyS8EBj5b06M9kHyZuIr4El6WQ==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0", + "is-string": "^1.0.5" + } + }, + "array.prototype.flat": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.2.3.tgz", + "integrity": "sha512-gBlRZV0VSmfPIeWfuuy56XZMvbVfbEUnOXUvt3F/eUUUSyzlgLxhEX4YAEpxNAogRGehPSnfXyPtYyKAhkzQhQ==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1" + } + }, "arrify": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", @@ -1059,6 +1203,18 @@ "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", "dev": true }, + "ast-types-flow": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/ast-types-flow/-/ast-types-flow-0.0.7.tgz", + "integrity": "sha1-9wtzXGvKGlycItmCw+Oef+ujva0=", + "dev": true + }, + "astral-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-1.0.0.tgz", + "integrity": "sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg==", + "dev": true + }, "async": { "version": "2.6.3", "resolved": "https://registry.npmjs.org/async/-/async-2.6.3.tgz", @@ -1107,6 +1263,26 @@ "is-buffer": "^2.0.2" } }, + "axobject-query": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-2.1.2.tgz", + "integrity": "sha512-ICt34ZmrVt8UQnvPl6TVyDTkmhXmAyAT4Jh5ugfGUX4MOrZ+U/ZY6/sdylRw3qGNr9Ub5AJsaHeDMzNLehRdOQ==", + "dev": true + }, + "babel-eslint": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/babel-eslint/-/babel-eslint-10.1.0.tgz", + "integrity": "sha512-ifWaTHQ0ce+448CYop8AdrQiBsGrnC+bMgfyKFdi6EsPLTAWG+QfyDeM6OH+FmWnKvEq5NnBMLvlBUPKQZoDSg==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "@babel/parser": "^7.7.0", + "@babel/traverse": "^7.7.0", + "@babel/types": "^7.7.0", + "eslint-visitor-keys": "^1.0.0", + "resolve": "^1.12.0" + } + }, "balanced-match": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", @@ -1185,6 +1361,12 @@ "type-is": "~1.6.17" } }, + "boolify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/boolify/-/boolify-1.0.1.tgz", + "integrity": "sha1-tcCeF8rNET0Rt7s+04TMASmU2Gs=", + "dev": true + }, "brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", @@ -1263,11 +1445,28 @@ "write-file-atomic": "^2.4.2" } }, + "callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true + }, "camelcase": { "version": "5.3.1", "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==" }, + "camelcase-keys": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.2.2.tgz", + "integrity": "sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg==", + "dev": true, + "requires": { + "camelcase": "^5.3.1", + "map-obj": "^4.0.0", + "quick-lru": "^4.0.1" + } + }, "caseless": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", @@ -1300,6 +1499,27 @@ "supports-color": "^5.3.0" } }, + "chardet": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz", + "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", + "dev": true + }, + "cli-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", + "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "dev": true, + "requires": { + "restore-cursor": "^3.1.0" + } + }, + "cli-width": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.1.tgz", + "integrity": "sha512-GRMWDxpOB6Dgk2E5Uo+3eEBvtOOlimMmpbFiKuLFnQzYDavtLFY3K5ona41jgN/WdRZtG7utuVSVTL4HbZHGkw==", + "dev": true + }, "cliui": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", @@ -1344,6 +1564,18 @@ "delayed-stream": "~1.0.0" } }, + "commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "dev": true + }, + "common-tags": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.0.tgz", + "integrity": "sha512-6P6g0uetGpW/sdyUy/iQQCbFF0kWVMSIVSyYz7Zgjcgh8mgw8PQzDNZeyZ5DQ2gM7LBoZPHmnjz8rUthkBG5tw==", + "dev": true + }, "commondir": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", @@ -1359,6 +1591,12 @@ "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz", "integrity": "sha512-VZzbIORbP+PPcN/gg3DXClTLPLg5Slwd5fL2MIc+o1qZ4BXBvWyc6QxPk6T/Mkr6IVjRpoAGf32XxP3ZWMVRcQ==" }, + "contains-path": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/contains-path/-/contains-path-0.1.0.tgz", + "integrity": "sha1-/ozxhP9mcLa67wGp1IYaXL7EEgo=", + "dev": true + }, "content-disposition": { "version": "0.5.3", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.3.tgz", @@ -1399,6 +1637,18 @@ "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" }, + "core-js": { + "version": "3.6.5", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.6.5.tgz", + "integrity": "sha512-vZVEEwZoIsI+vPEuoF9Iqf5H7/M3eeQqWlQnYa8FSKKePuYTf5MWnxb5SDAzCa60b3JBRS5g9b+Dq7b1y/RCrA==", + "dev": true + }, + "core-js-pure": { + "version": "3.6.5", + "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.6.5.tgz", + "integrity": "sha512-lacdXOimsiD0QyNf9BC/mxivNJ/ybBGJXQFKzRekp1WTHoVUWsUHEn+2T8GJAzzIhyOuXA+gOxCVN3l+5PLPUA==", + "dev": true + }, "core-util-is": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", @@ -1446,6 +1696,12 @@ "resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz", "integrity": "sha512-5eNy3WZziVYnrogqgXhcdEmqcDB2IHurTqLcrgssJsfkMVCUoUaZpK6cJjxxvLV2dUm5SuJMNcYfVGoin9UIRw==" }, + "damerau-levenshtein": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.6.tgz", + "integrity": "sha512-JVrozIeElnj3QzfUIt8tB8YMluBJom4Vw9qTPpjGYQ9fYlB3D/rb6OordUxf3xeFB35LKWs0xqcO5U6ySvBtug==", + "dev": true + }, "dashdash": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", @@ -1484,6 +1740,12 @@ } } }, + "deep-is": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz", + "integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=", + "dev": true + }, "default-require-extensions": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/default-require-extensions/-/default-require-extensions-2.0.0.tgz", @@ -1492,6 +1754,15 @@ "strip-bom": "^3.0.0" } }, + "define-properties": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", + "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", + "dev": true, + "requires": { + "object-keys": "^1.0.12" + } + }, "delay": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/delay/-/delay-4.3.0.tgz", @@ -1523,6 +1794,21 @@ "integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==", "dev": true }, + "dlv": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", + "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==", + "dev": true + }, + "doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "requires": { + "esutils": "^2.0.2" + } + }, "dot-prop": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.2.0.tgz", @@ -1641,6 +1927,36 @@ "is-arrayish": "^0.2.1" } }, + "es-abstract": { + "version": "1.17.5", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.5.tgz", + "integrity": "sha512-BR9auzDbySxOcfog0tLECW8l28eRGpDpU3Dm3Hp4q/N+VtLTmyj4EUN088XZWQDW/hzj6sYRDXeOFsaAODKvpg==", + "dev": true, + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.1.5", + "is-regex": "^1.0.5", + "object-inspect": "^1.7.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.0", + "string.prototype.trimleft": "^2.1.1", + "string.prototype.trimright": "^2.1.1" + } + }, + "es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "dev": true, + "requires": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + } + }, "es6-error": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz", @@ -1669,11 +1985,581 @@ "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" }, + "eslint": { + "version": "6.6.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.6.0.tgz", + "integrity": "sha512-PpEBq7b6qY/qrOmpYQ/jTMDYfuQMELR4g4WI1M/NaSDDD/bdcMb+dj4Hgks7p41kW2caXsPsEZAEAyAgjVVC0g==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "ajv": "^6.10.0", + "chalk": "^2.1.0", + "cross-spawn": "^6.0.5", + "debug": "^4.0.1", + "doctrine": "^3.0.0", + "eslint-scope": "^5.0.0", + "eslint-utils": "^1.4.3", + "eslint-visitor-keys": "^1.1.0", + "espree": "^6.1.2", + "esquery": "^1.0.1", + "esutils": "^2.0.2", + "file-entry-cache": "^5.0.1", + "functional-red-black-tree": "^1.0.1", + "glob-parent": "^5.0.0", + "globals": "^11.7.0", + "ignore": "^4.0.6", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "inquirer": "^7.0.0", + "is-glob": "^4.0.0", + "js-yaml": "^3.13.1", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.3.0", + "lodash": "^4.17.14", + "minimatch": "^3.0.4", + "mkdirp": "^0.5.1", + "natural-compare": "^1.4.0", + "optionator": "^0.8.2", + "progress": "^2.0.0", + "regexpp": "^2.0.1", + "semver": "^6.1.2", + "strip-ansi": "^5.2.0", + "strip-json-comments": "^3.0.1", + "table": "^5.2.3", + "text-table": "^0.2.0", + "v8-compile-cache": "^2.0.3" + }, + "dependencies": { + "cross-spawn": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", + "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "dev": true, + "requires": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + } + } + }, + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "minimist": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", + "dev": true + }, + "mkdirp": { + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", + "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", + "dev": true, + "requires": { + "minimist": "^1.2.5" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + } + } + }, + "eslint-config-prettier": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-6.11.0.tgz", + "integrity": "sha512-oB8cpLWSAjOVFEJhhyMZh6NOEOtBVziaqdDQ86+qhDHFbZXoRTM7pNSvFRfW/W/L/LrQ38C99J5CGuRBBzBsdA==", + "dev": true, + "requires": { + "get-stdin": "^6.0.0" + } + }, + "eslint-config-standard": { + "version": "14.1.1", + "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-14.1.1.tgz", + "integrity": "sha512-Z9B+VR+JIXRxz21udPTL9HpFMyoMUEeX1G251EQ6e05WD9aPVtVBn09XUmZ259wCMlCDmYDSZG62Hhm+ZTJcUg==", + "dev": true + }, + "eslint-config-standard-jsx": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/eslint-config-standard-jsx/-/eslint-config-standard-jsx-8.1.0.tgz", + "integrity": "sha512-ULVC8qH8qCqbU792ZOO6DaiaZyHNS/5CZt3hKqHkEhVlhPEPN3nfBqqxJCyp59XrjIBZPu1chMYe9T2DXZ7TMw==", + "dev": true + }, + "eslint-config-standard-react": { + "version": "9.2.0", + "resolved": "https://registry.npmjs.org/eslint-config-standard-react/-/eslint-config-standard-react-9.2.0.tgz", + "integrity": "sha512-u+KRP2uCtthZ/W4DlLWCC59GZNV/y9k9yicWWammgTs/Omh8ZUUPF3EnYm81MAcbkYQq2Wg0oxutAhi/FQ8mIw==", + "dev": true, + "requires": { + "eslint-config-standard-jsx": "^8.0.0" + } + }, + "eslint-import-resolver-node": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.3.tgz", + "integrity": "sha512-b8crLDo0M5RSe5YG8Pu2DYBj71tSB6OvXkfzwbJU2w7y8P4/yo0MyF8jU26IEuEuHF2K5/gcAJE3LhQGqBBbVg==", + "dev": true, + "requires": { + "debug": "^2.6.9", + "resolve": "^1.13.1" + } + }, + "eslint-module-utils": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.6.0.tgz", + "integrity": "sha512-6j9xxegbqe8/kZY8cYpcp0xhbK0EgJlg3g9mib3/miLaExuuwc3n5UEfSnU6hWMbT0FAYVvDbL9RrRgpUeQIvA==", + "dev": true, + "requires": { + "debug": "^2.6.9", + "pkg-dir": "^2.0.0" + }, + "dependencies": { + "find-up": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", + "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "dev": true, + "requires": { + "locate-path": "^2.0.0" + } + }, + "locate-path": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", + "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", + "dev": true, + "requires": { + "p-locate": "^2.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", + "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", + "dev": true, + "requires": { + "p-try": "^1.0.0" + } + }, + "p-locate": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", + "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "dev": true, + "requires": { + "p-limit": "^1.1.0" + } + }, + "p-try": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", + "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", + "dev": true + }, + "pkg-dir": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", + "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=", + "dev": true, + "requires": { + "find-up": "^2.1.0" + } + } + } + }, + "eslint-plugin-chai-expect": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-chai-expect/-/eslint-plugin-chai-expect-2.1.0.tgz", + "integrity": "sha512-rd0/4mjMV6c3i0o4DKkWI4uaFN9DK707kW+/fDphaDI6HVgxXnhML9Xgt5vHnTXmSSnDhupuCFBgsEAEpchXmQ==", + "dev": true + }, + "eslint-plugin-chai-friendly": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-chai-friendly/-/eslint-plugin-chai-friendly-0.6.0.tgz", + "integrity": "sha512-Uvvv1gkbRGp/qfN15B0kQyQWg+oFA8buDSqrwmW3egNSk/FpqH2MjQqKOuKwmEL6w4QIQrIjDp+gg6kGGmD3oQ==", + "dev": true + }, + "eslint-plugin-es": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-3.0.0.tgz", + "integrity": "sha512-6/Jb/J/ZvSebydwbBJO1R9E5ky7YeElfK56Veh7e4QGFHCXoIXGH9HhVz+ibJLM3XJ1XjP+T7rKBLUa/Y7eIng==", + "dev": true, + "requires": { + "eslint-utils": "^2.0.0", + "regexpp": "^3.0.0" + }, + "dependencies": { + "eslint-utils": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.0.0.tgz", + "integrity": "sha512-0HCPuJv+7Wv1bACm8y5/ECVfYdfsAm9xmVb7saeFlxjPYALefjhbYoCkBjPdPzGH8wWyTpAez82Fh3VKYEZ8OA==", + "dev": true, + "requires": { + "eslint-visitor-keys": "^1.1.0" + } + }, + "regexpp": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.1.0.tgz", + "integrity": "sha512-ZOIzd8yVsQQA7j8GCSlPGXwg5PfmA1mrq0JP4nGhh54LaKN3xdai/vHUDu74pKwV8OxseMS65u2NImosQcSD0Q==", + "dev": true + } + } + }, + "eslint-plugin-import": { + "version": "2.20.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.20.2.tgz", + "integrity": "sha512-FObidqpXrR8OnCh4iNsxy+WACztJLXAHBO5hK79T1Hc77PgQZkyDGA5Ag9xAvRpglvLNxhH/zSmZ70/pZ31dHg==", + "dev": true, + "requires": { + "array-includes": "^3.0.3", + "array.prototype.flat": "^1.2.1", + "contains-path": "^0.1.0", + "debug": "^2.6.9", + "doctrine": "1.5.0", + "eslint-import-resolver-node": "^0.3.2", + "eslint-module-utils": "^2.4.1", + "has": "^1.0.3", + "minimatch": "^3.0.4", + "object.values": "^1.1.0", + "read-pkg-up": "^2.0.0", + "resolve": "^1.12.0" + }, + "dependencies": { + "doctrine": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-1.5.0.tgz", + "integrity": "sha1-N53Ocw9hZvds76TmcHoVmwLFpvo=", + "dev": true, + "requires": { + "esutils": "^2.0.2", + "isarray": "^1.0.0" + } + }, + "find-up": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", + "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "dev": true, + "requires": { + "locate-path": "^2.0.0" + } + }, + "load-json-file": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz", + "integrity": "sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg=", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "parse-json": "^2.2.0", + "pify": "^2.0.0", + "strip-bom": "^3.0.0" + } + }, + "locate-path": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", + "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", + "dev": true, + "requires": { + "p-locate": "^2.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", + "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", + "dev": true, + "requires": { + "p-try": "^1.0.0" + } + }, + "p-locate": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", + "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "dev": true, + "requires": { + "p-limit": "^1.1.0" + } + }, + "p-try": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", + "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", + "dev": true + }, + "parse-json": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", + "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", + "dev": true, + "requires": { + "error-ex": "^1.2.0" + } + }, + "path-type": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", + "integrity": "sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=", + "dev": true, + "requires": { + "pify": "^2.0.0" + } + }, + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "dev": true + }, + "read-pkg": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz", + "integrity": "sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg=", + "dev": true, + "requires": { + "load-json-file": "^2.0.0", + "normalize-package-data": "^2.3.2", + "path-type": "^2.0.0" + } + }, + "read-pkg-up": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-2.0.0.tgz", + "integrity": "sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4=", + "dev": true, + "requires": { + "find-up": "^2.0.0", + "read-pkg": "^2.0.0" + } + } + } + }, + "eslint-plugin-jsx-a11y": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.2.3.tgz", + "integrity": "sha512-CawzfGt9w83tyuVekn0GDPU9ytYtxyxyFZ3aSWROmnRRFQFT2BiPJd7jvRdzNDi6oLWaS2asMeYSNMjWTV4eNg==", + "dev": true, + "requires": { + "@babel/runtime": "^7.4.5", + "aria-query": "^3.0.0", + "array-includes": "^3.0.3", + "ast-types-flow": "^0.0.7", + "axobject-query": "^2.0.2", + "damerau-levenshtein": "^1.0.4", + "emoji-regex": "^7.0.2", + "has": "^1.0.3", + "jsx-ast-utils": "^2.2.1" + } + }, + "eslint-plugin-mocha": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-mocha/-/eslint-plugin-mocha-6.3.0.tgz", + "integrity": "sha512-Cd2roo8caAyG21oKaaNTj7cqeYRWW1I2B5SfpKRp0Ip1gkfwoR1Ow0IGlPWnNjzywdF4n+kHL8/9vM6zCJUxdg==", + "dev": true, + "requires": { + "eslint-utils": "^2.0.0", + "ramda": "^0.27.0" + }, + "dependencies": { + "eslint-utils": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.0.0.tgz", + "integrity": "sha512-0HCPuJv+7Wv1bACm8y5/ECVfYdfsAm9xmVb7saeFlxjPYALefjhbYoCkBjPdPzGH8wWyTpAez82Fh3VKYEZ8OA==", + "dev": true, + "requires": { + "eslint-visitor-keys": "^1.1.0" + } + } + } + }, + "eslint-plugin-node": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-11.1.0.tgz", + "integrity": "sha512-oUwtPJ1W0SKD0Tr+wqu92c5xuCeQqB3hSCHasn/ZgjFdA9iDGNkNf2Zi9ztY7X+hNuMib23LNGRm6+uN+KLE3g==", + "dev": true, + "requires": { + "eslint-plugin-es": "^3.0.0", + "eslint-utils": "^2.0.0", + "ignore": "^5.1.1", + "minimatch": "^3.0.4", + "resolve": "^1.10.1", + "semver": "^6.1.0" + }, + "dependencies": { + "eslint-utils": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.0.0.tgz", + "integrity": "sha512-0HCPuJv+7Wv1bACm8y5/ECVfYdfsAm9xmVb7saeFlxjPYALefjhbYoCkBjPdPzGH8wWyTpAez82Fh3VKYEZ8OA==", + "dev": true, + "requires": { + "eslint-visitor-keys": "^1.1.0" + } + }, + "ignore": { + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.4.tgz", + "integrity": "sha512-MzbUSahkTW1u7JpKKjY7LCARd1fU5W2rLdxlM4kdkayuCwZImjkpluF9CM1aLewYJguPDqewLam18Y6AU69A8A==", + "dev": true + } + } + }, + "eslint-plugin-prettier": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-3.1.3.tgz", + "integrity": "sha512-+HG5jmu/dN3ZV3T6eCD7a4BlAySdN7mLIbJYo0z1cFQuI+r2DiTJEFeF68ots93PsnrMxbzIZ2S/ieX+mkrBeQ==", + "dev": true, + "requires": { + "prettier-linter-helpers": "^1.0.0" + } + }, + "eslint-plugin-promise": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-4.2.1.tgz", + "integrity": "sha512-VoM09vT7bfA7D+upt+FjeBO5eHIJQBUWki1aPvB+vbNiHS3+oGIJGIeyBtKQTME6UPXXy3vV07OL1tHd3ANuDw==", + "dev": true + }, + "eslint-plugin-react": { + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.19.0.tgz", + "integrity": "sha512-SPT8j72CGuAP+JFbT0sJHOB80TX/pu44gQ4vXH/cq+hQTiY2PuZ6IHkqXJV6x1b28GDdo1lbInjKUrrdUf0LOQ==", + "dev": true, + "requires": { + "array-includes": "^3.1.1", + "doctrine": "^2.1.0", + "has": "^1.0.3", + "jsx-ast-utils": "^2.2.3", + "object.entries": "^1.1.1", + "object.fromentries": "^2.0.2", + "object.values": "^1.1.1", + "prop-types": "^15.7.2", + "resolve": "^1.15.1", + "semver": "^6.3.0", + "string.prototype.matchall": "^4.0.2", + "xregexp": "^4.3.0" + }, + "dependencies": { + "doctrine": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "dev": true, + "requires": { + "esutils": "^2.0.2" + } + } + } + }, + "eslint-plugin-standard": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-standard/-/eslint-plugin-standard-4.0.1.tgz", + "integrity": "sha512-v/KBnfyaOMPmZc/dmc6ozOdWqekGp7bBGq4jLAecEfPGmfKiWS4sA8sC0LqiV9w5qmXAtXVn4M3p1jSyhY85SQ==", + "dev": true + }, + "eslint-scope": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.0.0.tgz", + "integrity": "sha512-oYrhJW7S0bxAFDvWqzvMPRm6pcgcnWc4QnofCAqRTRfQC0JcwenzGglTtsLyIuuWFfkqDG9vz67cnttSd53djw==", + "dev": true, + "requires": { + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" + } + }, + "eslint-utils": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-1.4.3.tgz", + "integrity": "sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q==", + "dev": true, + "requires": { + "eslint-visitor-keys": "^1.1.0" + } + }, + "eslint-visitor-keys": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.1.0.tgz", + "integrity": "sha512-8y9YjtM1JBJU/A9Kc+SbaOV4y29sSWckBwMHa+FGtVj5gN/sbnKDf6xJUl+8g7FAij9LVaP8C24DUiH/f/2Z9A==", + "dev": true + }, + "espree": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-6.2.1.tgz", + "integrity": "sha512-ysCxRQY3WaXJz9tdbWOwuWr5Y/XrPTGX9Kiz3yoUXwW0VZ4w30HTkQLaGx/+ttFjF8i+ACbArnB4ce68a9m5hw==", + "dev": true, + "requires": { + "acorn": "^7.1.1", + "acorn-jsx": "^5.2.0", + "eslint-visitor-keys": "^1.1.0" + }, + "dependencies": { + "acorn": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.1.tgz", + "integrity": "sha512-add7dgA5ppRPxCFJoAGfMDi7PIBXq1RtGo7BhbLaxwrXPOmw8gq48Y9ozT01hUKy9byMjlR20EJhu5zlkErEkg==", + "dev": true + } + } + }, "esprima": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==" }, + "esquery": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.3.1.tgz", + "integrity": "sha512-olpvt9QG0vniUBZspVRN6lwB7hOZoTRtT+jzR+tS4ffYx2mzbw+z0XCOk44aaLYKApNX5nMm+E+P6o25ip/DHQ==", + "dev": true, + "requires": { + "estraverse": "^5.1.0" + }, + "dependencies": { + "estraverse": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.1.0.tgz", + "integrity": "sha512-FyohXK+R0vE+y1nHLoBM7ZTyqRpqAlhdZHCWIWEviFLiGB8b04H6bQs8G+XTthacvT8VuwvteiP7RJSxMs8UEw==", + "dev": true + } + } + }, + "esrecurse": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.2.1.tgz", + "integrity": "sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ==", + "dev": true, + "requires": { + "estraverse": "^4.1.0" + } + }, + "estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "dev": true + }, "esutils": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", @@ -1740,6 +2626,17 @@ "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" }, + "external-editor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", + "integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==", + "dev": true, + "requires": { + "chardet": "^0.7.0", + "iconv-lite": "^0.4.24", + "tmp": "^0.0.33" + } + }, "extsprintf": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", @@ -1750,16 +2647,46 @@ "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.1.tgz", "integrity": "sha512-8UEa58QDLauDNfpbrX55Q9jrGHThw2ZMdOky5Gl1CDtVeJDPVrG4Jxx1N8jw2gkWaff5UUuX1KJd+9zGe2B+ZA==" }, + "fast-diff": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.2.0.tgz", + "integrity": "sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w==", + "dev": true + }, "fast-json-stable-stringify": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" }, + "fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", + "dev": true + }, "fast-text-encoding": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.0.tgz", "integrity": "sha512-R9bHCvweUxxwkDwhjav5vxpFvdPGlVngtqmx4pIZfSUhM/Q4NiIUHB456BAf+Q1Nwu3HEZYONtu+Rya+af4jiQ==" }, + "figures": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", + "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", + "dev": true, + "requires": { + "escape-string-regexp": "^1.0.5" + } + }, + "file-entry-cache": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-5.0.1.tgz", + "integrity": "sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g==", + "dev": true, + "requires": { + "flat-cache": "^2.0.1" + } + }, "file-uri-to-path": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", @@ -1802,6 +2729,48 @@ "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz", "integrity": "sha512-lg/Moejf4qXovVutL0Lz4IsaPoNYMuxt4PA0nGqFxnJ1CTTGGlEO2wKgoDpwknhvZ8k4Q2F+eesgkLbG2Mxfog==" }, + "flat-cache": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-2.0.1.tgz", + "integrity": "sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA==", + "dev": true, + "requires": { + "flatted": "^2.0.0", + "rimraf": "2.6.3", + "write": "1.0.3" + }, + "dependencies": { + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "rimraf": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", + "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==", + "dev": true, + "requires": { + "glob": "^7.1.3" + } + } + } + }, + "flatted": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.2.tgz", + "integrity": "sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA==", + "dev": true + }, "follow-redirects": { "version": "1.5.10", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.10.tgz", @@ -1869,6 +2838,18 @@ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "functional-red-black-tree": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", + "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=", + "dev": true + }, "gaxios": { "version": "2.3.4", "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-2.3.4.tgz", @@ -1895,6 +2876,12 @@ "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==" }, + "get-stdin": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-6.0.0.tgz", + "integrity": "sha512-jp4tHawyV7+fkkSKyvjuLZswblUtz+SQKzSWnBbii16BuZksJlU1wuBYXY75r+duh/llF1ur6oNwi+2ZzjKZ7g==", + "dev": true + }, "getpass": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", @@ -1916,6 +2903,15 @@ "path-is-absolute": "^1.0.0" } }, + "glob-parent": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.1.tgz", + "integrity": "sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ==", + "dev": true, + "requires": { + "is-glob": "^4.0.1" + } + }, "globals": { "version": "11.12.0", "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", @@ -2032,11 +3028,43 @@ "har-schema": "^2.0.0" } }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-ansi": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", + "integrity": "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=", + "dev": true, + "requires": { + "ansi-regex": "^2.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "dev": true + } + } + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==" }, + "has-symbols": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz", + "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", + "dev": true + }, "hasha": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/hasha/-/hasha-3.0.0.tgz", @@ -2159,11 +3187,41 @@ "safer-buffer": ">= 2.1.2 < 3" } }, + "ignore": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", + "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", + "dev": true + }, + "import-fresh": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.2.1.tgz", + "integrity": "sha512-6e1q1cnWP2RXD9/keSkxHScg508CdXqXWgWBaETNhyuBFz+kUZlKboh+ISK+bU++DmbHimVBrOz/zzPe0sZ3sQ==", + "dev": true, + "requires": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "dependencies": { + "resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true + } + } + }, "imurmurhash": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=" }, + "indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true + }, "inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", @@ -2178,6 +3236,128 @@ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, + "inquirer": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.1.0.tgz", + "integrity": "sha512-5fJMWEmikSYu0nv/flMc475MhGbB7TSPd/2IpFV4I4rMklboCH2rQjYY5kKiYGHqUF9gvaambupcJFFG9dvReg==", + "dev": true, + "requires": { + "ansi-escapes": "^4.2.1", + "chalk": "^3.0.0", + "cli-cursor": "^3.1.0", + "cli-width": "^2.0.0", + "external-editor": "^3.0.3", + "figures": "^3.0.0", + "lodash": "^4.17.15", + "mute-stream": "0.0.8", + "run-async": "^2.4.0", + "rxjs": "^6.5.3", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0", + "through": "^2.3.6" + }, + "dependencies": { + "ansi-regex": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "dev": true + }, + "ansi-styles": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", + "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "dev": true, + "requires": { + "@types/color-name": "^1.1.1", + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", + "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "string-width": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", + "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.0" + } + }, + "strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.0" + } + }, + "supports-color": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", + "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "internal-slot": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.2.tgz", + "integrity": "sha512-2cQNfwhAfJIkU4KZPkDI+Gj5yNNnbqi40W9Gge6dfnk4TocEVm00B3bdiL+JINrbGJil2TeHvM4rETGzk/f/0g==", + "dev": true, + "requires": { + "es-abstract": "^1.17.0-next.1", + "has": "^1.0.3", + "side-channel": "^1.0.2" + } + }, "ioredis": { "version": "4.16.1", "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.16.1.tgz", @@ -2229,16 +3409,52 @@ "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.4.tgz", "integrity": "sha512-Kq1rokWXOPXWuaMAqZiJW4XxsmD9zGx9q4aePabbn3qCRGedtH7Cm+zV8WETitMfu1wdh+Rvd6w5egwSngUX2A==" }, + "is-callable": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", + "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==", + "dev": true + }, + "is-date-object": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz", + "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==", + "dev": true + }, + "is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "dev": true + }, "is-fullwidth-code-point": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=" }, + "is-glob": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", + "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", + "dev": true, + "requires": { + "is-extglob": "^2.1.1" + } + }, "is-obj": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==" }, + "is-regex": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", + "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, "is-stream": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz", @@ -2249,6 +3465,21 @@ "resolved": "https://registry.npmjs.org/is-stream-ended/-/is-stream-ended-0.1.4.tgz", "integrity": "sha512-xj0XPvmr7bQFTvirqnFr50o0hQIh6ZItDqloxt5aJrR4NQsYeSsyFQERYGCAzfindAcnKjINnwEEgLx4IqVzQw==" }, + "is-string": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz", + "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==", + "dev": true + }, + "is-symbol": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", + "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==", + "dev": true, + "requires": { + "has-symbols": "^1.0.1" + } + }, "is-typedarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", @@ -2419,6 +3650,12 @@ "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" }, + "json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", + "dev": true + }, "json-stringify-safe": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", @@ -2435,6 +3672,16 @@ "verror": "1.10.0" } }, + "jsx-ast-utils": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-2.2.3.tgz", + "integrity": "sha512-EdIHFMm+1BPynpKOpdPqiOsvnIrInRGJD7bzPZdPkjitQEqpdpUuFpq4T0npZFKTiB3RhWFdGN+oqOJIdhDhQA==", + "dev": true, + "requires": { + "array-includes": "^3.0.3", + "object.assign": "^4.1.0" + } + }, "jwa": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz", @@ -2454,6 +3701,16 @@ "safe-buffer": "^5.0.1" } }, + "levn": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", + "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=", + "dev": true, + "requires": { + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2" + } + }, "load-json-file": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", @@ -2516,11 +3773,29 @@ "resolved": "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz", "integrity": "sha512-rnYUdIo6xRCJnQmbVFEwcxF144erlD+M3YcJUVesflU9paQaE8p+fJDcIQrlMYbxoANFL+AB9hZrzSBBk5PL+g==" }, + "lodash.memoize": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", + "integrity": "sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4=", + "dev": true + }, + "lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true + }, "lodash.pickby": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz", "integrity": "sha512-AZV+GsS/6ckvPOVQPXSiFFacKvKB4kOQu6ynt9wz0F3LO4R9Ij4K1ddYsIytDpSgLz88JHd9P+oaLeej5/Sl7Q==" }, + "lodash.unescape": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/lodash.unescape/-/lodash.unescape-4.0.1.tgz", + "integrity": "sha1-vyJJiGzlFM2hEvrpIYzcBlIR/Jw=", + "dev": true + }, "log-driver": { "version": "1.2.7", "resolved": "https://registry.npmjs.org/log-driver/-/log-driver-1.2.7.tgz", @@ -2584,11 +3859,78 @@ } } }, + "loglevel": { + "version": "1.6.8", + "resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.6.8.tgz", + "integrity": "sha512-bsU7+gc9AJ2SqpzxwU3+1fedl8zAntbtC5XYlt3s2j1hJcn2PsXSmgN8TaLG/J1/2mod4+cE/3vNL70/c1RNCA==", + "dev": true + }, + "loglevel-colored-level-prefix": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/loglevel-colored-level-prefix/-/loglevel-colored-level-prefix-1.0.0.tgz", + "integrity": "sha1-akAhj9x64V/HbD0PPmdsRlOIYD4=", + "dev": true, + "requires": { + "chalk": "^1.1.3", + "loglevel": "^1.4.1" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "dev": true + }, + "ansi-styles": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", + "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=", + "dev": true + }, + "chalk": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", + "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", + "dev": true, + "requires": { + "ansi-styles": "^2.2.1", + "escape-string-regexp": "^1.0.2", + "has-ansi": "^2.0.0", + "strip-ansi": "^3.0.0", + "supports-color": "^2.0.0" + } + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "dev": true, + "requires": { + "ansi-regex": "^2.0.0" + } + }, + "supports-color": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", + "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=", + "dev": true + } + } + }, "long": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", "integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==" }, + "loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "dev": true, + "requires": { + "js-tokens": "^3.0.0 || ^4.0.0" + } + }, "lru-cache": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", @@ -2627,6 +3969,24 @@ } } }, + "make-plural": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/make-plural/-/make-plural-4.3.0.tgz", + "integrity": "sha512-xTYd4JVHpSCW+aqDof6w/MebaMVNTVYBZhbB/vi513xXdiPT92JMVCo0Jq8W2UZnzYRFeVbQiQ+I25l13JuKvA==", + "dev": true, + "requires": { + "minimist": "^1.2.0" + }, + "dependencies": { + "minimist": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", + "dev": true, + "optional": true + } + } + }, "map-obj": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.1.0.tgz", @@ -2661,6 +4021,29 @@ "resolved": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz", "integrity": "sha512-XoSUL+nF8hMTKGQxUs8r3Btdsf1yuKKBdCCGbh3YXgCXuVKishpZv1CNc385w9s8t4Ynwc5h61BwW/FCVulkbg==" }, + "messageformat": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/messageformat/-/messageformat-2.3.0.tgz", + "integrity": "sha512-uTzvsv0lTeQxYI2y1NPa1lItL5VRI8Gb93Y2K2ue5gBPyrbJxfDi/EYWxh2PKv5yO42AJeeqblS9MJSh/IEk4w==", + "dev": true, + "requires": { + "make-plural": "^4.3.0", + "messageformat-formatters": "^2.0.1", + "messageformat-parser": "^4.1.2" + } + }, + "messageformat-formatters": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/messageformat-formatters/-/messageformat-formatters-2.0.1.tgz", + "integrity": "sha512-E/lQRXhtHwGuiQjI7qxkLp8AHbMD5r2217XNe/SREbBlSawe0lOqsFb7rflZJmlQFSULNLIqlcjjsCPlB3m3Mg==", + "dev": true + }, + "messageformat-parser": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/messageformat-parser/-/messageformat-parser-4.1.3.tgz", + "integrity": "sha512-2fU3XDCanRqeOCkn7R5zW5VQHWf+T3hH65SzuqRvjatBK7r4uyFa5mEX+k6F9Bd04LVM5G4/BHBTUJsOdW7uyg==", + "dev": true + }, "methods": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", @@ -2706,6 +4089,12 @@ "mime-db": "1.43.0" } }, + "mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true + }, "minimatch": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", @@ -2833,6 +4222,12 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" }, + "mute-stream": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", + "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", + "dev": true + }, "mv": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", @@ -2860,6 +4255,12 @@ "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", "integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==" }, + "natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", + "dev": true + }, "ncp": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", @@ -2876,6 +4277,12 @@ "resolved": "https://registry.npmjs.org/nested-error-stacks/-/nested-error-stacks-2.1.0.tgz", "integrity": "sha512-AO81vsIO1k1sM4Zrd6Hu7regmJN1NSiAja10gc4bX3F0wd+9rQmcuHQaHVQCYIEC8iFXnE+mavh23GOt7wBgug==" }, + "nice-try": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", + "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", + "dev": true + }, "node-fetch": { "version": "2.6.0", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz", @@ -2969,6 +4376,72 @@ "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==" }, + "object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", + "dev": true + }, + "object-inspect": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", + "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==", + "dev": true + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true + }, + "object.assign": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.0.tgz", + "integrity": "sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w==", + "dev": true, + "requires": { + "define-properties": "^1.1.2", + "function-bind": "^1.1.1", + "has-symbols": "^1.0.0", + "object-keys": "^1.0.11" + } + }, + "object.entries": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.1.tgz", + "integrity": "sha512-ilqR7BgdyZetJutmDPfXCDffGa0/Yzl2ivVNpbx/g4UeWrCdRnFDUBrKJGLhGieRHDATnyZXWBeCb29k9CJysQ==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1", + "function-bind": "^1.1.1", + "has": "^1.0.3" + } + }, + "object.fromentries": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.2.tgz", + "integrity": "sha512-r3ZiBH7MQppDJVLx6fhD618GKNG40CZYH9wgwdhKxBDDbQgjeWGGd4AtkZad84d291YxvWe7bJGuE65Anh0dxQ==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1", + "function-bind": "^1.1.1", + "has": "^1.0.3" + } + }, + "object.values": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.1.tgz", + "integrity": "sha512-WTa54g2K8iu0kmS/us18jEmdv1a4Wi//BZ/DTVYEcH0XhLM5NYdpDHja3gt57VrZLcNAO2WGA+KpWsDBaHt6eA==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1", + "function-bind": "^1.1.1", + "has": "^1.0.3" + } + }, "on-finished": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", @@ -2992,11 +4465,40 @@ "wrappy": "1" } }, + "onetime": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.0.tgz", + "integrity": "sha512-5NcSkPHhwTVFIQN+TUqXoS5+dlElHXdpAWu9I0HP20YOtIi+aZ0Ct82jdlILDxjLEAWwvm+qj1m6aEtsDVmm6Q==", + "dev": true, + "requires": { + "mimic-fn": "^2.1.0" + } + }, + "optionator": { + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", + "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", + "dev": true, + "requires": { + "deep-is": "~0.1.3", + "fast-levenshtein": "~2.0.6", + "levn": "~0.3.0", + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2", + "word-wrap": "~1.2.3" + } + }, "os-homedir": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=" }, + "os-tmpdir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=", + "dev": true + }, "p-limit": { "version": "2.2.2", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.2.tgz", @@ -3029,6 +4531,15 @@ "release-zalgo": "^1.0.0" } }, + "parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "requires": { + "callsites": "^3.0.0" + } + }, "parse-duration": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.1.2.tgz", @@ -3068,6 +4579,18 @@ "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==" }, + "path-is-inside": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz", + "integrity": "sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM=", + "dev": true + }, + "path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", + "dev": true + }, "path-parse": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", @@ -3111,6 +4634,659 @@ "find-up": "^3.0.0" } }, + "prelude-ls": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", + "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=", + "dev": true + }, + "prettier": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.0.5.tgz", + "integrity": "sha512-7PtVymN48hGcO4fGjybyBSIWDsLU4H4XlvOHfq91pz9kkGlonzwTfYkaIEwiRg/dAJF9YlbsduBAgtYLi+8cFg==", + "dev": true + }, + "prettier-eslint": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/prettier-eslint/-/prettier-eslint-9.0.1.tgz", + "integrity": "sha512-KZT65QTosSAqBBqmrC+RpXbsMRe7Os2YSR9cAfFbDlyPAopzA/S5bioiZ3rpziNQNSJaOxmtXSx07EQ+o2Dlug==", + "dev": true, + "requires": { + "@typescript-eslint/parser": "^1.10.2", + "common-tags": "^1.4.0", + "core-js": "^3.1.4", + "dlv": "^1.1.0", + "eslint": "^5.0.0", + "indent-string": "^4.0.0", + "lodash.merge": "^4.6.0", + "loglevel-colored-level-prefix": "^1.0.0", + "prettier": "^1.7.0", + "pretty-format": "^23.0.1", + "require-relative": "^0.8.7", + "typescript": "^3.2.1", + "vue-eslint-parser": "^2.0.2" + }, + "dependencies": { + "ansi-escapes": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", + "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", + "dev": true + }, + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "dev": true + }, + "cli-cursor": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", + "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", + "dev": true, + "requires": { + "restore-cursor": "^2.0.0" + } + }, + "cross-spawn": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", + "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "dev": true, + "requires": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + } + }, + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "eslint": { + "version": "5.16.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-5.16.0.tgz", + "integrity": "sha512-S3Rz11i7c8AA5JPv7xAH+dOyq/Cu/VXHiHXBPOU1k/JAM5dXqQPt3qcrhpHSorXmrpu2g0gkIBVXAqCpzfoZIg==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "ajv": "^6.9.1", + "chalk": "^2.1.0", + "cross-spawn": "^6.0.5", + "debug": "^4.0.1", + "doctrine": "^3.0.0", + "eslint-scope": "^4.0.3", + "eslint-utils": "^1.3.1", + "eslint-visitor-keys": "^1.0.0", + "espree": "^5.0.1", + "esquery": "^1.0.1", + "esutils": "^2.0.2", + "file-entry-cache": "^5.0.1", + "functional-red-black-tree": "^1.0.1", + "glob": "^7.1.2", + "globals": "^11.7.0", + "ignore": "^4.0.6", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "inquirer": "^6.2.2", + "js-yaml": "^3.13.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.3.0", + "lodash": "^4.17.11", + "minimatch": "^3.0.4", + "mkdirp": "^0.5.1", + "natural-compare": "^1.4.0", + "optionator": "^0.8.2", + "path-is-inside": "^1.0.2", + "progress": "^2.0.0", + "regexpp": "^2.0.1", + "semver": "^5.5.1", + "strip-ansi": "^4.0.0", + "strip-json-comments": "^2.0.1", + "table": "^5.2.3", + "text-table": "^0.2.0" + } + }, + "eslint-scope": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-4.0.3.tgz", + "integrity": "sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==", + "dev": true, + "requires": { + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" + } + }, + "espree": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-5.0.1.tgz", + "integrity": "sha512-qWAZcWh4XE/RwzLJejfcofscgMc9CamR6Tn1+XRXNzrvUSSbiAjGOI/fggztjIi7y9VLPqnICMIPiGyr8JaZ0A==", + "dev": true, + "requires": { + "acorn": "^6.0.7", + "acorn-jsx": "^5.0.0", + "eslint-visitor-keys": "^1.0.0" + } + }, + "figures": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", + "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=", + "dev": true, + "requires": { + "escape-string-regexp": "^1.0.5" + } + }, + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "inquirer": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-6.5.2.tgz", + "integrity": "sha512-cntlB5ghuB0iuO65Ovoi8ogLHiWGs/5yNrtUcKjFhSSiVeAIVpD7koaSU9RM8mpXw5YDi9RdYXGQMaOURB7ycQ==", + "dev": true, + "requires": { + "ansi-escapes": "^3.2.0", + "chalk": "^2.4.2", + "cli-cursor": "^2.1.0", + "cli-width": "^2.0.0", + "external-editor": "^3.0.3", + "figures": "^2.0.0", + "lodash": "^4.17.12", + "mute-stream": "0.0.7", + "run-async": "^2.2.0", + "rxjs": "^6.4.0", + "string-width": "^2.1.0", + "strip-ansi": "^5.1.0", + "through": "^2.3.6" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + } + } + }, + "mimic-fn": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", + "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", + "dev": true + }, + "minimist": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", + "dev": true + }, + "mkdirp": { + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", + "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", + "dev": true, + "requires": { + "minimist": "^1.2.5" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "mute-stream": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", + "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=", + "dev": true + }, + "onetime": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", + "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", + "dev": true, + "requires": { + "mimic-fn": "^1.0.0" + } + }, + "prettier": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz", + "integrity": "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==", + "dev": true + }, + "restore-cursor": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", + "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", + "dev": true, + "requires": { + "onetime": "^2.0.0", + "signal-exit": "^3.0.2" + } + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + }, + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "dev": true, + "requires": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + } + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "dev": true, + "requires": { + "ansi-regex": "^3.0.0" + } + }, + "strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", + "dev": true + } + } + }, + "prettier-eslint-cli": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/prettier-eslint-cli/-/prettier-eslint-cli-5.0.0.tgz", + "integrity": "sha512-cei9UbN1aTrz3sQs88CWpvY/10PYTevzd76zoG1tdJ164OhmNTFRKPTOZrutVvscoQWzbnLKkviS3gu5JXwvZg==", + "dev": true, + "requires": { + "arrify": "^2.0.1", + "boolify": "^1.0.0", + "camelcase-keys": "^6.0.0", + "chalk": "^2.4.2", + "common-tags": "^1.8.0", + "core-js": "^3.1.4", + "eslint": "^5.0.0", + "find-up": "^4.1.0", + "get-stdin": "^7.0.0", + "glob": "^7.1.4", + "ignore": "^5.1.2", + "lodash.memoize": "^4.1.2", + "loglevel-colored-level-prefix": "^1.0.0", + "messageformat": "^2.2.1", + "prettier-eslint": "^9.0.0", + "rxjs": "^6.5.2", + "yargs": "^13.2.4" + }, + "dependencies": { + "ansi-escapes": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", + "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", + "dev": true + }, + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "dev": true + }, + "cli-cursor": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", + "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", + "dev": true, + "requires": { + "restore-cursor": "^2.0.0" + } + }, + "cross-spawn": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", + "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "dev": true, + "requires": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + } + }, + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "eslint": { + "version": "5.16.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-5.16.0.tgz", + "integrity": "sha512-S3Rz11i7c8AA5JPv7xAH+dOyq/Cu/VXHiHXBPOU1k/JAM5dXqQPt3qcrhpHSorXmrpu2g0gkIBVXAqCpzfoZIg==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "ajv": "^6.9.1", + "chalk": "^2.1.0", + "cross-spawn": "^6.0.5", + "debug": "^4.0.1", + "doctrine": "^3.0.0", + "eslint-scope": "^4.0.3", + "eslint-utils": "^1.3.1", + "eslint-visitor-keys": "^1.0.0", + "espree": "^5.0.1", + "esquery": "^1.0.1", + "esutils": "^2.0.2", + "file-entry-cache": "^5.0.1", + "functional-red-black-tree": "^1.0.1", + "glob": "^7.1.2", + "globals": "^11.7.0", + "ignore": "^4.0.6", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "inquirer": "^6.2.2", + "js-yaml": "^3.13.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.3.0", + "lodash": "^4.17.11", + "minimatch": "^3.0.4", + "mkdirp": "^0.5.1", + "natural-compare": "^1.4.0", + "optionator": "^0.8.2", + "path-is-inside": "^1.0.2", + "progress": "^2.0.0", + "regexpp": "^2.0.1", + "semver": "^5.5.1", + "strip-ansi": "^4.0.0", + "strip-json-comments": "^2.0.1", + "table": "^5.2.3", + "text-table": "^0.2.0" + }, + "dependencies": { + "ignore": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", + "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", + "dev": true + } + } + }, + "eslint-scope": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-4.0.3.tgz", + "integrity": "sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==", + "dev": true, + "requires": { + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" + } + }, + "espree": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-5.0.1.tgz", + "integrity": "sha512-qWAZcWh4XE/RwzLJejfcofscgMc9CamR6Tn1+XRXNzrvUSSbiAjGOI/fggztjIi7y9VLPqnICMIPiGyr8JaZ0A==", + "dev": true, + "requires": { + "acorn": "^6.0.7", + "acorn-jsx": "^5.0.0", + "eslint-visitor-keys": "^1.0.0" + } + }, + "figures": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", + "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=", + "dev": true, + "requires": { + "escape-string-regexp": "^1.0.5" + } + }, + "find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "requires": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + } + }, + "get-stdin": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-7.0.0.tgz", + "integrity": "sha512-zRKcywvrXlXsA0v0i9Io4KDRaAw7+a1ZpjRwl9Wox8PFlVCCHra7E9c4kqXCoCM9nR5tBkaTTZRBoCm60bFqTQ==", + "dev": true + }, + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "ignore": { + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.4.tgz", + "integrity": "sha512-MzbUSahkTW1u7JpKKjY7LCARd1fU5W2rLdxlM4kdkayuCwZImjkpluF9CM1aLewYJguPDqewLam18Y6AU69A8A==", + "dev": true + }, + "inquirer": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-6.5.2.tgz", + "integrity": "sha512-cntlB5ghuB0iuO65Ovoi8ogLHiWGs/5yNrtUcKjFhSSiVeAIVpD7koaSU9RM8mpXw5YDi9RdYXGQMaOURB7ycQ==", + "dev": true, + "requires": { + "ansi-escapes": "^3.2.0", + "chalk": "^2.4.2", + "cli-cursor": "^2.1.0", + "cli-width": "^2.0.0", + "external-editor": "^3.0.3", + "figures": "^2.0.0", + "lodash": "^4.17.12", + "mute-stream": "0.0.7", + "run-async": "^2.2.0", + "rxjs": "^6.4.0", + "string-width": "^2.1.0", + "strip-ansi": "^5.1.0", + "through": "^2.3.6" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + } + } + }, + "locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "requires": { + "p-locate": "^4.1.0" + } + }, + "mimic-fn": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", + "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", + "dev": true + }, + "minimist": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", + "dev": true + }, + "mkdirp": { + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", + "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", + "dev": true, + "requires": { + "minimist": "^1.2.5" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "mute-stream": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", + "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=", + "dev": true + }, + "onetime": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", + "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", + "dev": true, + "requires": { + "mimic-fn": "^1.0.0" + } + }, + "p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "requires": { + "p-limit": "^2.2.0" + } + }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true + }, + "restore-cursor": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", + "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", + "dev": true, + "requires": { + "onetime": "^2.0.0", + "signal-exit": "^3.0.2" + } + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + }, + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "dev": true, + "requires": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + } + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "dev": true, + "requires": { + "ansi-regex": "^3.0.0" + } + }, + "strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", + "dev": true + } + } + }, + "prettier-linter-helpers": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz", + "integrity": "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==", + "dev": true, + "requires": { + "fast-diff": "^1.1.2" + } + }, + "pretty-format": { + "version": "23.6.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-23.6.0.tgz", + "integrity": "sha512-zf9NV1NSlDLDjycnwm6hpFATCGl/K1lt0R/GdkAK2O5LN/rwJoB+Mh93gGJjut4YbmecbfgLWVGSTCr0Ewvvbw==", + "dev": true, + "requires": { + "ansi-regex": "^3.0.0", + "ansi-styles": "^3.2.0" + }, + "dependencies": { + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "dev": true + } + } + }, "pretty-ms": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-4.0.0.tgz", @@ -3124,6 +5300,12 @@ "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" }, + "progress": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", + "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", + "dev": true + }, "prom-client": { "version": "11.5.3", "resolved": "https://registry.npmjs.org/prom-client/-/prom-client-11.5.3.tgz", @@ -3132,6 +5314,17 @@ "tdigest": "^0.1.1" } }, + "prop-types": { + "version": "15.7.2", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", + "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", + "dev": true, + "requires": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.8.1" + } + }, "protobufjs": { "version": "6.8.8", "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz", @@ -3218,6 +5411,18 @@ "resolved": "https://registry.npmjs.org/qs/-/qs-6.7.0.tgz", "integrity": "sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==" }, + "quick-lru": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz", + "integrity": "sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==", + "dev": true + }, + "ramda": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/ramda/-/ramda-0.27.0.tgz", + "integrity": "sha512-pVzZdDpWwWqEVVLshWUHjNwuVP7SfcmPraYuqocJp1yo2U1R7P+5QAfDhdItkuoGqIBnBYrtPp7rEPqDn9HlZA==", + "dev": true + }, "range-parser": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", @@ -3258,6 +5463,12 @@ "unpipe": "1.0.0" } }, + "react-is": { + "version": "16.13.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", + "dev": true + }, "read-pkg": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", @@ -3346,6 +5557,28 @@ } } }, + "regenerator-runtime": { + "version": "0.13.5", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.5.tgz", + "integrity": "sha512-ZS5w8CpKFinUzOwW3c83oPeVXoNsrLsaCoLtJvAClH135j/R77RuymhiSErhm2lKcwSCIpmvIWSbDkIfAqKQlA==", + "dev": true + }, + "regexp.prototype.flags": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.3.0.tgz", + "integrity": "sha512-2+Q0C5g951OlYlJz6yu5/M33IcsESLlLfsyIaLJaG4FA2r4yP8MvVMJUUP/fVBkSpbbbZlS5gynbEWLipiiXiQ==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1" + } + }, + "regexpp": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz", + "integrity": "sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==", + "dev": true + }, "release-zalgo": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/release-zalgo/-/release-zalgo-1.0.0.tgz", @@ -3456,6 +5689,12 @@ "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==" }, + "require-relative": { + "version": "0.8.7", + "resolved": "https://registry.npmjs.org/require-relative/-/require-relative-0.8.7.tgz", + "integrity": "sha1-eZlTn8ngR6N5KPoZb44VY9q9Nt4=", + "dev": true + }, "require_optional": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/require_optional/-/require_optional-1.0.1.tgz", @@ -3485,6 +5724,16 @@ "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz", "integrity": "sha512-qpFcKaXsq8+oRoLilkwyc7zHGF5i9Q2/25NIgLQQ/+VVv9rU4qvr6nXVAw1DsnXJyQkZsR4Ytfbtg5ehfcUssQ==" }, + "restore-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", + "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", + "dev": true, + "requires": { + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" + } + }, "retry-axios": { "version": "0.3.2", "resolved": "https://registry.npmjs.org/retry-axios/-/retry-axios-0.3.2.tgz", @@ -3523,6 +5772,21 @@ "glob": "^6.0.1" } }, + "run-async": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.1.tgz", + "integrity": "sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==", + "dev": true + }, + "rxjs": { + "version": "6.5.5", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.5.5.tgz", + "integrity": "sha512-WfQI+1gohdf0Dai/Bbmk5L5ItH5tYqm3ki2c5GdWhKjalzjg93N3avFjVStyZZz+A2Em+ZxKH5bNghw9UeylGQ==", + "dev": true, + "requires": { + "tslib": "^1.9.0" + } + }, "safe-buffer": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", @@ -3634,11 +5898,36 @@ } } }, + "shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", + "dev": true, + "requires": { + "shebang-regex": "^1.0.0" + } + }, + "shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", + "dev": true + }, "shimmer": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz", "integrity": "sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw==" }, + "side-channel": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.2.tgz", + "integrity": "sha512-7rL9YlPHg7Ancea1S96Pa8/QWb4BtXL/TZvS6B8XFetGBeuhAsfmUspK6DokBeZ64+Kj9TCNRD/30pVz1BvQNA==", + "dev": true, + "requires": { + "es-abstract": "^1.17.0-next.1", + "object-inspect": "^1.7.0" + } + }, "signal-exit": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", @@ -3653,6 +5942,17 @@ "buster-format": "~0.5" } }, + "slice-ansi": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-2.1.0.tgz", + "integrity": "sha512-Qu+VC3EwYLldKa1fCxuuvULvSJOKEgk9pi8dZeCVK7TqBfUNTH4sFkk4joj8afVSfAYgJoSOetjx9QWOJ5mYoQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.0", + "astral-regex": "^1.0.0", + "is-fullwidth-code-point": "^2.0.0" + } + }, "snakecase-keys": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/snakecase-keys/-/snakecase-keys-3.1.2.tgz", @@ -3825,6 +6125,62 @@ "strip-ansi": "^5.1.0" } }, + "string.prototype.matchall": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.2.tgz", + "integrity": "sha512-N/jp6O5fMf9os0JU3E72Qhf590RSRZU/ungsL/qJUYVTNv7hTG0P/dbPjxINVN9jpscu3nzYwKESU3P3RY5tOg==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0", + "has-symbols": "^1.0.1", + "internal-slot": "^1.0.2", + "regexp.prototype.flags": "^1.3.0", + "side-channel": "^1.0.2" + } + }, + "string.prototype.trimend": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.1.tgz", + "integrity": "sha512-LRPxFUaTtpqYsTeNKaFOw3R4bxIzWOnbQ837QfBylo8jIxtcbK/A/sMV7Q+OAV/vWo+7s25pOE10KYSjaSO06g==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5" + } + }, + "string.prototype.trimleft": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.2.tgz", + "integrity": "sha512-gCA0tza1JBvqr3bfAIFJGqfdRTyPae82+KTnm3coDXkZN9wnuW3HjGgN386D7hfv5CHQYCI022/rJPVlqXyHSw==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5", + "string.prototype.trimstart": "^1.0.0" + } + }, + "string.prototype.trimright": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.2.tgz", + "integrity": "sha512-ZNRQ7sY3KroTaYjRS6EbNiiHrOkjihL9aQE/8gfQ4DtAC/aEBRHFJa44OmoWxGGqXuJlfKkZW4WcXErGr+9ZFg==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5", + "string.prototype.trimend": "^1.0.0" + } + }, + "string.prototype.trimstart": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.1.tgz", + "integrity": "sha512-XxZn+QpvrBI1FOcg6dIpxUPgWCPuNXvMD72aaRaUQv1eD4e/Qy8i/hFTe0BUmD60p/QA6bh1avmuPTfNjqVWRw==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5" + } + }, "string_decoder": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", @@ -3846,6 +6202,12 @@ "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=" }, + "strip-json-comments": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.0.tgz", + "integrity": "sha512-e6/d0eBu7gHtdCqFt0xJr642LdToM5/cN4Qb9DbHjVx1CP5RyeM+zH7pbecEmDv/lBqb0QH+6Uqq75rxFPkM0w==", + "dev": true + }, "stubs": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", @@ -3859,6 +6221,18 @@ "has-flag": "^3.0.0" } }, + "table": { + "version": "5.4.6", + "resolved": "https://registry.npmjs.org/table/-/table-5.4.6.tgz", + "integrity": "sha512-wmEc8m4fjnob4gt5riFRtTu/6+4rSe12TpAELNSqHMfF3IqnA+CH37USM6/YR3qRZv7e56kAEAtd6nKZaxe0Ug==", + "dev": true, + "requires": { + "ajv": "^6.10.2", + "lodash": "^4.17.14", + "slice-ansi": "^2.1.0", + "string-width": "^3.0.0" + } + }, "tdigest": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.1.tgz", @@ -3912,6 +6286,12 @@ } } }, + "text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", + "dev": true + }, "through": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", @@ -3961,6 +6341,15 @@ "integrity": "sha512-W3AmPTJWZkRwu+iSNxPIsLZ2ByADsOLbbLxe46UJyWj3mlYLlwucKiq+/dPm0l9wTzqoF3/2PH0AGFCebjq23A==", "dev": true }, + "tmp": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", + "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", + "dev": true, + "requires": { + "os-tmpdir": "~1.0.2" + } + }, "to-fast-properties": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", @@ -4013,6 +6402,12 @@ } } }, + "tslib": { + "version": "1.11.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.11.2.tgz", + "integrity": "sha512-tTSkux6IGPnUGUd1XAZHcpu85MOkIl5zX49pO+jfsie3eP0B6pyhOlLXm3cAC6T7s+euSDDUUV+Acop5WmtkVg==", + "dev": true + }, "tunnel-agent": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", @@ -4026,6 +6421,15 @@ "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==" }, + "type-check": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", + "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=", + "dev": true, + "requires": { + "prelude-ls": "~1.1.2" + } + }, "type-detect": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-1.0.0.tgz", @@ -4046,6 +6450,12 @@ "mime-types": "~2.1.24" } }, + "typescript": { + "version": "3.8.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.8.3.tgz", + "integrity": "sha512-MYlEfn5VrLNsgudQTVJeNaQFUAI7DkhnOjdpAp4T+ku1TfQClewlbSuTVHiA+8skNBgaf02TL/kLOvig4y3G8w==", + "dev": true + }, "underscore": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz", @@ -4079,6 +6489,12 @@ "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" }, + "v8-compile-cache": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.1.0.tgz", + "integrity": "sha512-usZBT3PW+LOjM25wbqIlZwPeJV+3OSz3M1k1Ws8snlW39dZyYL9lOGC5FgPVHfk0jKmjiDV8Z0mIbVQPiwFs7g==", + "dev": true + }, "validate-npm-package-license": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", @@ -4103,6 +6519,80 @@ "extsprintf": "^1.2.0" } }, + "vue-eslint-parser": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/vue-eslint-parser/-/vue-eslint-parser-2.0.3.tgz", + "integrity": "sha512-ZezcU71Owm84xVF6gfurBQUGg8WQ+WZGxgDEQu1IHFBZNx7BFZg3L1yHxrCBNNwbwFtE1GuvfJKMtb6Xuwc/Bw==", + "dev": true, + "requires": { + "debug": "^3.1.0", + "eslint-scope": "^3.7.1", + "eslint-visitor-keys": "^1.0.0", + "espree": "^3.5.2", + "esquery": "^1.0.0", + "lodash": "^4.17.4" + }, + "dependencies": { + "acorn": { + "version": "5.7.4", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-5.7.4.tgz", + "integrity": "sha512-1D++VG7BhrtvQpNbBzovKNc1FLGGEE/oGe7b9xJm/RFHMBeUaUGpluV9RLjZa47YFdPcDAenEYuq9pQPcMdLJg==", + "dev": true + }, + "acorn-jsx": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-3.0.1.tgz", + "integrity": "sha1-r9+UiPsezvyDSPb7IvRk4ypYs2s=", + "dev": true, + "requires": { + "acorn": "^3.0.4" + }, + "dependencies": { + "acorn": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-3.3.0.tgz", + "integrity": "sha1-ReN/s56No/JbruP/U2niu18iAXo=", + "dev": true + } + } + }, + "debug": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", + "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "eslint-scope": { + "version": "3.7.3", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-3.7.3.tgz", + "integrity": "sha512-W+B0SvF4gamyCTmUc+uITPY0989iXVfKvhwtmJocTaYoc/3khEHmEmvfY/Gn9HA9VV75jrQECsHizkNw1b68FA==", + "dev": true, + "requires": { + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" + } + }, + "espree": { + "version": "3.5.4", + "resolved": "https://registry.npmjs.org/espree/-/espree-3.5.4.tgz", + "integrity": "sha512-yAcIQxtmMiB/jL32dzEp2enBeidsB7xWPLNiw3IIkpVds1P+h7qF9YwJq1yUNzp2OKXgAprs4F61ih66UsoD1A==", + "dev": true, + "requires": { + "acorn": "^5.5.0", + "acorn-jsx": "^3.0.0" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + } + } + }, "walkdir": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/walkdir/-/walkdir-0.4.1.tgz", @@ -4126,6 +6616,12 @@ "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=" }, + "word-wrap": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", + "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "dev": true + }, "wrap-ansi": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", @@ -4141,6 +6637,32 @@ "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, + "write": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/write/-/write-1.0.3.tgz", + "integrity": "sha512-/lg70HAjtkUgWPVZhZcm+T4hkL8Zbtp1nFNOn3lRrxnlv50SRBv7cR7RqR+GMsd3hUXy9hWBo4CHTbFTcOYwig==", + "dev": true, + "requires": { + "mkdirp": "^0.5.1" + }, + "dependencies": { + "minimist": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", + "dev": true + }, + "mkdirp": { + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", + "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", + "dev": true, + "requires": { + "minimist": "^1.2.5" + } + } + } + }, "write-file-atomic": { "version": "2.4.3", "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.3.tgz", @@ -4151,6 +6673,15 @@ "signal-exit": "^3.0.2" } }, + "xregexp": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/xregexp/-/xregexp-4.3.0.tgz", + "integrity": "sha512-7jXDIFXh5yJ/orPn4SXjuVrWWoi4Cr8jfV1eHv9CixKSbU+jY4mxfrBwAuDvupPNKpMUY+FeIqsVw/JLT9+B8g==", + "dev": true, + "requires": { + "@babel/runtime-corejs3": "^7.8.3" + } + }, "y18n": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 43c40ce469..81e27d4c3c 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -33,24 +33,30 @@ "settings-sharelatex": "^1.1.0" }, "devDependencies": { + "babel-eslint": "^10.1.0", "chai": "^3.5.0", "chai-spies": "^0.7.1", "cluster-key-slot": "^1.0.5", + "eslint": "^6.6.0", + "eslint-config-prettier": "^6.11.0", + "eslint-config-standard": "^14.1.1", + "eslint-config-standard-jsx": "^8.1.0", + "eslint-config-standard-react": "^9.2.0", + "eslint-plugin-chai-expect": "^2.1.0", + "eslint-plugin-chai-friendly": "^0.6.0", + "eslint-plugin-import": "^2.20.2", + "eslint-plugin-jsx-a11y": "^6.2.3", + "eslint-plugin-mocha": "^6.3.0", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-prettier": "^3.1.3", + "eslint-plugin-promise": "^4.2.1", + "eslint-plugin-react": "^7.19.0", + "eslint-plugin-standard": "^4.0.1", "mocha": "^5.0.1", + "prettier": "^2.0.5", + "prettier-eslint-cli": "^5.0.0", "sandboxed-module": "~0.2.0", "sinon": "~1.5.2", - "timekeeper": "^2.0.0", - "eslint": "^6.8.0", - "eslint-config-prettier": "^6.10.0", - "eslint-config-standard": "^14.1.0", - "eslint-plugin-chai-expect": "^2.1.0", - "eslint-plugin-chai-friendly": "^0.5.0", - "eslint-plugin-import": "^2.20.1", - "eslint-plugin-mocha": "^6.3.0", - "eslint-plugin-node": "^11.0.0", - "eslint-plugin-prettier": "^3.1.2", - "eslint-plugin-promise": "^4.2.1", - "eslint-plugin-standard": "^4.0.1", - "prettier-eslint-cli": "^5.0.0" + "timekeeper": "^2.0.0" } } From 249b7247b53db399771a4da7549d25968afa9c5a Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 6 May 2020 12:07:29 +0200 Subject: [PATCH 609/769] decaffeinate: Rename DeleteQueueManager.coffee and 58 other files from .coffee to .js --- .../coffee/{DeleteQueueManager.coffee => DeleteQueueManager.js} | 0 .../app/coffee/{DiffCodec.coffee => DiffCodec.js} | 0 .../app/coffee/{DispatchManager.coffee => DispatchManager.js} | 0 .../app/coffee/{DocumentManager.coffee => DocumentManager.js} | 0 services/document-updater/app/coffee/{Errors.coffee => Errors.js} | 0 .../app/coffee/{HistoryManager.coffee => HistoryManager.js} | 0 .../coffee/{HistoryRedisManager.coffee => HistoryRedisManager.js} | 0 .../app/coffee/{HttpController.coffee => HttpController.js} | 0 .../app/coffee/{LockManager.coffee => LockManager.js} | 0 .../app/coffee/{LoggerSerializers.coffee => LoggerSerializers.js} | 0 .../document-updater/app/coffee/{Metrics.coffee => Metrics.js} | 0 .../coffee/{PersistenceManager.coffee => PersistenceManager.js} | 0 .../document-updater/app/coffee/{Profiler.coffee => Profiler.js} | 0 .../app/coffee/{ProjectFlusher.coffee => ProjectFlusher.js} | 0 ...ctHistoryRedisManager.coffee => ProjectHistoryRedisManager.js} | 0 .../app/coffee/{ProjectManager.coffee => ProjectManager.js} | 0 .../app/coffee/{RangesManager.coffee => RangesManager.js} | 0 .../app/coffee/{RangesTracker.coffee => RangesTracker.js} | 0 .../app/coffee/{RateLimitManager.coffee => RateLimitManager.js} | 0 .../{RealTimeRedisManager.coffee => RealTimeRedisManager.js} | 0 .../app/coffee/{RedisManager.coffee => RedisManager.js} | 0 .../app/coffee/{ShareJsDB.coffee => ShareJsDB.js} | 0 .../{ShareJsUpdateManager.coffee => ShareJsUpdateManager.js} | 0 .../app/coffee/{SnapshotManager.coffee => SnapshotManager.js} | 0 .../app/coffee/{UpdateKeys.coffee => UpdateKeys.js} | 0 .../app/coffee/{UpdateManager.coffee => UpdateManager.js} | 0 .../document-updater/app/coffee/{mongojs.coffee => mongojs.js} | 0 .../app/coffee/sharejs/{count.coffee => count.js} | 0 .../app/coffee/sharejs/{helpers.coffee => helpers.js} | 0 .../app/coffee/sharejs/{index.coffee => index.js} | 0 .../app/coffee/sharejs/{json-api.coffee => json-api.js} | 0 .../document-updater/app/coffee/sharejs/{json.coffee => json.js} | 0 .../app/coffee/sharejs/{model.coffee => model.js} | 0 .../app/coffee/sharejs/server/{model.coffee => model.js} | 0 .../app/coffee/sharejs/server/{syncqueue.coffee => syncqueue.js} | 0 .../app/coffee/sharejs/{simple.coffee => simple.js} | 0 .../app/coffee/sharejs/{syncqueue.coffee => syncqueue.js} | 0 .../app/coffee/sharejs/{text-api.coffee => text-api.js} | 0 .../{text-composable-api.coffee => text-composable-api.js} | 0 .../coffee/sharejs/{text-composable.coffee => text-composable.js} | 0 .../app/coffee/sharejs/{text-tp2-api.coffee => text-tp2-api.js} | 0 .../app/coffee/sharejs/{text-tp2.coffee => text-tp2.js} | 0 .../document-updater/app/coffee/sharejs/{text.coffee => text.js} | 0 .../app/coffee/sharejs/types/{count.coffee => count.js} | 0 .../app/coffee/sharejs/types/{helpers.coffee => helpers.js} | 0 .../app/coffee/sharejs/types/{index.coffee => index.js} | 0 .../app/coffee/sharejs/types/{json-api.coffee => json-api.js} | 0 .../app/coffee/sharejs/types/{json.coffee => json.js} | 0 .../app/coffee/sharejs/types/{model.coffee => model.js} | 0 .../app/coffee/sharejs/types/{simple.coffee => simple.js} | 0 .../app/coffee/sharejs/types/{syncqueue.coffee => syncqueue.js} | 0 .../app/coffee/sharejs/types/{text-api.coffee => text-api.js} | 0 .../types/{text-composable-api.coffee => text-composable-api.js} | 0 .../sharejs/types/{text-composable.coffee => text-composable.js} | 0 .../coffee/sharejs/types/{text-tp2-api.coffee => text-tp2-api.js} | 0 .../app/coffee/sharejs/types/{text-tp2.coffee => text-tp2.js} | 0 .../app/coffee/sharejs/types/{text.coffee => text.js} | 0 .../coffee/sharejs/types/{web-prelude.coffee => web-prelude.js} | 0 .../app/coffee/sharejs/{web-prelude.coffee => web-prelude.js} | 0 59 files changed, 0 insertions(+), 0 deletions(-) rename services/document-updater/app/coffee/{DeleteQueueManager.coffee => DeleteQueueManager.js} (100%) rename services/document-updater/app/coffee/{DiffCodec.coffee => DiffCodec.js} (100%) rename services/document-updater/app/coffee/{DispatchManager.coffee => DispatchManager.js} (100%) rename services/document-updater/app/coffee/{DocumentManager.coffee => DocumentManager.js} (100%) rename services/document-updater/app/coffee/{Errors.coffee => Errors.js} (100%) rename services/document-updater/app/coffee/{HistoryManager.coffee => HistoryManager.js} (100%) rename services/document-updater/app/coffee/{HistoryRedisManager.coffee => HistoryRedisManager.js} (100%) rename services/document-updater/app/coffee/{HttpController.coffee => HttpController.js} (100%) rename services/document-updater/app/coffee/{LockManager.coffee => LockManager.js} (100%) rename services/document-updater/app/coffee/{LoggerSerializers.coffee => LoggerSerializers.js} (100%) rename services/document-updater/app/coffee/{Metrics.coffee => Metrics.js} (100%) rename services/document-updater/app/coffee/{PersistenceManager.coffee => PersistenceManager.js} (100%) rename services/document-updater/app/coffee/{Profiler.coffee => Profiler.js} (100%) rename services/document-updater/app/coffee/{ProjectFlusher.coffee => ProjectFlusher.js} (100%) rename services/document-updater/app/coffee/{ProjectHistoryRedisManager.coffee => ProjectHistoryRedisManager.js} (100%) rename services/document-updater/app/coffee/{ProjectManager.coffee => ProjectManager.js} (100%) rename services/document-updater/app/coffee/{RangesManager.coffee => RangesManager.js} (100%) rename services/document-updater/app/coffee/{RangesTracker.coffee => RangesTracker.js} (100%) rename services/document-updater/app/coffee/{RateLimitManager.coffee => RateLimitManager.js} (100%) rename services/document-updater/app/coffee/{RealTimeRedisManager.coffee => RealTimeRedisManager.js} (100%) rename services/document-updater/app/coffee/{RedisManager.coffee => RedisManager.js} (100%) rename services/document-updater/app/coffee/{ShareJsDB.coffee => ShareJsDB.js} (100%) rename services/document-updater/app/coffee/{ShareJsUpdateManager.coffee => ShareJsUpdateManager.js} (100%) rename services/document-updater/app/coffee/{SnapshotManager.coffee => SnapshotManager.js} (100%) rename services/document-updater/app/coffee/{UpdateKeys.coffee => UpdateKeys.js} (100%) rename services/document-updater/app/coffee/{UpdateManager.coffee => UpdateManager.js} (100%) rename services/document-updater/app/coffee/{mongojs.coffee => mongojs.js} (100%) rename services/document-updater/app/coffee/sharejs/{count.coffee => count.js} (100%) rename services/document-updater/app/coffee/sharejs/{helpers.coffee => helpers.js} (100%) rename services/document-updater/app/coffee/sharejs/{index.coffee => index.js} (100%) rename services/document-updater/app/coffee/sharejs/{json-api.coffee => json-api.js} (100%) rename services/document-updater/app/coffee/sharejs/{json.coffee => json.js} (100%) rename services/document-updater/app/coffee/sharejs/{model.coffee => model.js} (100%) rename services/document-updater/app/coffee/sharejs/server/{model.coffee => model.js} (100%) rename services/document-updater/app/coffee/sharejs/server/{syncqueue.coffee => syncqueue.js} (100%) rename services/document-updater/app/coffee/sharejs/{simple.coffee => simple.js} (100%) rename services/document-updater/app/coffee/sharejs/{syncqueue.coffee => syncqueue.js} (100%) rename services/document-updater/app/coffee/sharejs/{text-api.coffee => text-api.js} (100%) rename services/document-updater/app/coffee/sharejs/{text-composable-api.coffee => text-composable-api.js} (100%) rename services/document-updater/app/coffee/sharejs/{text-composable.coffee => text-composable.js} (100%) rename services/document-updater/app/coffee/sharejs/{text-tp2-api.coffee => text-tp2-api.js} (100%) rename services/document-updater/app/coffee/sharejs/{text-tp2.coffee => text-tp2.js} (100%) rename services/document-updater/app/coffee/sharejs/{text.coffee => text.js} (100%) rename services/document-updater/app/coffee/sharejs/types/{count.coffee => count.js} (100%) rename services/document-updater/app/coffee/sharejs/types/{helpers.coffee => helpers.js} (100%) rename services/document-updater/app/coffee/sharejs/types/{index.coffee => index.js} (100%) rename services/document-updater/app/coffee/sharejs/types/{json-api.coffee => json-api.js} (100%) rename services/document-updater/app/coffee/sharejs/types/{json.coffee => json.js} (100%) rename services/document-updater/app/coffee/sharejs/types/{model.coffee => model.js} (100%) rename services/document-updater/app/coffee/sharejs/types/{simple.coffee => simple.js} (100%) rename services/document-updater/app/coffee/sharejs/types/{syncqueue.coffee => syncqueue.js} (100%) rename services/document-updater/app/coffee/sharejs/types/{text-api.coffee => text-api.js} (100%) rename services/document-updater/app/coffee/sharejs/types/{text-composable-api.coffee => text-composable-api.js} (100%) rename services/document-updater/app/coffee/sharejs/types/{text-composable.coffee => text-composable.js} (100%) rename services/document-updater/app/coffee/sharejs/types/{text-tp2-api.coffee => text-tp2-api.js} (100%) rename services/document-updater/app/coffee/sharejs/types/{text-tp2.coffee => text-tp2.js} (100%) rename services/document-updater/app/coffee/sharejs/types/{text.coffee => text.js} (100%) rename services/document-updater/app/coffee/sharejs/types/{web-prelude.coffee => web-prelude.js} (100%) rename services/document-updater/app/coffee/sharejs/{web-prelude.coffee => web-prelude.js} (100%) diff --git a/services/document-updater/app/coffee/DeleteQueueManager.coffee b/services/document-updater/app/coffee/DeleteQueueManager.js similarity index 100% rename from services/document-updater/app/coffee/DeleteQueueManager.coffee rename to services/document-updater/app/coffee/DeleteQueueManager.js diff --git a/services/document-updater/app/coffee/DiffCodec.coffee b/services/document-updater/app/coffee/DiffCodec.js similarity index 100% rename from services/document-updater/app/coffee/DiffCodec.coffee rename to services/document-updater/app/coffee/DiffCodec.js diff --git a/services/document-updater/app/coffee/DispatchManager.coffee b/services/document-updater/app/coffee/DispatchManager.js similarity index 100% rename from services/document-updater/app/coffee/DispatchManager.coffee rename to services/document-updater/app/coffee/DispatchManager.js diff --git a/services/document-updater/app/coffee/DocumentManager.coffee b/services/document-updater/app/coffee/DocumentManager.js similarity index 100% rename from services/document-updater/app/coffee/DocumentManager.coffee rename to services/document-updater/app/coffee/DocumentManager.js diff --git a/services/document-updater/app/coffee/Errors.coffee b/services/document-updater/app/coffee/Errors.js similarity index 100% rename from services/document-updater/app/coffee/Errors.coffee rename to services/document-updater/app/coffee/Errors.js diff --git a/services/document-updater/app/coffee/HistoryManager.coffee b/services/document-updater/app/coffee/HistoryManager.js similarity index 100% rename from services/document-updater/app/coffee/HistoryManager.coffee rename to services/document-updater/app/coffee/HistoryManager.js diff --git a/services/document-updater/app/coffee/HistoryRedisManager.coffee b/services/document-updater/app/coffee/HistoryRedisManager.js similarity index 100% rename from services/document-updater/app/coffee/HistoryRedisManager.coffee rename to services/document-updater/app/coffee/HistoryRedisManager.js diff --git a/services/document-updater/app/coffee/HttpController.coffee b/services/document-updater/app/coffee/HttpController.js similarity index 100% rename from services/document-updater/app/coffee/HttpController.coffee rename to services/document-updater/app/coffee/HttpController.js diff --git a/services/document-updater/app/coffee/LockManager.coffee b/services/document-updater/app/coffee/LockManager.js similarity index 100% rename from services/document-updater/app/coffee/LockManager.coffee rename to services/document-updater/app/coffee/LockManager.js diff --git a/services/document-updater/app/coffee/LoggerSerializers.coffee b/services/document-updater/app/coffee/LoggerSerializers.js similarity index 100% rename from services/document-updater/app/coffee/LoggerSerializers.coffee rename to services/document-updater/app/coffee/LoggerSerializers.js diff --git a/services/document-updater/app/coffee/Metrics.coffee b/services/document-updater/app/coffee/Metrics.js similarity index 100% rename from services/document-updater/app/coffee/Metrics.coffee rename to services/document-updater/app/coffee/Metrics.js diff --git a/services/document-updater/app/coffee/PersistenceManager.coffee b/services/document-updater/app/coffee/PersistenceManager.js similarity index 100% rename from services/document-updater/app/coffee/PersistenceManager.coffee rename to services/document-updater/app/coffee/PersistenceManager.js diff --git a/services/document-updater/app/coffee/Profiler.coffee b/services/document-updater/app/coffee/Profiler.js similarity index 100% rename from services/document-updater/app/coffee/Profiler.coffee rename to services/document-updater/app/coffee/Profiler.js diff --git a/services/document-updater/app/coffee/ProjectFlusher.coffee b/services/document-updater/app/coffee/ProjectFlusher.js similarity index 100% rename from services/document-updater/app/coffee/ProjectFlusher.coffee rename to services/document-updater/app/coffee/ProjectFlusher.js diff --git a/services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee b/services/document-updater/app/coffee/ProjectHistoryRedisManager.js similarity index 100% rename from services/document-updater/app/coffee/ProjectHistoryRedisManager.coffee rename to services/document-updater/app/coffee/ProjectHistoryRedisManager.js diff --git a/services/document-updater/app/coffee/ProjectManager.coffee b/services/document-updater/app/coffee/ProjectManager.js similarity index 100% rename from services/document-updater/app/coffee/ProjectManager.coffee rename to services/document-updater/app/coffee/ProjectManager.js diff --git a/services/document-updater/app/coffee/RangesManager.coffee b/services/document-updater/app/coffee/RangesManager.js similarity index 100% rename from services/document-updater/app/coffee/RangesManager.coffee rename to services/document-updater/app/coffee/RangesManager.js diff --git a/services/document-updater/app/coffee/RangesTracker.coffee b/services/document-updater/app/coffee/RangesTracker.js similarity index 100% rename from services/document-updater/app/coffee/RangesTracker.coffee rename to services/document-updater/app/coffee/RangesTracker.js diff --git a/services/document-updater/app/coffee/RateLimitManager.coffee b/services/document-updater/app/coffee/RateLimitManager.js similarity index 100% rename from services/document-updater/app/coffee/RateLimitManager.coffee rename to services/document-updater/app/coffee/RateLimitManager.js diff --git a/services/document-updater/app/coffee/RealTimeRedisManager.coffee b/services/document-updater/app/coffee/RealTimeRedisManager.js similarity index 100% rename from services/document-updater/app/coffee/RealTimeRedisManager.coffee rename to services/document-updater/app/coffee/RealTimeRedisManager.js diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.js similarity index 100% rename from services/document-updater/app/coffee/RedisManager.coffee rename to services/document-updater/app/coffee/RedisManager.js diff --git a/services/document-updater/app/coffee/ShareJsDB.coffee b/services/document-updater/app/coffee/ShareJsDB.js similarity index 100% rename from services/document-updater/app/coffee/ShareJsDB.coffee rename to services/document-updater/app/coffee/ShareJsDB.js diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.coffee b/services/document-updater/app/coffee/ShareJsUpdateManager.js similarity index 100% rename from services/document-updater/app/coffee/ShareJsUpdateManager.coffee rename to services/document-updater/app/coffee/ShareJsUpdateManager.js diff --git a/services/document-updater/app/coffee/SnapshotManager.coffee b/services/document-updater/app/coffee/SnapshotManager.js similarity index 100% rename from services/document-updater/app/coffee/SnapshotManager.coffee rename to services/document-updater/app/coffee/SnapshotManager.js diff --git a/services/document-updater/app/coffee/UpdateKeys.coffee b/services/document-updater/app/coffee/UpdateKeys.js similarity index 100% rename from services/document-updater/app/coffee/UpdateKeys.coffee rename to services/document-updater/app/coffee/UpdateKeys.js diff --git a/services/document-updater/app/coffee/UpdateManager.coffee b/services/document-updater/app/coffee/UpdateManager.js similarity index 100% rename from services/document-updater/app/coffee/UpdateManager.coffee rename to services/document-updater/app/coffee/UpdateManager.js diff --git a/services/document-updater/app/coffee/mongojs.coffee b/services/document-updater/app/coffee/mongojs.js similarity index 100% rename from services/document-updater/app/coffee/mongojs.coffee rename to services/document-updater/app/coffee/mongojs.js diff --git a/services/document-updater/app/coffee/sharejs/count.coffee b/services/document-updater/app/coffee/sharejs/count.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/count.coffee rename to services/document-updater/app/coffee/sharejs/count.js diff --git a/services/document-updater/app/coffee/sharejs/helpers.coffee b/services/document-updater/app/coffee/sharejs/helpers.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/helpers.coffee rename to services/document-updater/app/coffee/sharejs/helpers.js diff --git a/services/document-updater/app/coffee/sharejs/index.coffee b/services/document-updater/app/coffee/sharejs/index.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/index.coffee rename to services/document-updater/app/coffee/sharejs/index.js diff --git a/services/document-updater/app/coffee/sharejs/json-api.coffee b/services/document-updater/app/coffee/sharejs/json-api.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/json-api.coffee rename to services/document-updater/app/coffee/sharejs/json-api.js diff --git a/services/document-updater/app/coffee/sharejs/json.coffee b/services/document-updater/app/coffee/sharejs/json.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/json.coffee rename to services/document-updater/app/coffee/sharejs/json.js diff --git a/services/document-updater/app/coffee/sharejs/model.coffee b/services/document-updater/app/coffee/sharejs/model.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/model.coffee rename to services/document-updater/app/coffee/sharejs/model.js diff --git a/services/document-updater/app/coffee/sharejs/server/model.coffee b/services/document-updater/app/coffee/sharejs/server/model.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/server/model.coffee rename to services/document-updater/app/coffee/sharejs/server/model.js diff --git a/services/document-updater/app/coffee/sharejs/server/syncqueue.coffee b/services/document-updater/app/coffee/sharejs/server/syncqueue.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/server/syncqueue.coffee rename to services/document-updater/app/coffee/sharejs/server/syncqueue.js diff --git a/services/document-updater/app/coffee/sharejs/simple.coffee b/services/document-updater/app/coffee/sharejs/simple.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/simple.coffee rename to services/document-updater/app/coffee/sharejs/simple.js diff --git a/services/document-updater/app/coffee/sharejs/syncqueue.coffee b/services/document-updater/app/coffee/sharejs/syncqueue.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/syncqueue.coffee rename to services/document-updater/app/coffee/sharejs/syncqueue.js diff --git a/services/document-updater/app/coffee/sharejs/text-api.coffee b/services/document-updater/app/coffee/sharejs/text-api.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/text-api.coffee rename to services/document-updater/app/coffee/sharejs/text-api.js diff --git a/services/document-updater/app/coffee/sharejs/text-composable-api.coffee b/services/document-updater/app/coffee/sharejs/text-composable-api.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/text-composable-api.coffee rename to services/document-updater/app/coffee/sharejs/text-composable-api.js diff --git a/services/document-updater/app/coffee/sharejs/text-composable.coffee b/services/document-updater/app/coffee/sharejs/text-composable.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/text-composable.coffee rename to services/document-updater/app/coffee/sharejs/text-composable.js diff --git a/services/document-updater/app/coffee/sharejs/text-tp2-api.coffee b/services/document-updater/app/coffee/sharejs/text-tp2-api.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/text-tp2-api.coffee rename to services/document-updater/app/coffee/sharejs/text-tp2-api.js diff --git a/services/document-updater/app/coffee/sharejs/text-tp2.coffee b/services/document-updater/app/coffee/sharejs/text-tp2.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/text-tp2.coffee rename to services/document-updater/app/coffee/sharejs/text-tp2.js diff --git a/services/document-updater/app/coffee/sharejs/text.coffee b/services/document-updater/app/coffee/sharejs/text.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/text.coffee rename to services/document-updater/app/coffee/sharejs/text.js diff --git a/services/document-updater/app/coffee/sharejs/types/count.coffee b/services/document-updater/app/coffee/sharejs/types/count.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/count.coffee rename to services/document-updater/app/coffee/sharejs/types/count.js diff --git a/services/document-updater/app/coffee/sharejs/types/helpers.coffee b/services/document-updater/app/coffee/sharejs/types/helpers.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/helpers.coffee rename to services/document-updater/app/coffee/sharejs/types/helpers.js diff --git a/services/document-updater/app/coffee/sharejs/types/index.coffee b/services/document-updater/app/coffee/sharejs/types/index.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/index.coffee rename to services/document-updater/app/coffee/sharejs/types/index.js diff --git a/services/document-updater/app/coffee/sharejs/types/json-api.coffee b/services/document-updater/app/coffee/sharejs/types/json-api.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/json-api.coffee rename to services/document-updater/app/coffee/sharejs/types/json-api.js diff --git a/services/document-updater/app/coffee/sharejs/types/json.coffee b/services/document-updater/app/coffee/sharejs/types/json.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/json.coffee rename to services/document-updater/app/coffee/sharejs/types/json.js diff --git a/services/document-updater/app/coffee/sharejs/types/model.coffee b/services/document-updater/app/coffee/sharejs/types/model.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/model.coffee rename to services/document-updater/app/coffee/sharejs/types/model.js diff --git a/services/document-updater/app/coffee/sharejs/types/simple.coffee b/services/document-updater/app/coffee/sharejs/types/simple.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/simple.coffee rename to services/document-updater/app/coffee/sharejs/types/simple.js diff --git a/services/document-updater/app/coffee/sharejs/types/syncqueue.coffee b/services/document-updater/app/coffee/sharejs/types/syncqueue.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/syncqueue.coffee rename to services/document-updater/app/coffee/sharejs/types/syncqueue.js diff --git a/services/document-updater/app/coffee/sharejs/types/text-api.coffee b/services/document-updater/app/coffee/sharejs/types/text-api.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/text-api.coffee rename to services/document-updater/app/coffee/sharejs/types/text-api.js diff --git a/services/document-updater/app/coffee/sharejs/types/text-composable-api.coffee b/services/document-updater/app/coffee/sharejs/types/text-composable-api.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/text-composable-api.coffee rename to services/document-updater/app/coffee/sharejs/types/text-composable-api.js diff --git a/services/document-updater/app/coffee/sharejs/types/text-composable.coffee b/services/document-updater/app/coffee/sharejs/types/text-composable.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/text-composable.coffee rename to services/document-updater/app/coffee/sharejs/types/text-composable.js diff --git a/services/document-updater/app/coffee/sharejs/types/text-tp2-api.coffee b/services/document-updater/app/coffee/sharejs/types/text-tp2-api.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/text-tp2-api.coffee rename to services/document-updater/app/coffee/sharejs/types/text-tp2-api.js diff --git a/services/document-updater/app/coffee/sharejs/types/text-tp2.coffee b/services/document-updater/app/coffee/sharejs/types/text-tp2.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/text-tp2.coffee rename to services/document-updater/app/coffee/sharejs/types/text-tp2.js diff --git a/services/document-updater/app/coffee/sharejs/types/text.coffee b/services/document-updater/app/coffee/sharejs/types/text.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/text.coffee rename to services/document-updater/app/coffee/sharejs/types/text.js diff --git a/services/document-updater/app/coffee/sharejs/types/web-prelude.coffee b/services/document-updater/app/coffee/sharejs/types/web-prelude.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/web-prelude.coffee rename to services/document-updater/app/coffee/sharejs/types/web-prelude.js diff --git a/services/document-updater/app/coffee/sharejs/web-prelude.coffee b/services/document-updater/app/coffee/sharejs/web-prelude.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/web-prelude.coffee rename to services/document-updater/app/coffee/sharejs/web-prelude.js From 1fa88826741d47ceb067727863f8f08782620b69 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 6 May 2020 12:08:21 +0200 Subject: [PATCH 610/769] decaffeinate: Convert DeleteQueueManager.coffee and 58 other files to JS --- .../app/coffee/DeleteQueueManager.js | 165 ++- .../document-updater/app/coffee/DiffCodec.js | 69 +- .../app/coffee/DispatchManager.js | 126 +- .../app/coffee/DocumentManager.js | 513 +++++--- .../document-updater/app/coffee/Errors.js | 64 +- .../app/coffee/HistoryManager.js | 229 ++-- .../app/coffee/HistoryRedisManager.js | 37 +- .../app/coffee/HttpController.js | 519 +++++--- .../app/coffee/LockManager.js | 211 +-- .../app/coffee/LoggerSerializers.js | 58 +- .../document-updater/app/coffee/Metrics.js | 2 +- .../app/coffee/PersistenceManager.js | 208 +-- .../document-updater/app/coffee/Profiler.js | 80 +- .../app/coffee/ProjectFlusher.js | 160 ++- .../app/coffee/ProjectHistoryRedisManager.js | 160 ++- .../app/coffee/ProjectManager.js | 359 ++--- .../app/coffee/RangesManager.js | 170 ++- .../app/coffee/RangesTracker.js | 1169 +++++++++-------- .../app/coffee/RateLimitManager.js | 85 +- .../app/coffee/RealTimeRedisManager.js | 111 +- .../app/coffee/RedisManager.js | 776 ++++++----- .../document-updater/app/coffee/ShareJsDB.js | 92 +- .../app/coffee/ShareJsUpdateManager.js | 160 ++- .../app/coffee/SnapshotManager.js | 88 +- .../document-updater/app/coffee/UpdateKeys.js | 7 +- .../app/coffee/UpdateManager.js | 382 +++--- .../document-updater/app/coffee/mongojs.js | 31 +- .../app/coffee/sharejs/count.js | 40 +- .../app/coffee/sharejs/helpers.js | 126 +- .../app/coffee/sharejs/index.js | 28 +- .../app/coffee/sharejs/json-api.js | 417 +++--- .../app/coffee/sharejs/json.js | 859 ++++++------ .../app/coffee/sharejs/model.js | 1054 ++++++++------- .../app/coffee/sharejs/server/model.js | 1059 ++++++++------- .../app/coffee/sharejs/server/syncqueue.js | 80 +- .../app/coffee/sharejs/simple.js | 74 +- .../app/coffee/sharejs/syncqueue.js | 80 +- .../app/coffee/sharejs/text-api.js | 58 +- .../app/coffee/sharejs/text-composable-api.js | 87 +- .../app/coffee/sharejs/text-composable.js | 480 ++++--- .../app/coffee/sharejs/text-tp2-api.js | 169 ++- .../app/coffee/sharejs/text-tp2.js | 602 +++++---- .../app/coffee/sharejs/text.js | 376 +++--- .../app/coffee/sharejs/types/count.js | 40 +- .../app/coffee/sharejs/types/helpers.js | 126 +- .../app/coffee/sharejs/types/index.js | 28 +- .../app/coffee/sharejs/types/json-api.js | 417 +++--- .../app/coffee/sharejs/types/json.js | 859 ++++++------ .../app/coffee/sharejs/types/model.js | 1054 ++++++++------- .../app/coffee/sharejs/types/simple.js | 74 +- .../app/coffee/sharejs/types/syncqueue.js | 80 +- .../app/coffee/sharejs/types/text-api.js | 58 +- .../sharejs/types/text-composable-api.js | 87 +- .../coffee/sharejs/types/text-composable.js | 480 ++++--- .../app/coffee/sharejs/types/text-tp2-api.js | 169 ++- .../app/coffee/sharejs/types/text-tp2.js | 602 +++++---- .../app/coffee/sharejs/types/text.js | 476 ++++--- .../app/coffee/sharejs/types/web-prelude.js | 10 +- .../app/coffee/sharejs/web-prelude.js | 10 +- 59 files changed, 9218 insertions(+), 6972 deletions(-) diff --git a/services/document-updater/app/coffee/DeleteQueueManager.js b/services/document-updater/app/coffee/DeleteQueueManager.js index 9e3f1c176e..2b6230100a 100644 --- a/services/document-updater/app/coffee/DeleteQueueManager.js +++ b/services/document-updater/app/coffee/DeleteQueueManager.js @@ -1,79 +1,102 @@ -Settings = require('settings-sharelatex') -RedisManager = require "./RedisManager" -ProjectManager = require "./ProjectManager" -logger = require "logger-sharelatex" -metrics = require "./Metrics" -async = require "async" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let DeleteQueueManager; +const Settings = require('settings-sharelatex'); +const RedisManager = require("./RedisManager"); +const ProjectManager = require("./ProjectManager"); +const logger = require("logger-sharelatex"); +const metrics = require("./Metrics"); +const async = require("async"); -# Maintain a sorted set of project flushAndDelete requests, ordered by timestamp -# (ZADD), and process them from oldest to newest. A flushAndDelete request comes -# from real-time and is triggered when a user leaves a project. -# -# The aim is to remove the project from redis 5 minutes after the last request -# if there has been no activity (document updates) in that time. If there is -# activity we can expect a further flushAndDelete request when the editing user -# leaves the project. -# -# If a new flushAndDelete request comes in while an existing request is already -# in the queue we update the timestamp as we can postpone flushing further. -# -# Documents are processed by checking the queue, seeing if the first entry is -# older than 5 minutes, and popping it from the queue in that case. +// Maintain a sorted set of project flushAndDelete requests, ordered by timestamp +// (ZADD), and process them from oldest to newest. A flushAndDelete request comes +// from real-time and is triggered when a user leaves a project. +// +// The aim is to remove the project from redis 5 minutes after the last request +// if there has been no activity (document updates) in that time. If there is +// activity we can expect a further flushAndDelete request when the editing user +// leaves the project. +// +// If a new flushAndDelete request comes in while an existing request is already +// in the queue we update the timestamp as we can postpone flushing further. +// +// Documents are processed by checking the queue, seeing if the first entry is +// older than 5 minutes, and popping it from the queue in that case. -module.exports = DeleteQueueManager = - flushAndDeleteOldProjects: (options, callback) -> - startTime = Date.now() - cutoffTime = startTime - options.min_delete_age + 100 * (Math.random() - 0.5) - count = 0 +module.exports = (DeleteQueueManager = { + flushAndDeleteOldProjects(options, callback) { + const startTime = Date.now(); + const cutoffTime = (startTime - options.min_delete_age) + (100 * (Math.random() - 0.5)); + let count = 0; - flushProjectIfNotModified = (project_id, flushTimestamp, cb) -> - ProjectManager.getProjectDocsTimestamps project_id, (err, timestamps) -> - return callback(err) if err? - if timestamps.length == 0 - logger.log {project_id}, "skipping flush of queued project - no timestamps" - return cb() - # are any of the timestamps newer than the time the project was flushed? - for timestamp in timestamps when timestamp > flushTimestamp - metrics.inc "queued-delete-skipped" - logger.debug {project_id, timestamps, flushTimestamp}, "found newer timestamp, will skip delete" - return cb() - logger.log {project_id, flushTimestamp}, "flushing queued project" - ProjectManager.flushAndDeleteProjectWithLocks project_id, {skip_history_flush: false}, (err) -> - if err? - logger.err {project_id, err}, "error flushing queued project" - metrics.inc "queued-delete-completed" - return cb(null, true) + const flushProjectIfNotModified = (project_id, flushTimestamp, cb) => ProjectManager.getProjectDocsTimestamps(project_id, function(err, timestamps) { + if (err != null) { return callback(err); } + if (timestamps.length === 0) { + logger.log({project_id}, "skipping flush of queued project - no timestamps"); + return cb(); + } + // are any of the timestamps newer than the time the project was flushed? + for (let timestamp of Array.from(timestamps)) { + if (timestamp > flushTimestamp) { + metrics.inc("queued-delete-skipped"); + logger.debug({project_id, timestamps, flushTimestamp}, "found newer timestamp, will skip delete"); + return cb(); + } + } + logger.log({project_id, flushTimestamp}, "flushing queued project"); + return ProjectManager.flushAndDeleteProjectWithLocks(project_id, {skip_history_flush: false}, function(err) { + if (err != null) { + logger.err({project_id, err}, "error flushing queued project"); + } + metrics.inc("queued-delete-completed"); + return cb(null, true); + }); + }); - flushNextProject = () -> - now = Date.now() - if now - startTime > options.timeout - logger.log "hit time limit on flushing old projects" - return callback(null, count) - if count > options.limit - logger.log "hit count limit on flushing old projects" - return callback(null, count) - RedisManager.getNextProjectToFlushAndDelete cutoffTime, (err, project_id, flushTimestamp, queueLength) -> - return callback(err) if err? - return callback(null, count) if !project_id? - logger.log {project_id, queueLength: queueLength}, "flushing queued project" - metrics.globalGauge "queued-flush-backlog", queueLength - flushProjectIfNotModified project_id, flushTimestamp, (err, flushed) -> - count++ if flushed - flushNextProject() + var flushNextProject = function() { + const now = Date.now(); + if ((now - startTime) > options.timeout) { + logger.log("hit time limit on flushing old projects"); + return callback(null, count); + } + if (count > options.limit) { + logger.log("hit count limit on flushing old projects"); + return callback(null, count); + } + return RedisManager.getNextProjectToFlushAndDelete(cutoffTime, function(err, project_id, flushTimestamp, queueLength) { + if (err != null) { return callback(err); } + if ((project_id == null)) { return callback(null, count); } + logger.log({project_id, queueLength}, "flushing queued project"); + metrics.globalGauge("queued-flush-backlog", queueLength); + return flushProjectIfNotModified(project_id, flushTimestamp, function(err, flushed) { + if (flushed) { count++; } + return flushNextProject(); + }); + }); + }; - flushNextProject() + return flushNextProject(); + }, - startBackgroundFlush: () -> - SHORT_DELAY = 10 - LONG_DELAY = 1000 - doFlush = () -> - if Settings.shuttingDown - logger.warn "discontinuing background flush due to shutdown" - return - DeleteQueueManager.flushAndDeleteOldProjects { + startBackgroundFlush() { + const SHORT_DELAY = 10; + const LONG_DELAY = 1000; + var doFlush = function() { + if (Settings.shuttingDown) { + logger.warn("discontinuing background flush due to shutdown"); + return; + } + return DeleteQueueManager.flushAndDeleteOldProjects({ timeout:1000, min_delete_age:3*60*1000, - limit:1000 # high value, to ensure we always flush enough projects - }, (err, flushed) -> - setTimeout doFlush, (if flushed > 10 then SHORT_DELAY else LONG_DELAY) - doFlush() + limit:1000 // high value, to ensure we always flush enough projects + }, (err, flushed) => setTimeout(doFlush, (flushed > 10 ? SHORT_DELAY : LONG_DELAY))); + }; + return doFlush(); + } +}); diff --git a/services/document-updater/app/coffee/DiffCodec.js b/services/document-updater/app/coffee/DiffCodec.js index ba5966648e..c5c99b7acc 100644 --- a/services/document-updater/app/coffee/DiffCodec.js +++ b/services/document-updater/app/coffee/DiffCodec.js @@ -1,31 +1,48 @@ -diff_match_patch = require("../lib/diff_match_patch").diff_match_patch -dmp = new diff_match_patch() +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let DiffCodec; +const { + diff_match_patch +} = require("../lib/diff_match_patch"); +const dmp = new diff_match_patch(); -module.exports = DiffCodec = - ADDED: 1 - REMOVED: -1 - UNCHANGED: 0 +module.exports = (DiffCodec = { + ADDED: 1, + REMOVED: -1, + UNCHANGED: 0, - diffAsShareJsOp: (before, after, callback = (error, ops) ->) -> - diffs = dmp.diff_main(before.join("\n"), after.join("\n")) - dmp.diff_cleanupSemantic(diffs) + diffAsShareJsOp(before, after, callback) { + if (callback == null) { callback = function(error, ops) {}; } + const diffs = dmp.diff_main(before.join("\n"), after.join("\n")); + dmp.diff_cleanupSemantic(diffs); - ops = [] - position = 0 - for diff in diffs - type = diff[0] - content = diff[1] - if type == @ADDED - ops.push - i: content + const ops = []; + let position = 0; + for (let diff of Array.from(diffs)) { + const type = diff[0]; + const content = diff[1]; + if (type === this.ADDED) { + ops.push({ + i: content, p: position - position += content.length - else if type == @REMOVED - ops.push - d: content + }); + position += content.length; + } else if (type === this.REMOVED) { + ops.push({ + d: content, p: position - else if type == @UNCHANGED - position += content.length - else - throw "Unknown type" - callback null, ops + }); + } else if (type === this.UNCHANGED) { + position += content.length; + } else { + throw "Unknown type"; + } + } + return callback(null, ops); + } +}); diff --git a/services/document-updater/app/coffee/DispatchManager.js b/services/document-updater/app/coffee/DispatchManager.js index 375f3b98dc..3bf343dd2e 100644 --- a/services/document-updater/app/coffee/DispatchManager.js +++ b/services/document-updater/app/coffee/DispatchManager.js @@ -1,55 +1,81 @@ -Settings = require('settings-sharelatex') -logger = require('logger-sharelatex') -Keys = require('./UpdateKeys') -redis = require("redis-sharelatex") -Errors = require("./Errors") +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS202: Simplify dynamic range loops + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let DispatchManager; +const Settings = require('settings-sharelatex'); +const logger = require('logger-sharelatex'); +const Keys = require('./UpdateKeys'); +const redis = require("redis-sharelatex"); +const Errors = require("./Errors"); -UpdateManager = require('./UpdateManager') -Metrics = require('./Metrics') -RateLimitManager = require('./RateLimitManager') +const UpdateManager = require('./UpdateManager'); +const Metrics = require('./Metrics'); +const RateLimitManager = require('./RateLimitManager'); -module.exports = DispatchManager = - createDispatcher: (RateLimiter) -> - client = redis.createClient(Settings.redis.documentupdater) - worker = { - client: client - _waitForUpdateThenDispatchWorker: (callback = (error) ->) -> - timer = new Metrics.Timer "worker.waiting" - worker.client.blpop "pending-updates-list", 0, (error, result) -> - logger.log("getting pending-updates-list", error, result) - timer.done() - return callback(error) if error? - return callback() if !result? - [list_name, doc_key] = result - [project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key) - # Dispatch this in the background - backgroundTask = (cb) -> - UpdateManager.processOutstandingUpdatesWithLock project_id, doc_id, (error) -> - # log everything except OpRangeNotAvailable errors, these are normal - if error? - # downgrade OpRangeNotAvailable and "Delete component" errors so they are not sent to sentry - logAsWarning = (error instanceof Errors.OpRangeNotAvailableError) || (error instanceof Errors.DeleteMismatchError) - if logAsWarning - logger.warn err: error, project_id: project_id, doc_id: doc_id, "error processing update" - else - logger.error err: error, project_id: project_id, doc_id: doc_id, "error processing update" - cb() - RateLimiter.run backgroundTask, callback +module.exports = (DispatchManager = { + createDispatcher(RateLimiter) { + const client = redis.createClient(Settings.redis.documentupdater); + var worker = { + client, + _waitForUpdateThenDispatchWorker(callback) { + if (callback == null) { callback = function(error) {}; } + const timer = new Metrics.Timer("worker.waiting"); + return worker.client.blpop("pending-updates-list", 0, function(error, result) { + logger.log("getting pending-updates-list", error, result); + timer.done(); + if (error != null) { return callback(error); } + if ((result == null)) { return callback(); } + const [list_name, doc_key] = Array.from(result); + const [project_id, doc_id] = Array.from(Keys.splitProjectIdAndDocId(doc_key)); + // Dispatch this in the background + const backgroundTask = cb => UpdateManager.processOutstandingUpdatesWithLock(project_id, doc_id, function(error) { + // log everything except OpRangeNotAvailable errors, these are normal + if (error != null) { + // downgrade OpRangeNotAvailable and "Delete component" errors so they are not sent to sentry + const logAsWarning = (error instanceof Errors.OpRangeNotAvailableError) || (error instanceof Errors.DeleteMismatchError); + if (logAsWarning) { + logger.warn({err: error, project_id, doc_id}, "error processing update"); + } else { + logger.error({err: error, project_id, doc_id}, "error processing update"); + } + } + return cb(); + }); + return RateLimiter.run(backgroundTask, callback); + }); + }, - run: () -> - return if Settings.shuttingDown - worker._waitForUpdateThenDispatchWorker (error) => - if error? - logger.error err: error, "Error in worker process" - throw error - else - worker.run() - } + run() { + if (Settings.shuttingDown) { return; } + return worker._waitForUpdateThenDispatchWorker(error => { + if (error != null) { + logger.error({err: error}, "Error in worker process"); + throw error; + } else { + return worker.run(); + } + }); + } + }; - return worker + return worker; + }, - createAndStartDispatchers: (number) -> - RateLimiter = new RateLimitManager(number) - for i in [1..number] - worker = DispatchManager.createDispatcher(RateLimiter) - worker.run() + createAndStartDispatchers(number) { + const RateLimiter = new RateLimitManager(number); + return (() => { + const result = []; + for (let i = 1, end = number, asc = 1 <= end; asc ? i <= end : i >= end; asc ? i++ : i--) { + const worker = DispatchManager.createDispatcher(RateLimiter); + result.push(worker.run()); + } + return result; + })(); + } +}); diff --git a/services/document-updater/app/coffee/DocumentManager.js b/services/document-updater/app/coffee/DocumentManager.js index b37d2e9433..c5a9ebb3d1 100644 --- a/services/document-updater/app/coffee/DocumentManager.js +++ b/services/document-updater/app/coffee/DocumentManager.js @@ -1,243 +1,340 @@ -RedisManager = require "./RedisManager" -ProjectHistoryRedisManager = require "./ProjectHistoryRedisManager" -PersistenceManager = require "./PersistenceManager" -DiffCodec = require "./DiffCodec" -logger = require "logger-sharelatex" -Metrics = require "./Metrics" -HistoryManager = require "./HistoryManager" -RealTimeRedisManager = require "./RealTimeRedisManager" -Errors = require "./Errors" -RangesManager = require "./RangesManager" -async = require "async" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let DocumentManager; +const RedisManager = require("./RedisManager"); +const ProjectHistoryRedisManager = require("./ProjectHistoryRedisManager"); +const PersistenceManager = require("./PersistenceManager"); +const DiffCodec = require("./DiffCodec"); +const logger = require("logger-sharelatex"); +const Metrics = require("./Metrics"); +const HistoryManager = require("./HistoryManager"); +const RealTimeRedisManager = require("./RealTimeRedisManager"); +const Errors = require("./Errors"); +const RangesManager = require("./RangesManager"); +const async = require("async"); -MAX_UNFLUSHED_AGE = 300 * 1000 # 5 mins, document should be flushed to mongo this time after a change +const MAX_UNFLUSHED_AGE = 300 * 1000; // 5 mins, document should be flushed to mongo this time after a change -module.exports = DocumentManager = - getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, pathname, projectHistoryId, unflushedTime, alreadyLoaded) ->) -> - timer = new Metrics.Timer("docManager.getDoc") - callback = (args...) -> - timer.done() - _callback(args...) +module.exports = (DocumentManager = { + getDoc(project_id, doc_id, _callback) { + if (_callback == null) { _callback = function(error, lines, version, ranges, pathname, projectHistoryId, unflushedTime, alreadyLoaded) {}; } + const timer = new Metrics.Timer("docManager.getDoc"); + const callback = function(...args) { + timer.done(); + return _callback(...Array.from(args || [])); + }; - RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId, unflushedTime) -> - return callback(error) if error? - if !lines? or !version? - logger.log {project_id, doc_id}, "doc not in redis so getting from persistence API" - PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId, projectHistoryType) -> - return callback(error) if error? - logger.log {project_id, doc_id, lines, version, pathname, projectHistoryId, projectHistoryType}, "got doc from persistence API" - RedisManager.putDocInMemory project_id, doc_id, lines, version, ranges, pathname, projectHistoryId, (error) -> - return callback(error) if error? - RedisManager.setHistoryType doc_id, projectHistoryType, (error) -> - return callback(error) if error? - callback null, lines, version, ranges || {}, pathname, projectHistoryId, null, false - else - callback null, lines, version, ranges, pathname, projectHistoryId, unflushedTime, true + return RedisManager.getDoc(project_id, doc_id, function(error, lines, version, ranges, pathname, projectHistoryId, unflushedTime) { + if (error != null) { return callback(error); } + if ((lines == null) || (version == null)) { + logger.log({project_id, doc_id}, "doc not in redis so getting from persistence API"); + return PersistenceManager.getDoc(project_id, doc_id, function(error, lines, version, ranges, pathname, projectHistoryId, projectHistoryType) { + if (error != null) { return callback(error); } + logger.log({project_id, doc_id, lines, version, pathname, projectHistoryId, projectHistoryType}, "got doc from persistence API"); + return RedisManager.putDocInMemory(project_id, doc_id, lines, version, ranges, pathname, projectHistoryId, function(error) { + if (error != null) { return callback(error); } + return RedisManager.setHistoryType(doc_id, projectHistoryType, function(error) { + if (error != null) { return callback(error); } + return callback(null, lines, version, ranges || {}, pathname, projectHistoryId, null, false); + }); + }); + }); + } else { + return callback(null, lines, version, ranges, pathname, projectHistoryId, unflushedTime, true); + } + }); + }, - getDocAndRecentOps: (project_id, doc_id, fromVersion, _callback = (error, lines, version, ops, ranges, pathname, projectHistoryId) ->) -> - timer = new Metrics.Timer("docManager.getDocAndRecentOps") - callback = (args...) -> - timer.done() - _callback(args...) + getDocAndRecentOps(project_id, doc_id, fromVersion, _callback) { + if (_callback == null) { _callback = function(error, lines, version, ops, ranges, pathname, projectHistoryId) {}; } + const timer = new Metrics.Timer("docManager.getDocAndRecentOps"); + const callback = function(...args) { + timer.done(); + return _callback(...Array.from(args || [])); + }; - DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) -> - return callback(error) if error? - if fromVersion == -1 - callback null, lines, version, [], ranges, pathname, projectHistoryId - else - RedisManager.getPreviousDocOps doc_id, fromVersion, version, (error, ops) -> - return callback(error) if error? - callback null, lines, version, ops, ranges, pathname, projectHistoryId + return DocumentManager.getDoc(project_id, doc_id, function(error, lines, version, ranges, pathname, projectHistoryId) { + if (error != null) { return callback(error); } + if (fromVersion === -1) { + return callback(null, lines, version, [], ranges, pathname, projectHistoryId); + } else { + return RedisManager.getPreviousDocOps(doc_id, fromVersion, version, function(error, ops) { + if (error != null) { return callback(error); } + return callback(null, lines, version, ops, ranges, pathname, projectHistoryId); + }); + } + }); + }, - setDoc: (project_id, doc_id, newLines, source, user_id, undoing, _callback = (error) ->) -> - timer = new Metrics.Timer("docManager.setDoc") - callback = (args...) -> - timer.done() - _callback(args...) + setDoc(project_id, doc_id, newLines, source, user_id, undoing, _callback) { + if (_callback == null) { _callback = function(error) {}; } + const timer = new Metrics.Timer("docManager.setDoc"); + const callback = function(...args) { + timer.done(); + return _callback(...Array.from(args || [])); + }; - if !newLines? - return callback(new Error("No lines were provided to setDoc")) + if ((newLines == null)) { + return callback(new Error("No lines were provided to setDoc")); + } - UpdateManager = require "./UpdateManager" - DocumentManager.getDoc project_id, doc_id, (error, oldLines, version, ranges, pathname, projectHistoryId, unflushedTime, alreadyLoaded) -> - return callback(error) if error? + const UpdateManager = require("./UpdateManager"); + return DocumentManager.getDoc(project_id, doc_id, function(error, oldLines, version, ranges, pathname, projectHistoryId, unflushedTime, alreadyLoaded) { + if (error != null) { return callback(error); } - if oldLines? and oldLines.length > 0 and oldLines[0].text? - logger.log doc_id: doc_id, project_id: project_id, oldLines: oldLines, newLines: newLines, "document is JSON so not updating" - return callback(null) + if ((oldLines != null) && (oldLines.length > 0) && (oldLines[0].text != null)) { + logger.log({doc_id, project_id, oldLines, newLines}, "document is JSON so not updating"); + return callback(null); + } - logger.log doc_id: doc_id, project_id: project_id, oldLines: oldLines, newLines: newLines, "setting a document via http" - DiffCodec.diffAsShareJsOp oldLines, newLines, (error, op) -> - return callback(error) if error? - if undoing - for o in op or [] - o.u = true # Turn on undo flag for each op for track changes - update = - doc: doc_id - op: op - v: version - meta: - type: "external" - source: source - user_id: user_id - UpdateManager.applyUpdate project_id, doc_id, update, (error) -> - return callback(error) if error? - # If the document was loaded already, then someone has it open - # in a project, and the usual flushing mechanism will happen. - # Otherwise we should remove it immediately since nothing else - # is using it. - if alreadyLoaded - DocumentManager.flushDocIfLoaded project_id, doc_id, (error) -> - return callback(error) if error? - callback null - else - DocumentManager.flushAndDeleteDoc project_id, doc_id, {}, (error) -> - # There is no harm in flushing project history if the previous - # call failed and sometimes it is required - HistoryManager.flushProjectChangesAsync project_id + logger.log({doc_id, project_id, oldLines, newLines}, "setting a document via http"); + return DiffCodec.diffAsShareJsOp(oldLines, newLines, function(error, op) { + if (error != null) { return callback(error); } + if (undoing) { + for (let o of Array.from(op || [])) { + o.u = true; + } // Turn on undo flag for each op for track changes + } + const update = { + doc: doc_id, + op, + v: version, + meta: { + type: "external", + source, + user_id + } + }; + return UpdateManager.applyUpdate(project_id, doc_id, update, function(error) { + if (error != null) { return callback(error); } + // If the document was loaded already, then someone has it open + // in a project, and the usual flushing mechanism will happen. + // Otherwise we should remove it immediately since nothing else + // is using it. + if (alreadyLoaded) { + return DocumentManager.flushDocIfLoaded(project_id, doc_id, function(error) { + if (error != null) { return callback(error); } + return callback(null); + }); + } else { + return DocumentManager.flushAndDeleteDoc(project_id, doc_id, {}, function(error) { + // There is no harm in flushing project history if the previous + // call failed and sometimes it is required + HistoryManager.flushProjectChangesAsync(project_id); - return callback(error) if error? - callback null + if (error != null) { return callback(error); } + return callback(null); + }); + } + }); + }); + }); + }, - flushDocIfLoaded: (project_id, doc_id, _callback = (error) ->) -> - timer = new Metrics.Timer("docManager.flushDocIfLoaded") - callback = (args...) -> - timer.done() - _callback(args...) - RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy) -> - return callback(error) if error? - if !lines? or !version? - logger.log project_id: project_id, doc_id: doc_id, "doc is not loaded so not flushing" - callback null # TODO: return a flag to bail out, as we go on to remove doc from memory? - else - logger.log project_id: project_id, doc_id: doc_id, version: version, "flushing doc" - PersistenceManager.setDoc project_id, doc_id, lines, version, ranges, lastUpdatedAt, lastUpdatedBy, (error) -> - return callback(error) if error? - RedisManager.clearUnflushedTime doc_id, callback + flushDocIfLoaded(project_id, doc_id, _callback) { + if (_callback == null) { _callback = function(error) {}; } + const timer = new Metrics.Timer("docManager.flushDocIfLoaded"); + const callback = function(...args) { + timer.done(); + return _callback(...Array.from(args || [])); + }; + return RedisManager.getDoc(project_id, doc_id, function(error, lines, version, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy) { + if (error != null) { return callback(error); } + if ((lines == null) || (version == null)) { + logger.log({project_id, doc_id}, "doc is not loaded so not flushing"); + return callback(null); // TODO: return a flag to bail out, as we go on to remove doc from memory? + } else { + logger.log({project_id, doc_id, version}, "flushing doc"); + return PersistenceManager.setDoc(project_id, doc_id, lines, version, ranges, lastUpdatedAt, lastUpdatedBy, function(error) { + if (error != null) { return callback(error); } + return RedisManager.clearUnflushedTime(doc_id, callback); + }); + } + }); + }, - flushAndDeleteDoc: (project_id, doc_id, options, _callback) -> - timer = new Metrics.Timer("docManager.flushAndDeleteDoc") - callback = (args...) -> - timer.done() - _callback(args...) + flushAndDeleteDoc(project_id, doc_id, options, _callback) { + const timer = new Metrics.Timer("docManager.flushAndDeleteDoc"); + const callback = function(...args) { + timer.done(); + return _callback(...Array.from(args || [])); + }; - DocumentManager.flushDocIfLoaded project_id, doc_id, (error) -> - if error? - if options.ignoreFlushErrors - logger.warn {project_id: project_id, doc_id: doc_id, err: error}, "ignoring flush error while deleting document" - else - return callback(error) + return DocumentManager.flushDocIfLoaded(project_id, doc_id, function(error) { + if (error != null) { + if (options.ignoreFlushErrors) { + logger.warn({project_id, doc_id, err: error}, "ignoring flush error while deleting document"); + } else { + return callback(error); + } + } - # Flush in the background since it requires a http request - HistoryManager.flushDocChangesAsync project_id, doc_id + // Flush in the background since it requires a http request + HistoryManager.flushDocChangesAsync(project_id, doc_id); - RedisManager.removeDocFromMemory project_id, doc_id, (error) -> - return callback(error) if error? - callback null + return RedisManager.removeDocFromMemory(project_id, doc_id, function(error) { + if (error != null) { return callback(error); } + return callback(null); + }); + }); + }, - acceptChanges: (project_id, doc_id, change_ids = [], _callback = (error) ->) -> - timer = new Metrics.Timer("docManager.acceptChanges") - callback = (args...) -> - timer.done() - _callback(args...) + acceptChanges(project_id, doc_id, change_ids, _callback) { + if (change_ids == null) { change_ids = []; } + if (_callback == null) { _callback = function(error) {}; } + const timer = new Metrics.Timer("docManager.acceptChanges"); + const callback = function(...args) { + timer.done(); + return _callback(...Array.from(args || [])); + }; - DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges) -> - return callback(error) if error? - if !lines? or !version? - return callback(new Errors.NotFoundError("document not found: #{doc_id}")) - RangesManager.acceptChanges change_ids, ranges, (error, new_ranges) -> - return callback(error) if error? - RedisManager.updateDocument project_id, doc_id, lines, version, [], new_ranges, {}, (error) -> - return callback(error) if error? - callback() + return DocumentManager.getDoc(project_id, doc_id, function(error, lines, version, ranges) { + if (error != null) { return callback(error); } + if ((lines == null) || (version == null)) { + return callback(new Errors.NotFoundError(`document not found: ${doc_id}`)); + } + return RangesManager.acceptChanges(change_ids, ranges, function(error, new_ranges) { + if (error != null) { return callback(error); } + return RedisManager.updateDocument(project_id, doc_id, lines, version, [], new_ranges, {}, function(error) { + if (error != null) { return callback(error); } + return callback(); + }); + }); + }); + }, - deleteComment: (project_id, doc_id, comment_id, _callback = (error) ->) -> - timer = new Metrics.Timer("docManager.deleteComment") - callback = (args...) -> - timer.done() - _callback(args...) + deleteComment(project_id, doc_id, comment_id, _callback) { + if (_callback == null) { _callback = function(error) {}; } + const timer = new Metrics.Timer("docManager.deleteComment"); + const callback = function(...args) { + timer.done(); + return _callback(...Array.from(args || [])); + }; - DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges) -> - return callback(error) if error? - if !lines? or !version? - return callback(new Errors.NotFoundError("document not found: #{doc_id}")) - RangesManager.deleteComment comment_id, ranges, (error, new_ranges) -> - return callback(error) if error? - RedisManager.updateDocument project_id, doc_id, lines, version, [], new_ranges, {}, (error) -> - return callback(error) if error? - callback() + return DocumentManager.getDoc(project_id, doc_id, function(error, lines, version, ranges) { + if (error != null) { return callback(error); } + if ((lines == null) || (version == null)) { + return callback(new Errors.NotFoundError(`document not found: ${doc_id}`)); + } + return RangesManager.deleteComment(comment_id, ranges, function(error, new_ranges) { + if (error != null) { return callback(error); } + return RedisManager.updateDocument(project_id, doc_id, lines, version, [], new_ranges, {}, function(error) { + if (error != null) { return callback(error); } + return callback(); + }); + }); + }); + }, - renameDoc: (project_id, doc_id, user_id, update, projectHistoryId, _callback = (error) ->) -> - timer = new Metrics.Timer("docManager.updateProject") - callback = (args...) -> - timer.done() - _callback(args...) + renameDoc(project_id, doc_id, user_id, update, projectHistoryId, _callback) { + if (_callback == null) { _callback = function(error) {}; } + const timer = new Metrics.Timer("docManager.updateProject"); + const callback = function(...args) { + timer.done(); + return _callback(...Array.from(args || [])); + }; - RedisManager.renameDoc project_id, doc_id, user_id, update, projectHistoryId, callback + return RedisManager.renameDoc(project_id, doc_id, user_id, update, projectHistoryId, callback); + }, - getDocAndFlushIfOld: (project_id, doc_id, callback = (error, doc) ->) -> - DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId, unflushedTime, alreadyLoaded) -> - return callback(error) if error? - # if doc was already loaded see if it needs to be flushed - if alreadyLoaded and unflushedTime? and (Date.now() - unflushedTime) > MAX_UNFLUSHED_AGE - DocumentManager.flushDocIfLoaded project_id, doc_id, (error) -> - return callback(error) if error? - callback(null, lines, version) - else - callback(null, lines, version) + getDocAndFlushIfOld(project_id, doc_id, callback) { + if (callback == null) { callback = function(error, doc) {}; } + return DocumentManager.getDoc(project_id, doc_id, function(error, lines, version, ranges, pathname, projectHistoryId, unflushedTime, alreadyLoaded) { + if (error != null) { return callback(error); } + // if doc was already loaded see if it needs to be flushed + if (alreadyLoaded && (unflushedTime != null) && ((Date.now() - unflushedTime) > MAX_UNFLUSHED_AGE)) { + return DocumentManager.flushDocIfLoaded(project_id, doc_id, function(error) { + if (error != null) { return callback(error); } + return callback(null, lines, version); + }); + } else { + return callback(null, lines, version); + } + }); + }, - resyncDocContents: (project_id, doc_id, callback) -> - logger.log {project_id: project_id, doc_id: doc_id}, "start resyncing doc contents" - RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) -> - return callback(error) if error? + resyncDocContents(project_id, doc_id, callback) { + logger.log({project_id, doc_id}, "start resyncing doc contents"); + return RedisManager.getDoc(project_id, doc_id, function(error, lines, version, ranges, pathname, projectHistoryId) { + if (error != null) { return callback(error); } - if !lines? or !version? - logger.log {project_id: project_id, doc_id: doc_id}, "resyncing doc contents - not found in redis - retrieving from web" - PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) -> - if error? - logger.error {project_id: project_id, doc_id: doc_id, getDocError: error}, "resyncing doc contents - error retrieving from web" - return callback(error) - ProjectHistoryRedisManager.queueResyncDocContent project_id, projectHistoryId, doc_id, lines, version, pathname, callback - else - logger.log {project_id: project_id, doc_id: doc_id}, "resyncing doc contents - doc in redis - will queue in redis" - ProjectHistoryRedisManager.queueResyncDocContent project_id, projectHistoryId, doc_id, lines, version, pathname, callback + if ((lines == null) || (version == null)) { + logger.log({project_id, doc_id}, "resyncing doc contents - not found in redis - retrieving from web"); + return PersistenceManager.getDoc(project_id, doc_id, function(error, lines, version, ranges, pathname, projectHistoryId) { + if (error != null) { + logger.error({project_id, doc_id, getDocError: error}, "resyncing doc contents - error retrieving from web"); + return callback(error); + } + return ProjectHistoryRedisManager.queueResyncDocContent(project_id, projectHistoryId, doc_id, lines, version, pathname, callback); + }); + } else { + logger.log({project_id, doc_id}, "resyncing doc contents - doc in redis - will queue in redis"); + return ProjectHistoryRedisManager.queueResyncDocContent(project_id, projectHistoryId, doc_id, lines, version, pathname, callback); + } + }); + }, - getDocWithLock: (project_id, doc_id, callback = (error, lines, version) ->) -> - UpdateManager = require "./UpdateManager" - UpdateManager.lockUpdatesAndDo DocumentManager.getDoc, project_id, doc_id, callback + getDocWithLock(project_id, doc_id, callback) { + if (callback == null) { callback = function(error, lines, version) {}; } + const UpdateManager = require("./UpdateManager"); + return UpdateManager.lockUpdatesAndDo(DocumentManager.getDoc, project_id, doc_id, callback); + }, - getDocAndRecentOpsWithLock: (project_id, doc_id, fromVersion, callback = (error, lines, version, ops, ranges, pathname, projectHistoryId) ->) -> - UpdateManager = require "./UpdateManager" - UpdateManager.lockUpdatesAndDo DocumentManager.getDocAndRecentOps, project_id, doc_id, fromVersion, callback + getDocAndRecentOpsWithLock(project_id, doc_id, fromVersion, callback) { + if (callback == null) { callback = function(error, lines, version, ops, ranges, pathname, projectHistoryId) {}; } + const UpdateManager = require("./UpdateManager"); + return UpdateManager.lockUpdatesAndDo(DocumentManager.getDocAndRecentOps, project_id, doc_id, fromVersion, callback); + }, - getDocAndFlushIfOldWithLock: (project_id, doc_id, callback = (error, doc) ->) -> - UpdateManager = require "./UpdateManager" - UpdateManager.lockUpdatesAndDo DocumentManager.getDocAndFlushIfOld, project_id, doc_id, callback + getDocAndFlushIfOldWithLock(project_id, doc_id, callback) { + if (callback == null) { callback = function(error, doc) {}; } + const UpdateManager = require("./UpdateManager"); + return UpdateManager.lockUpdatesAndDo(DocumentManager.getDocAndFlushIfOld, project_id, doc_id, callback); + }, - setDocWithLock: (project_id, doc_id, lines, source, user_id, undoing, callback = (error) ->) -> - UpdateManager = require "./UpdateManager" - UpdateManager.lockUpdatesAndDo DocumentManager.setDoc, project_id, doc_id, lines, source, user_id, undoing, callback + setDocWithLock(project_id, doc_id, lines, source, user_id, undoing, callback) { + if (callback == null) { callback = function(error) {}; } + const UpdateManager = require("./UpdateManager"); + return UpdateManager.lockUpdatesAndDo(DocumentManager.setDoc, project_id, doc_id, lines, source, user_id, undoing, callback); + }, - flushDocIfLoadedWithLock: (project_id, doc_id, callback = (error) ->) -> - UpdateManager = require "./UpdateManager" - UpdateManager.lockUpdatesAndDo DocumentManager.flushDocIfLoaded, project_id, doc_id, callback + flushDocIfLoadedWithLock(project_id, doc_id, callback) { + if (callback == null) { callback = function(error) {}; } + const UpdateManager = require("./UpdateManager"); + return UpdateManager.lockUpdatesAndDo(DocumentManager.flushDocIfLoaded, project_id, doc_id, callback); + }, - flushAndDeleteDocWithLock: (project_id, doc_id, options, callback) -> - UpdateManager = require "./UpdateManager" - UpdateManager.lockUpdatesAndDo DocumentManager.flushAndDeleteDoc, project_id, doc_id, options, callback + flushAndDeleteDocWithLock(project_id, doc_id, options, callback) { + const UpdateManager = require("./UpdateManager"); + return UpdateManager.lockUpdatesAndDo(DocumentManager.flushAndDeleteDoc, project_id, doc_id, options, callback); + }, - acceptChangesWithLock: (project_id, doc_id, change_ids, callback = (error) ->) -> - UpdateManager = require "./UpdateManager" - UpdateManager.lockUpdatesAndDo DocumentManager.acceptChanges, project_id, doc_id, change_ids, callback + acceptChangesWithLock(project_id, doc_id, change_ids, callback) { + if (callback == null) { callback = function(error) {}; } + const UpdateManager = require("./UpdateManager"); + return UpdateManager.lockUpdatesAndDo(DocumentManager.acceptChanges, project_id, doc_id, change_ids, callback); + }, - deleteCommentWithLock: (project_id, doc_id, thread_id, callback = (error) ->) -> - UpdateManager = require "./UpdateManager" - UpdateManager.lockUpdatesAndDo DocumentManager.deleteComment, project_id, doc_id, thread_id, callback + deleteCommentWithLock(project_id, doc_id, thread_id, callback) { + if (callback == null) { callback = function(error) {}; } + const UpdateManager = require("./UpdateManager"); + return UpdateManager.lockUpdatesAndDo(DocumentManager.deleteComment, project_id, doc_id, thread_id, callback); + }, - renameDocWithLock: (project_id, doc_id, user_id, update, projectHistoryId, callback = (error) ->) -> - UpdateManager = require "./UpdateManager" - UpdateManager.lockUpdatesAndDo DocumentManager.renameDoc, project_id, doc_id, user_id, update, projectHistoryId, callback + renameDocWithLock(project_id, doc_id, user_id, update, projectHistoryId, callback) { + if (callback == null) { callback = function(error) {}; } + const UpdateManager = require("./UpdateManager"); + return UpdateManager.lockUpdatesAndDo(DocumentManager.renameDoc, project_id, doc_id, user_id, update, projectHistoryId, callback); + }, - resyncDocContentsWithLock: (project_id, doc_id, callback = (error) ->) -> - UpdateManager = require "./UpdateManager" - UpdateManager.lockUpdatesAndDo DocumentManager.resyncDocContents, project_id, doc_id, callback + resyncDocContentsWithLock(project_id, doc_id, callback) { + if (callback == null) { callback = function(error) {}; } + const UpdateManager = require("./UpdateManager"); + return UpdateManager.lockUpdatesAndDo(DocumentManager.resyncDocContents, project_id, doc_id, callback); + } +}); diff --git a/services/document-updater/app/coffee/Errors.js b/services/document-updater/app/coffee/Errors.js index e3d08e7641..a8cb2efb1d 100644 --- a/services/document-updater/app/coffee/Errors.js +++ b/services/document-updater/app/coffee/Errors.js @@ -1,33 +1,39 @@ -NotFoundError = (message) -> - error = new Error(message) - error.name = "NotFoundError" - error.__proto__ = NotFoundError.prototype - return error -NotFoundError.prototype.__proto__ = Error.prototype +let Errors; +var NotFoundError = function(message) { + const error = new Error(message); + error.name = "NotFoundError"; + error.__proto__ = NotFoundError.prototype; + return error; +}; +NotFoundError.prototype.__proto__ = Error.prototype; -OpRangeNotAvailableError = (message) -> - error = new Error(message) - error.name = "OpRangeNotAvailableError" - error.__proto__ = OpRangeNotAvailableError.prototype - return error -OpRangeNotAvailableError.prototype.__proto__ = Error.prototype +var OpRangeNotAvailableError = function(message) { + const error = new Error(message); + error.name = "OpRangeNotAvailableError"; + error.__proto__ = OpRangeNotAvailableError.prototype; + return error; +}; +OpRangeNotAvailableError.prototype.__proto__ = Error.prototype; -ProjectStateChangedError = (message) -> - error = new Error(message) - error.name = "ProjectStateChangedError" - error.__proto__ = ProjectStateChangedError.prototype - return error -ProjectStateChangedError.prototype.__proto__ = Error.prototype +var ProjectStateChangedError = function(message) { + const error = new Error(message); + error.name = "ProjectStateChangedError"; + error.__proto__ = ProjectStateChangedError.prototype; + return error; +}; +ProjectStateChangedError.prototype.__proto__ = Error.prototype; -DeleteMismatchError = (message) -> - error = new Error(message) - error.name = "DeleteMismatchError" - error.__proto__ = DeleteMismatchError.prototype - return error -DeleteMismatchError.prototype.__proto__ = Error.prototype +var DeleteMismatchError = function(message) { + const error = new Error(message); + error.name = "DeleteMismatchError"; + error.__proto__ = DeleteMismatchError.prototype; + return error; +}; +DeleteMismatchError.prototype.__proto__ = Error.prototype; -module.exports = Errors = - NotFoundError: NotFoundError - OpRangeNotAvailableError: OpRangeNotAvailableError - ProjectStateChangedError: ProjectStateChangedError - DeleteMismatchError: DeleteMismatchError +module.exports = (Errors = { + NotFoundError, + OpRangeNotAvailableError, + ProjectStateChangedError, + DeleteMismatchError +}); diff --git a/services/document-updater/app/coffee/HistoryManager.js b/services/document-updater/app/coffee/HistoryManager.js index 183ac268f3..ac9ba9a706 100644 --- a/services/document-updater/app/coffee/HistoryManager.js +++ b/services/document-updater/app/coffee/HistoryManager.js @@ -1,107 +1,144 @@ -async = require "async" -logger = require "logger-sharelatex" -request = require "request" -Settings = require "settings-sharelatex" -HistoryRedisManager = require "./HistoryRedisManager" -ProjectHistoryRedisManager = require "./ProjectHistoryRedisManager" -RedisManager = require "./RedisManager" -metrics = require "./Metrics" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let HistoryManager; +const async = require("async"); +const logger = require("logger-sharelatex"); +const request = require("request"); +const Settings = require("settings-sharelatex"); +const HistoryRedisManager = require("./HistoryRedisManager"); +const ProjectHistoryRedisManager = require("./ProjectHistoryRedisManager"); +const RedisManager = require("./RedisManager"); +const metrics = require("./Metrics"); -module.exports = HistoryManager = - flushDocChangesAsync: (project_id, doc_id) -> - if !Settings.apis?.trackchanges? - logger.warn { doc_id }, "track changes API is not configured, so not flushing" - return - RedisManager.getHistoryType doc_id, (err, projectHistoryType) -> - if err? - logger.warn {err, doc_id}, "error getting history type" - # if there's an error continue and flush to track-changes for safety - if Settings.disableDoubleFlush and projectHistoryType is "project-history" - logger.debug {doc_id, projectHistoryType}, "skipping track-changes flush" - else - metrics.inc 'history-flush', 1, { status: 'track-changes'} - url = "#{Settings.apis.trackchanges.url}/project/#{project_id}/doc/#{doc_id}/flush" - logger.log { project_id, doc_id, url, projectHistoryType }, "flushing doc in track changes api" - request.post url, (error, res, body)-> - if error? - logger.error { error, doc_id, project_id}, "track changes doc to track changes api" - else if res.statusCode < 200 and res.statusCode >= 300 - logger.error { doc_id, project_id }, "track changes api returned a failure status code: #{res.statusCode}" +module.exports = (HistoryManager = { + flushDocChangesAsync(project_id, doc_id) { + if (((Settings.apis != null ? Settings.apis.trackchanges : undefined) == null)) { + logger.warn({ doc_id }, "track changes API is not configured, so not flushing"); + return; + } + return RedisManager.getHistoryType(doc_id, function(err, projectHistoryType) { + if (err != null) { + logger.warn({err, doc_id}, "error getting history type"); + } + // if there's an error continue and flush to track-changes for safety + if (Settings.disableDoubleFlush && (projectHistoryType === "project-history")) { + return logger.debug({doc_id, projectHistoryType}, "skipping track-changes flush"); + } else { + metrics.inc('history-flush', 1, { status: 'track-changes'}); + const url = `${Settings.apis.trackchanges.url}/project/${project_id}/doc/${doc_id}/flush`; + logger.log({ project_id, doc_id, url, projectHistoryType }, "flushing doc in track changes api"); + return request.post(url, function(error, res, body){ + if (error != null) { + return logger.error({ error, doc_id, project_id}, "track changes doc to track changes api"); + } else if ((res.statusCode < 200) && (res.statusCode >= 300)) { + return logger.error({ doc_id, project_id }, `track changes api returned a failure status code: ${res.statusCode}`); + } + }); + } + }); + }, - # flush changes in the background - flushProjectChangesAsync: (project_id) -> - return if !Settings.apis?.project_history?.enabled - HistoryManager.flushProjectChanges project_id, {background:true}, -> + // flush changes in the background + flushProjectChangesAsync(project_id) { + if (!__guard__(Settings.apis != null ? Settings.apis.project_history : undefined, x => x.enabled)) { return; } + return HistoryManager.flushProjectChanges(project_id, {background:true}, function() {}); + }, - # flush changes and callback (for when we need to know the queue is flushed) - flushProjectChanges: (project_id, options, callback = (error) ->) -> - return callback() if !Settings.apis?.project_history?.enabled - if options.skip_history_flush - logger.log {project_id}, "skipping flush of project history" - return callback() - metrics.inc 'history-flush', 1, { status: 'project-history'} - url = "#{Settings.apis.project_history.url}/project/#{project_id}/flush" - qs = {} - qs.background = true if options.background # pass on the background flush option if present - logger.log { project_id, url, qs }, "flushing doc in project history api" - request.post {url: url, qs: qs}, (error, res, body)-> - if error? - logger.error { error, project_id}, "project history doc to track changes api" - return callback(error) - else if res.statusCode < 200 and res.statusCode >= 300 - logger.error { project_id }, "project history api returned a failure status code: #{res.statusCode}" - return callback(error) - else - return callback() + // flush changes and callback (for when we need to know the queue is flushed) + flushProjectChanges(project_id, options, callback) { + if (callback == null) { callback = function(error) {}; } + if (!__guard__(Settings.apis != null ? Settings.apis.project_history : undefined, x => x.enabled)) { return callback(); } + if (options.skip_history_flush) { + logger.log({project_id}, "skipping flush of project history"); + return callback(); + } + metrics.inc('history-flush', 1, { status: 'project-history'}); + const url = `${Settings.apis.project_history.url}/project/${project_id}/flush`; + const qs = {}; + if (options.background) { qs.background = true; } // pass on the background flush option if present + logger.log({ project_id, url, qs }, "flushing doc in project history api"); + return request.post({url, qs}, function(error, res, body){ + if (error != null) { + logger.error({ error, project_id}, "project history doc to track changes api"); + return callback(error); + } else if ((res.statusCode < 200) && (res.statusCode >= 300)) { + logger.error({ project_id }, `project history api returned a failure status code: ${res.statusCode}`); + return callback(error); + } else { + return callback(); + } + }); + }, - FLUSH_DOC_EVERY_N_OPS: 100 - FLUSH_PROJECT_EVERY_N_OPS: 500 + FLUSH_DOC_EVERY_N_OPS: 100, + FLUSH_PROJECT_EVERY_N_OPS: 500, - recordAndFlushHistoryOps: (project_id, doc_id, ops = [], doc_ops_length, project_ops_length, callback = (error) ->) -> - if ops.length == 0 - return callback() + recordAndFlushHistoryOps(project_id, doc_id, ops, doc_ops_length, project_ops_length, callback) { + if (ops == null) { ops = []; } + if (callback == null) { callback = function(error) {}; } + if (ops.length === 0) { + return callback(); + } - # record updates for project history - if Settings.apis?.project_history?.enabled - if HistoryManager.shouldFlushHistoryOps(project_ops_length, ops.length, HistoryManager.FLUSH_PROJECT_EVERY_N_OPS) - # Do this in the background since it uses HTTP and so may be too - # slow to wait for when processing a doc update. - logger.log { project_ops_length, project_id }, "flushing project history api" - HistoryManager.flushProjectChangesAsync project_id + // record updates for project history + if (__guard__(Settings.apis != null ? Settings.apis.project_history : undefined, x => x.enabled)) { + if (HistoryManager.shouldFlushHistoryOps(project_ops_length, ops.length, HistoryManager.FLUSH_PROJECT_EVERY_N_OPS)) { + // Do this in the background since it uses HTTP and so may be too + // slow to wait for when processing a doc update. + logger.log({ project_ops_length, project_id }, "flushing project history api"); + HistoryManager.flushProjectChangesAsync(project_id); + } + } - # if the doc_ops_length is undefined it means the project is not using track-changes - # so we can bail out here - if typeof(doc_ops_length) is 'undefined' - logger.debug { project_id, doc_id}, "skipping flush to track-changes, only using project-history" - return callback() + // if the doc_ops_length is undefined it means the project is not using track-changes + // so we can bail out here + if (typeof(doc_ops_length) === 'undefined') { + logger.debug({ project_id, doc_id}, "skipping flush to track-changes, only using project-history"); + return callback(); + } - # record updates for track-changes - HistoryRedisManager.recordDocHasHistoryOps project_id, doc_id, ops, (error) -> - return callback(error) if error? - if HistoryManager.shouldFlushHistoryOps(doc_ops_length, ops.length, HistoryManager.FLUSH_DOC_EVERY_N_OPS) - # Do this in the background since it uses HTTP and so may be too - # slow to wait for when processing a doc update. - logger.log { doc_ops_length, doc_id, project_id }, "flushing track changes api" - HistoryManager.flushDocChangesAsync project_id, doc_id - callback() + // record updates for track-changes + return HistoryRedisManager.recordDocHasHistoryOps(project_id, doc_id, ops, function(error) { + if (error != null) { return callback(error); } + if (HistoryManager.shouldFlushHistoryOps(doc_ops_length, ops.length, HistoryManager.FLUSH_DOC_EVERY_N_OPS)) { + // Do this in the background since it uses HTTP and so may be too + // slow to wait for when processing a doc update. + logger.log({ doc_ops_length, doc_id, project_id }, "flushing track changes api"); + HistoryManager.flushDocChangesAsync(project_id, doc_id); + } + return callback(); + }); + }, - shouldFlushHistoryOps: (length, ops_length, threshold) -> - return false if !length # don't flush unless we know the length - # We want to flush every 100 ops, i.e. 100, 200, 300, etc - # Find out which 'block' (i.e. 0-99, 100-199) we were in before and after pushing these - # ops. If we've changed, then we've gone over a multiple of 100 and should flush. - # (Most of the time, we will only hit 100 and then flushing will put us back to 0) - previousLength = length - ops_length - prevBlock = Math.floor(previousLength / threshold) - newBlock = Math.floor(length / threshold) - return newBlock != prevBlock + shouldFlushHistoryOps(length, ops_length, threshold) { + if (!length) { return false; } // don't flush unless we know the length + // We want to flush every 100 ops, i.e. 100, 200, 300, etc + // Find out which 'block' (i.e. 0-99, 100-199) we were in before and after pushing these + // ops. If we've changed, then we've gone over a multiple of 100 and should flush. + // (Most of the time, we will only hit 100 and then flushing will put us back to 0) + const previousLength = length - ops_length; + const prevBlock = Math.floor(previousLength / threshold); + const newBlock = Math.floor(length / threshold); + return newBlock !== prevBlock; + }, - MAX_PARALLEL_REQUESTS: 4 + MAX_PARALLEL_REQUESTS: 4, - resyncProjectHistory: (project_id, projectHistoryId, docs, files, callback) -> - ProjectHistoryRedisManager.queueResyncProjectStructure project_id, projectHistoryId, docs, files, (error) -> - return callback(error) if error? - DocumentManager = require "./DocumentManager" - resyncDoc = (doc, cb) -> - DocumentManager.resyncDocContentsWithLock project_id, doc.doc, cb - async.eachLimit docs, HistoryManager.MAX_PARALLEL_REQUESTS, resyncDoc, callback + resyncProjectHistory(project_id, projectHistoryId, docs, files, callback) { + return ProjectHistoryRedisManager.queueResyncProjectStructure(project_id, projectHistoryId, docs, files, function(error) { + if (error != null) { return callback(error); } + const DocumentManager = require("./DocumentManager"); + const resyncDoc = (doc, cb) => DocumentManager.resyncDocContentsWithLock(project_id, doc.doc, cb); + return async.eachLimit(docs, HistoryManager.MAX_PARALLEL_REQUESTS, resyncDoc, callback); + }); + } +}); + +function __guard__(value, transform) { + return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; +} \ No newline at end of file diff --git a/services/document-updater/app/coffee/HistoryRedisManager.js b/services/document-updater/app/coffee/HistoryRedisManager.js index d9a99a09aa..6e2aba403c 100644 --- a/services/document-updater/app/coffee/HistoryRedisManager.js +++ b/services/document-updater/app/coffee/HistoryRedisManager.js @@ -1,13 +1,26 @@ -Settings = require('settings-sharelatex') -rclient = require("redis-sharelatex").createClient(Settings.redis.history) -Keys = Settings.redis.history.key_schema -logger = require('logger-sharelatex') +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let HistoryRedisManager; +const Settings = require('settings-sharelatex'); +const rclient = require("redis-sharelatex").createClient(Settings.redis.history); +const Keys = Settings.redis.history.key_schema; +const logger = require('logger-sharelatex'); -module.exports = HistoryRedisManager = - recordDocHasHistoryOps: (project_id, doc_id, ops = [], callback = (error) ->) -> - if ops.length == 0 - return callback(new Error("cannot push no ops")) # This should never be called with no ops, but protect against a redis error if we sent an empty array to rpush - logger.log project_id: project_id, doc_id: doc_id, "marking doc in project for history ops" - rclient.sadd Keys.docsWithHistoryOps({project_id}), doc_id, (error) -> - return callback(error) if error? - callback() +module.exports = (HistoryRedisManager = { + recordDocHasHistoryOps(project_id, doc_id, ops, callback) { + if (ops == null) { ops = []; } + if (callback == null) { callback = function(error) {}; } + if (ops.length === 0) { + return callback(new Error("cannot push no ops")); // This should never be called with no ops, but protect against a redis error if we sent an empty array to rpush + } + logger.log({project_id, doc_id}, "marking doc in project for history ops"); + return rclient.sadd(Keys.docsWithHistoryOps({project_id}), doc_id, function(error) { + if (error != null) { return callback(error); } + return callback(); + }); + } +}); diff --git a/services/document-updater/app/coffee/HttpController.js b/services/document-updater/app/coffee/HttpController.js index 67d247ab97..dfc749eeb9 100644 --- a/services/document-updater/app/coffee/HttpController.js +++ b/services/document-updater/app/coffee/HttpController.js @@ -1,231 +1,336 @@ -DocumentManager = require "./DocumentManager" -HistoryManager = require "./HistoryManager" -ProjectManager = require "./ProjectManager" -Errors = require "./Errors" -logger = require "logger-sharelatex" -Metrics = require "./Metrics" -ProjectFlusher = require("./ProjectFlusher") -DeleteQueueManager = require("./DeleteQueueManager") -async = require "async" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let HttpController; +const DocumentManager = require("./DocumentManager"); +const HistoryManager = require("./HistoryManager"); +const ProjectManager = require("./ProjectManager"); +const Errors = require("./Errors"); +const logger = require("logger-sharelatex"); +const Metrics = require("./Metrics"); +const ProjectFlusher = require("./ProjectFlusher"); +const DeleteQueueManager = require("./DeleteQueueManager"); +const async = require("async"); -TWO_MEGABYTES = 2 * 1024 * 1024 +const TWO_MEGABYTES = 2 * 1024 * 1024; -module.exports = HttpController = - getDoc: (req, res, next = (error) ->) -> - doc_id = req.params.doc_id - project_id = req.params.project_id - logger.log project_id: project_id, doc_id: doc_id, "getting doc via http" - timer = new Metrics.Timer("http.getDoc") +module.exports = (HttpController = { + getDoc(req, res, next) { + let fromVersion; + if (next == null) { next = function(error) {}; } + const { + doc_id + } = req.params; + const { + project_id + } = req.params; + logger.log({project_id, doc_id}, "getting doc via http"); + const timer = new Metrics.Timer("http.getDoc"); - if req.query?.fromVersion? - fromVersion = parseInt(req.query.fromVersion, 10) - else - fromVersion = -1 + if ((req.query != null ? req.query.fromVersion : undefined) != null) { + fromVersion = parseInt(req.query.fromVersion, 10); + } else { + fromVersion = -1; + } - DocumentManager.getDocAndRecentOpsWithLock project_id, doc_id, fromVersion, (error, lines, version, ops, ranges, pathname) -> - timer.done() - return next(error) if error? - logger.log project_id: project_id, doc_id: doc_id, "got doc via http" - if !lines? or !version? - return next(new Errors.NotFoundError("document not found")) - res.json - id: doc_id - lines: lines - version: version - ops: ops - ranges: ranges - pathname: pathname + return DocumentManager.getDocAndRecentOpsWithLock(project_id, doc_id, fromVersion, function(error, lines, version, ops, ranges, pathname) { + timer.done(); + if (error != null) { return next(error); } + logger.log({project_id, doc_id}, "got doc via http"); + if ((lines == null) || (version == null)) { + return next(new Errors.NotFoundError("document not found")); + } + return res.json({ + id: doc_id, + lines, + version, + ops, + ranges, + pathname + }); + }); + }, - _getTotalSizeOfLines: (lines) -> - size = 0 - for line in lines - size += (line.length + 1) - return size + _getTotalSizeOfLines(lines) { + let size = 0; + for (let line of Array.from(lines)) { + size += (line.length + 1); + } + return size; + }, - getProjectDocsAndFlushIfOld: (req, res, next = (error) ->) -> - project_id = req.params.project_id - projectStateHash = req.query?.state - # exclude is string of existing docs "id:version,id:version,..." - excludeItems = req.query?.exclude?.split(',') or [] - logger.log project_id: project_id, exclude: excludeItems, "getting docs via http" - timer = new Metrics.Timer("http.getAllDocs") - excludeVersions = {} - for item in excludeItems - [id,version] = item?.split(':') - excludeVersions[id] = version - logger.log {project_id: project_id, projectStateHash: projectStateHash, excludeVersions: excludeVersions}, "excluding versions" - ProjectManager.getProjectDocsAndFlushIfOld project_id, projectStateHash, excludeVersions, (error, result) -> - timer.done() - if error instanceof Errors.ProjectStateChangedError - res.sendStatus 409 # conflict - else if error? - return next(error) - else - logger.log project_id: project_id, result: ("#{doc._id}:#{doc.v}" for doc in result), "got docs via http" - res.send result + getProjectDocsAndFlushIfOld(req, res, next) { + if (next == null) { next = function(error) {}; } + const { + project_id + } = req.params; + const projectStateHash = req.query != null ? req.query.state : undefined; + // exclude is string of existing docs "id:version,id:version,..." + const excludeItems = __guard__(req.query != null ? req.query.exclude : undefined, x => x.split(',')) || []; + logger.log({project_id, exclude: excludeItems}, "getting docs via http"); + const timer = new Metrics.Timer("http.getAllDocs"); + const excludeVersions = {}; + for (let item of Array.from(excludeItems)) { + const [id,version] = Array.from(item != null ? item.split(':') : undefined); + excludeVersions[id] = version; + } + logger.log({project_id, projectStateHash, excludeVersions}, "excluding versions"); + return ProjectManager.getProjectDocsAndFlushIfOld(project_id, projectStateHash, excludeVersions, function(error, result) { + timer.done(); + if (error instanceof Errors.ProjectStateChangedError) { + return res.sendStatus(409); // conflict + } else if (error != null) { + return next(error); + } else { + logger.log({project_id, result: ((Array.from(result).map((doc) => `${doc._id}:${doc.v}`)))}, "got docs via http"); + return res.send(result); + } + }); + }, - clearProjectState: (req, res, next = (error) ->) -> - project_id = req.params.project_id - timer = new Metrics.Timer("http.clearProjectState") - logger.log project_id: project_id, "clearing project state via http" - ProjectManager.clearProjectState project_id, (error) -> - timer.done() - if error? - return next(error) - else - res.sendStatus 200 + clearProjectState(req, res, next) { + if (next == null) { next = function(error) {}; } + const { + project_id + } = req.params; + const timer = new Metrics.Timer("http.clearProjectState"); + logger.log({project_id}, "clearing project state via http"); + return ProjectManager.clearProjectState(project_id, function(error) { + timer.done(); + if (error != null) { + return next(error); + } else { + return res.sendStatus(200); + } + }); + }, - setDoc: (req, res, next = (error) ->) -> - doc_id = req.params.doc_id - project_id = req.params.project_id - {lines, source, user_id, undoing} = req.body - lineSize = HttpController._getTotalSizeOfLines(lines) - if lineSize > TWO_MEGABYTES - logger.log {project_id, doc_id, source, lineSize, user_id}, "document too large, returning 406 response" - return res.sendStatus 406 - logger.log {project_id, doc_id, lines, source, user_id, undoing}, "setting doc via http" - timer = new Metrics.Timer("http.setDoc") - DocumentManager.setDocWithLock project_id, doc_id, lines, source, user_id, undoing, (error) -> - timer.done() - return next(error) if error? - logger.log project_id: project_id, doc_id: doc_id, "set doc via http" - res.sendStatus 204 # No Content + setDoc(req, res, next) { + if (next == null) { next = function(error) {}; } + const { + doc_id + } = req.params; + const { + project_id + } = req.params; + const {lines, source, user_id, undoing} = req.body; + const lineSize = HttpController._getTotalSizeOfLines(lines); + if (lineSize > TWO_MEGABYTES) { + logger.log({project_id, doc_id, source, lineSize, user_id}, "document too large, returning 406 response"); + return res.sendStatus(406); + } + logger.log({project_id, doc_id, lines, source, user_id, undoing}, "setting doc via http"); + const timer = new Metrics.Timer("http.setDoc"); + return DocumentManager.setDocWithLock(project_id, doc_id, lines, source, user_id, undoing, function(error) { + timer.done(); + if (error != null) { return next(error); } + logger.log({project_id, doc_id}, "set doc via http"); + return res.sendStatus(204); + }); + }, // No Content - flushDocIfLoaded: (req, res, next = (error) ->) -> - doc_id = req.params.doc_id - project_id = req.params.project_id - logger.log project_id: project_id, doc_id: doc_id, "flushing doc via http" - timer = new Metrics.Timer("http.flushDoc") - DocumentManager.flushDocIfLoadedWithLock project_id, doc_id, (error) -> - timer.done() - return next(error) if error? - logger.log project_id: project_id, doc_id: doc_id, "flushed doc via http" - res.sendStatus 204 # No Content + flushDocIfLoaded(req, res, next) { + if (next == null) { next = function(error) {}; } + const { + doc_id + } = req.params; + const { + project_id + } = req.params; + logger.log({project_id, doc_id}, "flushing doc via http"); + const timer = new Metrics.Timer("http.flushDoc"); + return DocumentManager.flushDocIfLoadedWithLock(project_id, doc_id, function(error) { + timer.done(); + if (error != null) { return next(error); } + logger.log({project_id, doc_id}, "flushed doc via http"); + return res.sendStatus(204); + }); + }, // No Content - deleteDoc: (req, res, next = (error) ->) -> - doc_id = req.params.doc_id - project_id = req.params.project_id - ignoreFlushErrors = req.query.ignore_flush_errors == 'true' - timer = new Metrics.Timer("http.deleteDoc") - logger.log project_id: project_id, doc_id: doc_id, "deleting doc via http" - DocumentManager.flushAndDeleteDocWithLock project_id, doc_id, { ignoreFlushErrors: ignoreFlushErrors }, (error) -> - timer.done() - # There is no harm in flushing project history if the previous call - # failed and sometimes it is required - HistoryManager.flushProjectChangesAsync project_id + deleteDoc(req, res, next) { + if (next == null) { next = function(error) {}; } + const { + doc_id + } = req.params; + const { + project_id + } = req.params; + const ignoreFlushErrors = req.query.ignore_flush_errors === 'true'; + const timer = new Metrics.Timer("http.deleteDoc"); + logger.log({project_id, doc_id}, "deleting doc via http"); + return DocumentManager.flushAndDeleteDocWithLock(project_id, doc_id, { ignoreFlushErrors }, function(error) { + timer.done(); + // There is no harm in flushing project history if the previous call + // failed and sometimes it is required + HistoryManager.flushProjectChangesAsync(project_id); - return next(error) if error? - logger.log project_id: project_id, doc_id: doc_id, "deleted doc via http" - res.sendStatus 204 # No Content + if (error != null) { return next(error); } + logger.log({project_id, doc_id}, "deleted doc via http"); + return res.sendStatus(204); + }); + }, // No Content - flushProject: (req, res, next = (error) ->) -> - project_id = req.params.project_id - logger.log project_id: project_id, "flushing project via http" - timer = new Metrics.Timer("http.flushProject") - ProjectManager.flushProjectWithLocks project_id, (error) -> - timer.done() - return next(error) if error? - logger.log project_id: project_id, "flushed project via http" - res.sendStatus 204 # No Content + flushProject(req, res, next) { + if (next == null) { next = function(error) {}; } + const { + project_id + } = req.params; + logger.log({project_id}, "flushing project via http"); + const timer = new Metrics.Timer("http.flushProject"); + return ProjectManager.flushProjectWithLocks(project_id, function(error) { + timer.done(); + if (error != null) { return next(error); } + logger.log({project_id}, "flushed project via http"); + return res.sendStatus(204); + }); + }, // No Content - deleteProject: (req, res, next = (error) ->) -> - project_id = req.params.project_id - logger.log project_id: project_id, "deleting project via http" - options = {} - options.background = true if req.query?.background # allow non-urgent flushes to be queued - options.skip_history_flush = true if req.query?.shutdown # don't flush history when realtime shuts down - if req.query?.background - ProjectManager.queueFlushAndDeleteProject project_id, (error) -> - return next(error) if error? - logger.log project_id: project_id, "queue delete of project via http" - res.sendStatus 204 # No Content - else - timer = new Metrics.Timer("http.deleteProject") - ProjectManager.flushAndDeleteProjectWithLocks project_id, options, (error) -> - timer.done() - return next(error) if error? - logger.log project_id: project_id, "deleted project via http" - res.sendStatus 204 # No Content + deleteProject(req, res, next) { + if (next == null) { next = function(error) {}; } + const { + project_id + } = req.params; + logger.log({project_id}, "deleting project via http"); + const options = {}; + if (req.query != null ? req.query.background : undefined) { options.background = true; } // allow non-urgent flushes to be queued + if (req.query != null ? req.query.shutdown : undefined) { options.skip_history_flush = true; } // don't flush history when realtime shuts down + if (req.query != null ? req.query.background : undefined) { + return ProjectManager.queueFlushAndDeleteProject(project_id, function(error) { + if (error != null) { return next(error); } + logger.log({project_id}, "queue delete of project via http"); + return res.sendStatus(204); + }); // No Content + } else { + const timer = new Metrics.Timer("http.deleteProject"); + return ProjectManager.flushAndDeleteProjectWithLocks(project_id, options, function(error) { + timer.done(); + if (error != null) { return next(error); } + logger.log({project_id}, "deleted project via http"); + return res.sendStatus(204); + }); + } + }, // No Content - deleteMultipleProjects: (req, res, next = (error) ->) -> - project_ids = req.body?.project_ids || [] - logger.log project_ids: project_ids, "deleting multiple projects via http" - async.eachSeries project_ids, (project_id, cb) -> - logger.log project_id: project_id, "queue delete of project via http" - ProjectManager.queueFlushAndDeleteProject project_id, cb - , (error) -> - return next(error) if error? - res.sendStatus 204 # No Content + deleteMultipleProjects(req, res, next) { + if (next == null) { next = function(error) {}; } + const project_ids = (req.body != null ? req.body.project_ids : undefined) || []; + logger.log({project_ids}, "deleting multiple projects via http"); + return async.eachSeries(project_ids, function(project_id, cb) { + logger.log({project_id}, "queue delete of project via http"); + return ProjectManager.queueFlushAndDeleteProject(project_id, cb); + } + , function(error) { + if (error != null) { return next(error); } + return res.sendStatus(204); + }); + }, // No Content - acceptChanges: (req, res, next = (error) ->) -> - {project_id, doc_id} = req.params - change_ids = req.body?.change_ids - if !change_ids? - change_ids = [ req.params.change_id ] - logger.log {project_id, doc_id}, "accepting #{ change_ids.length } changes via http" - timer = new Metrics.Timer("http.acceptChanges") - DocumentManager.acceptChangesWithLock project_id, doc_id, change_ids, (error) -> - timer.done() - return next(error) if error? - logger.log {project_id, doc_id}, "accepted #{ change_ids.length } changes via http" - res.sendStatus 204 # No Content + acceptChanges(req, res, next) { + if (next == null) { next = function(error) {}; } + const {project_id, doc_id} = req.params; + let change_ids = req.body != null ? req.body.change_ids : undefined; + if ((change_ids == null)) { + change_ids = [ req.params.change_id ]; + } + logger.log({project_id, doc_id}, `accepting ${ change_ids.length } changes via http`); + const timer = new Metrics.Timer("http.acceptChanges"); + return DocumentManager.acceptChangesWithLock(project_id, doc_id, change_ids, function(error) { + timer.done(); + if (error != null) { return next(error); } + logger.log({project_id, doc_id}, `accepted ${ change_ids.length } changes via http`); + return res.sendStatus(204); + }); + }, // No Content - deleteComment: (req, res, next = (error) ->) -> - {project_id, doc_id, comment_id} = req.params - logger.log {project_id, doc_id, comment_id}, "deleting comment via http" - timer = new Metrics.Timer("http.deleteComment") - DocumentManager.deleteCommentWithLock project_id, doc_id, comment_id, (error) -> - timer.done() - return next(error) if error? - logger.log {project_id, doc_id, comment_id}, "deleted comment via http" - res.sendStatus 204 # No Content + deleteComment(req, res, next) { + if (next == null) { next = function(error) {}; } + const {project_id, doc_id, comment_id} = req.params; + logger.log({project_id, doc_id, comment_id}, "deleting comment via http"); + const timer = new Metrics.Timer("http.deleteComment"); + return DocumentManager.deleteCommentWithLock(project_id, doc_id, comment_id, function(error) { + timer.done(); + if (error != null) { return next(error); } + logger.log({project_id, doc_id, comment_id}, "deleted comment via http"); + return res.sendStatus(204); + }); + }, // No Content - updateProject: (req, res, next = (error) ->) -> - timer = new Metrics.Timer("http.updateProject") - project_id = req.params.project_id - {projectHistoryId, userId, docUpdates, fileUpdates, version} = req.body - logger.log {project_id, docUpdates, fileUpdates, version}, "updating project via http" + updateProject(req, res, next) { + if (next == null) { next = function(error) {}; } + const timer = new Metrics.Timer("http.updateProject"); + const { + project_id + } = req.params; + const {projectHistoryId, userId, docUpdates, fileUpdates, version} = req.body; + logger.log({project_id, docUpdates, fileUpdates, version}, "updating project via http"); - ProjectManager.updateProjectWithLocks project_id, projectHistoryId, userId, docUpdates, fileUpdates, version, (error) -> - timer.done() - return next(error) if error? - logger.log project_id: project_id, "updated project via http" - res.sendStatus 204 # No Content + return ProjectManager.updateProjectWithLocks(project_id, projectHistoryId, userId, docUpdates, fileUpdates, version, function(error) { + timer.done(); + if (error != null) { return next(error); } + logger.log({project_id}, "updated project via http"); + return res.sendStatus(204); + }); + }, // No Content - resyncProjectHistory: (req, res, next = (error) ->) -> - project_id = req.params.project_id - {projectHistoryId, docs, files} = req.body + resyncProjectHistory(req, res, next) { + if (next == null) { next = function(error) {}; } + const { + project_id + } = req.params; + const {projectHistoryId, docs, files} = req.body; - logger.log {project_id, docs, files}, "queuing project history resync via http" - HistoryManager.resyncProjectHistory project_id, projectHistoryId, docs, files, (error) -> - return next(error) if error? - logger.log {project_id}, "queued project history resync via http" - res.sendStatus 204 + logger.log({project_id, docs, files}, "queuing project history resync via http"); + return HistoryManager.resyncProjectHistory(project_id, projectHistoryId, docs, files, function(error) { + if (error != null) { return next(error); } + logger.log({project_id}, "queued project history resync via http"); + return res.sendStatus(204); + }); + }, - flushAllProjects: (req, res, next = (error)-> )-> - res.setTimeout(5 * 60 * 1000) - options = - limit : req.query.limit || 1000 - concurrency : req.query.concurrency || 5 + flushAllProjects(req, res, next ){ + if (next == null) { next = function(error){}; } + res.setTimeout(5 * 60 * 1000); + const options = { + limit : req.query.limit || 1000, + concurrency : req.query.concurrency || 5, dryRun : req.query.dryRun || false - ProjectFlusher.flushAllProjects options, (err, project_ids)-> - if err? - logger.err err:err, "error bulk flushing projects" - res.sendStatus 500 - else - res.send project_ids + }; + return ProjectFlusher.flushAllProjects(options, function(err, project_ids){ + if (err != null) { + logger.err({err}, "error bulk flushing projects"); + return res.sendStatus(500); + } else { + return res.send(project_ids); + } + }); + }, - flushQueuedProjects: (req, res, next = (error) ->) -> - res.setTimeout(10 * 60 * 1000) - options = - limit : req.query.limit || 1000 - timeout: 5 * 60 * 1000 - min_delete_age: req.query.min_delete_age || 5 * 60 * 1000 - DeleteQueueManager.flushAndDeleteOldProjects options, (err, flushed)-> - if err? - logger.err err:err, "error flushing old projects" - res.sendStatus 500 - else - logger.log {flushed: flushed}, "flush of queued projects completed" - res.send {flushed: flushed} + flushQueuedProjects(req, res, next) { + if (next == null) { next = function(error) {}; } + res.setTimeout(10 * 60 * 1000); + const options = { + limit : req.query.limit || 1000, + timeout: 5 * 60 * 1000, + min_delete_age: req.query.min_delete_age || (5 * 60 * 1000) + }; + return DeleteQueueManager.flushAndDeleteOldProjects(options, function(err, flushed){ + if (err != null) { + logger.err({err}, "error flushing old projects"); + return res.sendStatus(500); + } else { + logger.log({flushed}, "flush of queued projects completed"); + return res.send({flushed}); + } + }); + } +}); + +function __guard__(value, transform) { + return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; +} \ No newline at end of file diff --git a/services/document-updater/app/coffee/LockManager.js b/services/document-updater/app/coffee/LockManager.js index 8f62e46ccb..2b278c31e4 100644 --- a/services/document-updater/app/coffee/LockManager.js +++ b/services/document-updater/app/coffee/LockManager.js @@ -1,102 +1,131 @@ -metrics = require('./Metrics') -Settings = require('settings-sharelatex') -redis = require("redis-sharelatex") -rclient = redis.createClient(Settings.redis.lock) -keys = Settings.redis.lock.key_schema -logger = require "logger-sharelatex" -os = require "os" -crypto = require "crypto" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let LockManager; +const metrics = require('./Metrics'); +const Settings = require('settings-sharelatex'); +const redis = require("redis-sharelatex"); +const rclient = redis.createClient(Settings.redis.lock); +const keys = Settings.redis.lock.key_schema; +const logger = require("logger-sharelatex"); +const os = require("os"); +const crypto = require("crypto"); -Profiler = require "./Profiler" +const Profiler = require("./Profiler"); -HOST = os.hostname() -PID = process.pid -RND = crypto.randomBytes(4).toString('hex') -COUNT = 0 +const HOST = os.hostname(); +const PID = process.pid; +const RND = crypto.randomBytes(4).toString('hex'); +let COUNT = 0; -MAX_REDIS_REQUEST_LENGTH = 5000 # 5 seconds +const MAX_REDIS_REQUEST_LENGTH = 5000; // 5 seconds -module.exports = LockManager = - LOCK_TEST_INTERVAL: 50 # 50ms between each test of the lock - MAX_TEST_INTERVAL: 1000 # back off to 1s between each test of the lock - MAX_LOCK_WAIT_TIME: 10000 # 10s maximum time to spend trying to get the lock - LOCK_TTL: 30 # seconds. Time until lock auto expires in redis. +module.exports = (LockManager = { + LOCK_TEST_INTERVAL: 50, // 50ms between each test of the lock + MAX_TEST_INTERVAL: 1000, // back off to 1s between each test of the lock + MAX_LOCK_WAIT_TIME: 10000, // 10s maximum time to spend trying to get the lock + LOCK_TTL: 30, // seconds. Time until lock auto expires in redis. - # Use a signed lock value as described in - # http://redis.io/topics/distlock#correct-implementation-with-a-single-instance - # to prevent accidental unlocking by multiple processes - randomLock : () -> - time = Date.now() - return "locked:host=#{HOST}:pid=#{PID}:random=#{RND}:time=#{time}:count=#{COUNT++}" + // Use a signed lock value as described in + // http://redis.io/topics/distlock#correct-implementation-with-a-single-instance + // to prevent accidental unlocking by multiple processes + randomLock() { + const time = Date.now(); + return `locked:host=${HOST}:pid=${PID}:random=${RND}:time=${time}:count=${COUNT++}`; + }, - unlockScript: 'if redis.call("get", KEYS[1]) == ARGV[1] then return redis.call("del", KEYS[1]) else return 0 end'; + unlockScript: 'if redis.call("get", KEYS[1]) == ARGV[1] then return redis.call("del", KEYS[1]) else return 0 end', - tryLock : (doc_id, callback = (err, isFree)->)-> - lockValue = LockManager.randomLock() - key = keys.blockingKey(doc_id:doc_id) - profile = new Profiler("tryLock", {doc_id, key, lockValue}) - rclient.set key, lockValue, "EX", @LOCK_TTL, "NX", (err, gotLock)-> - return callback(err) if err? - if gotLock == "OK" - metrics.inc "doc-not-blocking" - timeTaken = profile.log("got lock").end() - if timeTaken > MAX_REDIS_REQUEST_LENGTH - # took too long, so try to free the lock - LockManager.releaseLock doc_id, lockValue, (err, result) -> - return callback(err) if err? # error freeing lock - callback null, false # tell caller they didn't get the lock - else - callback null, true, lockValue - else - metrics.inc "doc-blocking" - profile.log("doc is locked").end() - callback null, false + tryLock(doc_id, callback){ + if (callback == null) { callback = function(err, isFree){}; } + const lockValue = LockManager.randomLock(); + const key = keys.blockingKey({doc_id}); + const profile = new Profiler("tryLock", {doc_id, key, lockValue}); + return rclient.set(key, lockValue, "EX", this.LOCK_TTL, "NX", function(err, gotLock){ + if (err != null) { return callback(err); } + if (gotLock === "OK") { + metrics.inc("doc-not-blocking"); + const timeTaken = profile.log("got lock").end(); + if (timeTaken > MAX_REDIS_REQUEST_LENGTH) { + // took too long, so try to free the lock + return LockManager.releaseLock(doc_id, lockValue, function(err, result) { + if (err != null) { return callback(err); } // error freeing lock + return callback(null, false); + }); // tell caller they didn't get the lock + } else { + return callback(null, true, lockValue); + } + } else { + metrics.inc("doc-blocking"); + profile.log("doc is locked").end(); + return callback(null, false); + } + }); + }, - getLock: (doc_id, callback = (error, lockValue) ->) -> - startTime = Date.now() - testInterval = LockManager.LOCK_TEST_INTERVAL - profile = new Profiler("getLock", {doc_id}) - do attempt = () -> - if Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME - e = new Error("Timeout") - e.doc_id = doc_id - profile.log("timeout").end() - return callback(e) + getLock(doc_id, callback) { + let attempt; + if (callback == null) { callback = function(error, lockValue) {}; } + const startTime = Date.now(); + let testInterval = LockManager.LOCK_TEST_INTERVAL; + const profile = new Profiler("getLock", {doc_id}); + return (attempt = function() { + if ((Date.now() - startTime) > LockManager.MAX_LOCK_WAIT_TIME) { + const e = new Error("Timeout"); + e.doc_id = doc_id; + profile.log("timeout").end(); + return callback(e); + } - LockManager.tryLock doc_id, (error, gotLock, lockValue) -> - return callback(error) if error? - profile.log("tryLock") - if gotLock - profile.end() - callback(null, lockValue) - else - setTimeout attempt, testInterval - # back off when the lock is taken to avoid overloading - testInterval = Math.min(testInterval * 2, LockManager.MAX_TEST_INTERVAL) + return LockManager.tryLock(doc_id, function(error, gotLock, lockValue) { + if (error != null) { return callback(error); } + profile.log("tryLock"); + if (gotLock) { + profile.end(); + return callback(null, lockValue); + } else { + setTimeout(attempt, testInterval); + // back off when the lock is taken to avoid overloading + return testInterval = Math.min(testInterval * 2, LockManager.MAX_TEST_INTERVAL); + } + }); + })(); + }, - checkLock: (doc_id, callback = (err, isFree)->)-> - key = keys.blockingKey(doc_id:doc_id) - rclient.exists key, (err, exists) -> - return callback(err) if err? - exists = parseInt exists - if exists == 1 - metrics.inc "doc-blocking" - callback null, false - else - metrics.inc "doc-not-blocking" - callback null, true + checkLock(doc_id, callback){ + if (callback == null) { callback = function(err, isFree){}; } + const key = keys.blockingKey({doc_id}); + return rclient.exists(key, function(err, exists) { + if (err != null) { return callback(err); } + exists = parseInt(exists); + if (exists === 1) { + metrics.inc("doc-blocking"); + return callback(null, false); + } else { + metrics.inc("doc-not-blocking"); + return callback(null, true); + } + }); + }, - releaseLock: (doc_id, lockValue, callback)-> - key = keys.blockingKey(doc_id:doc_id) - profile = new Profiler("releaseLock", {doc_id, key, lockValue}) - rclient.eval LockManager.unlockScript, 1, key, lockValue, (err, result) -> - if err? - return callback(err) - else if result? and result isnt 1 # successful unlock should release exactly one key - profile.log("unlockScript:expired-lock").end() - logger.error {doc_id:doc_id, key:key, lockValue:lockValue, redis_err:err, redis_result:result}, "unlocking error" - metrics.inc "unlock-error" - return callback(new Error("tried to release timed out lock")) - else - profile.log("unlockScript:ok").end() - callback(null,result) + releaseLock(doc_id, lockValue, callback){ + const key = keys.blockingKey({doc_id}); + const profile = new Profiler("releaseLock", {doc_id, key, lockValue}); + return rclient.eval(LockManager.unlockScript, 1, key, lockValue, function(err, result) { + if (err != null) { + return callback(err); + } else if ((result != null) && (result !== 1)) { // successful unlock should release exactly one key + profile.log("unlockScript:expired-lock").end(); + logger.error({doc_id, key, lockValue, redis_err:err, redis_result:result}, "unlocking error"); + metrics.inc("unlock-error"); + return callback(new Error("tried to release timed out lock")); + } else { + profile.log("unlockScript:ok").end(); + return callback(null,result); + } + }); + } +}); diff --git a/services/document-updater/app/coffee/LoggerSerializers.js b/services/document-updater/app/coffee/LoggerSerializers.js index 437f49e074..87696abf3a 100644 --- a/services/document-updater/app/coffee/LoggerSerializers.js +++ b/services/document-updater/app/coffee/LoggerSerializers.js @@ -1,25 +1,41 @@ -_ = require('lodash') +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const _ = require('lodash'); -showLength = (thing) -> - if thing?.length then thing.length else thing +const showLength = function(thing) { + if ((thing != null ? thing.length : undefined)) { return thing.length; } else { return thing; } +}; -showUpdateLength = (update) -> - if update?.op instanceof Array - copy = _.cloneDeep(update) - copy.op.forEach (element, index) -> - copy.op[index].i = element.i.length if element?.i?.length? - copy.op[index].d = element.d.length if element?.d?.length? - copy.op[index].c = element.c.length if element?.c?.length? - copy - else - update +const showUpdateLength = function(update) { + if ((update != null ? update.op : undefined) instanceof Array) { + const copy = _.cloneDeep(update); + copy.op.forEach(function(element, index) { + if (__guard__(element != null ? element.i : undefined, x => x.length) != null) { copy.op[index].i = element.i.length; } + if (__guard__(element != null ? element.d : undefined, x1 => x1.length) != null) { copy.op[index].d = element.d.length; } + if (__guard__(element != null ? element.c : undefined, x2 => x2.length) != null) { return copy.op[index].c = element.c.length; } + }); + return copy; + } else { + return update; + } +}; -module.exports = - # replace long values with their length - lines: showLength - oldLines: showLength - newLines: showLength - docLines: showLength - newDocLines: showLength - ranges: showLength +module.exports = { + // replace long values with their length + lines: showLength, + oldLines: showLength, + newLines: showLength, + docLines: showLength, + newDocLines: showLength, + ranges: showLength, update: showUpdateLength +}; + +function __guard__(value, transform) { + return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; +} \ No newline at end of file diff --git a/services/document-updater/app/coffee/Metrics.js b/services/document-updater/app/coffee/Metrics.js index 4bf5c6dba5..8a46f7aa83 100644 --- a/services/document-updater/app/coffee/Metrics.js +++ b/services/document-updater/app/coffee/Metrics.js @@ -1 +1 @@ -module.exports = require "metrics-sharelatex" \ No newline at end of file +module.exports = require("metrics-sharelatex"); \ No newline at end of file diff --git a/services/document-updater/app/coffee/PersistenceManager.js b/services/document-updater/app/coffee/PersistenceManager.js index 88b44fd1de..f981f6bf90 100644 --- a/services/document-updater/app/coffee/PersistenceManager.js +++ b/services/document-updater/app/coffee/PersistenceManager.js @@ -1,100 +1,134 @@ -Settings = require "settings-sharelatex" -Errors = require "./Errors" -Metrics = require "./Metrics" -logger = require "logger-sharelatex" -request = (require("requestretry")).defaults({ - maxAttempts: 2 +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let PersistenceManager; +const Settings = require("settings-sharelatex"); +const Errors = require("./Errors"); +const Metrics = require("./Metrics"); +const logger = require("logger-sharelatex"); +const request = (require("requestretry")).defaults({ + maxAttempts: 2, retryDelay: 10 -}) +}); -# We have to be quick with HTTP calls because we're holding a lock that -# expires after 30 seconds. We can't let any errors in the rest of the stack -# hold us up, and need to bail out quickly if there is a problem. -MAX_HTTP_REQUEST_LENGTH = 5000 # 5 seconds +// We have to be quick with HTTP calls because we're holding a lock that +// expires after 30 seconds. We can't let any errors in the rest of the stack +// hold us up, and need to bail out quickly if there is a problem. +const MAX_HTTP_REQUEST_LENGTH = 5000; // 5 seconds -updateMetric = (method, error, response) -> - # find the status, with special handling for connection timeouts - # https://github.com/request/request#timeouts - status = if error?.connect is true - "#{error.code} (connect)" - else if error? - error.code - else if response? - response.statusCode - Metrics.inc method, 1, {status: status} - if error?.attempts > 1 - Metrics.inc "#{method}-retries", 1, {status: 'error'} - if response?.attempts > 1 - Metrics.inc "#{method}-retries", 1, {status: 'success'} +const updateMetric = function(method, error, response) { + // find the status, with special handling for connection timeouts + // https://github.com/request/request#timeouts + const status = (() => { + if ((error != null ? error.connect : undefined) === true) { + return `${error.code} (connect)`; + } else if (error != null) { + return error.code; + } else if (response != null) { + return response.statusCode; + } + })(); + Metrics.inc(method, 1, {status}); + if ((error != null ? error.attempts : undefined) > 1) { + Metrics.inc(`${method}-retries`, 1, {status: 'error'}); + } + if ((response != null ? response.attempts : undefined) > 1) { + return Metrics.inc(`${method}-retries`, 1, {status: 'success'}); + } +}; -module.exports = PersistenceManager = - getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, pathname, projectHistoryId, projectHistoryType) ->) -> - timer = new Metrics.Timer("persistenceManager.getDoc") - callback = (args...) -> - timer.done() - _callback(args...) +module.exports = (PersistenceManager = { + getDoc(project_id, doc_id, _callback) { + if (_callback == null) { _callback = function(error, lines, version, ranges, pathname, projectHistoryId, projectHistoryType) {}; } + const timer = new Metrics.Timer("persistenceManager.getDoc"); + const callback = function(...args) { + timer.done(); + return _callback(...Array.from(args || [])); + }; - url = "#{Settings.apis.web.url}/project/#{project_id}/doc/#{doc_id}" - request { - url: url - method: "GET" - headers: + const url = `${Settings.apis.web.url}/project/${project_id}/doc/${doc_id}`; + return request({ + url, + method: "GET", + headers: { "accept": "application/json" - auth: - user: Settings.apis.web.user - pass: Settings.apis.web.pass + }, + auth: { + user: Settings.apis.web.user, + pass: Settings.apis.web.pass, sendImmediately: true - jar: false + }, + jar: false, timeout: MAX_HTTP_REQUEST_LENGTH - }, (error, res, body) -> - updateMetric('getDoc', error, res) - return callback(error) if error? - if res.statusCode >= 200 and res.statusCode < 300 - try - body = JSON.parse body - catch e - return callback(e) - if !body.lines? - return callback(new Error("web API response had no doc lines")) - if !body.version? or not body.version instanceof Number - return callback(new Error("web API response had no valid doc version")) - if !body.pathname? - return callback(new Error("web API response had no valid doc pathname")) - return callback null, body.lines, body.version, body.ranges, body.pathname, body.projectHistoryId, body.projectHistoryType - else if res.statusCode == 404 - return callback(new Errors.NotFoundError("doc not not found: #{url}")) - else - return callback(new Error("error accessing web API: #{url} #{res.statusCode}")) + }, function(error, res, body) { + updateMetric('getDoc', error, res); + if (error != null) { return callback(error); } + if ((res.statusCode >= 200) && (res.statusCode < 300)) { + try { + body = JSON.parse(body); + } catch (e) { + return callback(e); + } + if ((body.lines == null)) { + return callback(new Error("web API response had no doc lines")); + } + if ((body.version == null) || !body.version instanceof Number) { + return callback(new Error("web API response had no valid doc version")); + } + if ((body.pathname == null)) { + return callback(new Error("web API response had no valid doc pathname")); + } + return callback(null, body.lines, body.version, body.ranges, body.pathname, body.projectHistoryId, body.projectHistoryType); + } else if (res.statusCode === 404) { + return callback(new Errors.NotFoundError(`doc not not found: ${url}`)); + } else { + return callback(new Error(`error accessing web API: ${url} ${res.statusCode}`)); + } + }); + }, - setDoc: (project_id, doc_id, lines, version, ranges, lastUpdatedAt, lastUpdatedBy,_callback = (error) ->) -> - timer = new Metrics.Timer("persistenceManager.setDoc") - callback = (args...) -> - timer.done() - _callback(args...) + setDoc(project_id, doc_id, lines, version, ranges, lastUpdatedAt, lastUpdatedBy,_callback) { + if (_callback == null) { _callback = function(error) {}; } + const timer = new Metrics.Timer("persistenceManager.setDoc"); + const callback = function(...args) { + timer.done(); + return _callback(...Array.from(args || [])); + }; - url = "#{Settings.apis.web.url}/project/#{project_id}/doc/#{doc_id}" - request { - url: url - method: "POST" - json: - lines: lines - ranges: ranges - version: version - lastUpdatedBy: lastUpdatedBy - lastUpdatedAt: lastUpdatedAt - auth: - user: Settings.apis.web.user - pass: Settings.apis.web.pass + const url = `${Settings.apis.web.url}/project/${project_id}/doc/${doc_id}`; + return request({ + url, + method: "POST", + json: { + lines, + ranges, + version, + lastUpdatedBy, + lastUpdatedAt + }, + auth: { + user: Settings.apis.web.user, + pass: Settings.apis.web.pass, sendImmediately: true - jar: false + }, + jar: false, timeout: MAX_HTTP_REQUEST_LENGTH - }, (error, res, body) -> - updateMetric('setDoc', error, res) - return callback(error) if error? - if res.statusCode >= 200 and res.statusCode < 300 - return callback null - else if res.statusCode == 404 - return callback(new Errors.NotFoundError("doc not not found: #{url}")) - else - return callback(new Error("error accessing web API: #{url} #{res.statusCode}")) + }, function(error, res, body) { + updateMetric('setDoc', error, res); + if (error != null) { return callback(error); } + if ((res.statusCode >= 200) && (res.statusCode < 300)) { + return callback(null); + } else if (res.statusCode === 404) { + return callback(new Errors.NotFoundError(`doc not not found: ${url}`)); + } else { + return callback(new Error(`error accessing web API: ${url} ${res.statusCode}`)); + } + }); + } +}); diff --git a/services/document-updater/app/coffee/Profiler.js b/services/document-updater/app/coffee/Profiler.js index 1d85f9bd98..2ca3484496 100644 --- a/services/document-updater/app/coffee/Profiler.js +++ b/services/document-updater/app/coffee/Profiler.js @@ -1,34 +1,56 @@ -Settings = require('settings-sharelatex') -logger = require('logger-sharelatex') +/* + * decaffeinate suggestions: + * DS206: Consider reworking classes to avoid initClass + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let Profiler; +const Settings = require('settings-sharelatex'); +const logger = require('logger-sharelatex'); -deltaMs = (ta, tb) -> - nanoSeconds = (ta[0]-tb[0])*1e9 + (ta[1]-tb[1]) - milliSeconds = Math.floor(nanoSeconds*1e-6) - return milliSeconds +const deltaMs = function(ta, tb) { + const nanoSeconds = ((ta[0]-tb[0])*1e9) + (ta[1]-tb[1]); + const milliSeconds = Math.floor(nanoSeconds*1e-6); + return milliSeconds; +}; -module.exports = class Profiler - LOG_CUTOFF_TIME: 1000 +module.exports = (Profiler = (function() { + Profiler = class Profiler { + static initClass() { + this.prototype.LOG_CUTOFF_TIME = 1000; + } - constructor: (@name, @args) -> - @t0 = @t = process.hrtime() - @start = new Date() - @updateTimes = [] + constructor(name, args) { + this.name = name; + this.args = args; + this.t0 = (this.t = process.hrtime()); + this.start = new Date(); + this.updateTimes = []; + } - log: (label) -> - t1 = process.hrtime() - dtMilliSec = deltaMs(t1, @t) - @t = t1 - @updateTimes.push [label, dtMilliSec] # timings in ms - return @ # make it chainable + log(label) { + const t1 = process.hrtime(); + const dtMilliSec = deltaMs(t1, this.t); + this.t = t1; + this.updateTimes.push([label, dtMilliSec]); // timings in ms + return this; // make it chainable + } - end: (message) -> - totalTime = deltaMs(@t, @t0) - if totalTime > @LOG_CUTOFF_TIME # log anything greater than cutoff - args = {} - for k,v of @args - args[k] = v - args.updateTimes = @updateTimes - args.start = @start - args.end = new Date() - logger.log args, @name - return totalTime + end(message) { + const totalTime = deltaMs(this.t, this.t0); + if (totalTime > this.LOG_CUTOFF_TIME) { // log anything greater than cutoff + const args = {}; + for (let k in this.args) { + const v = this.args[k]; + args[k] = v; + } + args.updateTimes = this.updateTimes; + args.start = this.start; + args.end = new Date(); + logger.log(args, this.name); + } + return totalTime; + } + }; + Profiler.initClass(); + return Profiler; +})()); diff --git a/services/document-updater/app/coffee/ProjectFlusher.js b/services/document-updater/app/coffee/ProjectFlusher.js index e1ead3759c..d42eb59531 100644 --- a/services/document-updater/app/coffee/ProjectFlusher.js +++ b/services/document-updater/app/coffee/ProjectFlusher.js @@ -1,73 +1,101 @@ -request = require("request") -Settings = require('settings-sharelatex') -RedisManager = require("./RedisManager") -rclient = RedisManager.rclient -docUpdaterKeys = Settings.redis.documentupdater.key_schema -async = require("async") -ProjectManager = require("./ProjectManager") -_ = require("lodash") -logger = require("logger-sharelatex") +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const request = require("request"); +const Settings = require('settings-sharelatex'); +const RedisManager = require("./RedisManager"); +const { + rclient +} = RedisManager; +const docUpdaterKeys = Settings.redis.documentupdater.key_schema; +const async = require("async"); +const ProjectManager = require("./ProjectManager"); +const _ = require("lodash"); +const logger = require("logger-sharelatex"); -ProjectFlusher = +var ProjectFlusher = { - # iterate over keys asynchronously using redis scan (non-blocking) - # handle all the cluster nodes or single redis server - _getKeys: (pattern, limit, callback) -> - nodes = rclient.nodes?('master') || [ rclient ]; - doKeyLookupForNode = (node, cb) -> - ProjectFlusher._getKeysFromNode node, pattern, limit, cb - async.concatSeries nodes, doKeyLookupForNode, callback + // iterate over keys asynchronously using redis scan (non-blocking) + // handle all the cluster nodes or single redis server + _getKeys(pattern, limit, callback) { + const nodes = (typeof rclient.nodes === 'function' ? rclient.nodes('master') : undefined) || [ rclient ]; + const doKeyLookupForNode = (node, cb) => ProjectFlusher._getKeysFromNode(node, pattern, limit, cb); + return async.concatSeries(nodes, doKeyLookupForNode, callback); + }, - _getKeysFromNode: (node, pattern, limit = 1000, callback) -> - cursor = 0 # redis iterator - keySet = {} # use hash to avoid duplicate results - batchSize = if limit? then Math.min(limit, 1000) else 1000 - # scan over all keys looking for pattern - doIteration = (cb) -> - node.scan cursor, "MATCH", pattern, "COUNT", batchSize, (error, reply) -> - return callback(error) if error? - [cursor, keys] = reply - for key in keys - keySet[key] = true - keys = Object.keys(keySet) - noResults = cursor == "0" # redis returns string results not numeric - limitReached = (limit? && keys.length >= limit) - if noResults || limitReached - return callback(null, keys) - else - setTimeout doIteration, 10 # avoid hitting redis too hard - doIteration() + _getKeysFromNode(node, pattern, limit, callback) { + if (limit == null) { limit = 1000; } + let cursor = 0; // redis iterator + const keySet = {}; // use hash to avoid duplicate results + const batchSize = (limit != null) ? Math.min(limit, 1000) : 1000; + // scan over all keys looking for pattern + var doIteration = cb => // avoid hitting redis too hard + node.scan(cursor, "MATCH", pattern, "COUNT", batchSize, function(error, reply) { + let keys; + if (error != null) { return callback(error); } + [cursor, keys] = Array.from(reply); + for (let key of Array.from(keys)) { + keySet[key] = true; + } + keys = Object.keys(keySet); + const noResults = cursor === "0"; // redis returns string results not numeric + const limitReached = ((limit != null) && (keys.length >= limit)); + if (noResults || limitReached) { + return callback(null, keys); + } else { + return setTimeout(doIteration, 10); + } + }); + return doIteration(); + }, - # extract ids from keys like DocsWithHistoryOps:57fd0b1f53a8396d22b2c24b - # or docsInProject:{57fd0b1f53a8396d22b2c24b} (for redis cluster) - _extractIds: (keyList) -> - ids = for key in keyList - m = key.match(/:\{?([0-9a-f]{24})\}?/) # extract object id - m[1] - return ids + // extract ids from keys like DocsWithHistoryOps:57fd0b1f53a8396d22b2c24b + // or docsInProject:{57fd0b1f53a8396d22b2c24b} (for redis cluster) + _extractIds(keyList) { + const ids = (() => { + const result = []; + for (let key of Array.from(keyList)) { + const m = key.match(/:\{?([0-9a-f]{24})\}?/); // extract object id + result.push(m[1]); + } + return result; + })(); + return ids; + }, - flushAllProjects: (options, callback)-> - logger.log options:options, "flushing all projects" - ProjectFlusher._getKeys docUpdaterKeys.docsInProject({project_id:"*"}), options.limit, (error, project_keys) -> - if error? - logger.err err:error, "error getting keys for flushing" - return callback(error) - project_ids = ProjectFlusher._extractIds(project_keys) - if options.dryRun - return callback(null, project_ids) - jobs = _.map project_ids, (project_id)-> - return (cb)-> - ProjectManager.flushAndDeleteProjectWithLocks project_id, {background:true}, cb - async.parallelLimit async.reflectAll(jobs), options.concurrency, (error, results)-> - success = [] - failure = [] - _.each results, (result, i)-> - if result.error? - failure.push(project_ids[i]) - else - success.push(project_ids[i]) - logger.log success:success, failure:failure, "finished flushing all projects" - return callback(error, {success:success, failure:failure}) + flushAllProjects(options, callback){ + logger.log({options}, "flushing all projects"); + return ProjectFlusher._getKeys(docUpdaterKeys.docsInProject({project_id:"*"}), options.limit, function(error, project_keys) { + if (error != null) { + logger.err({err:error}, "error getting keys for flushing"); + return callback(error); + } + const project_ids = ProjectFlusher._extractIds(project_keys); + if (options.dryRun) { + return callback(null, project_ids); + } + const jobs = _.map(project_ids, project_id => cb => ProjectManager.flushAndDeleteProjectWithLocks(project_id, {background:true}, cb)); + return async.parallelLimit(async.reflectAll(jobs), options.concurrency, function(error, results){ + const success = []; + const failure = []; + _.each(results, function(result, i){ + if (result.error != null) { + return failure.push(project_ids[i]); + } else { + return success.push(project_ids[i]); + } + }); + logger.log({success, failure}, "finished flushing all projects"); + return callback(error, {success, failure}); + }); + }); + } +}; -module.exports = ProjectFlusher \ No newline at end of file +module.exports = ProjectFlusher; \ No newline at end of file diff --git a/services/document-updater/app/coffee/ProjectHistoryRedisManager.js b/services/document-updater/app/coffee/ProjectHistoryRedisManager.js index af75487a90..cccacba2d2 100644 --- a/services/document-updater/app/coffee/ProjectHistoryRedisManager.js +++ b/services/document-updater/app/coffee/ProjectHistoryRedisManager.js @@ -1,79 +1,111 @@ -Settings = require('settings-sharelatex') -projectHistoryKeys = Settings.redis?.project_history?.key_schema -rclient = require("redis-sharelatex").createClient(Settings.redis.project_history) -logger = require('logger-sharelatex') -metrics = require('./Metrics') +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS201: Simplify complex destructure assignments + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let ProjectHistoryRedisManager; +const Settings = require('settings-sharelatex'); +const projectHistoryKeys = __guard__(Settings.redis != null ? Settings.redis.project_history : undefined, x => x.key_schema); +const rclient = require("redis-sharelatex").createClient(Settings.redis.project_history); +const logger = require('logger-sharelatex'); +const metrics = require('./Metrics'); -module.exports = ProjectHistoryRedisManager = - queueOps: (project_id, ops..., callback = (error, projectUpdateCount) ->) -> - # Record metric for ops pushed onto queue - for op in ops - metrics.summary "redis.projectHistoryOps", op.length, {status: "push"} - multi = rclient.multi() - # Push the ops onto the project history queue - multi.rpush projectHistoryKeys.projectHistoryOps({project_id}), ops... - # To record the age of the oldest op on the queue set a timestamp if not - # already present (SETNX). - multi.setnx projectHistoryKeys.projectHistoryFirstOpTimestamp({project_id}), Date.now() - multi.exec (error, result) -> - return callback(error) if error? - # return the number of entries pushed onto the project history queue - callback null, result[0] +module.exports = (ProjectHistoryRedisManager = { + queueOps(project_id, ...rest) { + // Record metric for ops pushed onto queue + const adjustedLength = Math.max(rest.length, 1), ops = rest.slice(0, adjustedLength - 1), val = rest[adjustedLength - 1], callback = val != null ? val : function(error, projectUpdateCount) {}; + for (let op of Array.from(ops)) { + metrics.summary("redis.projectHistoryOps", op.length, {status: "push"}); + } + const multi = rclient.multi(); + // Push the ops onto the project history queue + multi.rpush(projectHistoryKeys.projectHistoryOps({project_id}), ...Array.from(ops)); + // To record the age of the oldest op on the queue set a timestamp if not + // already present (SETNX). + multi.setnx(projectHistoryKeys.projectHistoryFirstOpTimestamp({project_id}), Date.now()); + return multi.exec(function(error, result) { + if (error != null) { return callback(error); } + // return the number of entries pushed onto the project history queue + return callback(null, result[0]);}); + }, - queueRenameEntity: (project_id, projectHistoryId, entity_type, entity_id, user_id, projectUpdate, callback) -> - projectUpdate = - pathname: projectUpdate.pathname - new_pathname: projectUpdate.newPathname - meta: - user_id: user_id + queueRenameEntity(project_id, projectHistoryId, entity_type, entity_id, user_id, projectUpdate, callback) { + projectUpdate = { + pathname: projectUpdate.pathname, + new_pathname: projectUpdate.newPathname, + meta: { + user_id, ts: new Date() - version: projectUpdate.version - projectHistoryId: projectHistoryId - projectUpdate[entity_type] = entity_id + }, + version: projectUpdate.version, + projectHistoryId + }; + projectUpdate[entity_type] = entity_id; - logger.log {project_id, projectUpdate}, "queue rename operation to project-history" - jsonUpdate = JSON.stringify(projectUpdate) + logger.log({project_id, projectUpdate}, "queue rename operation to project-history"); + const jsonUpdate = JSON.stringify(projectUpdate); - ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback + return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback); + }, - queueAddEntity: (project_id, projectHistoryId, entity_type, entitiy_id, user_id, projectUpdate, callback = (error) ->) -> - projectUpdate = - pathname: projectUpdate.pathname - docLines: projectUpdate.docLines - url: projectUpdate.url - meta: - user_id: user_id + queueAddEntity(project_id, projectHistoryId, entity_type, entitiy_id, user_id, projectUpdate, callback) { + if (callback == null) { callback = function(error) {}; } + projectUpdate = { + pathname: projectUpdate.pathname, + docLines: projectUpdate.docLines, + url: projectUpdate.url, + meta: { + user_id, ts: new Date() - version: projectUpdate.version - projectHistoryId: projectHistoryId - projectUpdate[entity_type] = entitiy_id + }, + version: projectUpdate.version, + projectHistoryId + }; + projectUpdate[entity_type] = entitiy_id; - logger.log {project_id, projectUpdate}, "queue add operation to project-history" - jsonUpdate = JSON.stringify(projectUpdate) + logger.log({project_id, projectUpdate}, "queue add operation to project-history"); + const jsonUpdate = JSON.stringify(projectUpdate); - ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback + return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback); + }, - queueResyncProjectStructure: (project_id, projectHistoryId, docs, files, callback) -> - logger.log {project_id, docs, files}, "queue project structure resync" - projectUpdate = - resyncProjectStructure: { docs, files } - projectHistoryId: projectHistoryId - meta: + queueResyncProjectStructure(project_id, projectHistoryId, docs, files, callback) { + logger.log({project_id, docs, files}, "queue project structure resync"); + const projectUpdate = { + resyncProjectStructure: { docs, files }, + projectHistoryId, + meta: { ts: new Date() - jsonUpdate = JSON.stringify projectUpdate - ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback + } + }; + const jsonUpdate = JSON.stringify(projectUpdate); + return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback); + }, - queueResyncDocContent: (project_id, projectHistoryId, doc_id, lines, version, pathname, callback) -> - logger.log {project_id, doc_id, lines, version, pathname}, "queue doc content resync" - projectUpdate = - resyncDocContent: + queueResyncDocContent(project_id, projectHistoryId, doc_id, lines, version, pathname, callback) { + logger.log({project_id, doc_id, lines, version, pathname}, "queue doc content resync"); + const projectUpdate = { + resyncDocContent: { content: lines.join("\n"), - version: version - projectHistoryId: projectHistoryId - path: pathname - doc: doc_id - meta: + version + }, + projectHistoryId, + path: pathname, + doc: doc_id, + meta: { ts: new Date() - jsonUpdate = JSON.stringify projectUpdate - ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback + } + }; + const jsonUpdate = JSON.stringify(projectUpdate); + return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback); + } +}); + +function __guard__(value, transform) { + return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; +} \ No newline at end of file diff --git a/services/document-updater/app/coffee/ProjectManager.js b/services/document-updater/app/coffee/ProjectManager.js index b60bb98d5e..8b45b7d32d 100644 --- a/services/document-updater/app/coffee/ProjectManager.js +++ b/services/document-updater/app/coffee/ProjectManager.js @@ -1,168 +1,225 @@ -RedisManager = require "./RedisManager" -ProjectHistoryRedisManager = require "./ProjectHistoryRedisManager" -DocumentManager = require "./DocumentManager" -HistoryManager = require "./HistoryManager" -async = require "async" -logger = require "logger-sharelatex" -Metrics = require "./Metrics" -Errors = require "./Errors" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let ProjectManager; +const RedisManager = require("./RedisManager"); +const ProjectHistoryRedisManager = require("./ProjectHistoryRedisManager"); +const DocumentManager = require("./DocumentManager"); +const HistoryManager = require("./HistoryManager"); +const async = require("async"); +const logger = require("logger-sharelatex"); +const Metrics = require("./Metrics"); +const Errors = require("./Errors"); -module.exports = ProjectManager = - flushProjectWithLocks: (project_id, _callback = (error) ->) -> - timer = new Metrics.Timer("projectManager.flushProjectWithLocks") - callback = (args...) -> - timer.done() - _callback(args...) +module.exports = (ProjectManager = { + flushProjectWithLocks(project_id, _callback) { + if (_callback == null) { _callback = function(error) {}; } + const timer = new Metrics.Timer("projectManager.flushProjectWithLocks"); + const callback = function(...args) { + timer.done(); + return _callback(...Array.from(args || [])); + }; - RedisManager.getDocIdsInProject project_id, (error, doc_ids) -> - return callback(error) if error? - jobs = [] - errors = [] - for doc_id in (doc_ids or []) - do (doc_id) -> - jobs.push (callback) -> - DocumentManager.flushDocIfLoadedWithLock project_id, doc_id, (error) -> - if error? and error instanceof Errors.NotFoundError - logger.warn err: error, project_id: project_id, doc_id: doc_id, "found deleted doc when flushing" - callback() - else if error? - logger.error err: error, project_id: project_id, doc_id: doc_id, "error flushing doc" - errors.push(error) - callback() - else - callback() + return RedisManager.getDocIdsInProject(project_id, function(error, doc_ids) { + if (error != null) { return callback(error); } + const jobs = []; + const errors = []; + for (let doc_id of Array.from((doc_ids || []))) { + ((doc_id => jobs.push(callback => DocumentManager.flushDocIfLoadedWithLock(project_id, doc_id, function(error) { + if ((error != null) && error instanceof Errors.NotFoundError) { + logger.warn({err: error, project_id, doc_id}, "found deleted doc when flushing"); + return callback(); + } else if (error != null) { + logger.error({err: error, project_id, doc_id}, "error flushing doc"); + errors.push(error); + return callback(); + } else { + return callback(); + } + }))))(doc_id); + } - logger.log project_id: project_id, doc_ids: doc_ids, "flushing docs" - async.series jobs, () -> - if errors.length > 0 - callback new Error("Errors flushing docs. See log for details") - else - callback(null) + logger.log({project_id, doc_ids}, "flushing docs"); + return async.series(jobs, function() { + if (errors.length > 0) { + return callback(new Error("Errors flushing docs. See log for details")); + } else { + return callback(null); + } + }); + }); + }, - flushAndDeleteProjectWithLocks: (project_id, options, _callback = (error) ->) -> - timer = new Metrics.Timer("projectManager.flushAndDeleteProjectWithLocks") - callback = (args...) -> - timer.done() - _callback(args...) + flushAndDeleteProjectWithLocks(project_id, options, _callback) { + if (_callback == null) { _callback = function(error) {}; } + const timer = new Metrics.Timer("projectManager.flushAndDeleteProjectWithLocks"); + const callback = function(...args) { + timer.done(); + return _callback(...Array.from(args || [])); + }; - RedisManager.getDocIdsInProject project_id, (error, doc_ids) -> - return callback(error) if error? - jobs = [] - errors = [] - for doc_id in (doc_ids or []) - do (doc_id) -> - jobs.push (callback) -> - DocumentManager.flushAndDeleteDocWithLock project_id, doc_id, {}, (error) -> - if error? - logger.error err: error, project_id: project_id, doc_id: doc_id, "error deleting doc" - errors.push(error) - callback() + return RedisManager.getDocIdsInProject(project_id, function(error, doc_ids) { + if (error != null) { return callback(error); } + const jobs = []; + const errors = []; + for (let doc_id of Array.from((doc_ids || []))) { + ((doc_id => jobs.push(callback => DocumentManager.flushAndDeleteDocWithLock(project_id, doc_id, {}, function(error) { + if (error != null) { + logger.error({err: error, project_id, doc_id}, "error deleting doc"); + errors.push(error); + } + return callback(); + }))))(doc_id); + } - logger.log project_id: project_id, doc_ids: doc_ids, "deleting docs" - async.series jobs, () -> - # When deleting the project here we want to ensure that project - # history is completely flushed because the project may be - # deleted in web after this call completes, and so further - # attempts to flush would fail after that. - HistoryManager.flushProjectChanges project_id, options, (error) -> - if errors.length > 0 - callback new Error("Errors deleting docs. See log for details") - else if error? - callback(error) - else - callback(null) + logger.log({project_id, doc_ids}, "deleting docs"); + return async.series(jobs, () => // When deleting the project here we want to ensure that project + // history is completely flushed because the project may be + // deleted in web after this call completes, and so further + // attempts to flush would fail after that. + HistoryManager.flushProjectChanges(project_id, options, function(error) { + if (errors.length > 0) { + return callback(new Error("Errors deleting docs. See log for details")); + } else if (error != null) { + return callback(error); + } else { + return callback(null); + } + })); + }); + }, - queueFlushAndDeleteProject: (project_id, callback = (error) ->) -> - RedisManager.queueFlushAndDeleteProject project_id, (error) -> - if error? - logger.error {project_id: project_id, error:error}, "error adding project to flush and delete queue" - return callback(error) - Metrics.inc "queued-delete" - callback() + queueFlushAndDeleteProject(project_id, callback) { + if (callback == null) { callback = function(error) {}; } + return RedisManager.queueFlushAndDeleteProject(project_id, function(error) { + if (error != null) { + logger.error({project_id, error}, "error adding project to flush and delete queue"); + return callback(error); + } + Metrics.inc("queued-delete"); + return callback(); + }); + }, - getProjectDocsTimestamps: (project_id, callback = (error) ->) -> - RedisManager.getDocIdsInProject project_id, (error, doc_ids) -> - return callback(error) if error? - return callback(null, []) if !doc_ids?.length - RedisManager.getDocTimestamps doc_ids, (error, timestamps) -> - return callback(error) if error? - callback(null, timestamps) + getProjectDocsTimestamps(project_id, callback) { + if (callback == null) { callback = function(error) {}; } + return RedisManager.getDocIdsInProject(project_id, function(error, doc_ids) { + if (error != null) { return callback(error); } + if (!(doc_ids != null ? doc_ids.length : undefined)) { return callback(null, []); } + return RedisManager.getDocTimestamps(doc_ids, function(error, timestamps) { + if (error != null) { return callback(error); } + return callback(null, timestamps); + }); + }); + }, - getProjectDocsAndFlushIfOld: (project_id, projectStateHash, excludeVersions = {}, _callback = (error, docs) ->) -> - timer = new Metrics.Timer("projectManager.getProjectDocsAndFlushIfOld") - callback = (args...) -> - timer.done() - _callback(args...) + getProjectDocsAndFlushIfOld(project_id, projectStateHash, excludeVersions, _callback) { + if (excludeVersions == null) { excludeVersions = {}; } + if (_callback == null) { _callback = function(error, docs) {}; } + const timer = new Metrics.Timer("projectManager.getProjectDocsAndFlushIfOld"); + const callback = function(...args) { + timer.done(); + return _callback(...Array.from(args || [])); + }; - RedisManager.checkOrSetProjectState project_id, projectStateHash, (error, projectStateChanged) -> - if error? - logger.error err: error, project_id: project_id, "error getting/setting project state in getProjectDocsAndFlushIfOld" - return callback(error) - # we can't return docs if project structure has changed - if projectStateChanged - return callback Errors.ProjectStateChangedError("project state changed") - # project structure hasn't changed, return doc content from redis - RedisManager.getDocIdsInProject project_id, (error, doc_ids) -> - if error? - logger.error err: error, project_id: project_id, "error getting doc ids in getProjectDocs" - return callback(error) - jobs = [] - for doc_id in doc_ids or [] - do (doc_id) -> - jobs.push (cb) -> - # get the doc lines from redis - DocumentManager.getDocAndFlushIfOldWithLock project_id, doc_id, (err, lines, version) -> - if err? - logger.error err:err, project_id: project_id, doc_id: doc_id, "error getting project doc lines in getProjectDocsAndFlushIfOld" - return cb(err) - doc = {_id:doc_id, lines:lines, v:version} # create a doc object to return - cb(null, doc) - async.series jobs, (error, docs) -> - return callback(error) if error? - callback(null, docs) + return RedisManager.checkOrSetProjectState(project_id, projectStateHash, function(error, projectStateChanged) { + if (error != null) { + logger.error({err: error, project_id}, "error getting/setting project state in getProjectDocsAndFlushIfOld"); + return callback(error); + } + // we can't return docs if project structure has changed + if (projectStateChanged) { + return callback(Errors.ProjectStateChangedError("project state changed")); + } + // project structure hasn't changed, return doc content from redis + return RedisManager.getDocIdsInProject(project_id, function(error, doc_ids) { + if (error != null) { + logger.error({err: error, project_id}, "error getting doc ids in getProjectDocs"); + return callback(error); + } + const jobs = []; + for (let doc_id of Array.from(doc_ids || [])) { + ((doc_id => jobs.push(cb => // get the doc lines from redis + DocumentManager.getDocAndFlushIfOldWithLock(project_id, doc_id, function(err, lines, version) { + if (err != null) { + logger.error({err, project_id, doc_id}, "error getting project doc lines in getProjectDocsAndFlushIfOld"); + return cb(err); + } + const doc = {_id:doc_id, lines, v:version}; // create a doc object to return + return cb(null, doc); + }))))(doc_id); + } + return async.series(jobs, function(error, docs) { + if (error != null) { return callback(error); } + return callback(null, docs); + }); + }); + }); + }, - clearProjectState: (project_id, callback = (error) ->) -> - RedisManager.clearProjectState project_id, callback + clearProjectState(project_id, callback) { + if (callback == null) { callback = function(error) {}; } + return RedisManager.clearProjectState(project_id, callback); + }, - updateProjectWithLocks: (project_id, projectHistoryId, user_id, docUpdates, fileUpdates, version, _callback = (error) ->) -> - timer = new Metrics.Timer("projectManager.updateProject") - callback = (args...) -> - timer.done() - _callback(args...) + updateProjectWithLocks(project_id, projectHistoryId, user_id, docUpdates, fileUpdates, version, _callback) { + if (_callback == null) { _callback = function(error) {}; } + const timer = new Metrics.Timer("projectManager.updateProject"); + const callback = function(...args) { + timer.done(); + return _callback(...Array.from(args || [])); + }; - project_version = version - project_subversion = 0 # project versions can have multiple operations + const project_version = version; + let project_subversion = 0; // project versions can have multiple operations - project_ops_length = 0 + let project_ops_length = 0; - handleDocUpdate = (projectUpdate, cb) -> - doc_id = projectUpdate.id - projectUpdate.version = "#{project_version}.#{project_subversion++}" - if projectUpdate.docLines? - ProjectHistoryRedisManager.queueAddEntity project_id, projectHistoryId, 'doc', doc_id, user_id, projectUpdate, (error, count) -> - project_ops_length = count - cb(error) - else - DocumentManager.renameDocWithLock project_id, doc_id, user_id, projectUpdate, projectHistoryId, (error, count) -> - project_ops_length = count - cb(error) + const handleDocUpdate = function(projectUpdate, cb) { + const doc_id = projectUpdate.id; + projectUpdate.version = `${project_version}.${project_subversion++}`; + if (projectUpdate.docLines != null) { + return ProjectHistoryRedisManager.queueAddEntity(project_id, projectHistoryId, 'doc', doc_id, user_id, projectUpdate, function(error, count) { + project_ops_length = count; + return cb(error); + }); + } else { + return DocumentManager.renameDocWithLock(project_id, doc_id, user_id, projectUpdate, projectHistoryId, function(error, count) { + project_ops_length = count; + return cb(error); + }); + } + }; - handleFileUpdate = (projectUpdate, cb) -> - file_id = projectUpdate.id - projectUpdate.version = "#{project_version}.#{project_subversion++}" - if projectUpdate.url? - ProjectHistoryRedisManager.queueAddEntity project_id, projectHistoryId, 'file', file_id, user_id, projectUpdate, (error, count) -> - project_ops_length = count - cb(error) - else - ProjectHistoryRedisManager.queueRenameEntity project_id, projectHistoryId, 'file', file_id, user_id, projectUpdate, (error, count) -> - project_ops_length = count - cb(error) + const handleFileUpdate = function(projectUpdate, cb) { + const file_id = projectUpdate.id; + projectUpdate.version = `${project_version}.${project_subversion++}`; + if (projectUpdate.url != null) { + return ProjectHistoryRedisManager.queueAddEntity(project_id, projectHistoryId, 'file', file_id, user_id, projectUpdate, function(error, count) { + project_ops_length = count; + return cb(error); + }); + } else { + return ProjectHistoryRedisManager.queueRenameEntity(project_id, projectHistoryId, 'file', file_id, user_id, projectUpdate, function(error, count) { + project_ops_length = count; + return cb(error); + }); + } + }; - async.eachSeries docUpdates, handleDocUpdate, (error) -> - return callback(error) if error? - async.eachSeries fileUpdates, handleFileUpdate, (error) -> - return callback(error) if error? - if HistoryManager.shouldFlushHistoryOps(project_ops_length, docUpdates.length + fileUpdates.length, HistoryManager.FLUSH_PROJECT_EVERY_N_OPS) - HistoryManager.flushProjectChangesAsync project_id - callback() + return async.eachSeries(docUpdates, handleDocUpdate, function(error) { + if (error != null) { return callback(error); } + return async.eachSeries(fileUpdates, handleFileUpdate, function(error) { + if (error != null) { return callback(error); } + if (HistoryManager.shouldFlushHistoryOps(project_ops_length, docUpdates.length + fileUpdates.length, HistoryManager.FLUSH_PROJECT_EVERY_N_OPS)) { + HistoryManager.flushProjectChangesAsync(project_id); + } + return callback(); + }); + }); + } +}); diff --git a/services/document-updater/app/coffee/RangesManager.js b/services/document-updater/app/coffee/RangesManager.js index bcb16a39c9..83523f33b5 100644 --- a/services/document-updater/app/coffee/RangesManager.js +++ b/services/document-updater/app/coffee/RangesManager.js @@ -1,76 +1,112 @@ -RangesTracker = require "./RangesTracker" -logger = require "logger-sharelatex" -_ = require "lodash" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let RangesManager; +const RangesTracker = require("./RangesTracker"); +const logger = require("logger-sharelatex"); +const _ = require("lodash"); -module.exports = RangesManager = - MAX_COMMENTS: 500 - MAX_CHANGES: 2000 +module.exports = (RangesManager = { + MAX_COMMENTS: 500, + MAX_CHANGES: 2000, - applyUpdate: (project_id, doc_id, entries = {}, updates = [], newDocLines, callback = (error, new_entries, ranges_were_collapsed) ->) -> - {changes, comments} = _.cloneDeep(entries) - rangesTracker = new RangesTracker(changes, comments) - emptyRangeCountBefore = RangesManager._emptyRangesCount(rangesTracker) - for update in updates - rangesTracker.track_changes = !!update.meta.tc - if !!update.meta.tc - rangesTracker.setIdSeed(update.meta.tc) - for op in update.op - try - rangesTracker.applyOp(op, { user_id: update.meta?.user_id }) - catch error - return callback(error) + applyUpdate(project_id, doc_id, entries, updates, newDocLines, callback) { + let error; + if (entries == null) { entries = {}; } + if (updates == null) { updates = []; } + if (callback == null) { callback = function(error, new_entries, ranges_were_collapsed) {}; } + const {changes, comments} = _.cloneDeep(entries); + const rangesTracker = new RangesTracker(changes, comments); + const emptyRangeCountBefore = RangesManager._emptyRangesCount(rangesTracker); + for (let update of Array.from(updates)) { + rangesTracker.track_changes = !!update.meta.tc; + if (!!update.meta.tc) { + rangesTracker.setIdSeed(update.meta.tc); + } + for (let op of Array.from(update.op)) { + try { + rangesTracker.applyOp(op, { user_id: (update.meta != null ? update.meta.user_id : undefined) }); + } catch (error1) { + error = error1; + return callback(error); + } + } + } - if rangesTracker.changes?.length > RangesManager.MAX_CHANGES or rangesTracker.comments?.length > RangesManager.MAX_COMMENTS - return callback new Error("too many comments or tracked changes") + if (((rangesTracker.changes != null ? rangesTracker.changes.length : undefined) > RangesManager.MAX_CHANGES) || ((rangesTracker.comments != null ? rangesTracker.comments.length : undefined) > RangesManager.MAX_COMMENTS)) { + return callback(new Error("too many comments or tracked changes")); + } - try - # This is a consistency check that all of our ranges and - # comments still match the corresponding text - rangesTracker.validate(newDocLines.join("\n")) - catch error - logger.error {err: error, project_id, doc_id, newDocLines, updates}, "error validating ranges" - return callback(error) + try { + // This is a consistency check that all of our ranges and + // comments still match the corresponding text + rangesTracker.validate(newDocLines.join("\n")); + } catch (error2) { + error = error2; + logger.error({err: error, project_id, doc_id, newDocLines, updates}, "error validating ranges"); + return callback(error); + } - emptyRangeCountAfter = RangesManager._emptyRangesCount(rangesTracker) - rangesWereCollapsed = emptyRangeCountAfter > emptyRangeCountBefore - response = RangesManager._getRanges rangesTracker - logger.log {project_id, doc_id, changesCount: response.changes?.length, commentsCount: response.comments?.length, rangesWereCollapsed}, "applied updates to ranges" - callback null, response, rangesWereCollapsed + const emptyRangeCountAfter = RangesManager._emptyRangesCount(rangesTracker); + const rangesWereCollapsed = emptyRangeCountAfter > emptyRangeCountBefore; + const response = RangesManager._getRanges(rangesTracker); + logger.log({project_id, doc_id, changesCount: (response.changes != null ? response.changes.length : undefined), commentsCount: (response.comments != null ? response.comments.length : undefined), rangesWereCollapsed}, "applied updates to ranges"); + return callback(null, response, rangesWereCollapsed); + }, - acceptChanges: (change_ids, ranges, callback = (error, ranges) ->) -> - {changes, comments} = ranges - logger.log "accepting #{ change_ids.length } changes in ranges" - rangesTracker = new RangesTracker(changes, comments) - rangesTracker.removeChangeIds(change_ids) - response = RangesManager._getRanges(rangesTracker) - callback null, response + acceptChanges(change_ids, ranges, callback) { + if (callback == null) { callback = function(error, ranges) {}; } + const {changes, comments} = ranges; + logger.log(`accepting ${ change_ids.length } changes in ranges`); + const rangesTracker = new RangesTracker(changes, comments); + rangesTracker.removeChangeIds(change_ids); + const response = RangesManager._getRanges(rangesTracker); + return callback(null, response); + }, - deleteComment: (comment_id, ranges, callback = (error, ranges) ->) -> - {changes, comments} = ranges - logger.log {comment_id}, "deleting comment in ranges" - rangesTracker = new RangesTracker(changes, comments) - rangesTracker.removeCommentId(comment_id) - response = RangesManager._getRanges(rangesTracker) - callback null, response + deleteComment(comment_id, ranges, callback) { + if (callback == null) { callback = function(error, ranges) {}; } + const {changes, comments} = ranges; + logger.log({comment_id}, "deleting comment in ranges"); + const rangesTracker = new RangesTracker(changes, comments); + rangesTracker.removeCommentId(comment_id); + const response = RangesManager._getRanges(rangesTracker); + return callback(null, response); + }, - _getRanges: (rangesTracker) -> - # Return the minimal data structure needed, since most documents won't have any - # changes or comments - response = {} - if rangesTracker.changes?.length > 0 - response ?= {} - response.changes = rangesTracker.changes - if rangesTracker.comments?.length > 0 - response ?= {} - response.comments = rangesTracker.comments - return response + _getRanges(rangesTracker) { + // Return the minimal data structure needed, since most documents won't have any + // changes or comments + let response = {}; + if ((rangesTracker.changes != null ? rangesTracker.changes.length : undefined) > 0) { + if (response == null) { response = {}; } + response.changes = rangesTracker.changes; + } + if ((rangesTracker.comments != null ? rangesTracker.comments.length : undefined) > 0) { + if (response == null) { response = {}; } + response.comments = rangesTracker.comments; + } + return response; + }, - _emptyRangesCount: (ranges) -> - count = 0 - for comment in (ranges.comments or []) - if comment.op.c == "" - count++ - for change in (ranges.changes or []) when change.op.i? - if change.op.i == "" - count++ - return count \ No newline at end of file + _emptyRangesCount(ranges) { + let count = 0; + for (let comment of Array.from((ranges.comments || []))) { + if (comment.op.c === "") { + count++; + } + } + for (let change of Array.from((ranges.changes || []))) { + if (change.op.i != null) { + if (change.op.i === "") { + count++; + } + } + } + return count; + } +}); \ No newline at end of file diff --git a/services/document-updater/app/coffee/RangesTracker.js b/services/document-updater/app/coffee/RangesTracker.js index 869d63159b..de7e885c5c 100644 --- a/services/document-updater/app/coffee/RangesTracker.js +++ b/services/document-updater/app/coffee/RangesTracker.js @@ -1,576 +1,717 @@ -# This file is shared between document-updater and web, so that the server and client share -# an identical track changes implementation. Do not edit it directly in web or document-updater, -# instead edit it at https://github.com/sharelatex/ranges-tracker, where it has a suite of tests -load = () -> - class RangesTracker - # The purpose of this class is to track a set of inserts and deletes to a document, like - # track changes in Word. We store these as a set of ShareJs style ranges: - # {i: "foo", p: 42} # Insert 'foo' at offset 42 - # {d: "bar", p: 37} # Delete 'bar' at offset 37 - # We only track the inserts and deletes, not the whole document, but by being given all - # updates that are applied to a document, we can update these appropriately. - # - # Note that the set of inserts and deletes we store applies to the document as-is at the moment. - # So inserts correspond to text which is in the document, while deletes correspond to text which - # is no longer there, so their lengths do not affect the position of later offsets. - # E.g. - # this is the current text of the document - # |-----| | - # {i: "current ", p:12} -^ ^- {d: "old ", p: 31} - # - # Track changes rules (should be consistent with Word): - # * When text is inserted at a delete, the text goes to the left of the delete - # I.e. "foo|bar" -> "foobaz|bar", where | is the delete, and 'baz' is inserted - # * Deleting content flagged as 'inserted' does not create a new delete marker, it only - # removes the insert marker. E.g. - # * "abdefghijkl" -> "abfghijkl" when 'de' is deleted. No delete marker added - # |---| <- inserted |-| <- inserted - # * Deletes overlapping regular text and inserted text will insert a delete marker for the - # regular text: - # "abcdefghijkl" -> "abcdejkl" when 'fghi' is deleted - # |----| |--|| - # ^- inserted 'bcdefg' \ ^- deleted 'hi' - # \--inserted 'bcde' - # * Deletes overlapping other deletes are merged. E.g. - # "abcghijkl" -> "ahijkl" when 'bcg is deleted' - # | <- delete 'def' | <- delete 'bcdefg' - # * Deletes by another user will consume deletes by the first user - # * Inserts by another user will not combine with inserts by the first user. If they are in the - # middle of a previous insert by the first user, the original insert will be split into two. - constructor: (@changes = [], @comments = []) -> - @setIdSeed(RangesTracker.generateIdSeed()) - @resetDirtyState() +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// This file is shared between document-updater and web, so that the server and client share +// an identical track changes implementation. Do not edit it directly in web or document-updater, +// instead edit it at https://github.com/sharelatex/ranges-tracker, where it has a suite of tests +const load = function() { + let RangesTracker; + return RangesTracker = class RangesTracker { + // The purpose of this class is to track a set of inserts and deletes to a document, like + // track changes in Word. We store these as a set of ShareJs style ranges: + // {i: "foo", p: 42} # Insert 'foo' at offset 42 + // {d: "bar", p: 37} # Delete 'bar' at offset 37 + // We only track the inserts and deletes, not the whole document, but by being given all + // updates that are applied to a document, we can update these appropriately. + // + // Note that the set of inserts and deletes we store applies to the document as-is at the moment. + // So inserts correspond to text which is in the document, while deletes correspond to text which + // is no longer there, so their lengths do not affect the position of later offsets. + // E.g. + // this is the current text of the document + // |-----| | + // {i: "current ", p:12} -^ ^- {d: "old ", p: 31} + // + // Track changes rules (should be consistent with Word): + // * When text is inserted at a delete, the text goes to the left of the delete + // I.e. "foo|bar" -> "foobaz|bar", where | is the delete, and 'baz' is inserted + // * Deleting content flagged as 'inserted' does not create a new delete marker, it only + // removes the insert marker. E.g. + // * "abdefghijkl" -> "abfghijkl" when 'de' is deleted. No delete marker added + // |---| <- inserted |-| <- inserted + // * Deletes overlapping regular text and inserted text will insert a delete marker for the + // regular text: + // "abcdefghijkl" -> "abcdejkl" when 'fghi' is deleted + // |----| |--|| + // ^- inserted 'bcdefg' \ ^- deleted 'hi' + // \--inserted 'bcde' + // * Deletes overlapping other deletes are merged. E.g. + // "abcghijkl" -> "ahijkl" when 'bcg is deleted' + // | <- delete 'def' | <- delete 'bcdefg' + // * Deletes by another user will consume deletes by the first user + // * Inserts by another user will not combine with inserts by the first user. If they are in the + // middle of a previous insert by the first user, the original insert will be split into two. + constructor(changes, comments) { + if (changes == null) { changes = []; } + this.changes = changes; + if (comments == null) { comments = []; } + this.comments = comments; + this.setIdSeed(RangesTracker.generateIdSeed()); + this.resetDirtyState(); + } - getIdSeed: () -> - return @id_seed + getIdSeed() { + return this.id_seed; + } - setIdSeed: (seed) -> - @id_seed = seed - @id_increment = 0 + setIdSeed(seed) { + this.id_seed = seed; + return this.id_increment = 0; + } - @generateIdSeed: () -> - # Generate a the first 18 characters of Mongo ObjectId, leaving 6 for the increment part - # Reference: https://github.com/dreampulse/ObjectId.js/blob/master/src/main/javascript/Objectid.js - pid = Math.floor(Math.random() * (32767)).toString(16) - machine = Math.floor(Math.random() * (16777216)).toString(16) - timestamp = Math.floor(new Date().valueOf() / 1000).toString(16) + static generateIdSeed() { + // Generate a the first 18 characters of Mongo ObjectId, leaving 6 for the increment part + // Reference: https://github.com/dreampulse/ObjectId.js/blob/master/src/main/javascript/Objectid.js + const pid = Math.floor(Math.random() * (32767)).toString(16); + const machine = Math.floor(Math.random() * (16777216)).toString(16); + const timestamp = Math.floor(new Date().valueOf() / 1000).toString(16); return '00000000'.substr(0, 8 - timestamp.length) + timestamp + '000000'.substr(0, 6 - machine.length) + machine + - '0000'.substr(0, 4 - pid.length) + pid + '0000'.substr(0, 4 - pid.length) + pid; + } - @generateId: () -> - @generateIdSeed() + "000001" + static generateId() { + return this.generateIdSeed() + "000001"; + } - newId: () -> - @id_increment++ - increment = @id_increment.toString(16) - id = @id_seed + '000000'.substr(0, 6 - increment.length) + increment; - return id + newId() { + this.id_increment++; + const increment = this.id_increment.toString(16); + const id = this.id_seed + '000000'.substr(0, 6 - increment.length) + increment; + return id; + } - getComment: (comment_id) -> - comment = null - for c in @comments - if c.id == comment_id - comment = c - break - return comment + getComment(comment_id) { + let comment = null; + for (let c of Array.from(this.comments)) { + if (c.id === comment_id) { + comment = c; + break; + } + } + return comment; + } - removeCommentId: (comment_id) -> - comment = @getComment(comment_id) - return if !comment? - @comments = @comments.filter (c) -> c.id != comment_id - @_markAsDirty comment, "comment", "removed" + removeCommentId(comment_id) { + const comment = this.getComment(comment_id); + if ((comment == null)) { return; } + this.comments = this.comments.filter(c => c.id !== comment_id); + return this._markAsDirty(comment, "comment", "removed"); + } - moveCommentId: (comment_id, position, text) -> - for comment in @comments - if comment.id == comment_id - comment.op.p = position - comment.op.c = text - @_markAsDirty comment, "comment", "moved" + moveCommentId(comment_id, position, text) { + return (() => { + const result = []; + for (let comment of Array.from(this.comments)) { + if (comment.id === comment_id) { + comment.op.p = position; + comment.op.c = text; + result.push(this._markAsDirty(comment, "comment", "moved")); + } else { + result.push(undefined); + } + } + return result; + })(); + } - getChange: (change_id) -> - change = null - for c in @changes - if c.id == change_id - change = c - break - return change + getChange(change_id) { + let change = null; + for (let c of Array.from(this.changes)) { + if (c.id === change_id) { + change = c; + break; + } + } + return change; + } - getChanges: (change_ids) -> - changes_response = [] - ids_map = {} + getChanges(change_ids) { + const changes_response = []; + const ids_map = {}; - for change_id in change_ids - ids_map[change_id] = true + for (let change_id of Array.from(change_ids)) { + ids_map[change_id] = true; + } - for change in @changes - if ids_map[change.id] - delete ids_map[change.id] - changes_response.push change + for (let change of Array.from(this.changes)) { + if (ids_map[change.id]) { + delete ids_map[change.id]; + changes_response.push(change); + } + } - return changes_response + return changes_response; + } - removeChangeId: (change_id) -> - change = @getChange(change_id) - return if !change? - @_removeChange(change) + removeChangeId(change_id) { + const change = this.getChange(change_id); + if ((change == null)) { return; } + return this._removeChange(change); + } - removeChangeIds: (change_to_remove_ids) -> - return if !change_to_remove_ids?.length > 0 - i = @changes.length - remove_change_id = {} - for change_id in change_to_remove_ids - remove_change_id[change_id] = true + removeChangeIds(change_to_remove_ids) { + if (!(change_to_remove_ids != null ? change_to_remove_ids.length : undefined) > 0) { return; } + const i = this.changes.length; + const remove_change_id = {}; + for (let change_id of Array.from(change_to_remove_ids)) { + remove_change_id[change_id] = true; + } - remaining_changes = [] + const remaining_changes = []; - for change in @changes - if remove_change_id[change.id] - delete remove_change_id[change.id] - @_markAsDirty change, "change", "removed" - else - remaining_changes.push change + for (let change of Array.from(this.changes)) { + if (remove_change_id[change.id]) { + delete remove_change_id[change.id]; + this._markAsDirty(change, "change", "removed"); + } else { + remaining_changes.push(change); + } + } - @changes = remaining_changes + return this.changes = remaining_changes; + } - validate: (text) -> - for change in @changes - if change.op.i? - content = text.slice(change.op.p, change.op.p + change.op.i.length) - if content != change.op.i - throw new Error("Change (#{JSON.stringify(change)}) doesn't match text (#{JSON.stringify(content)})") - for comment in @comments - content = text.slice(comment.op.p, comment.op.p + comment.op.c.length) - if content != comment.op.c - throw new Error("Comment (#{JSON.stringify(comment)}) doesn't match text (#{JSON.stringify(content)})") - return true + validate(text) { + let content; + for (let change of Array.from(this.changes)) { + if (change.op.i != null) { + content = text.slice(change.op.p, change.op.p + change.op.i.length); + if (content !== change.op.i) { + throw new Error(`Change (${JSON.stringify(change)}) doesn't match text (${JSON.stringify(content)})`); + } + } + } + for (let comment of Array.from(this.comments)) { + content = text.slice(comment.op.p, comment.op.p + comment.op.c.length); + if (content !== comment.op.c) { + throw new Error(`Comment (${JSON.stringify(comment)}) doesn't match text (${JSON.stringify(content)})`); + } + } + return true; + } - applyOp: (op, metadata = {}) -> - metadata.ts ?= new Date() - # Apply an op that has been applied to the document to our changes to keep them up to date - if op.i? - @applyInsertToChanges(op, metadata) - @applyInsertToComments(op) - else if op.d? - @applyDeleteToChanges(op, metadata) - @applyDeleteToComments(op) - else if op.c? - @addComment(op, metadata) - else - throw new Error("unknown op type") + applyOp(op, metadata) { + if (metadata == null) { metadata = {}; } + if (metadata.ts == null) { metadata.ts = new Date(); } + // Apply an op that has been applied to the document to our changes to keep them up to date + if (op.i != null) { + this.applyInsertToChanges(op, metadata); + return this.applyInsertToComments(op); + } else if (op.d != null) { + this.applyDeleteToChanges(op, metadata); + return this.applyDeleteToComments(op); + } else if (op.c != null) { + return this.addComment(op, metadata); + } else { + throw new Error("unknown op type"); + } + } - applyOps: (ops, metadata = {}) -> - for op in ops - @applyOp(op, metadata) + applyOps(ops, metadata) { + if (metadata == null) { metadata = {}; } + return Array.from(ops).map((op) => + this.applyOp(op, metadata)); + } - addComment: (op, metadata) -> - existing = @getComment(op.t) - if existing? - @moveCommentId(op.t, op.p, op.c) - return existing - else - @comments.push comment = { - id: op.t or @newId() - op: # Copy because we'll modify in place - c: op.c - p: op.p + addComment(op, metadata) { + const existing = this.getComment(op.t); + if (existing != null) { + this.moveCommentId(op.t, op.p, op.c); + return existing; + } else { + let comment; + this.comments.push(comment = { + id: op.t || this.newId(), + op: { // Copy because we'll modify in place + c: op.c, + p: op.p, t: op.t + }, metadata - } - @_markAsDirty comment, "comment", "added" - return comment + }); + this._markAsDirty(comment, "comment", "added"); + return comment; + } + } - applyInsertToComments: (op) -> - for comment in @comments - if op.p <= comment.op.p - comment.op.p += op.i.length - @_markAsDirty comment, "comment", "moved" - else if op.p < comment.op.p + comment.op.c.length - offset = op.p - comment.op.p - comment.op.c = comment.op.c[0..(offset-1)] + op.i + comment.op.c[offset...] - @_markAsDirty comment, "comment", "moved" + applyInsertToComments(op) { + return (() => { + const result = []; + for (let comment of Array.from(this.comments)) { + if (op.p <= comment.op.p) { + comment.op.p += op.i.length; + result.push(this._markAsDirty(comment, "comment", "moved")); + } else if (op.p < (comment.op.p + comment.op.c.length)) { + const offset = op.p - comment.op.p; + comment.op.c = comment.op.c.slice(0, +(offset-1) + 1 || undefined) + op.i + comment.op.c.slice(offset); + result.push(this._markAsDirty(comment, "comment", "moved")); + } else { + result.push(undefined); + } + } + return result; + })(); + } - applyDeleteToComments: (op) -> - op_start = op.p - op_length = op.d.length - op_end = op.p + op_length - for comment in @comments - comment_start = comment.op.p - comment_end = comment.op.p + comment.op.c.length - comment_length = comment_end - comment_start - if op_end <= comment_start - # delete is fully before comment - comment.op.p -= op_length - @_markAsDirty comment, "comment", "moved" - else if op_start >= comment_end - # delete is fully after comment, nothing to do - else - # delete and comment overlap - if op_start <= comment_start - remaining_before = "" - else - remaining_before = comment.op.c.slice(0, op_start - comment_start) - if op_end >= comment_end - remaining_after = "" - else - remaining_after = comment.op.c.slice(op_end - comment_start) + applyDeleteToComments(op) { + const op_start = op.p; + const op_length = op.d.length; + const op_end = op.p + op_length; + return (() => { + const result = []; + for (let comment of Array.from(this.comments)) { + const comment_start = comment.op.p; + const comment_end = comment.op.p + comment.op.c.length; + const comment_length = comment_end - comment_start; + if (op_end <= comment_start) { + // delete is fully before comment + comment.op.p -= op_length; + result.push(this._markAsDirty(comment, "comment", "moved")); + } else if (op_start >= comment_end) { + // delete is fully after comment, nothing to do + } else { + // delete and comment overlap + var remaining_after, remaining_before; + if (op_start <= comment_start) { + remaining_before = ""; + } else { + remaining_before = comment.op.c.slice(0, op_start - comment_start); + } + if (op_end >= comment_end) { + remaining_after = ""; + } else { + remaining_after = comment.op.c.slice(op_end - comment_start); + } - # Check deleted content matches delete op - deleted_comment = comment.op.c.slice(remaining_before.length, comment_length - remaining_after.length) - offset = Math.max(0, comment_start - op_start) - deleted_op_content = op.d.slice(offset).slice(0, deleted_comment.length) - if deleted_comment != deleted_op_content - throw new Error("deleted content does not match comment content") + // Check deleted content matches delete op + const deleted_comment = comment.op.c.slice(remaining_before.length, comment_length - remaining_after.length); + const offset = Math.max(0, comment_start - op_start); + const deleted_op_content = op.d.slice(offset).slice(0, deleted_comment.length); + if (deleted_comment !== deleted_op_content) { + throw new Error("deleted content does not match comment content"); + } - comment.op.p = Math.min(comment_start, op_start) - comment.op.c = remaining_before + remaining_after - @_markAsDirty comment, "comment", "moved" + comment.op.p = Math.min(comment_start, op_start); + comment.op.c = remaining_before + remaining_after; + result.push(this._markAsDirty(comment, "comment", "moved")); + } + } + return result; + })(); + } - applyInsertToChanges: (op, metadata) -> - op_start = op.p - op_length = op.i.length - op_end = op.p + op_length - undoing = !!op.u + applyInsertToChanges(op, metadata) { + let change; + const op_start = op.p; + const op_length = op.i.length; + const op_end = op.p + op_length; + const undoing = !!op.u; - already_merged = false - previous_change = null - moved_changes = [] - remove_changes = [] - new_changes = [] + let already_merged = false; + let previous_change = null; + const moved_changes = []; + const remove_changes = []; + const new_changes = []; - for change, i in @changes - change_start = change.op.p + for (let i = 0; i < this.changes.length; i++) { + change = this.changes[i]; + const change_start = change.op.p; - if change.op.d? - # Shift any deletes after this along by the length of this insert - if op_start < change_start - change.op.p += op_length - moved_changes.push change - else if op_start == change_start - # If we are undoing, then we want to cancel any existing delete ranges if we can. - # Check if the insert matches the start of the delete, and just remove it from the delete instead if so. - if undoing and change.op.d.length >= op.i.length and change.op.d.slice(0, op.i.length) == op.i - change.op.d = change.op.d.slice(op.i.length) - change.op.p += op.i.length - if change.op.d == "" - remove_changes.push change - else - moved_changes.push change - already_merged = true - else - change.op.p += op_length - moved_changes.push change - else if change.op.i? - change_end = change_start + change.op.i.length - is_change_overlapping = (op_start >= change_start and op_start <= change_end) - - # Only merge inserts if they are from the same user - is_same_user = metadata.user_id == change.metadata.user_id - - # If we are undoing, then our changes will be removed from any delete ops just after. In that case, if there is also - # an insert op just before, then we shouldn't append it to this insert, but instead only cancel the following delete. - # E.g. - # foo|<--- about to insert 'b' here - # inserted 'foo' --^ ^-- deleted 'bar' - # should become just 'foo' not 'foob' (with the delete marker becoming just 'ar'), . - next_change = @changes[i+1] - is_op_adjacent_to_next_delete = next_change? and next_change.op.d? and op.p == change_end and next_change.op.p == op.p - will_op_cancel_next_delete = undoing and is_op_adjacent_to_next_delete and next_change.op.d.slice(0, op.i.length) == op.i - - # If there is a delete at the start of the insert, and we're inserting - # at the start, we SHOULDN'T merge since the delete acts as a partition. - # The previous op will be the delete, but it's already been shifted by this insert - # - # I.e. - # Originally: |-- existing insert --| - # | <- existing delete at same offset - # - # Now: |-- existing insert --| <- not shifted yet - # |-- this insert --|| <- existing delete shifted along to end of this op - # - # After: |-- existing insert --| - # |-- this insert --|| <- existing delete - # - # Without the delete, the inserts would be merged. - is_insert_blocked_by_delete = (previous_change? and previous_change.op.d? and previous_change.op.p == op_end) - - # If the insert is overlapping another insert, either at the beginning in the middle or touching the end, - # then we merge them into one. - if @track_changes and - is_change_overlapping and - !is_insert_blocked_by_delete and - !already_merged and - !will_op_cancel_next_delete and - is_same_user - offset = op_start - change_start - change.op.i = change.op.i.slice(0, offset) + op.i + change.op.i.slice(offset) - change.metadata.ts = metadata.ts - already_merged = true - moved_changes.push change - else if op_start <= change_start - # If we're fully before the other insert we can just shift the other insert by our length. - # If they are touching, and should have been merged, they will have been above. - # If not merged above, then it must be blocked by a delete, and will be after this insert, so we shift it along as well - change.op.p += op_length - moved_changes.push change - else if (!is_same_user or !@track_changes) and change_start < op_start < change_end - # This user is inserting inside a change by another user, so we need to split the - # other user's change into one before and after this one. - offset = op_start - change_start - before_content = change.op.i.slice(0, offset) - after_content = change.op.i.slice(offset) - - # The existing change can become the 'before' change - change.op.i = before_content - moved_changes.push change - - # Create a new op afterwards - after_change = { - op: { - i: after_content - p: change_start + offset + op_length + if (change.op.d != null) { + // Shift any deletes after this along by the length of this insert + if (op_start < change_start) { + change.op.p += op_length; + moved_changes.push(change); + } else if (op_start === change_start) { + // If we are undoing, then we want to cancel any existing delete ranges if we can. + // Check if the insert matches the start of the delete, and just remove it from the delete instead if so. + if (undoing && (change.op.d.length >= op.i.length) && (change.op.d.slice(0, op.i.length) === op.i)) { + change.op.d = change.op.d.slice(op.i.length); + change.op.p += op.i.length; + if (change.op.d === "") { + remove_changes.push(change); + } else { + moved_changes.push(change); } - metadata: {} + already_merged = true; + } else { + change.op.p += op_length; + moved_changes.push(change); } - after_change.metadata[key] = value for key, value of change.metadata - new_changes.push after_change + } + } else if (change.op.i != null) { + var offset; + const change_end = change_start + change.op.i.length; + const is_change_overlapping = ((op_start >= change_start) && (op_start <= change_end)); + + // Only merge inserts if they are from the same user + const is_same_user = metadata.user_id === change.metadata.user_id; + + // If we are undoing, then our changes will be removed from any delete ops just after. In that case, if there is also + // an insert op just before, then we shouldn't append it to this insert, but instead only cancel the following delete. + // E.g. + // foo|<--- about to insert 'b' here + // inserted 'foo' --^ ^-- deleted 'bar' + // should become just 'foo' not 'foob' (with the delete marker becoming just 'ar'), . + const next_change = this.changes[i+1]; + const is_op_adjacent_to_next_delete = (next_change != null) && (next_change.op.d != null) && (op.p === change_end) && (next_change.op.p === op.p); + const will_op_cancel_next_delete = undoing && is_op_adjacent_to_next_delete && (next_change.op.d.slice(0, op.i.length) === op.i); + + // If there is a delete at the start of the insert, and we're inserting + // at the start, we SHOULDN'T merge since the delete acts as a partition. + // The previous op will be the delete, but it's already been shifted by this insert + // + // I.e. + // Originally: |-- existing insert --| + // | <- existing delete at same offset + // + // Now: |-- existing insert --| <- not shifted yet + // |-- this insert --|| <- existing delete shifted along to end of this op + // + // After: |-- existing insert --| + // |-- this insert --|| <- existing delete + // + // Without the delete, the inserts would be merged. + const is_insert_blocked_by_delete = ((previous_change != null) && (previous_change.op.d != null) && (previous_change.op.p === op_end)); + + // If the insert is overlapping another insert, either at the beginning in the middle or touching the end, + // then we merge them into one. + if (this.track_changes && + is_change_overlapping && + !is_insert_blocked_by_delete && + !already_merged && + !will_op_cancel_next_delete && + is_same_user) { + offset = op_start - change_start; + change.op.i = change.op.i.slice(0, offset) + op.i + change.op.i.slice(offset); + change.metadata.ts = metadata.ts; + already_merged = true; + moved_changes.push(change); + } else if (op_start <= change_start) { + // If we're fully before the other insert we can just shift the other insert by our length. + // If they are touching, and should have been merged, they will have been above. + // If not merged above, then it must be blocked by a delete, and will be after this insert, so we shift it along as well + change.op.p += op_length; + moved_changes.push(change); + } else if ((!is_same_user || !this.track_changes) && (change_start < op_start && op_start < change_end)) { + // This user is inserting inside a change by another user, so we need to split the + // other user's change into one before and after this one. + offset = op_start - change_start; + const before_content = change.op.i.slice(0, offset); + const after_content = change.op.i.slice(offset); - previous_change = change + // The existing change can become the 'before' change + change.op.i = before_content; + moved_changes.push(change); + + // Create a new op afterwards + const after_change = { + op: { + i: after_content, + p: change_start + offset + op_length + }, + metadata: {} + }; + for (let key in change.metadata) { const value = change.metadata[key]; after_change.metadata[key] = value; } + new_changes.push(after_change); + } + } + + previous_change = change; + } - if @track_changes and !already_merged - @_addOp op, metadata - for {op, metadata} in new_changes - @_addOp op, metadata + if (this.track_changes && !already_merged) { + this._addOp(op, metadata); + } + for ({op, metadata} of Array.from(new_changes)) { + this._addOp(op, metadata); + } - for change in remove_changes - @_removeChange change + for (change of Array.from(remove_changes)) { + this._removeChange(change); + } - for change in moved_changes - @_markAsDirty change, "change", "moved" + return (() => { + const result = []; + for (change of Array.from(moved_changes)) { + result.push(this._markAsDirty(change, "change", "moved")); + } + return result; + })(); + } - applyDeleteToChanges: (op, metadata) -> - op_start = op.p - op_length = op.d.length - op_end = op.p + op_length - remove_changes = [] - moved_changes = [] + applyDeleteToChanges(op, metadata) { + let change; + const op_start = op.p; + const op_length = op.d.length; + const op_end = op.p + op_length; + const remove_changes = []; + let moved_changes = []; - # We might end up modifying our delete op if it merges with existing deletes, or cancels out - # with an existing insert. Since we might do multiple modifications, we record them and do - # all the modifications after looping through the existing changes, so as not to mess up the - # offset indexes as we go. - op_modifications = [] - for change in @changes - if change.op.i? - change_start = change.op.p - change_end = change_start + change.op.i.length - if op_end <= change_start - # Shift ops after us back by our length - change.op.p -= op_length - moved_changes.push change - else if op_start >= change_end - # Delete is after insert, nothing to do - else - # When the new delete overlaps an insert, we should remove the part of the insert that - # is now deleted, and also remove the part of the new delete that overlapped. I.e. - # the two cancel out where they overlap. - if op_start >= change_start - # |-- existing insert --| - # insert_remaining_before -> |.....||-- new delete --| - delete_remaining_before = "" - insert_remaining_before = change.op.i.slice(0, op_start - change_start) - else - # delete_remaining_before -> |.....||-- existing insert --| - # |-- new delete --| - delete_remaining_before = op.d.slice(0, change_start - op_start) - insert_remaining_before = "" - - if op_end <= change_end - # |-- existing insert --| - # |-- new delete --||.....| <- insert_remaining_after - delete_remaining_after = "" - insert_remaining_after = change.op.i.slice(op_end - change_start) - else - # |-- existing insert --||.....| <- delete_remaining_after - # |-- new delete --| - delete_remaining_after = op.d.slice(change_end - op_start) - insert_remaining_after = "" - - insert_remaining = insert_remaining_before + insert_remaining_after - if insert_remaining.length > 0 - change.op.i = insert_remaining - change.op.p = Math.min(change_start, op_start) - change.metadata.ts = metadata.ts - moved_changes.push change - else - remove_changes.push change - - # We know what we want to preserve of our delete op before (delete_remaining_before) and what we want to preserve - # afterwards (delete_remaining_before). Now we need to turn that into a modification which deletes the - # chunk in the middle not covered by these. - delete_removed_length = op.d.length - delete_remaining_before.length - delete_remaining_after.length - delete_removed_start = delete_remaining_before.length - modification = { - d: op.d.slice(delete_removed_start, delete_removed_start + delete_removed_length) - p: delete_removed_start + // We might end up modifying our delete op if it merges with existing deletes, or cancels out + // with an existing insert. Since we might do multiple modifications, we record them and do + // all the modifications after looping through the existing changes, so as not to mess up the + // offset indexes as we go. + const op_modifications = []; + for (change of Array.from(this.changes)) { + var change_start; + if (change.op.i != null) { + change_start = change.op.p; + const change_end = change_start + change.op.i.length; + if (op_end <= change_start) { + // Shift ops after us back by our length + change.op.p -= op_length; + moved_changes.push(change); + } else if (op_start >= change_end) { + // Delete is after insert, nothing to do + } else { + // When the new delete overlaps an insert, we should remove the part of the insert that + // is now deleted, and also remove the part of the new delete that overlapped. I.e. + // the two cancel out where they overlap. + var delete_remaining_after, delete_remaining_before, insert_remaining_after, insert_remaining_before; + if (op_start >= change_start) { + // |-- existing insert --| + // insert_remaining_before -> |.....||-- new delete --| + delete_remaining_before = ""; + insert_remaining_before = change.op.i.slice(0, op_start - change_start); + } else { + // delete_remaining_before -> |.....||-- existing insert --| + // |-- new delete --| + delete_remaining_before = op.d.slice(0, change_start - op_start); + insert_remaining_before = ""; } - if modification.d.length > 0 - op_modifications.push modification - else if change.op.d? - change_start = change.op.p - if op_end < change_start or (!@track_changes and op_end == change_start) - # Shift ops after us back by our length. - # If we're tracking changes, it must be strictly before, since we'll merge - # below if they are touching. Otherwise, touching is fine. - change.op.p -= op_length - moved_changes.push change - else if op_start <= change_start <= op_end - if @track_changes - # If we overlap a delete, add it in our content, and delete the existing change. - # It's easier to do it this way, rather than modifying the existing delete in case - # we overlap many deletes and we'd need to track that. We have a workaround to - # update the delete in place if possible below. - offset = change_start - op_start - op_modifications.push { i: change.op.d, p: offset } - remove_changes.push change - else - change.op.p = op_start - moved_changes.push change - # Copy rather than modify because we still need to apply it to comments + if (op_end <= change_end) { + // |-- existing insert --| + // |-- new delete --||.....| <- insert_remaining_after + delete_remaining_after = ""; + insert_remaining_after = change.op.i.slice(op_end - change_start); + } else { + // |-- existing insert --||.....| <- delete_remaining_after + // |-- new delete --| + delete_remaining_after = op.d.slice(change_end - op_start); + insert_remaining_after = ""; + } + + const insert_remaining = insert_remaining_before + insert_remaining_after; + if (insert_remaining.length > 0) { + change.op.i = insert_remaining; + change.op.p = Math.min(change_start, op_start); + change.metadata.ts = metadata.ts; + moved_changes.push(change); + } else { + remove_changes.push(change); + } + + // We know what we want to preserve of our delete op before (delete_remaining_before) and what we want to preserve + // afterwards (delete_remaining_before). Now we need to turn that into a modification which deletes the + // chunk in the middle not covered by these. + const delete_removed_length = op.d.length - delete_remaining_before.length - delete_remaining_after.length; + const delete_removed_start = delete_remaining_before.length; + const modification = { + d: op.d.slice(delete_removed_start, delete_removed_start + delete_removed_length), + p: delete_removed_start + }; + if (modification.d.length > 0) { + op_modifications.push(modification); + } + } + } else if (change.op.d != null) { + change_start = change.op.p; + if ((op_end < change_start) || (!this.track_changes && (op_end === change_start))) { + // Shift ops after us back by our length. + // If we're tracking changes, it must be strictly before, since we'll merge + // below if they are touching. Otherwise, touching is fine. + change.op.p -= op_length; + moved_changes.push(change); + } else if (op_start <= change_start && change_start <= op_end) { + if (this.track_changes) { + // If we overlap a delete, add it in our content, and delete the existing change. + // It's easier to do it this way, rather than modifying the existing delete in case + // we overlap many deletes and we'd need to track that. We have a workaround to + // update the delete in place if possible below. + const offset = change_start - op_start; + op_modifications.push({ i: change.op.d, p: offset }); + remove_changes.push(change); + } else { + change.op.p = op_start; + moved_changes.push(change); + } + } + } + } + + // Copy rather than modify because we still need to apply it to comments op = { - p: op.p - d: @_applyOpModifications(op.d, op_modifications) + p: op.p, + d: this._applyOpModifications(op.d, op_modifications) + }; + + for (change of Array.from(remove_changes)) { + // This is a bit of hack to avoid removing one delete and replacing it with another. + // If we don't do this, it causes the UI to flicker + if ((op.d.length > 0) && (change.op.d != null) && (op.p <= change.op.p && change.op.p <= op.p + op.d.length)) { + change.op.p = op.p; + change.op.d = op.d; + change.metadata = metadata; + moved_changes.push(change); + op.d = ""; // stop it being added + } else { + this._removeChange(change); + } } - for change in remove_changes - # This is a bit of hack to avoid removing one delete and replacing it with another. - # If we don't do this, it causes the UI to flicker - if op.d.length > 0 and change.op.d? and op.p <= change.op.p <= op.p + op.d.length - change.op.p = op.p - change.op.d = op.d - change.metadata = metadata - moved_changes.push change - op.d = "" # stop it being added - else - @_removeChange change - - if @track_changes and op.d.length > 0 - @_addOp op, metadata - else - # It's possible that we deleted an insert between two other inserts. I.e. - # If we delete 'user_2 insert' in: - # |-- user_1 insert --||-- user_2 insert --||-- user_1 insert --| - # it becomes: - # |-- user_1 insert --||-- user_1 insert --| - # We need to merge these together again - results = @_scanAndMergeAdjacentUpdates() - moved_changes = moved_changes.concat(results.moved_changes) - for change in results.remove_changes - @_removeChange change - moved_changes = moved_changes.filter (c) -> c != change - - for change in moved_changes - @_markAsDirty change, "change", "moved" - - _addOp: (op, metadata) -> - change = { - id: @newId() - op: @_clone(op) # Don't take a reference to the existing op since we'll modify this in place with future changes - metadata: @_clone(metadata) + if (this.track_changes && (op.d.length > 0)) { + this._addOp(op, metadata); + } else { + // It's possible that we deleted an insert between two other inserts. I.e. + // If we delete 'user_2 insert' in: + // |-- user_1 insert --||-- user_2 insert --||-- user_1 insert --| + // it becomes: + // |-- user_1 insert --||-- user_1 insert --| + // We need to merge these together again + const results = this._scanAndMergeAdjacentUpdates(); + moved_changes = moved_changes.concat(results.moved_changes); + for (change of Array.from(results.remove_changes)) { + this._removeChange(change); + moved_changes = moved_changes.filter(c => c !== change); + } } - @changes.push change - - # Keep ops in order of offset, with deletes before inserts - @changes.sort (c1, c2) -> - result = c1.op.p - c2.op.p - if result != 0 - return result - else if c1.op.i? and c2.op.d? - return 1 - else - return -1 - - @_markAsDirty(change, "change", "added") - - _removeChange: (change) -> - @changes = @changes.filter (c) -> c.id != change.id - @_markAsDirty change, "change", "removed" - _applyOpModifications: (content, op_modifications) -> - # Put in descending position order, with deleting first if at the same offset - # (Inserting first would modify the content that the delete will delete) - op_modifications.sort (a, b) -> - result = b.p - a.p - if result != 0 - return result - else if a.i? and b.d? - return 1 - else - return -1 + return (() => { + const result = []; + for (change of Array.from(moved_changes)) { + result.push(this._markAsDirty(change, "change", "moved")); + } + return result; + })(); + } - for modification in op_modifications - if modification.i? - content = content.slice(0, modification.p) + modification.i + content.slice(modification.p) - else if modification.d? - if content.slice(modification.p, modification.p + modification.d.length) != modification.d - throw new Error("deleted content does not match. content: #{JSON.stringify(content)}; modification: #{JSON.stringify(modification)}") - content = content.slice(0, modification.p) + content.slice(modification.p + modification.d.length) - return content + _addOp(op, metadata) { + const change = { + id: this.newId(), + op: this._clone(op), // Don't take a reference to the existing op since we'll modify this in place with future changes + metadata: this._clone(metadata) + }; + this.changes.push(change); + + // Keep ops in order of offset, with deletes before inserts + this.changes.sort(function(c1, c2) { + const result = c1.op.p - c2.op.p; + if (result !== 0) { + return result; + } else if ((c1.op.i != null) && (c2.op.d != null)) { + return 1; + } else { + return -1; + } + }); + + return this._markAsDirty(change, "change", "added"); + } - _scanAndMergeAdjacentUpdates: () -> - # This should only need calling when deleting an update between two - # other updates. There's no other way to get two adjacent updates from the - # same user, since they would be merged on insert. - previous_change = null - remove_changes = [] - moved_changes = [] - for change in @changes - if previous_change?.op.i? and change.op.i? - previous_change_end = previous_change.op.p + previous_change.op.i.length - previous_change_user_id = previous_change.metadata.user_id - change_start = change.op.p - change_user_id = change.metadata.user_id - if previous_change_end == change_start and previous_change_user_id == change_user_id - remove_changes.push change - previous_change.op.i += change.op.i - moved_changes.push previous_change - else if previous_change?.op.d? and change.op.d? and previous_change.op.p == change.op.p - # Merge adjacent deletes - previous_change.op.d += change.op.d - remove_changes.push change - moved_changes.push previous_change - else # Only update to the current change if we haven't removed it. - previous_change = change - return { moved_changes, remove_changes } + _removeChange(change) { + this.changes = this.changes.filter(c => c.id !== change.id); + return this._markAsDirty(change, "change", "removed"); + } + + _applyOpModifications(content, op_modifications) { + // Put in descending position order, with deleting first if at the same offset + // (Inserting first would modify the content that the delete will delete) + op_modifications.sort(function(a, b) { + const result = b.p - a.p; + if (result !== 0) { + return result; + } else if ((a.i != null) && (b.d != null)) { + return 1; + } else { + return -1; + } + }); + + for (let modification of Array.from(op_modifications)) { + if (modification.i != null) { + content = content.slice(0, modification.p) + modification.i + content.slice(modification.p); + } else if (modification.d != null) { + if (content.slice(modification.p, modification.p + modification.d.length) !== modification.d) { + throw new Error(`deleted content does not match. content: ${JSON.stringify(content)}; modification: ${JSON.stringify(modification)}`); + } + content = content.slice(0, modification.p) + content.slice(modification.p + modification.d.length); + } + } + return content; + } - resetDirtyState: () -> - @_dirtyState = { + _scanAndMergeAdjacentUpdates() { + // This should only need calling when deleting an update between two + // other updates. There's no other way to get two adjacent updates from the + // same user, since they would be merged on insert. + let previous_change = null; + const remove_changes = []; + const moved_changes = []; + for (let change of Array.from(this.changes)) { + if (((previous_change != null ? previous_change.op.i : undefined) != null) && (change.op.i != null)) { + const previous_change_end = previous_change.op.p + previous_change.op.i.length; + const previous_change_user_id = previous_change.metadata.user_id; + const change_start = change.op.p; + const change_user_id = change.metadata.user_id; + if ((previous_change_end === change_start) && (previous_change_user_id === change_user_id)) { + remove_changes.push(change); + previous_change.op.i += change.op.i; + moved_changes.push(previous_change); + } + } else if (((previous_change != null ? previous_change.op.d : undefined) != null) && (change.op.d != null) && (previous_change.op.p === change.op.p)) { + // Merge adjacent deletes + previous_change.op.d += change.op.d; + remove_changes.push(change); + moved_changes.push(previous_change); + } else { // Only update to the current change if we haven't removed it. + previous_change = change; + } + } + return { moved_changes, remove_changes }; + } + + resetDirtyState() { + return this._dirtyState = { comment: { - moved: {} - removed: {} + moved: {}, + removed: {}, added: {} - } + }, change: { - moved: {} - removed: {} + moved: {}, + removed: {}, added: {} } - } + }; + } - getDirtyState: () -> - return @_dirtyState + getDirtyState() { + return this._dirtyState; + } - _markAsDirty: (object, type, action) -> - @_dirtyState[type][action][object.id] = object + _markAsDirty(object, type, action) { + return this._dirtyState[type][action][object.id] = object; + } - _clone: (object) -> - clone = {} - (clone[k] = v for k,v of object) - return clone + _clone(object) { + const clone = {}; + for (let k in object) { const v = object[k]; clone[k] = v; } + return clone; + } + }; +}; -if define? - define [], load -else - module.exports = load() +if (typeof define !== 'undefined' && define !== null) { + define([], load); +} else { + module.exports = load(); +} diff --git a/services/document-updater/app/coffee/RateLimitManager.js b/services/document-updater/app/coffee/RateLimitManager.js index 7128b5d988..534fdade92 100644 --- a/services/document-updater/app/coffee/RateLimitManager.js +++ b/services/document-updater/app/coffee/RateLimitManager.js @@ -1,39 +1,58 @@ -Settings = require('settings-sharelatex') -logger = require('logger-sharelatex') -Metrics = require('./Metrics') +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let RateLimiter; +const Settings = require('settings-sharelatex'); +const logger = require('logger-sharelatex'); +const Metrics = require('./Metrics'); -module.exports = class RateLimiter +module.exports = (RateLimiter = class RateLimiter { - constructor: (number = 10) -> - @ActiveWorkerCount = 0 - @CurrentWorkerLimit = number - @BaseWorkerCount = number + constructor(number) { + if (number == null) { number = 10; } + this.ActiveWorkerCount = 0; + this.CurrentWorkerLimit = number; + this.BaseWorkerCount = number; + } - _adjustLimitUp: () -> - @CurrentWorkerLimit += 0.1 # allow target worker limit to increase gradually - Metrics.gauge "currentLimit", Math.ceil(@CurrentWorkerLimit) + _adjustLimitUp() { + this.CurrentWorkerLimit += 0.1; // allow target worker limit to increase gradually + return Metrics.gauge("currentLimit", Math.ceil(this.CurrentWorkerLimit)); + } - _adjustLimitDown: () -> - @CurrentWorkerLimit = Math.max @BaseWorkerCount, (@CurrentWorkerLimit * 0.9) - logger.log {currentLimit: Math.ceil(@CurrentWorkerLimit)}, "reducing rate limit" - Metrics.gauge "currentLimit", Math.ceil(@CurrentWorkerLimit) + _adjustLimitDown() { + this.CurrentWorkerLimit = Math.max(this.BaseWorkerCount, (this.CurrentWorkerLimit * 0.9)); + logger.log({currentLimit: Math.ceil(this.CurrentWorkerLimit)}, "reducing rate limit"); + return Metrics.gauge("currentLimit", Math.ceil(this.CurrentWorkerLimit)); + } - _trackAndRun: (task, callback = () ->) -> - @ActiveWorkerCount++ - Metrics.gauge "processingUpdates", @ActiveWorkerCount - task (err) => - @ActiveWorkerCount-- - Metrics.gauge "processingUpdates", @ActiveWorkerCount - callback(err) + _trackAndRun(task, callback) { + if (callback == null) { callback = function() {}; } + this.ActiveWorkerCount++; + Metrics.gauge("processingUpdates", this.ActiveWorkerCount); + return task(err => { + this.ActiveWorkerCount--; + Metrics.gauge("processingUpdates", this.ActiveWorkerCount); + return callback(err); + }); + } - run: (task, callback) -> - if @ActiveWorkerCount < @CurrentWorkerLimit - @_trackAndRun task # below the limit, just put the task in the background - callback() # return immediately - if @CurrentWorkerLimit > @BaseWorkerCount - @_adjustLimitDown() - else - logger.log {active: @ActiveWorkerCount, currentLimit: Math.ceil(@CurrentWorkerLimit)}, "hit rate limit" - @_trackAndRun task, (err) => - @_adjustLimitUp() if !err? # don't increment rate limit if there was an error - callback(err) # only return after task completes + run(task, callback) { + if (this.ActiveWorkerCount < this.CurrentWorkerLimit) { + this._trackAndRun(task); // below the limit, just put the task in the background + callback(); // return immediately + if (this.CurrentWorkerLimit > this.BaseWorkerCount) { + return this._adjustLimitDown(); + } + } else { + logger.log({active: this.ActiveWorkerCount, currentLimit: Math.ceil(this.CurrentWorkerLimit)}, "hit rate limit"); + return this._trackAndRun(task, err => { + if ((err == null)) { this._adjustLimitUp(); } // don't increment rate limit if there was an error + return callback(err); + }); // only return after task completes + } + } +}); diff --git a/services/document-updater/app/coffee/RealTimeRedisManager.js b/services/document-updater/app/coffee/RealTimeRedisManager.js index d26bf8ff8f..b3d7a65680 100644 --- a/services/document-updater/app/coffee/RealTimeRedisManager.js +++ b/services/document-updater/app/coffee/RealTimeRedisManager.js @@ -1,52 +1,73 @@ -Settings = require('settings-sharelatex') -rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) -pubsubClient = require("redis-sharelatex").createClient(Settings.redis.pubsub) -Keys = Settings.redis.documentupdater.key_schema -logger = require('logger-sharelatex') -os = require "os" -crypto = require "crypto" -metrics = require('./Metrics') +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let RealTimeRedisManager; +const Settings = require('settings-sharelatex'); +const rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater); +const pubsubClient = require("redis-sharelatex").createClient(Settings.redis.pubsub); +const Keys = Settings.redis.documentupdater.key_schema; +const logger = require('logger-sharelatex'); +const os = require("os"); +const crypto = require("crypto"); +const metrics = require('./Metrics'); -HOST = os.hostname() -RND = crypto.randomBytes(4).toString('hex') # generate a random key for this process -COUNT = 0 +const HOST = os.hostname(); +const RND = crypto.randomBytes(4).toString('hex'); // generate a random key for this process +let COUNT = 0; -MAX_OPS_PER_ITERATION = 8 # process a limited number of ops for safety +const MAX_OPS_PER_ITERATION = 8; // process a limited number of ops for safety -module.exports = RealTimeRedisManager = - getPendingUpdatesForDoc : (doc_id, callback)-> - multi = rclient.multi() - multi.lrange Keys.pendingUpdates({doc_id}), 0, (MAX_OPS_PER_ITERATION-1) - multi.ltrim Keys.pendingUpdates({doc_id}), MAX_OPS_PER_ITERATION, -1 - multi.exec (error, replys) -> - return callback(error) if error? - jsonUpdates = replys[0] - for jsonUpdate in jsonUpdates - # record metric for each update removed from queue - metrics.summary "redis.pendingUpdates", jsonUpdate.length, {status: "pop"} - updates = [] - for jsonUpdate in jsonUpdates - try - update = JSON.parse jsonUpdate - catch e - return callback e - updates.push update - callback error, updates +module.exports = (RealTimeRedisManager = { + getPendingUpdatesForDoc(doc_id, callback){ + const multi = rclient.multi(); + multi.lrange(Keys.pendingUpdates({doc_id}), 0, (MAX_OPS_PER_ITERATION-1)); + multi.ltrim(Keys.pendingUpdates({doc_id}), MAX_OPS_PER_ITERATION, -1); + return multi.exec(function(error, replys) { + let jsonUpdate; + if (error != null) { return callback(error); } + const jsonUpdates = replys[0]; + for (jsonUpdate of Array.from(jsonUpdates)) { + // record metric for each update removed from queue + metrics.summary("redis.pendingUpdates", jsonUpdate.length, {status: "pop"}); + } + const updates = []; + for (jsonUpdate of Array.from(jsonUpdates)) { + var update; + try { + update = JSON.parse(jsonUpdate); + } catch (e) { + return callback(e); + } + updates.push(update); + } + return callback(error, updates); + }); + }, - getUpdatesLength: (doc_id, callback)-> - rclient.llen Keys.pendingUpdates({doc_id}), callback + getUpdatesLength(doc_id, callback){ + return rclient.llen(Keys.pendingUpdates({doc_id}), callback); + }, - sendData: (data) -> - # create a unique message id using a counter - message_id = "doc:#{HOST}:#{RND}-#{COUNT++}" - data?._id = message_id + sendData(data) { + // create a unique message id using a counter + const message_id = `doc:${HOST}:${RND}-${COUNT++}`; + if (data != null) { + data._id = message_id; + } - blob = JSON.stringify(data) - metrics.summary "redis.publish.applied-ops", blob.length + const blob = JSON.stringify(data); + metrics.summary("redis.publish.applied-ops", blob.length); - # publish on separate channels for individual projects and docs when - # configured (needs realtime to be configured for this too). - if Settings.publishOnIndividualChannels - pubsubClient.publish "applied-ops:#{data.doc_id}", blob - else - pubsubClient.publish "applied-ops", blob + // publish on separate channels for individual projects and docs when + // configured (needs realtime to be configured for this too). + if (Settings.publishOnIndividualChannels) { + return pubsubClient.publish(`applied-ops:${data.doc_id}`, blob); + } else { + return pubsubClient.publish("applied-ops", blob); + } + } +}); diff --git a/services/document-updater/app/coffee/RedisManager.js b/services/document-updater/app/coffee/RedisManager.js index 3eeed78ffb..f434dfc9d4 100644 --- a/services/document-updater/app/coffee/RedisManager.js +++ b/services/document-updater/app/coffee/RedisManager.js @@ -1,376 +1,484 @@ -Settings = require('settings-sharelatex') -rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) -logger = require('logger-sharelatex') -metrics = require('./Metrics') -Errors = require "./Errors" -crypto = require "crypto" -async = require "async" -ProjectHistoryRedisManager = require "./ProjectHistoryRedisManager" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS201: Simplify complex destructure assignments + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let RedisManager; +const Settings = require('settings-sharelatex'); +const rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater); +const logger = require('logger-sharelatex'); +const metrics = require('./Metrics'); +const Errors = require("./Errors"); +const crypto = require("crypto"); +const async = require("async"); +const ProjectHistoryRedisManager = require("./ProjectHistoryRedisManager"); -# Sometimes Redis calls take an unexpectedly long time. We have to be -# quick with Redis calls because we're holding a lock that expires -# after 30 seconds. We can't let any errors in the rest of the stack -# hold us up, and need to bail out quickly if there is a problem. -MAX_REDIS_REQUEST_LENGTH = 5000 # 5 seconds +// Sometimes Redis calls take an unexpectedly long time. We have to be +// quick with Redis calls because we're holding a lock that expires +// after 30 seconds. We can't let any errors in the rest of the stack +// hold us up, and need to bail out quickly if there is a problem. +const MAX_REDIS_REQUEST_LENGTH = 5000; // 5 seconds -# Make times easy to read -minutes = 60 # seconds for Redis expire +// Make times easy to read +const minutes = 60; // seconds for Redis expire -logHashErrors = Settings.documentupdater?.logHashErrors -logHashReadErrors = logHashErrors?.read +const logHashErrors = Settings.documentupdater != null ? Settings.documentupdater.logHashErrors : undefined; +const logHashReadErrors = logHashErrors != null ? logHashErrors.read : undefined; -MEGABYTES = 1024 * 1024 -MAX_RANGES_SIZE = 3 * MEGABYTES +const MEGABYTES = 1024 * 1024; +const MAX_RANGES_SIZE = 3 * MEGABYTES; -keys = Settings.redis.documentupdater.key_schema -historyKeys = Settings.redis.history.key_schema # note: this is track changes, not project-history +const keys = Settings.redis.documentupdater.key_schema; +const historyKeys = Settings.redis.history.key_schema; // note: this is track changes, not project-history -module.exports = RedisManager = - rclient: rclient +module.exports = (RedisManager = { + rclient, - putDocInMemory : (project_id, doc_id, docLines, version, ranges, pathname, projectHistoryId, _callback)-> - timer = new metrics.Timer("redis.put-doc") - callback = (error) -> - timer.done() - _callback(error) - docLines = JSON.stringify(docLines) - if docLines.indexOf("\u0000") != -1 - error = new Error("null bytes found in doc lines") - # this check was added to catch memory corruption in JSON.stringify. - # It sometimes returned null bytes at the end of the string. - logger.error {err: error, doc_id: doc_id, docLines: docLines}, error.message - return callback(error) - docHash = RedisManager._computeHash(docLines) - # record bytes sent to redis - metrics.summary "redis.docLines", docLines.length, {status: "set"} - logger.log {project_id, doc_id, version, docHash, pathname, projectHistoryId}, "putting doc in redis" - RedisManager._serializeRanges ranges, (error, ranges) -> - if error? - logger.error {err: error, doc_id, project_id}, error.message - return callback(error) - multi = rclient.multi() - multi.set keys.docLines(doc_id:doc_id), docLines - multi.set keys.projectKey({doc_id:doc_id}), project_id - multi.set keys.docVersion(doc_id:doc_id), version - multi.set keys.docHash(doc_id:doc_id), docHash - if ranges? - multi.set keys.ranges(doc_id:doc_id), ranges - else - multi.del keys.ranges(doc_id:doc_id) - multi.set keys.pathname(doc_id:doc_id), pathname - multi.set keys.projectHistoryId(doc_id:doc_id), projectHistoryId - multi.exec (error, result) -> - return callback(error) if error? - # update docsInProject set - rclient.sadd keys.docsInProject(project_id:project_id), doc_id, callback + putDocInMemory(project_id, doc_id, docLines, version, ranges, pathname, projectHistoryId, _callback){ + const timer = new metrics.Timer("redis.put-doc"); + const callback = function(error) { + timer.done(); + return _callback(error); + }; + docLines = JSON.stringify(docLines); + if (docLines.indexOf("\u0000") !== -1) { + const error = new Error("null bytes found in doc lines"); + // this check was added to catch memory corruption in JSON.stringify. + // It sometimes returned null bytes at the end of the string. + logger.error({err: error, doc_id, docLines}, error.message); + return callback(error); + } + const docHash = RedisManager._computeHash(docLines); + // record bytes sent to redis + metrics.summary("redis.docLines", docLines.length, {status: "set"}); + logger.log({project_id, doc_id, version, docHash, pathname, projectHistoryId}, "putting doc in redis"); + return RedisManager._serializeRanges(ranges, function(error, ranges) { + if (error != null) { + logger.error({err: error, doc_id, project_id}, error.message); + return callback(error); + } + const multi = rclient.multi(); + multi.set(keys.docLines({doc_id}), docLines); + multi.set(keys.projectKey({doc_id}), project_id); + multi.set(keys.docVersion({doc_id}), version); + multi.set(keys.docHash({doc_id}), docHash); + if (ranges != null) { + multi.set(keys.ranges({doc_id}), ranges); + } else { + multi.del(keys.ranges({doc_id})); + } + multi.set(keys.pathname({doc_id}), pathname); + multi.set(keys.projectHistoryId({doc_id}), projectHistoryId); + return multi.exec(function(error, result) { + if (error != null) { return callback(error); } + // update docsInProject set + return rclient.sadd(keys.docsInProject({project_id}), doc_id, callback); + }); + }); + }, - removeDocFromMemory : (project_id, doc_id, _callback)-> - logger.log project_id:project_id, doc_id:doc_id, "removing doc from redis" - callback = (err) -> - if err? - logger.err project_id:project_id, doc_id:doc_id, err:err, "error removing doc from redis" - _callback(err) - else - logger.log project_id:project_id, doc_id:doc_id, "removed doc from redis" - _callback() + removeDocFromMemory(project_id, doc_id, _callback){ + logger.log({project_id, doc_id}, "removing doc from redis"); + const callback = function(err) { + if (err != null) { + logger.err({project_id, doc_id, err}, "error removing doc from redis"); + return _callback(err); + } else { + logger.log({project_id, doc_id}, "removed doc from redis"); + return _callback(); + } + }; - multi = rclient.multi() - multi.strlen keys.docLines(doc_id:doc_id) - multi.del keys.docLines(doc_id:doc_id) - multi.del keys.projectKey(doc_id:doc_id) - multi.del keys.docVersion(doc_id:doc_id) - multi.del keys.docHash(doc_id:doc_id) - multi.del keys.ranges(doc_id:doc_id) - multi.del keys.pathname(doc_id:doc_id) - multi.del keys.projectHistoryId(doc_id:doc_id) - multi.del keys.projectHistoryType(doc_id:doc_id) - multi.del keys.unflushedTime(doc_id:doc_id) - multi.del keys.lastUpdatedAt(doc_id: doc_id) - multi.del keys.lastUpdatedBy(doc_id: doc_id) - multi.exec (error, response) -> - return callback(error) if error? - length = response?[0] - if length > 0 - # record bytes freed in redis - metrics.summary "redis.docLines", length, {status: "del"} - multi = rclient.multi() - multi.srem keys.docsInProject(project_id:project_id), doc_id - multi.del keys.projectState(project_id:project_id) - multi.exec callback + let multi = rclient.multi(); + multi.strlen(keys.docLines({doc_id})); + multi.del(keys.docLines({doc_id})); + multi.del(keys.projectKey({doc_id})); + multi.del(keys.docVersion({doc_id})); + multi.del(keys.docHash({doc_id})); + multi.del(keys.ranges({doc_id})); + multi.del(keys.pathname({doc_id})); + multi.del(keys.projectHistoryId({doc_id})); + multi.del(keys.projectHistoryType({doc_id})); + multi.del(keys.unflushedTime({doc_id})); + multi.del(keys.lastUpdatedAt({doc_id})); + multi.del(keys.lastUpdatedBy({doc_id})); + return multi.exec(function(error, response) { + if (error != null) { return callback(error); } + const length = response != null ? response[0] : undefined; + if (length > 0) { + // record bytes freed in redis + metrics.summary("redis.docLines", length, {status: "del"}); + } + multi = rclient.multi(); + multi.srem(keys.docsInProject({project_id}), doc_id); + multi.del(keys.projectState({project_id})); + return multi.exec(callback); + }); + }, - checkOrSetProjectState: (project_id, newState, callback = (error, stateChanged) ->) -> - multi = rclient.multi() - multi.getset keys.projectState(project_id:project_id), newState - multi.expire keys.projectState(project_id:project_id), 30 * minutes - multi.exec (error, response) -> - return callback(error) if error? - logger.log project_id: project_id, newState:newState, oldState: response[0], "checking project state" - callback(null, response[0] isnt newState) + checkOrSetProjectState(project_id, newState, callback) { + if (callback == null) { callback = function(error, stateChanged) {}; } + const multi = rclient.multi(); + multi.getset(keys.projectState({project_id}), newState); + multi.expire(keys.projectState({project_id}), 30 * minutes); + return multi.exec(function(error, response) { + if (error != null) { return callback(error); } + logger.log({project_id, newState, oldState: response[0]}, "checking project state"); + return callback(null, response[0] !== newState); + }); + }, - clearProjectState: (project_id, callback = (error) ->) -> - rclient.del keys.projectState(project_id:project_id), callback + clearProjectState(project_id, callback) { + if (callback == null) { callback = function(error) {}; } + return rclient.del(keys.projectState({project_id}), callback); + }, - getDoc : (project_id, doc_id, callback = (error, lines, version, ranges, pathname, projectHistoryId, unflushedTime) ->)-> - timer = new metrics.Timer("redis.get-doc") - multi = rclient.multi() - multi.get keys.docLines(doc_id:doc_id) - multi.get keys.docVersion(doc_id:doc_id) - multi.get keys.docHash(doc_id:doc_id) - multi.get keys.projectKey(doc_id:doc_id) - multi.get keys.ranges(doc_id:doc_id) - multi.get keys.pathname(doc_id:doc_id) - multi.get keys.projectHistoryId(doc_id:doc_id) - multi.get keys.unflushedTime(doc_id:doc_id) - multi.get keys.lastUpdatedAt(doc_id: doc_id) - multi.get keys.lastUpdatedBy(doc_id: doc_id) - multi.exec (error, [docLines, version, storedHash, doc_project_id, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy])-> - timeSpan = timer.done() - return callback(error) if error? - # check if request took too long and bail out. only do this for - # get, because it is the first call in each update, so if this - # passes we'll assume others have a reasonable chance to succeed. - if timeSpan > MAX_REDIS_REQUEST_LENGTH - error = new Error("redis getDoc exceeded timeout") - return callback(error) - # record bytes loaded from redis - if docLines? - metrics.summary "redis.docLines", docLines.length, {status: "get"} - # check sha1 hash value if present - if docLines? and storedHash? - computedHash = RedisManager._computeHash(docLines) - if logHashReadErrors and computedHash isnt storedHash - logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, computedHash: computedHash, storedHash: storedHash, docLines:docLines, "hash mismatch on retrieved document" + getDoc(project_id, doc_id, callback){ + if (callback == null) { callback = function(error, lines, version, ranges, pathname, projectHistoryId, unflushedTime) {}; } + const timer = new metrics.Timer("redis.get-doc"); + const multi = rclient.multi(); + multi.get(keys.docLines({doc_id})); + multi.get(keys.docVersion({doc_id})); + multi.get(keys.docHash({doc_id})); + multi.get(keys.projectKey({doc_id})); + multi.get(keys.ranges({doc_id})); + multi.get(keys.pathname({doc_id})); + multi.get(keys.projectHistoryId({doc_id})); + multi.get(keys.unflushedTime({doc_id})); + multi.get(keys.lastUpdatedAt({doc_id})); + multi.get(keys.lastUpdatedBy({doc_id})); + return multi.exec(function(error, ...rest){ + let [docLines, version, storedHash, doc_project_id, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy] = Array.from(rest[0]); + const timeSpan = timer.done(); + if (error != null) { return callback(error); } + // check if request took too long and bail out. only do this for + // get, because it is the first call in each update, so if this + // passes we'll assume others have a reasonable chance to succeed. + if (timeSpan > MAX_REDIS_REQUEST_LENGTH) { + error = new Error("redis getDoc exceeded timeout"); + return callback(error); + } + // record bytes loaded from redis + if (docLines != null) { + metrics.summary("redis.docLines", docLines.length, {status: "get"}); + } + // check sha1 hash value if present + if ((docLines != null) && (storedHash != null)) { + const computedHash = RedisManager._computeHash(docLines); + if (logHashReadErrors && (computedHash !== storedHash)) { + logger.error({project_id, doc_id, doc_project_id, computedHash, storedHash, docLines}, "hash mismatch on retrieved document"); + } + } - try - docLines = JSON.parse docLines - ranges = RedisManager._deserializeRanges(ranges) - catch e - return callback(e) + try { + docLines = JSON.parse(docLines); + ranges = RedisManager._deserializeRanges(ranges); + } catch (e) { + return callback(e); + } - version = parseInt(version or 0, 10) - # check doc is in requested project - if doc_project_id? and doc_project_id isnt project_id - logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, "doc not in project" - return callback(new Errors.NotFoundError("document not found")) + version = parseInt(version || 0, 10); + // check doc is in requested project + if ((doc_project_id != null) && (doc_project_id !== project_id)) { + logger.error({project_id, doc_id, doc_project_id}, "doc not in project"); + return callback(new Errors.NotFoundError("document not found")); + } - if projectHistoryId? - projectHistoryId = parseInt(projectHistoryId) + if (projectHistoryId != null) { + projectHistoryId = parseInt(projectHistoryId); + } - # doc is not in redis, bail out - if !docLines? - return callback null, docLines, version, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy + // doc is not in redis, bail out + if ((docLines == null)) { + return callback(null, docLines, version, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy); + } - # doc should be in project set, check if missing (workaround for missing docs from putDoc) - rclient.sadd keys.docsInProject(project_id:project_id), doc_id, (error, result) -> - return callback(error) if error? - if result isnt 0 # doc should already be in set - logger.error project_id: project_id, doc_id: doc_id, doc_project_id: doc_project_id, "doc missing from docsInProject set" - callback null, docLines, version, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy + // doc should be in project set, check if missing (workaround for missing docs from putDoc) + return rclient.sadd(keys.docsInProject({project_id}), doc_id, function(error, result) { + if (error != null) { return callback(error); } + if (result !== 0) { // doc should already be in set + logger.error({project_id, doc_id, doc_project_id}, "doc missing from docsInProject set"); + } + return callback(null, docLines, version, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy); + }); + }); + }, - getDocVersion: (doc_id, callback = (error, version, projectHistoryType) ->) -> - rclient.mget keys.docVersion(doc_id: doc_id), keys.projectHistoryType(doc_id:doc_id), (error, result) -> - return callback(error) if error? - [version, projectHistoryType] = result || [] - version = parseInt(version, 10) - callback null, version, projectHistoryType + getDocVersion(doc_id, callback) { + if (callback == null) { callback = function(error, version, projectHistoryType) {}; } + return rclient.mget(keys.docVersion({doc_id}), keys.projectHistoryType({doc_id}), function(error, result) { + if (error != null) { return callback(error); } + let [version, projectHistoryType] = Array.from(result || []); + version = parseInt(version, 10); + return callback(null, version, projectHistoryType); + }); + }, - getDocLines: (doc_id, callback = (error, version) ->) -> - rclient.get keys.docLines(doc_id: doc_id), (error, docLines) -> - return callback(error) if error? - callback null, docLines + getDocLines(doc_id, callback) { + if (callback == null) { callback = function(error, version) {}; } + return rclient.get(keys.docLines({doc_id}), function(error, docLines) { + if (error != null) { return callback(error); } + return callback(null, docLines); + }); + }, - getPreviousDocOps: (doc_id, start, end, callback = (error, jsonOps) ->) -> - timer = new metrics.Timer("redis.get-prev-docops") - rclient.llen keys.docOps(doc_id: doc_id), (error, length) -> - return callback(error) if error? - rclient.get keys.docVersion(doc_id: doc_id), (error, version) -> - return callback(error) if error? - version = parseInt(version, 10) - first_version_in_redis = version - length + getPreviousDocOps(doc_id, start, end, callback) { + if (callback == null) { callback = function(error, jsonOps) {}; } + const timer = new metrics.Timer("redis.get-prev-docops"); + return rclient.llen(keys.docOps({doc_id}), function(error, length) { + if (error != null) { return callback(error); } + return rclient.get(keys.docVersion({doc_id}), function(error, version) { + if (error != null) { return callback(error); } + version = parseInt(version, 10); + const first_version_in_redis = version - length; - if start < first_version_in_redis or end > version - error = new Errors.OpRangeNotAvailableError("doc ops range is not loaded in redis") - logger.warn {err: error, doc_id, length, version, start, end}, "doc ops range is not loaded in redis" - return callback(error) + if ((start < first_version_in_redis) || (end > version)) { + error = new Errors.OpRangeNotAvailableError("doc ops range is not loaded in redis"); + logger.warn({err: error, doc_id, length, version, start, end}, "doc ops range is not loaded in redis"); + return callback(error); + } - start = start - first_version_in_redis - if end > -1 - end = end - first_version_in_redis + start = start - first_version_in_redis; + if (end > -1) { + end = end - first_version_in_redis; + } - if isNaN(start) or isNaN(end) - error = new Error("inconsistent version or lengths") - logger.error {err: error, doc_id, length, version, start, end}, "inconsistent version or length" - return callback(error) + if (isNaN(start) || isNaN(end)) { + error = new Error("inconsistent version or lengths"); + logger.error({err: error, doc_id, length, version, start, end}, "inconsistent version or length"); + return callback(error); + } - rclient.lrange keys.docOps(doc_id: doc_id), start, end, (error, jsonOps) -> - return callback(error) if error? - try - ops = jsonOps.map (jsonOp) -> JSON.parse jsonOp - catch e - return callback(e) - timeSpan = timer.done() - if timeSpan > MAX_REDIS_REQUEST_LENGTH - error = new Error("redis getPreviousDocOps exceeded timeout") - return callback(error) - callback null, ops + return rclient.lrange(keys.docOps({doc_id}), start, end, function(error, jsonOps) { + let ops; + if (error != null) { return callback(error); } + try { + ops = jsonOps.map(jsonOp => JSON.parse(jsonOp)); + } catch (e) { + return callback(e); + } + const timeSpan = timer.done(); + if (timeSpan > MAX_REDIS_REQUEST_LENGTH) { + error = new Error("redis getPreviousDocOps exceeded timeout"); + return callback(error); + } + return callback(null, ops); + }); + }); + }); + }, - getHistoryType: (doc_id, callback = (error, projectHistoryType) ->) -> - rclient.get keys.projectHistoryType(doc_id:doc_id), (error, projectHistoryType) -> - return callback(error) if error? - callback null, projectHistoryType + getHistoryType(doc_id, callback) { + if (callback == null) { callback = function(error, projectHistoryType) {}; } + return rclient.get(keys.projectHistoryType({doc_id}), function(error, projectHistoryType) { + if (error != null) { return callback(error); } + return callback(null, projectHistoryType); + }); + }, - setHistoryType: (doc_id, projectHistoryType, callback = (error) ->) -> - rclient.set keys.projectHistoryType(doc_id:doc_id), projectHistoryType, callback + setHistoryType(doc_id, projectHistoryType, callback) { + if (callback == null) { callback = function(error) {}; } + return rclient.set(keys.projectHistoryType({doc_id}), projectHistoryType, callback); + }, - DOC_OPS_TTL: 60 * minutes - DOC_OPS_MAX_LENGTH: 100 - updateDocument : (project_id, doc_id, docLines, newVersion, appliedOps = [], ranges, updateMeta, callback = (error) ->)-> - RedisManager.getDocVersion doc_id, (error, currentVersion, projectHistoryType) -> - return callback(error) if error? - if currentVersion + appliedOps.length != newVersion - error = new Error("Version mismatch. '#{doc_id}' is corrupted.") - logger.error {err: error, doc_id, currentVersion, newVersion, opsLength: appliedOps.length}, "version mismatch" - return callback(error) + DOC_OPS_TTL: 60 * minutes, + DOC_OPS_MAX_LENGTH: 100, + updateDocument(project_id, doc_id, docLines, newVersion, appliedOps, ranges, updateMeta, callback){ + if (appliedOps == null) { appliedOps = []; } + if (callback == null) { callback = function(error) {}; } + return RedisManager.getDocVersion(doc_id, function(error, currentVersion, projectHistoryType) { + if (error != null) { return callback(error); } + if ((currentVersion + appliedOps.length) !== newVersion) { + error = new Error(`Version mismatch. '${doc_id}' is corrupted.`); + logger.error({err: error, doc_id, currentVersion, newVersion, opsLength: appliedOps.length}, "version mismatch"); + return callback(error); + } - jsonOps = appliedOps.map (op) -> JSON.stringify op - for op in jsonOps - if op.indexOf("\u0000") != -1 - error = new Error("null bytes found in jsonOps") - # this check was added to catch memory corruption in JSON.stringify - logger.error {err: error, doc_id: doc_id, jsonOps: jsonOps}, error.message - return callback(error) + const jsonOps = appliedOps.map(op => JSON.stringify(op)); + for (let op of Array.from(jsonOps)) { + if (op.indexOf("\u0000") !== -1) { + error = new Error("null bytes found in jsonOps"); + // this check was added to catch memory corruption in JSON.stringify + logger.error({err: error, doc_id, jsonOps}, error.message); + return callback(error); + } + } - newDocLines = JSON.stringify(docLines) - if newDocLines.indexOf("\u0000") != -1 - error = new Error("null bytes found in doc lines") - # this check was added to catch memory corruption in JSON.stringify - logger.error {err: error, doc_id: doc_id, newDocLines: newDocLines}, error.message - return callback(error) - newHash = RedisManager._computeHash(newDocLines) + const newDocLines = JSON.stringify(docLines); + if (newDocLines.indexOf("\u0000") !== -1) { + error = new Error("null bytes found in doc lines"); + // this check was added to catch memory corruption in JSON.stringify + logger.error({err: error, doc_id, newDocLines}, error.message); + return callback(error); + } + const newHash = RedisManager._computeHash(newDocLines); - opVersions = appliedOps.map (op) -> op?.v - logger.log doc_id: doc_id, version: newVersion, hash: newHash, op_versions: opVersions, "updating doc in redis" - # record bytes sent to redis in update - metrics.summary "redis.docLines", newDocLines.length, {status: "update"} - RedisManager._serializeRanges ranges, (error, ranges) -> - if error? - logger.error {err: error, doc_id}, error.message - return callback(error) - if ranges? and ranges.indexOf("\u0000") != -1 - error = new Error("null bytes found in ranges") - # this check was added to catch memory corruption in JSON.stringify - logger.error err: error, doc_id: doc_id, ranges: ranges, error.message - return callback(error) - multi = rclient.multi() - multi.set keys.docLines(doc_id:doc_id), newDocLines # index 0 - multi.set keys.docVersion(doc_id:doc_id), newVersion # index 1 - multi.set keys.docHash(doc_id:doc_id), newHash # index 2 - multi.ltrim keys.docOps(doc_id: doc_id), -RedisManager.DOC_OPS_MAX_LENGTH, -1 # index 3 - if ranges? - multi.set keys.ranges(doc_id:doc_id), ranges # index 4 - else - multi.del keys.ranges(doc_id:doc_id) # also index 4 - # push the ops last so we can get the lengths at fixed index position 7 - if jsonOps.length > 0 - multi.rpush keys.docOps(doc_id: doc_id), jsonOps... # index 5 - # expire must come after rpush since before it will be a no-op if the list is empty - multi.expire keys.docOps(doc_id: doc_id), RedisManager.DOC_OPS_TTL # index 6 - if projectHistoryType is "project-history" - metrics.inc 'history-queue', 1, {status: 'skip-track-changes'} - logger.log {doc_id}, "skipping push of uncompressed ops for project using project-history" - else - # project is using old track-changes history service - metrics.inc 'history-queue', 1, {status: 'track-changes'} - multi.rpush historyKeys.uncompressedHistoryOps(doc_id: doc_id), jsonOps... # index 7 - # Set the unflushed timestamp to the current time if the doc - # hasn't been modified before (the content in mongo has been - # valid up to this point). Otherwise leave it alone ("NX" flag). - multi.set keys.unflushedTime(doc_id: doc_id), Date.now(), "NX" - multi.set keys.lastUpdatedAt(doc_id: doc_id), Date.now() # index 8 - if updateMeta?.user_id - multi.set keys.lastUpdatedBy(doc_id: doc_id), updateMeta.user_id # index 9 - else - multi.del keys.lastUpdatedBy(doc_id: doc_id) # index 9 - multi.exec (error, result) -> - return callback(error) if error? + const opVersions = appliedOps.map(op => op != null ? op.v : undefined); + logger.log({doc_id, version: newVersion, hash: newHash, op_versions: opVersions}, "updating doc in redis"); + // record bytes sent to redis in update + metrics.summary("redis.docLines", newDocLines.length, {status: "update"}); + return RedisManager._serializeRanges(ranges, function(error, ranges) { + if (error != null) { + logger.error({err: error, doc_id}, error.message); + return callback(error); + } + if ((ranges != null) && (ranges.indexOf("\u0000") !== -1)) { + error = new Error("null bytes found in ranges"); + // this check was added to catch memory corruption in JSON.stringify + logger.error({err: error, doc_id, ranges}, error.message); + return callback(error); + } + const multi = rclient.multi(); + multi.set(keys.docLines({doc_id}), newDocLines); // index 0 + multi.set(keys.docVersion({doc_id}), newVersion); // index 1 + multi.set(keys.docHash({doc_id}), newHash); // index 2 + multi.ltrim(keys.docOps({doc_id}), -RedisManager.DOC_OPS_MAX_LENGTH, -1); // index 3 + if (ranges != null) { + multi.set(keys.ranges({doc_id}), ranges); // index 4 + } else { + multi.del(keys.ranges({doc_id})); // also index 4 + } + // push the ops last so we can get the lengths at fixed index position 7 + if (jsonOps.length > 0) { + multi.rpush(keys.docOps({doc_id}), ...Array.from(jsonOps)); // index 5 + // expire must come after rpush since before it will be a no-op if the list is empty + multi.expire(keys.docOps({doc_id}), RedisManager.DOC_OPS_TTL); // index 6 + if (projectHistoryType === "project-history") { + metrics.inc('history-queue', 1, {status: 'skip-track-changes'}); + logger.log({doc_id}, "skipping push of uncompressed ops for project using project-history"); + } else { + // project is using old track-changes history service + metrics.inc('history-queue', 1, {status: 'track-changes'}); + multi.rpush(historyKeys.uncompressedHistoryOps({doc_id}), ...Array.from(jsonOps)); // index 7 + } + // Set the unflushed timestamp to the current time if the doc + // hasn't been modified before (the content in mongo has been + // valid up to this point). Otherwise leave it alone ("NX" flag). + multi.set(keys.unflushedTime({doc_id}), Date.now(), "NX"); + multi.set(keys.lastUpdatedAt({doc_id}), Date.now()); // index 8 + if ((updateMeta != null ? updateMeta.user_id : undefined)) { + multi.set(keys.lastUpdatedBy({doc_id}), updateMeta.user_id); // index 9 + } else { + multi.del(keys.lastUpdatedBy({doc_id})); // index 9 + } + } + return multi.exec(function(error, result) { + let docUpdateCount; + if (error != null) { return callback(error); } - if projectHistoryType is 'project-history' - docUpdateCount = undefined # only using project history, don't bother with track-changes - else - # project is using old track-changes history service - docUpdateCount = result[7] # length of uncompressedHistoryOps queue (index 7) + if (projectHistoryType === 'project-history') { + docUpdateCount = undefined; // only using project history, don't bother with track-changes + } else { + // project is using old track-changes history service + docUpdateCount = result[7]; // length of uncompressedHistoryOps queue (index 7) + } - if jsonOps.length > 0 && Settings.apis?.project_history?.enabled - metrics.inc 'history-queue', 1, {status: 'project-history'} - ProjectHistoryRedisManager.queueOps project_id, jsonOps..., (error, projectUpdateCount) -> - callback null, docUpdateCount, projectUpdateCount - else - callback null, docUpdateCount + if ((jsonOps.length > 0) && __guard__(Settings.apis != null ? Settings.apis.project_history : undefined, x => x.enabled)) { + metrics.inc('history-queue', 1, {status: 'project-history'}); + return ProjectHistoryRedisManager.queueOps(project_id, ...Array.from(jsonOps), (error, projectUpdateCount) => callback(null, docUpdateCount, projectUpdateCount)); + } else { + return callback(null, docUpdateCount); + } + }); + }); + }); + }, - renameDoc: (project_id, doc_id, user_id, update, projectHistoryId, callback = (error) ->) -> - RedisManager.getDoc project_id, doc_id, (error, lines, version) -> - return callback(error) if error? + renameDoc(project_id, doc_id, user_id, update, projectHistoryId, callback) { + if (callback == null) { callback = function(error) {}; } + return RedisManager.getDoc(project_id, doc_id, function(error, lines, version) { + if (error != null) { return callback(error); } - if lines? and version? - rclient.set keys.pathname(doc_id:doc_id), update.newPathname, (error) -> - return callback(error) if error? - ProjectHistoryRedisManager.queueRenameEntity project_id, projectHistoryId, 'doc', doc_id, user_id, update, callback - else - ProjectHistoryRedisManager.queueRenameEntity project_id, projectHistoryId, 'doc', doc_id, user_id, update, callback + if ((lines != null) && (version != null)) { + return rclient.set(keys.pathname({doc_id}), update.newPathname, function(error) { + if (error != null) { return callback(error); } + return ProjectHistoryRedisManager.queueRenameEntity(project_id, projectHistoryId, 'doc', doc_id, user_id, update, callback); + }); + } else { + return ProjectHistoryRedisManager.queueRenameEntity(project_id, projectHistoryId, 'doc', doc_id, user_id, update, callback); + } + }); + }, - clearUnflushedTime: (doc_id, callback = (error) ->) -> - rclient.del keys.unflushedTime(doc_id:doc_id), callback + clearUnflushedTime(doc_id, callback) { + if (callback == null) { callback = function(error) {}; } + return rclient.del(keys.unflushedTime({doc_id}), callback); + }, - getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) -> - rclient.smembers keys.docsInProject(project_id: project_id), callback + getDocIdsInProject(project_id, callback) { + if (callback == null) { callback = function(error, doc_ids) {}; } + return rclient.smembers(keys.docsInProject({project_id}), callback); + }, - getDocTimestamps: (doc_ids, callback = (error, result) ->) -> - # get lastupdatedat timestamps for an array of doc_ids - async.mapSeries doc_ids, (doc_id, cb) -> - rclient.get keys.lastUpdatedAt(doc_id: doc_id), cb - , callback + getDocTimestamps(doc_ids, callback) { + // get lastupdatedat timestamps for an array of doc_ids + if (callback == null) { callback = function(error, result) {}; } + return async.mapSeries(doc_ids, (doc_id, cb) => rclient.get(keys.lastUpdatedAt({doc_id}), cb) + , callback); + }, - queueFlushAndDeleteProject: (project_id, callback) -> - # store the project id in a sorted set ordered by time with a random offset to smooth out spikes - SMOOTHING_OFFSET = if Settings.smoothingOffset > 0 then Math.round(Settings.smoothingOffset * Math.random()) else 0 - rclient.zadd keys.flushAndDeleteQueue(), Date.now() + SMOOTHING_OFFSET, project_id, callback + queueFlushAndDeleteProject(project_id, callback) { + // store the project id in a sorted set ordered by time with a random offset to smooth out spikes + const SMOOTHING_OFFSET = Settings.smoothingOffset > 0 ? Math.round(Settings.smoothingOffset * Math.random()) : 0; + return rclient.zadd(keys.flushAndDeleteQueue(), Date.now() + SMOOTHING_OFFSET, project_id, callback); + }, - getNextProjectToFlushAndDelete: (cutoffTime, callback = (error, key, timestamp)->) -> - # find the oldest queued flush that is before the cutoff time - rclient.zrangebyscore keys.flushAndDeleteQueue(), 0, cutoffTime, "WITHSCORES", "LIMIT", 0, 1, (err, reply) -> - return callback(err) if err? - return callback() if !reply?.length # return if no projects ready to be processed - # pop the oldest entry (get and remove in a multi) - multi = rclient.multi() - # Poor man's version of ZPOPMIN, which is only available in Redis 5. - multi.zrange keys.flushAndDeleteQueue(), 0, 0, "WITHSCORES" - multi.zremrangebyrank keys.flushAndDeleteQueue(), 0, 0 - multi.zcard keys.flushAndDeleteQueue() # the total length of the queue (for metrics) - multi.exec (err, reply) -> - return callback(err) if err? - return callback() if !reply?.length - [key, timestamp] = reply[0] - queueLength = reply[2] - callback(null, key, timestamp, queueLength) + getNextProjectToFlushAndDelete(cutoffTime, callback) { + // find the oldest queued flush that is before the cutoff time + if (callback == null) { callback = function(error, key, timestamp){}; } + return rclient.zrangebyscore(keys.flushAndDeleteQueue(), 0, cutoffTime, "WITHSCORES", "LIMIT", 0, 1, function(err, reply) { + if (err != null) { return callback(err); } + if (!(reply != null ? reply.length : undefined)) { return callback(); } // return if no projects ready to be processed + // pop the oldest entry (get and remove in a multi) + const multi = rclient.multi(); + // Poor man's version of ZPOPMIN, which is only available in Redis 5. + multi.zrange(keys.flushAndDeleteQueue(), 0, 0, "WITHSCORES"); + multi.zremrangebyrank(keys.flushAndDeleteQueue(), 0, 0); + multi.zcard(keys.flushAndDeleteQueue()); // the total length of the queue (for metrics) + return multi.exec(function(err, reply) { + if (err != null) { return callback(err); } + if (!(reply != null ? reply.length : undefined)) { return callback(); } + const [key, timestamp] = Array.from(reply[0]); + const queueLength = reply[2]; + return callback(null, key, timestamp, queueLength); + }); + }); + }, - _serializeRanges: (ranges, callback = (error, serializedRanges) ->) -> - jsonRanges = JSON.stringify(ranges) - if jsonRanges? and jsonRanges.length > MAX_RANGES_SIZE - return callback new Error("ranges are too large") - if jsonRanges == '{}' - # Most doc will have empty ranges so don't fill redis with lots of '{}' keys - jsonRanges = null - return callback null, jsonRanges + _serializeRanges(ranges, callback) { + if (callback == null) { callback = function(error, serializedRanges) {}; } + let jsonRanges = JSON.stringify(ranges); + if ((jsonRanges != null) && (jsonRanges.length > MAX_RANGES_SIZE)) { + return callback(new Error("ranges are too large")); + } + if (jsonRanges === '{}') { + // Most doc will have empty ranges so don't fill redis with lots of '{}' keys + jsonRanges = null; + } + return callback(null, jsonRanges); + }, - _deserializeRanges: (ranges) -> - if !ranges? or ranges == "" - return {} - else - return JSON.parse(ranges) + _deserializeRanges(ranges) { + if ((ranges == null) || (ranges === "")) { + return {}; + } else { + return JSON.parse(ranges); + } + }, - _computeHash: (docLines) -> - # use sha1 checksum of doclines to detect data corruption. - # - # note: must specify 'utf8' encoding explicitly, as the default is - # binary in node < v5 - return crypto.createHash('sha1').update(docLines, 'utf8').digest('hex') + _computeHash(docLines) { + // use sha1 checksum of doclines to detect data corruption. + // + // note: must specify 'utf8' encoding explicitly, as the default is + // binary in node < v5 + return crypto.createHash('sha1').update(docLines, 'utf8').digest('hex'); + } +}); + +function __guard__(value, transform) { + return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; +} \ No newline at end of file diff --git a/services/document-updater/app/coffee/ShareJsDB.js b/services/document-updater/app/coffee/ShareJsDB.js index 3e5dfe303f..5b313cee96 100644 --- a/services/document-updater/app/coffee/ShareJsDB.js +++ b/services/document-updater/app/coffee/ShareJsDB.js @@ -1,44 +1,64 @@ -Keys = require('./UpdateKeys') -RedisManager = require "./RedisManager" -Errors = require "./Errors" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let ShareJsDB; +const Keys = require('./UpdateKeys'); +const RedisManager = require("./RedisManager"); +const Errors = require("./Errors"); -module.exports = class ShareJsDB - constructor: (@project_id, @doc_id, @lines, @version) -> - @appliedOps = {} - # ShareJS calls this detacted from the instance, so we need - # bind it to keep our context that can access @appliedOps - @writeOp = @_writeOp.bind(@) +module.exports = (ShareJsDB = class ShareJsDB { + constructor(project_id, doc_id, lines, version) { + this.project_id = project_id; + this.doc_id = doc_id; + this.lines = lines; + this.version = version; + this.appliedOps = {}; + // ShareJS calls this detacted from the instance, so we need + // bind it to keep our context that can access @appliedOps + this.writeOp = this._writeOp.bind(this); + } - getOps: (doc_key, start, end, callback) -> - if start == end - return callback null, [] + getOps(doc_key, start, end, callback) { + if (start === end) { + return callback(null, []); + } - # In redis, lrange values are inclusive. - if end? - end-- - else - end = -1 + // In redis, lrange values are inclusive. + if (end != null) { + end--; + } else { + end = -1; + } - [project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key) - RedisManager.getPreviousDocOps doc_id, start, end, callback + const [project_id, doc_id] = Array.from(Keys.splitProjectIdAndDocId(doc_key)); + return RedisManager.getPreviousDocOps(doc_id, start, end, callback); + } - _writeOp: (doc_key, opData, callback) -> - @appliedOps[doc_key] ?= [] - @appliedOps[doc_key].push opData - callback() + _writeOp(doc_key, opData, callback) { + if (this.appliedOps[doc_key] == null) { this.appliedOps[doc_key] = []; } + this.appliedOps[doc_key].push(opData); + return callback(); + } - getSnapshot: (doc_key, callback) -> - if doc_key != Keys.combineProjectIdAndDocId(@project_id, @doc_id) - return callback(new Errors.NotFoundError("unexpected doc_key #{doc_key}, expected #{Keys.combineProjectIdAndDocId(@project_id, @doc_id)}")) - else - return callback null, { - snapshot: @lines.join("\n") - v: parseInt(@version, 10) + getSnapshot(doc_key, callback) { + if (doc_key !== Keys.combineProjectIdAndDocId(this.project_id, this.doc_id)) { + return callback(new Errors.NotFoundError(`unexpected doc_key ${doc_key}, expected ${Keys.combineProjectIdAndDocId(this.project_id, this.doc_id)}`)); + } else { + return callback(null, { + snapshot: this.lines.join("\n"), + v: parseInt(this.version, 10), type: "text" - } + }); + } + } - # To be able to remove a doc from the ShareJS memory - # we need to called Model::delete, which calls this - # method on the database. However, we will handle removing - # it from Redis ourselves - delete: (docName, dbMeta, callback) -> callback() + // To be able to remove a doc from the ShareJS memory + // we need to called Model::delete, which calls this + // method on the database. However, we will handle removing + // it from Redis ourselves + delete(docName, dbMeta, callback) { return callback(); } +}); diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.js b/services/document-updater/app/coffee/ShareJsUpdateManager.js index 856a4d4a37..82eb6923b0 100644 --- a/services/document-updater/app/coffee/ShareJsUpdateManager.js +++ b/services/document-updater/app/coffee/ShareJsUpdateManager.js @@ -1,80 +1,102 @@ -ShareJsModel = require "./sharejs/server/model" -ShareJsDB = require "./ShareJsDB" -logger = require "logger-sharelatex" -Settings = require('settings-sharelatex') -Keys = require "./UpdateKeys" -{EventEmitter} = require "events" -util = require "util" -RealTimeRedisManager = require "./RealTimeRedisManager" -crypto = require "crypto" -metrics = require('./Metrics') -Errors = require("./Errors") +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let ShareJsUpdateManager; +const ShareJsModel = require("./sharejs/server/model"); +const ShareJsDB = require("./ShareJsDB"); +const logger = require("logger-sharelatex"); +const Settings = require('settings-sharelatex'); +const Keys = require("./UpdateKeys"); +const {EventEmitter} = require("events"); +const util = require("util"); +const RealTimeRedisManager = require("./RealTimeRedisManager"); +const crypto = require("crypto"); +const metrics = require('./Metrics'); +const Errors = require("./Errors"); -ShareJsModel:: = {} -util.inherits ShareJsModel, EventEmitter +ShareJsModel.prototype = {}; +util.inherits(ShareJsModel, EventEmitter); -MAX_AGE_OF_OP = 80 +const MAX_AGE_OF_OP = 80; -module.exports = ShareJsUpdateManager = - getNewShareJsModel: (project_id, doc_id, lines, version) -> - db = new ShareJsDB(project_id, doc_id, lines, version) - model = new ShareJsModel(db, maxDocLength: Settings.max_doc_length, maximumAge: MAX_AGE_OF_OP) - model.db = db - return model +module.exports = (ShareJsUpdateManager = { + getNewShareJsModel(project_id, doc_id, lines, version) { + const db = new ShareJsDB(project_id, doc_id, lines, version); + const model = new ShareJsModel(db, {maxDocLength: Settings.max_doc_length, maximumAge: MAX_AGE_OF_OP}); + model.db = db; + return model; + }, - applyUpdate: (project_id, doc_id, update, lines, version, callback = (error, updatedDocLines) ->) -> - logger.log project_id: project_id, doc_id: doc_id, update: update, "applying sharejs updates" - jobs = [] - # record the update version before it is modified - incomingUpdateVersion = update.v - # We could use a global model for all docs, but we're hitting issues with the - # internal state of ShareJS not being accessible for clearing caches, and - # getting stuck due to queued callbacks (line 260 of sharejs/server/model.coffee) - # This adds a small but hopefully acceptable overhead (~12ms per 1000 updates on - # my 2009 MBP). - model = @getNewShareJsModel(project_id, doc_id, lines, version) - @_listenForOps(model) - doc_key = Keys.combineProjectIdAndDocId(project_id, doc_id) - model.applyOp doc_key, update, (error) -> - if error? - if error == "Op already submitted" - metrics.inc "sharejs.already-submitted" - logger.warn {project_id, doc_id, update}, "op has already been submitted" - update.dup = true - ShareJsUpdateManager._sendOp(project_id, doc_id, update) - else if /^Delete component/.test(error) - metrics.inc "sharejs.delete-mismatch" - logger.warn {project_id, doc_id, update, shareJsErr: error}, "sharejs delete does not match" - error = new Errors.DeleteMismatchError("Delete component does not match") - return callback(error) - else - metrics.inc "sharejs.other-error" - return callback(error) - logger.log project_id: project_id, doc_id: doc_id, error: error, "applied update" - model.getSnapshot doc_key, (error, data) => - return callback(error) if error? - # only check hash when present and no other updates have been applied - if update.hash? and incomingUpdateVersion == version - ourHash = ShareJsUpdateManager._computeHash(data.snapshot) - if ourHash != update.hash - metrics.inc "sharejs.hash-fail" - return callback(new Error("Invalid hash")) - else - metrics.inc "sharejs.hash-pass", 0.001 - docLines = data.snapshot.split(/\r\n|\n|\r/) - callback(null, docLines, data.v, model.db.appliedOps[doc_key] or []) + applyUpdate(project_id, doc_id, update, lines, version, callback) { + if (callback == null) { callback = function(error, updatedDocLines) {}; } + logger.log({project_id, doc_id, update}, "applying sharejs updates"); + const jobs = []; + // record the update version before it is modified + const incomingUpdateVersion = update.v; + // We could use a global model for all docs, but we're hitting issues with the + // internal state of ShareJS not being accessible for clearing caches, and + // getting stuck due to queued callbacks (line 260 of sharejs/server/model.coffee) + // This adds a small but hopefully acceptable overhead (~12ms per 1000 updates on + // my 2009 MBP). + const model = this.getNewShareJsModel(project_id, doc_id, lines, version); + this._listenForOps(model); + const doc_key = Keys.combineProjectIdAndDocId(project_id, doc_id); + return model.applyOp(doc_key, update, function(error) { + if (error != null) { + if (error === "Op already submitted") { + metrics.inc("sharejs.already-submitted"); + logger.warn({project_id, doc_id, update}, "op has already been submitted"); + update.dup = true; + ShareJsUpdateManager._sendOp(project_id, doc_id, update); + } else if (/^Delete component/.test(error)) { + metrics.inc("sharejs.delete-mismatch"); + logger.warn({project_id, doc_id, update, shareJsErr: error}, "sharejs delete does not match"); + error = new Errors.DeleteMismatchError("Delete component does not match"); + return callback(error); + } else { + metrics.inc("sharejs.other-error"); + return callback(error); + } + } + logger.log({project_id, doc_id, error}, "applied update"); + return model.getSnapshot(doc_key, (error, data) => { + if (error != null) { return callback(error); } + // only check hash when present and no other updates have been applied + if ((update.hash != null) && (incomingUpdateVersion === version)) { + const ourHash = ShareJsUpdateManager._computeHash(data.snapshot); + if (ourHash !== update.hash) { + metrics.inc("sharejs.hash-fail"); + return callback(new Error("Invalid hash")); + } else { + metrics.inc("sharejs.hash-pass", 0.001); + } + } + const docLines = data.snapshot.split(/\r\n|\n|\r/); + return callback(null, docLines, data.v, model.db.appliedOps[doc_key] || []); + }); + }); + }, - _listenForOps: (model) -> - model.on "applyOp", (doc_key, opData) -> - [project_id, doc_id] = Keys.splitProjectIdAndDocId(doc_key) - ShareJsUpdateManager._sendOp(project_id, doc_id, opData) + _listenForOps(model) { + return model.on("applyOp", function(doc_key, opData) { + const [project_id, doc_id] = Array.from(Keys.splitProjectIdAndDocId(doc_key)); + return ShareJsUpdateManager._sendOp(project_id, doc_id, opData); + }); + }, - _sendOp: (project_id, doc_id, op) -> - RealTimeRedisManager.sendData {project_id, doc_id, op} + _sendOp(project_id, doc_id, op) { + return RealTimeRedisManager.sendData({project_id, doc_id, op}); + }, - _computeHash: (content) -> + _computeHash(content) { return crypto.createHash('sha1') .update("blob " + content.length + "\x00") .update(content, 'utf8') - .digest('hex') + .digest('hex'); + } +}); diff --git a/services/document-updater/app/coffee/SnapshotManager.js b/services/document-updater/app/coffee/SnapshotManager.js index 86670b648d..5f998096af 100644 --- a/services/document-updater/app/coffee/SnapshotManager.js +++ b/services/document-updater/app/coffee/SnapshotManager.js @@ -1,42 +1,62 @@ -{db, ObjectId} = require "./mongojs" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let SnapshotManager; +const {db, ObjectId} = require("./mongojs"); -module.exports = SnapshotManager = - recordSnapshot: (project_id, doc_id, version, pathname, lines, ranges, callback) -> - try - project_id = ObjectId(project_id) - doc_id = ObjectId(doc_id) - catch error - return callback(error) - db.docSnapshots.insert { +module.exports = (SnapshotManager = { + recordSnapshot(project_id, doc_id, version, pathname, lines, ranges, callback) { + try { + project_id = ObjectId(project_id); + doc_id = ObjectId(doc_id); + } catch (error) { + return callback(error); + } + return db.docSnapshots.insert({ project_id, doc_id, version, lines, pathname, ranges: SnapshotManager.jsonRangesToMongo(ranges), ts: new Date() - }, callback - # Suggested indexes: - # db.docSnapshots.createIndex({project_id:1, doc_id:1}) - # db.docSnapshots.createIndex({ts:1},{expiresAfterSeconds: 30*24*3600)) # expires after 30 days + }, callback); + }, + // Suggested indexes: + // db.docSnapshots.createIndex({project_id:1, doc_id:1}) + // db.docSnapshots.createIndex({ts:1},{expiresAfterSeconds: 30*24*3600)) # expires after 30 days - jsonRangesToMongo: (ranges) -> - return null if !ranges? + jsonRangesToMongo(ranges) { + if ((ranges == null)) { return null; } - updateMetadata = (metadata) -> - if metadata?.ts? - metadata.ts = new Date(metadata.ts) - if metadata?.user_id? - metadata.user_id = SnapshotManager._safeObjectId(metadata.user_id) + const updateMetadata = function(metadata) { + if ((metadata != null ? metadata.ts : undefined) != null) { + metadata.ts = new Date(metadata.ts); + } + if ((metadata != null ? metadata.user_id : undefined) != null) { + return metadata.user_id = SnapshotManager._safeObjectId(metadata.user_id); + } + }; - for change in ranges.changes or [] - change.id = SnapshotManager._safeObjectId(change.id) - updateMetadata(change.metadata) - for comment in ranges.comments or [] - comment.id = SnapshotManager._safeObjectId(comment.id) - if comment.op?.t? - comment.op.t = SnapshotManager._safeObjectId(comment.op.t) - updateMetadata(comment.metadata) - return ranges + for (let change of Array.from(ranges.changes || [])) { + change.id = SnapshotManager._safeObjectId(change.id); + updateMetadata(change.metadata); + } + for (let comment of Array.from(ranges.comments || [])) { + comment.id = SnapshotManager._safeObjectId(comment.id); + if ((comment.op != null ? comment.op.t : undefined) != null) { + comment.op.t = SnapshotManager._safeObjectId(comment.op.t); + } + updateMetadata(comment.metadata); + } + return ranges; + }, - _safeObjectId: (data) -> - try - return ObjectId(data) - catch error - return data + _safeObjectId(data) { + try { + return ObjectId(data); + } catch (error) { + return data; + } + } +}); diff --git a/services/document-updater/app/coffee/UpdateKeys.js b/services/document-updater/app/coffee/UpdateKeys.js index 7d1f279495..470be0ce4a 100644 --- a/services/document-updater/app/coffee/UpdateKeys.js +++ b/services/document-updater/app/coffee/UpdateKeys.js @@ -1,3 +1,4 @@ -module.exports = - combineProjectIdAndDocId: (project_id, doc_id) -> "#{project_id}:#{doc_id}" - splitProjectIdAndDocId: (project_and_doc_id) -> project_and_doc_id.split(":") +module.exports = { + combineProjectIdAndDocId(project_id, doc_id) { return `${project_id}:${doc_id}`; }, + splitProjectIdAndDocId(project_and_doc_id) { return project_and_doc_id.split(":"); } +}; diff --git a/services/document-updater/app/coffee/UpdateManager.js b/services/document-updater/app/coffee/UpdateManager.js index e5ede11173..5151dfb4e7 100644 --- a/services/document-updater/app/coffee/UpdateManager.js +++ b/services/document-updater/app/coffee/UpdateManager.js @@ -1,170 +1,232 @@ -LockManager = require "./LockManager" -RedisManager = require "./RedisManager" -RealTimeRedisManager = require "./RealTimeRedisManager" -ShareJsUpdateManager = require "./ShareJsUpdateManager" -HistoryManager = require "./HistoryManager" -Settings = require('settings-sharelatex') -_ = require("lodash") -async = require("async") -logger = require('logger-sharelatex') -Metrics = require "./Metrics" -Errors = require "./Errors" -DocumentManager = require "./DocumentManager" -RangesManager = require "./RangesManager" -SnapshotManager = require "./SnapshotManager" -Profiler = require "./Profiler" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS201: Simplify complex destructure assignments + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let UpdateManager; +const LockManager = require("./LockManager"); +const RedisManager = require("./RedisManager"); +const RealTimeRedisManager = require("./RealTimeRedisManager"); +const ShareJsUpdateManager = require("./ShareJsUpdateManager"); +const HistoryManager = require("./HistoryManager"); +const Settings = require('settings-sharelatex'); +const _ = require("lodash"); +const async = require("async"); +const logger = require('logger-sharelatex'); +const Metrics = require("./Metrics"); +const Errors = require("./Errors"); +const DocumentManager = require("./DocumentManager"); +const RangesManager = require("./RangesManager"); +const SnapshotManager = require("./SnapshotManager"); +const Profiler = require("./Profiler"); -module.exports = UpdateManager = - processOutstandingUpdates: (project_id, doc_id, callback = (error) ->) -> - timer = new Metrics.Timer("updateManager.processOutstandingUpdates") - UpdateManager.fetchAndApplyUpdates project_id, doc_id, (error) -> - timer.done() - return callback(error) if error? - callback() +module.exports = (UpdateManager = { + processOutstandingUpdates(project_id, doc_id, callback) { + if (callback == null) { callback = function(error) {}; } + const timer = new Metrics.Timer("updateManager.processOutstandingUpdates"); + return UpdateManager.fetchAndApplyUpdates(project_id, doc_id, function(error) { + timer.done(); + if (error != null) { return callback(error); } + return callback(); + }); + }, - processOutstandingUpdatesWithLock: (project_id, doc_id, callback = (error) ->) -> - profile = new Profiler("processOutstandingUpdatesWithLock", {project_id, doc_id}) - LockManager.tryLock doc_id, (error, gotLock, lockValue) => - return callback(error) if error? - return callback() if !gotLock - profile.log("tryLock") - UpdateManager.processOutstandingUpdates project_id, doc_id, (error) -> - return UpdateManager._handleErrorInsideLock(doc_id, lockValue, error, callback) if error? - profile.log("processOutstandingUpdates") - LockManager.releaseLock doc_id, lockValue, (error) => - return callback(error) if error? - profile.log("releaseLock").end() - UpdateManager.continueProcessingUpdatesWithLock project_id, doc_id, callback + processOutstandingUpdatesWithLock(project_id, doc_id, callback) { + if (callback == null) { callback = function(error) {}; } + const profile = new Profiler("processOutstandingUpdatesWithLock", {project_id, doc_id}); + return LockManager.tryLock(doc_id, (error, gotLock, lockValue) => { + if (error != null) { return callback(error); } + if (!gotLock) { return callback(); } + profile.log("tryLock"); + return UpdateManager.processOutstandingUpdates(project_id, doc_id, function(error) { + if (error != null) { return UpdateManager._handleErrorInsideLock(doc_id, lockValue, error, callback); } + profile.log("processOutstandingUpdates"); + return LockManager.releaseLock(doc_id, lockValue, error => { + if (error != null) { return callback(error); } + profile.log("releaseLock").end(); + return UpdateManager.continueProcessingUpdatesWithLock(project_id, doc_id, callback); + }); + }); + }); + }, - continueProcessingUpdatesWithLock: (project_id, doc_id, callback = (error) ->) -> - RealTimeRedisManager.getUpdatesLength doc_id, (error, length) => - return callback(error) if error? - if length > 0 - UpdateManager.processOutstandingUpdatesWithLock project_id, doc_id, callback - else - callback() + continueProcessingUpdatesWithLock(project_id, doc_id, callback) { + if (callback == null) { callback = function(error) {}; } + return RealTimeRedisManager.getUpdatesLength(doc_id, (error, length) => { + if (error != null) { return callback(error); } + if (length > 0) { + return UpdateManager.processOutstandingUpdatesWithLock(project_id, doc_id, callback); + } else { + return callback(); + } + }); + }, - fetchAndApplyUpdates: (project_id, doc_id, callback = (error) ->) -> - profile = new Profiler("fetchAndApplyUpdates", {project_id, doc_id}) - RealTimeRedisManager.getPendingUpdatesForDoc doc_id, (error, updates) => - return callback(error) if error? - logger.log {project_id: project_id, doc_id: doc_id, count: updates.length}, "processing updates" - if updates.length == 0 - return callback() - profile.log("getPendingUpdatesForDoc") - doUpdate = (update, cb)-> - UpdateManager.applyUpdate project_id, doc_id, update, (err) -> - profile.log("applyUpdate") - cb(err) - finalCallback = (err) -> - profile.log("async done").end() - callback(err) - async.eachSeries updates, doUpdate, finalCallback + fetchAndApplyUpdates(project_id, doc_id, callback) { + if (callback == null) { callback = function(error) {}; } + const profile = new Profiler("fetchAndApplyUpdates", {project_id, doc_id}); + return RealTimeRedisManager.getPendingUpdatesForDoc(doc_id, (error, updates) => { + if (error != null) { return callback(error); } + logger.log({project_id, doc_id, count: updates.length}, "processing updates"); + if (updates.length === 0) { + return callback(); + } + profile.log("getPendingUpdatesForDoc"); + const doUpdate = (update, cb) => UpdateManager.applyUpdate(project_id, doc_id, update, function(err) { + profile.log("applyUpdate"); + return cb(err); + }); + const finalCallback = function(err) { + profile.log("async done").end(); + return callback(err); + }; + return async.eachSeries(updates, doUpdate, finalCallback); + }); + }, - applyUpdate: (project_id, doc_id, update, _callback = (error) ->) -> - callback = (error) -> - if error? - RealTimeRedisManager.sendData {project_id, doc_id, error: error.message || error} - profile.log("sendData") - profile.end() - _callback(error) + applyUpdate(project_id, doc_id, update, _callback) { + if (_callback == null) { _callback = function(error) {}; } + const callback = function(error) { + if (error != null) { + RealTimeRedisManager.sendData({project_id, doc_id, error: error.message || error}); + profile.log("sendData"); + } + profile.end(); + return _callback(error); + }; - profile = new Profiler("applyUpdate", {project_id, doc_id}) - UpdateManager._sanitizeUpdate update - profile.log("sanitizeUpdate") - DocumentManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) -> - profile.log("getDoc") - return callback(error) if error? - if !lines? or !version? - return callback(new Errors.NotFoundError("document not found: #{doc_id}")) - previousVersion = version - ShareJsUpdateManager.applyUpdate project_id, doc_id, update, lines, version, (error, updatedDocLines, version, appliedOps) -> - profile.log("sharejs.applyUpdate") - return callback(error) if error? - RangesManager.applyUpdate project_id, doc_id, ranges, appliedOps, updatedDocLines, (error, new_ranges, ranges_were_collapsed) -> - UpdateManager._addProjectHistoryMetadataToOps(appliedOps, pathname, projectHistoryId, lines) - profile.log("RangesManager.applyUpdate") - return callback(error) if error? - RedisManager.updateDocument project_id, doc_id, updatedDocLines, version, appliedOps, new_ranges, update.meta, (error, doc_ops_length, project_ops_length) -> - profile.log("RedisManager.updateDocument") - return callback(error) if error? - HistoryManager.recordAndFlushHistoryOps project_id, doc_id, appliedOps, doc_ops_length, project_ops_length, (error) -> - profile.log("recordAndFlushHistoryOps") - return callback(error) if error? - if ranges_were_collapsed - logger.log {project_id, doc_id, previousVersion, lines, ranges, update}, "update collapsed some ranges, snapshotting previous content" - # Do this last, since it's a mongo call, and so potentially longest running - # If it overruns the lock, it's ok, since all of our redis work is done - SnapshotManager.recordSnapshot project_id, doc_id, previousVersion, pathname, lines, ranges, (error) -> - if error? - logger.error {err: error, project_id, doc_id, version, lines, ranges}, "error recording snapshot" - return callback(error) - else - callback() - else - callback() + var profile = new Profiler("applyUpdate", {project_id, doc_id}); + UpdateManager._sanitizeUpdate(update); + profile.log("sanitizeUpdate"); + return DocumentManager.getDoc(project_id, doc_id, function(error, lines, version, ranges, pathname, projectHistoryId) { + profile.log("getDoc"); + if (error != null) { return callback(error); } + if ((lines == null) || (version == null)) { + return callback(new Errors.NotFoundError(`document not found: ${doc_id}`)); + } + const previousVersion = version; + return ShareJsUpdateManager.applyUpdate(project_id, doc_id, update, lines, version, function(error, updatedDocLines, version, appliedOps) { + profile.log("sharejs.applyUpdate"); + if (error != null) { return callback(error); } + return RangesManager.applyUpdate(project_id, doc_id, ranges, appliedOps, updatedDocLines, function(error, new_ranges, ranges_were_collapsed) { + UpdateManager._addProjectHistoryMetadataToOps(appliedOps, pathname, projectHistoryId, lines); + profile.log("RangesManager.applyUpdate"); + if (error != null) { return callback(error); } + return RedisManager.updateDocument(project_id, doc_id, updatedDocLines, version, appliedOps, new_ranges, update.meta, function(error, doc_ops_length, project_ops_length) { + profile.log("RedisManager.updateDocument"); + if (error != null) { return callback(error); } + return HistoryManager.recordAndFlushHistoryOps(project_id, doc_id, appliedOps, doc_ops_length, project_ops_length, function(error) { + profile.log("recordAndFlushHistoryOps"); + if (error != null) { return callback(error); } + if (ranges_were_collapsed) { + logger.log({project_id, doc_id, previousVersion, lines, ranges, update}, "update collapsed some ranges, snapshotting previous content"); + // Do this last, since it's a mongo call, and so potentially longest running + // If it overruns the lock, it's ok, since all of our redis work is done + return SnapshotManager.recordSnapshot(project_id, doc_id, previousVersion, pathname, lines, ranges, function(error) { + if (error != null) { + logger.error({err: error, project_id, doc_id, version, lines, ranges}, "error recording snapshot"); + return callback(error); + } else { + return callback(); + } + }); + } else { + return callback(); + } + }); + }); + }); + }); + }); + }, - lockUpdatesAndDo: (method, project_id, doc_id, args..., callback) -> - profile = new Profiler("lockUpdatesAndDo", {project_id, doc_id}) - LockManager.getLock doc_id, (error, lockValue) -> - profile.log("getLock") - return callback(error) if error? - UpdateManager.processOutstandingUpdates project_id, doc_id, (error) -> - return UpdateManager._handleErrorInsideLock(doc_id, lockValue, error, callback) if error? - profile.log("processOutstandingUpdates") - method project_id, doc_id, args..., (error, response_args...) -> - return UpdateManager._handleErrorInsideLock(doc_id, lockValue, error, callback) if error? - profile.log("method") - LockManager.releaseLock doc_id, lockValue, (error) -> - return callback(error) if error? - profile.log("releaseLock").end() - callback null, response_args... - # We held the lock for a while so updates might have queued up - UpdateManager.continueProcessingUpdatesWithLock project_id, doc_id + lockUpdatesAndDo(method, project_id, doc_id, ...rest) { + const adjustedLength = Math.max(rest.length, 1), args = rest.slice(0, adjustedLength - 1), callback = rest[adjustedLength - 1]; + const profile = new Profiler("lockUpdatesAndDo", {project_id, doc_id}); + return LockManager.getLock(doc_id, function(error, lockValue) { + profile.log("getLock"); + if (error != null) { return callback(error); } + return UpdateManager.processOutstandingUpdates(project_id, doc_id, function(error) { + if (error != null) { return UpdateManager._handleErrorInsideLock(doc_id, lockValue, error, callback); } + profile.log("processOutstandingUpdates"); + return method(project_id, doc_id, ...Array.from(args), function(error, ...response_args) { + if (error != null) { return UpdateManager._handleErrorInsideLock(doc_id, lockValue, error, callback); } + profile.log("method"); + return LockManager.releaseLock(doc_id, lockValue, function(error) { + if (error != null) { return callback(error); } + profile.log("releaseLock").end(); + callback(null, ...Array.from(response_args)); + // We held the lock for a while so updates might have queued up + return UpdateManager.continueProcessingUpdatesWithLock(project_id, doc_id); + }); + }); + }); + }); + }, - _handleErrorInsideLock: (doc_id, lockValue, original_error, callback = (error) ->) -> - LockManager.releaseLock doc_id, lockValue, (lock_error) -> - callback(original_error) + _handleErrorInsideLock(doc_id, lockValue, original_error, callback) { + if (callback == null) { callback = function(error) {}; } + return LockManager.releaseLock(doc_id, lockValue, lock_error => callback(original_error)); + }, - _sanitizeUpdate: (update) -> - # In Javascript, characters are 16-bits wide. It does not understand surrogates as characters. - # - # From Wikipedia (http://en.wikipedia.org/wiki/Plane_(Unicode)#Basic_Multilingual_Plane): - # "The High Surrogates (U+D800–U+DBFF) and Low Surrogate (U+DC00–U+DFFF) codes are reserved - # for encoding non-BMP characters in UTF-16 by using a pair of 16-bit codes: one High Surrogate - # and one Low Surrogate. A single surrogate code point will never be assigned a character."" - # - # The main offender seems to be \uD835 as a stand alone character, which would be the first - # 16-bit character of a blackboard bold character (http://www.fileformat.info/info/unicode/char/1d400/index.htm). - # Something must be going on client side that is screwing up the encoding and splitting the - # two 16-bit characters so that \uD835 is standalone. - for op in update.op or [] - if op.i? - # Replace high and low surrogate characters with 'replacement character' (\uFFFD) - op.i = op.i.replace(/[\uD800-\uDFFF]/g, "\uFFFD") - return update + _sanitizeUpdate(update) { + // In Javascript, characters are 16-bits wide. It does not understand surrogates as characters. + // + // From Wikipedia (http://en.wikipedia.org/wiki/Plane_(Unicode)#Basic_Multilingual_Plane): + // "The High Surrogates (U+D800–U+DBFF) and Low Surrogate (U+DC00–U+DFFF) codes are reserved + // for encoding non-BMP characters in UTF-16 by using a pair of 16-bit codes: one High Surrogate + // and one Low Surrogate. A single surrogate code point will never be assigned a character."" + // + // The main offender seems to be \uD835 as a stand alone character, which would be the first + // 16-bit character of a blackboard bold character (http://www.fileformat.info/info/unicode/char/1d400/index.htm). + // Something must be going on client side that is screwing up the encoding and splitting the + // two 16-bit characters so that \uD835 is standalone. + for (let op of Array.from(update.op || [])) { + if (op.i != null) { + // Replace high and low surrogate characters with 'replacement character' (\uFFFD) + op.i = op.i.replace(/[\uD800-\uDFFF]/g, "\uFFFD"); + } + } + return update; + }, - _addProjectHistoryMetadataToOps: (updates, pathname, projectHistoryId, lines) -> - doc_length = _.reduce lines, - (chars, line) -> chars + line.length, - 0 - doc_length += lines.length - 1 # count newline characters - updates.forEach (update) -> - update.projectHistoryId = projectHistoryId - update.meta ||= {} - update.meta.pathname = pathname - update.meta.doc_length = doc_length - # Each update may contain multiple ops, i.e. - # [{ - # ops: [{i: "foo", p: 4}, {d: "bar", p:8}] - # }, { - # ops: [{d: "baz", p: 40}, {i: "qux", p:8}] - # }] - # We want to include the doc_length at the start of each update, - # before it's ops are applied. However, we need to track any - # changes to it for the next update. - for op in update.op - if op.i? - doc_length += op.i.length - if op.d? - doc_length -= op.d.length + _addProjectHistoryMetadataToOps(updates, pathname, projectHistoryId, lines) { + let doc_length = _.reduce(lines, + (chars, line) => chars + line.length, + 0); + doc_length += lines.length - 1; // count newline characters + return updates.forEach(function(update) { + update.projectHistoryId = projectHistoryId; + if (!update.meta) { update.meta = {}; } + update.meta.pathname = pathname; + update.meta.doc_length = doc_length; + // Each update may contain multiple ops, i.e. + // [{ + // ops: [{i: "foo", p: 4}, {d: "bar", p:8}] + // }, { + // ops: [{d: "baz", p: 40}, {i: "qux", p:8}] + // }] + // We want to include the doc_length at the start of each update, + // before it's ops are applied. However, we need to track any + // changes to it for the next update. + return (() => { + const result = []; + for (let op of Array.from(update.op)) { + if (op.i != null) { + doc_length += op.i.length; + } + if (op.d != null) { + result.push(doc_length -= op.d.length); + } else { + result.push(undefined); + } + } + return result; + })(); + }); + } +}); diff --git a/services/document-updater/app/coffee/mongojs.js b/services/document-updater/app/coffee/mongojs.js index dfeebb788f..daf6fbed6d 100644 --- a/services/document-updater/app/coffee/mongojs.js +++ b/services/document-updater/app/coffee/mongojs.js @@ -1,12 +1,21 @@ -Settings = require "settings-sharelatex" -mongojs = require "mongojs" -db = mongojs(Settings.mongo.url, ["docSnapshots"]) +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const Settings = require("settings-sharelatex"); +const mongojs = require("mongojs"); +const db = mongojs(Settings.mongo.url, ["docSnapshots"]); -module.exports = - db: db - ObjectId: mongojs.ObjectId - healthCheck: (callback) -> - db.runCommand {ping: 1}, (err, res) -> - return callback(err) if err? - return callback(new Error("failed mongo ping")) if !res.ok - callback() +module.exports = { + db, + ObjectId: mongojs.ObjectId, + healthCheck(callback) { + return db.runCommand({ping: 1}, function(err, res) { + if (err != null) { return callback(err); } + if (!res.ok) { return callback(new Error("failed mongo ping")); } + return callback(); + }); + } +}; diff --git a/services/document-updater/app/coffee/sharejs/count.js b/services/document-updater/app/coffee/sharejs/count.js index da28355efb..ffc3337ac7 100644 --- a/services/document-updater/app/coffee/sharejs/count.js +++ b/services/document-updater/app/coffee/sharejs/count.js @@ -1,22 +1,30 @@ -# This is a simple type used for testing other OT code. Each op is [expectedSnapshot, increment] +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// This is a simple type used for testing other OT code. Each op is [expectedSnapshot, increment] -exports.name = 'count' -exports.create = -> 1 +exports.name = 'count'; +exports.create = () => 1; -exports.apply = (snapshot, op) -> - [v, inc] = op - throw new Error "Op #{v} != snapshot #{snapshot}" unless snapshot == v - snapshot + inc +exports.apply = function(snapshot, op) { + const [v, inc] = Array.from(op); + if (snapshot !== v) { throw new Error(`Op ${v} != snapshot ${snapshot}`); } + return snapshot + inc; +}; -# transform op1 by op2. Return transformed version of op1. -exports.transform = (op1, op2) -> - throw new Error "Op1 #{op1[0]} != op2 #{op2[0]}" unless op1[0] == op2[0] - [op1[0] + op2[1], op1[1]] +// transform op1 by op2. Return transformed version of op1. +exports.transform = function(op1, op2) { + if (op1[0] !== op2[0]) { throw new Error(`Op1 ${op1[0]} != op2 ${op2[0]}`); } + return [op1[0] + op2[1], op1[1]]; +}; -exports.compose = (op1, op2) -> - throw new Error "Op1 #{op1} + 1 != op2 #{op2}" unless op1[0] + op1[1] == op2[0] - [op1[0], op1[1] + op2[1]] +exports.compose = function(op1, op2) { + if ((op1[0] + op1[1]) !== op2[0]) { throw new Error(`Op1 ${op1} + 1 != op2 ${op2}`); } + return [op1[0], op1[1] + op2[1]]; +}; -exports.generateRandomOp = (doc) -> - [[doc, 1], doc + 1] +exports.generateRandomOp = doc => [[doc, 1], doc + 1]; diff --git a/services/document-updater/app/coffee/sharejs/helpers.js b/services/document-updater/app/coffee/sharejs/helpers.js index 093b32e1bb..81a561de03 100644 --- a/services/document-updater/app/coffee/sharejs/helpers.js +++ b/services/document-updater/app/coffee/sharejs/helpers.js @@ -1,65 +1,87 @@ -# These methods let you build a transform function from a transformComponent function -# for OT types like text and JSON in which operations are lists of components -# and transforming them requires N^2 work. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// These methods let you build a transform function from a transformComponent function +// for OT types like text and JSON in which operations are lists of components +// and transforming them requires N^2 work. -# Add transform and transformX functions for an OT type which has transformComponent defined. -# transformComponent(destination array, component, other component, side) -exports['_bt'] = bootstrapTransform = (type, transformComponent, checkValidOp, append) -> - transformComponentX = (left, right, destLeft, destRight) -> - transformComponent destLeft, left, right, 'left' - transformComponent destRight, right, left, 'right' +// Add transform and transformX functions for an OT type which has transformComponent defined. +// transformComponent(destination array, component, other component, side) +let bootstrapTransform; +exports['_bt'] = (bootstrapTransform = function(type, transformComponent, checkValidOp, append) { + let transformX; + const transformComponentX = function(left, right, destLeft, destRight) { + transformComponent(destLeft, left, right, 'left'); + return transformComponent(destRight, right, left, 'right'); + }; - # Transforms rightOp by leftOp. Returns ['rightOp', clientOp'] - type.transformX = type['transformX'] = transformX = (leftOp, rightOp) -> - checkValidOp leftOp - checkValidOp rightOp + // Transforms rightOp by leftOp. Returns ['rightOp', clientOp'] + type.transformX = (type['transformX'] = (transformX = function(leftOp, rightOp) { + checkValidOp(leftOp); + checkValidOp(rightOp); - newRightOp = [] + const newRightOp = []; - for rightComponent in rightOp - # Generate newLeftOp by composing leftOp by rightComponent - newLeftOp = [] + for (let rightComponent of Array.from(rightOp)) { + // Generate newLeftOp by composing leftOp by rightComponent + const newLeftOp = []; - k = 0 - while k < leftOp.length - nextC = [] - transformComponentX leftOp[k], rightComponent, newLeftOp, nextC - k++ + let k = 0; + while (k < leftOp.length) { + var l; + const nextC = []; + transformComponentX(leftOp[k], rightComponent, newLeftOp, nextC); + k++; - if nextC.length == 1 - rightComponent = nextC[0] - else if nextC.length == 0 - append newLeftOp, l for l in leftOp[k..] - rightComponent = null - break - else - # Recurse. - [l_, r_] = transformX leftOp[k..], nextC - append newLeftOp, l for l in l_ - append newRightOp, r for r in r_ - rightComponent = null - break + if (nextC.length === 1) { + rightComponent = nextC[0]; + } else if (nextC.length === 0) { + for (l of Array.from(leftOp.slice(k))) { append(newLeftOp, l); } + rightComponent = null; + break; + } else { + // Recurse. + const [l_, r_] = Array.from(transformX(leftOp.slice(k), nextC)); + for (l of Array.from(l_)) { append(newLeftOp, l); } + for (let r of Array.from(r_)) { append(newRightOp, r); } + rightComponent = null; + break; + } + } - append newRightOp, rightComponent if rightComponent? - leftOp = newLeftOp + if (rightComponent != null) { append(newRightOp, rightComponent); } + leftOp = newLeftOp; + } - [leftOp, newRightOp] + return [leftOp, newRightOp]; + })); - # Transforms op with specified type ('left' or 'right') by otherOp. - type.transform = type['transform'] = (op, otherOp, type) -> - throw new Error "type must be 'left' or 'right'" unless type == 'left' or type == 'right' + // Transforms op with specified type ('left' or 'right') by otherOp. + return type.transform = (type['transform'] = function(op, otherOp, type) { + let _; + if ((type !== 'left') && (type !== 'right')) { throw new Error("type must be 'left' or 'right'"); } - return op if otherOp.length == 0 + if (otherOp.length === 0) { return op; } - # TODO: Benchmark with and without this line. I _think_ it'll make a big difference...? - return transformComponent [], op[0], otherOp[0], type if op.length == 1 and otherOp.length == 1 + // TODO: Benchmark with and without this line. I _think_ it'll make a big difference...? + if ((op.length === 1) && (otherOp.length === 1)) { return transformComponent([], op[0], otherOp[0], type); } - if type == 'left' - [left, _] = transformX op, otherOp - left - else - [_, right] = transformX otherOp, op - right + if (type === 'left') { + let left; + [left, _] = Array.from(transformX(op, otherOp)); + return left; + } else { + let right; + [_, right] = Array.from(transformX(otherOp, op)); + return right; + } + }); +}); -if typeof WEB is 'undefined' - exports.bootstrapTransform = bootstrapTransform +if (typeof WEB === 'undefined') { + exports.bootstrapTransform = bootstrapTransform; +} diff --git a/services/document-updater/app/coffee/sharejs/index.js b/services/document-updater/app/coffee/sharejs/index.js index 6f3bb8ec20..bf681de7cd 100644 --- a/services/document-updater/app/coffee/sharejs/index.js +++ b/services/document-updater/app/coffee/sharejs/index.js @@ -1,15 +1,21 @@ +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ -register = (file) -> - type = require file - exports[type.name] = type - try require "#{file}-api" +const register = function(file) { + const type = require(file); + exports[type.name] = type; + try { return require(`${file}-api`); } catch (error) {} +}; -# Import all the built-in types. -register './simple' -register './count' +// Import all the built-in types. +register('./simple'); +register('./count'); -register './text' -register './text-composable' -register './text-tp2' +register('./text'); +register('./text-composable'); +register('./text-tp2'); -register './json' +register('./json'); diff --git a/services/document-updater/app/coffee/sharejs/json-api.js b/services/document-updater/app/coffee/sharejs/json-api.js index 8819dee798..1c7c2633ba 100644 --- a/services/document-updater/app/coffee/sharejs/json-api.js +++ b/services/document-updater/app/coffee/sharejs/json-api.js @@ -1,180 +1,273 @@ -# API for JSON OT +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// API for JSON OT -json = require './json' if typeof WEB is 'undefined' +let json; +if (typeof WEB === 'undefined') { json = require('./json'); } -if WEB? - extendDoc = exports.extendDoc - exports.extendDoc = (name, fn) -> - SubDoc::[name] = fn - extendDoc name, fn +if (typeof WEB !== 'undefined' && WEB !== null) { + const { + extendDoc + } = exports; + exports.extendDoc = function(name, fn) { + SubDoc.prototype[name] = fn; + return extendDoc(name, fn); + }; +} -depath = (path) -> - if path.length == 1 and path[0].constructor == Array - path[0] - else path +const depath = function(path) { + if ((path.length === 1) && (path[0].constructor === Array)) { + return path[0]; + } else { return path; } +}; -class SubDoc - constructor: (@doc, @path) -> - at: (path...) -> @doc.at @path.concat depath path - get: -> @doc.getAt @path - # for objects and lists - set: (value, cb) -> @doc.setAt @path, value, cb - # for strings and lists. - insert: (pos, value, cb) -> @doc.insertAt @path, pos, value, cb - # for strings - del: (pos, length, cb) -> @doc.deleteTextAt @path, length, pos, cb - # for objects and lists - remove: (cb) -> @doc.removeAt @path, cb - push: (value, cb) -> @insert @get().length, value, cb - move: (from, to, cb) -> @doc.moveAt @path, from, to, cb - add: (amount, cb) -> @doc.addAt @path, amount, cb - on: (event, cb) -> @doc.addListener @path, event, cb - removeListener: (l) -> @doc.removeListener l +class SubDoc { + constructor(doc, path) { + this.doc = doc; + this.path = path; + } + at(...path) { return this.doc.at(this.path.concat(depath(path))); } + get() { return this.doc.getAt(this.path); } + // for objects and lists + set(value, cb) { return this.doc.setAt(this.path, value, cb); } + // for strings and lists. + insert(pos, value, cb) { return this.doc.insertAt(this.path, pos, value, cb); } + // for strings + del(pos, length, cb) { return this.doc.deleteTextAt(this.path, length, pos, cb); } + // for objects and lists + remove(cb) { return this.doc.removeAt(this.path, cb); } + push(value, cb) { return this.insert(this.get().length, value, cb); } + move(from, to, cb) { return this.doc.moveAt(this.path, from, to, cb); } + add(amount, cb) { return this.doc.addAt(this.path, amount, cb); } + on(event, cb) { return this.doc.addListener(this.path, event, cb); } + removeListener(l) { return this.doc.removeListener(l); } - # text API compatibility - getLength: -> @get().length - getText: -> @get() + // text API compatibility + getLength() { return this.get().length; } + getText() { return this.get(); } +} -traverse = (snapshot, path) -> - container = data:snapshot - key = 'data' - elem = container - for p in path - elem = elem[key] - key = p - throw new Error 'bad path' if typeof elem == 'undefined' - {elem, key} +const traverse = function(snapshot, path) { + const container = {data:snapshot}; + let key = 'data'; + let elem = container; + for (let p of Array.from(path)) { + elem = elem[key]; + key = p; + if (typeof elem === 'undefined') { throw new Error('bad path'); } + } + return {elem, key}; +}; -pathEquals = (p1, p2) -> - return false if p1.length != p2.length - for e,i in p1 - return false if e != p2[i] - true +const pathEquals = function(p1, p2) { + if (p1.length !== p2.length) { return false; } + for (let i = 0; i < p1.length; i++) { + const e = p1[i]; + if (e !== p2[i]) { return false; } + } + return true; +}; -json.api = - provides: {json:true} +json.api = { + provides: {json:true}, - at: (path...) -> new SubDoc this, depath path + at(...path) { return new SubDoc(this, depath(path)); }, - get: -> @snapshot - set: (value, cb) -> @setAt [], value, cb + get() { return this.snapshot; }, + set(value, cb) { return this.setAt([], value, cb); }, - getAt: (path) -> - {elem, key} = traverse @snapshot, path - return elem[key] + getAt(path) { + const {elem, key} = traverse(this.snapshot, path); + return elem[key]; + }, - setAt: (path, value, cb) -> - {elem, key} = traverse @snapshot, path - op = {p:path} - if elem.constructor == Array - op.li = value - op.ld = elem[key] if typeof elem[key] != 'undefined' - else if typeof elem == 'object' - op.oi = value - op.od = elem[key] if typeof elem[key] != 'undefined' - else throw new Error 'bad path' - @submitOp [op], cb + setAt(path, value, cb) { + const {elem, key} = traverse(this.snapshot, path); + const op = {p:path}; + if (elem.constructor === Array) { + op.li = value; + if (typeof elem[key] !== 'undefined') { op.ld = elem[key]; } + } else if (typeof elem === 'object') { + op.oi = value; + if (typeof elem[key] !== 'undefined') { op.od = elem[key]; } + } else { throw new Error('bad path'); } + return this.submitOp([op], cb); + }, - removeAt: (path, cb) -> - {elem, key} = traverse @snapshot, path - throw new Error 'no element at that path' unless typeof elem[key] != 'undefined' - op = {p:path} - if elem.constructor == Array - op.ld = elem[key] - else if typeof elem == 'object' - op.od = elem[key] - else throw new Error 'bad path' - @submitOp [op], cb + removeAt(path, cb) { + const {elem, key} = traverse(this.snapshot, path); + if (typeof elem[key] === 'undefined') { throw new Error('no element at that path'); } + const op = {p:path}; + if (elem.constructor === Array) { + op.ld = elem[key]; + } else if (typeof elem === 'object') { + op.od = elem[key]; + } else { throw new Error('bad path'); } + return this.submitOp([op], cb); + }, - insertAt: (path, pos, value, cb) -> - {elem, key} = traverse @snapshot, path - op = {p:path.concat pos} - if elem[key].constructor == Array - op.li = value - else if typeof elem[key] == 'string' - op.si = value - @submitOp [op], cb + insertAt(path, pos, value, cb) { + const {elem, key} = traverse(this.snapshot, path); + const op = {p:path.concat(pos)}; + if (elem[key].constructor === Array) { + op.li = value; + } else if (typeof elem[key] === 'string') { + op.si = value; + } + return this.submitOp([op], cb); + }, - moveAt: (path, from, to, cb) -> - op = [{p:path.concat(from), lm:to}] - @submitOp op, cb + moveAt(path, from, to, cb) { + const op = [{p:path.concat(from), lm:to}]; + return this.submitOp(op, cb); + }, - addAt: (path, amount, cb) -> - op = [{p:path, na:amount}] - @submitOp op, cb + addAt(path, amount, cb) { + const op = [{p:path, na:amount}]; + return this.submitOp(op, cb); + }, - deleteTextAt: (path, length, pos, cb) -> - {elem, key} = traverse @snapshot, path - op = [{p:path.concat(pos), sd:elem[key][pos...(pos + length)]}] - @submitOp op, cb + deleteTextAt(path, length, pos, cb) { + const {elem, key} = traverse(this.snapshot, path); + const op = [{p:path.concat(pos), sd:elem[key].slice(pos, (pos + length))}]; + return this.submitOp(op, cb); + }, - addListener: (path, event, cb) -> - l = {path, event, cb} - @_listeners.push l - l - removeListener: (l) -> - i = @_listeners.indexOf l - return false if i < 0 - @_listeners.splice i, 1 - return true - _register: -> - @_listeners = [] - @on 'change', (op) -> - for c in op - if c.na != undefined or c.si != undefined or c.sd != undefined - # no change to structure - continue - to_remove = [] - for l, i in @_listeners - # Transform a dummy op by the incoming op to work out what - # should happen to the listener. - dummy = {p:l.path, na:0} - xformed = @type.transformComponent [], dummy, c, 'left' - if xformed.length == 0 - # The op was transformed to noop, so we should delete the listener. - to_remove.push i - else if xformed.length == 1 - # The op remained, so grab its new path into the listener. - l.path = xformed[0].p - else - throw new Error "Bad assumption in json-api: xforming an 'si' op will always result in 0 or 1 components." - to_remove.sort (a, b) -> b - a - for i in to_remove - @_listeners.splice i, 1 - @on 'remoteop', (op) -> - for c in op - match_path = if c.na == undefined then c.p[...c.p.length-1] else c.p - for {path, event, cb} in @_listeners - if pathEquals path, match_path - switch event - when 'insert' - if c.li != undefined and c.ld == undefined - cb(c.p[c.p.length-1], c.li) - else if c.oi != undefined and c.od == undefined - cb(c.p[c.p.length-1], c.oi) - else if c.si != undefined - cb(c.p[c.p.length-1], c.si) - when 'delete' - if c.li == undefined and c.ld != undefined - cb(c.p[c.p.length-1], c.ld) - else if c.oi == undefined and c.od != undefined - cb(c.p[c.p.length-1], c.od) - else if c.sd != undefined - cb(c.p[c.p.length-1], c.sd) - when 'replace' - if c.li != undefined and c.ld != undefined - cb(c.p[c.p.length-1], c.ld, c.li) - else if c.oi != undefined and c.od != undefined - cb(c.p[c.p.length-1], c.od, c.oi) - when 'move' - if c.lm != undefined - cb(c.p[c.p.length-1], c.lm) - when 'add' - if c.na != undefined - cb(c.na) - else if (common = @type.commonPath match_path, path)? - if event == 'child op' - if match_path.length == path.length == common - throw new Error "paths match length and have commonality, but aren't equal?" - child_path = c.p[common+1..] - cb(child_path, c) + addListener(path, event, cb) { + const l = {path, event, cb}; + this._listeners.push(l); + return l; + }, + removeListener(l) { + const i = this._listeners.indexOf(l); + if (i < 0) { return false; } + this._listeners.splice(i, 1); + return true; + }, + _register() { + this._listeners = []; + this.on('change', function(op) { + return (() => { + const result = []; + for (let c of Array.from(op)) { + var i; + if ((c.na !== undefined) || (c.si !== undefined) || (c.sd !== undefined)) { + // no change to structure + continue; + } + var to_remove = []; + for (i = 0; i < this._listeners.length; i++) { + // Transform a dummy op by the incoming op to work out what + // should happen to the listener. + const l = this._listeners[i]; + const dummy = {p:l.path, na:0}; + const xformed = this.type.transformComponent([], dummy, c, 'left'); + if (xformed.length === 0) { + // The op was transformed to noop, so we should delete the listener. + to_remove.push(i); + } else if (xformed.length === 1) { + // The op remained, so grab its new path into the listener. + l.path = xformed[0].p; + } else { + throw new Error("Bad assumption in json-api: xforming an 'si' op will always result in 0 or 1 components."); + } + } + to_remove.sort((a, b) => b - a); + result.push((() => { + const result1 = []; + for (i of Array.from(to_remove)) { + result1.push(this._listeners.splice(i, 1)); + } + return result1; + })()); + } + return result; + })(); + }); + return this.on('remoteop', function(op) { + return (() => { + const result = []; + for (var c of Array.from(op)) { + var match_path = c.na === undefined ? c.p.slice(0, c.p.length-1) : c.p; + result.push((() => { + const result1 = []; + for (let {path, event, cb} of Array.from(this._listeners)) { + var common; + if (pathEquals(path, match_path)) { + switch (event) { + case 'insert': + if ((c.li !== undefined) && (c.ld === undefined)) { + result1.push(cb(c.p[c.p.length-1], c.li)); + } else if ((c.oi !== undefined) && (c.od === undefined)) { + result1.push(cb(c.p[c.p.length-1], c.oi)); + } else if (c.si !== undefined) { + result1.push(cb(c.p[c.p.length-1], c.si)); + } else { + result1.push(undefined); + } + break; + case 'delete': + if ((c.li === undefined) && (c.ld !== undefined)) { + result1.push(cb(c.p[c.p.length-1], c.ld)); + } else if ((c.oi === undefined) && (c.od !== undefined)) { + result1.push(cb(c.p[c.p.length-1], c.od)); + } else if (c.sd !== undefined) { + result1.push(cb(c.p[c.p.length-1], c.sd)); + } else { + result1.push(undefined); + } + break; + case 'replace': + if ((c.li !== undefined) && (c.ld !== undefined)) { + result1.push(cb(c.p[c.p.length-1], c.ld, c.li)); + } else if ((c.oi !== undefined) && (c.od !== undefined)) { + result1.push(cb(c.p[c.p.length-1], c.od, c.oi)); + } else { + result1.push(undefined); + } + break; + case 'move': + if (c.lm !== undefined) { + result1.push(cb(c.p[c.p.length-1], c.lm)); + } else { + result1.push(undefined); + } + break; + case 'add': + if (c.na !== undefined) { + result1.push(cb(c.na)); + } else { + result1.push(undefined); + } + break; + default: + result1.push(undefined); + } + } else if ((common = this.type.commonPath(match_path, path)) != null) { + if (event === 'child op') { + if (match_path.length === path.length && path.length === common) { + throw new Error("paths match length and have commonality, but aren't equal?"); + } + const child_path = c.p.slice(common+1); + result1.push(cb(child_path, c)); + } else { + result1.push(undefined); + } + } else { + result1.push(undefined); + } + } + return result1; + })()); + } + return result; + })(); + }); + } +}; diff --git a/services/document-updater/app/coffee/sharejs/json.js b/services/document-updater/app/coffee/sharejs/json.js index b03b0947ef..3e3bee79d9 100644 --- a/services/document-updater/app/coffee/sharejs/json.js +++ b/services/document-updater/app/coffee/sharejs/json.js @@ -1,441 +1,534 @@ -# This is the implementation of the JSON OT type. -# -# Spec is here: https://github.com/josephg/ShareJS/wiki/JSON-Operations +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// This is the implementation of the JSON OT type. +// +// Spec is here: https://github.com/josephg/ShareJS/wiki/JSON-Operations -if WEB? - text = exports.types.text -else - text = require './text' +let text; +if (typeof WEB !== 'undefined' && WEB !== null) { + ({ + text + } = exports.types); +} else { + text = require('./text'); +} -json = {} +const json = {}; -json.name = 'json' +json.name = 'json'; -json.create = -> null +json.create = () => null; -json.invertComponent = (c) -> - c_ = {p: c.p} - c_.sd = c.si if c.si != undefined - c_.si = c.sd if c.sd != undefined - c_.od = c.oi if c.oi != undefined - c_.oi = c.od if c.od != undefined - c_.ld = c.li if c.li != undefined - c_.li = c.ld if c.ld != undefined - c_.na = -c.na if c.na != undefined - if c.lm != undefined - c_.lm = c.p[c.p.length-1] - c_.p = c.p[0...c.p.length - 1].concat([c.lm]) - c_ +json.invertComponent = function(c) { + const c_ = {p: c.p}; + if (c.si !== undefined) { c_.sd = c.si; } + if (c.sd !== undefined) { c_.si = c.sd; } + if (c.oi !== undefined) { c_.od = c.oi; } + if (c.od !== undefined) { c_.oi = c.od; } + if (c.li !== undefined) { c_.ld = c.li; } + if (c.ld !== undefined) { c_.li = c.ld; } + if (c.na !== undefined) { c_.na = -c.na; } + if (c.lm !== undefined) { + c_.lm = c.p[c.p.length-1]; + c_.p = c.p.slice(0, c.p.length - 1).concat([c.lm]); + } + return c_; +}; -json.invert = (op) -> json.invertComponent c for c in op.slice().reverse() +json.invert = op => Array.from(op.slice().reverse()).map((c) => json.invertComponent(c)); -json.checkValidOp = (op) -> +json.checkValidOp = function(op) {}; -isArray = (o) -> Object.prototype.toString.call(o) == '[object Array]' -json.checkList = (elem) -> - throw new Error 'Referenced element not a list' unless isArray(elem) +const isArray = o => Object.prototype.toString.call(o) === '[object Array]'; +json.checkList = function(elem) { + if (!isArray(elem)) { throw new Error('Referenced element not a list'); } +}; -json.checkObj = (elem) -> - throw new Error "Referenced element not an object (it was #{JSON.stringify elem})" unless elem.constructor is Object +json.checkObj = function(elem) { + if (elem.constructor !== Object) { throw new Error(`Referenced element not an object (it was ${JSON.stringify(elem)})`); } +}; -json.apply = (snapshot, op) -> - json.checkValidOp op - op = clone op +json.apply = function(snapshot, op) { + json.checkValidOp(op); + op = clone(op); - container = {data: clone snapshot} + const container = {data: clone(snapshot)}; - try - for c, i in op - parent = null - parentkey = null - elem = container - key = 'data' + try { + for (let i = 0; i < op.length; i++) { + const c = op[i]; + let parent = null; + let parentkey = null; + let elem = container; + let key = 'data'; - for p in c.p - parent = elem - parentkey = key - elem = elem[key] - key = p + for (let p of Array.from(c.p)) { + parent = elem; + parentkey = key; + elem = elem[key]; + key = p; - throw new Error 'Path invalid' unless parent? + if (parent == null) { throw new Error('Path invalid'); } + } - if c.na != undefined - # Number add - throw new Error 'Referenced element not a number' unless typeof elem[key] is 'number' - elem[key] += c.na + if (c.na !== undefined) { + // Number add + if (typeof elem[key] !== 'number') { throw new Error('Referenced element not a number'); } + elem[key] += c.na; - else if c.si != undefined - # String insert - throw new Error "Referenced element not a string (it was #{JSON.stringify elem})" unless typeof elem is 'string' - parent[parentkey] = elem[...key] + c.si + elem[key..] - else if c.sd != undefined - # String delete - throw new Error 'Referenced element not a string' unless typeof elem is 'string' - throw new Error 'Deleted string does not match' unless elem[key...key + c.sd.length] == c.sd - parent[parentkey] = elem[...key] + elem[key + c.sd.length..] + } else if (c.si !== undefined) { + // String insert + if (typeof elem !== 'string') { throw new Error(`Referenced element not a string (it was ${JSON.stringify(elem)})`); } + parent[parentkey] = elem.slice(0, key) + c.si + elem.slice(key); + } else if (c.sd !== undefined) { + // String delete + if (typeof elem !== 'string') { throw new Error('Referenced element not a string'); } + if (elem.slice(key, key + c.sd.length) !== c.sd) { throw new Error('Deleted string does not match'); } + parent[parentkey] = elem.slice(0, key) + elem.slice(key + c.sd.length); - else if c.li != undefined && c.ld != undefined - # List replace - json.checkList elem + } else if ((c.li !== undefined) && (c.ld !== undefined)) { + // List replace + json.checkList(elem); - # Should check the list element matches c.ld - elem[key] = c.li - else if c.li != undefined - # List insert - json.checkList elem + // Should check the list element matches c.ld + elem[key] = c.li; + } else if (c.li !== undefined) { + // List insert + json.checkList(elem); - elem.splice key, 0, c.li - else if c.ld != undefined - # List delete - json.checkList elem + elem.splice(key, 0, c.li); + } else if (c.ld !== undefined) { + // List delete + json.checkList(elem); - # Should check the list element matches c.ld here too. - elem.splice key, 1 - else if c.lm != undefined - # List move - json.checkList elem - if c.lm != key - e = elem[key] - # Remove it... - elem.splice key, 1 - # And insert it back. - elem.splice c.lm, 0, e + // Should check the list element matches c.ld here too. + elem.splice(key, 1); + } else if (c.lm !== undefined) { + // List move + json.checkList(elem); + if (c.lm !== key) { + const e = elem[key]; + // Remove it... + elem.splice(key, 1); + // And insert it back. + elem.splice(c.lm, 0, e); + } - else if c.oi != undefined - # Object insert / replace - json.checkObj elem + } else if (c.oi !== undefined) { + // Object insert / replace + json.checkObj(elem); - # Should check that elem[key] == c.od - elem[key] = c.oi - else if c.od != undefined - # Object delete - json.checkObj elem + // Should check that elem[key] == c.od + elem[key] = c.oi; + } else if (c.od !== undefined) { + // Object delete + json.checkObj(elem); - # Should check that elem[key] == c.od - delete elem[key] - else - throw new Error 'invalid / missing instruction in op' - catch error - # TODO: Roll back all already applied changes. Write tests before implementing this code. - throw error + // Should check that elem[key] == c.od + delete elem[key]; + } else { + throw new Error('invalid / missing instruction in op'); + } + } + } catch (error) { + // TODO: Roll back all already applied changes. Write tests before implementing this code. + throw error; + } - container.data + return container.data; +}; -# Checks if two paths, p1 and p2 match. -json.pathMatches = (p1, p2, ignoreLast) -> - return false unless p1.length == p2.length +// Checks if two paths, p1 and p2 match. +json.pathMatches = function(p1, p2, ignoreLast) { + if (p1.length !== p2.length) { return false; } - for p, i in p1 - return false if p != p2[i] and (!ignoreLast or i != p1.length - 1) + for (let i = 0; i < p1.length; i++) { + const p = p1[i]; + if ((p !== p2[i]) && (!ignoreLast || (i !== (p1.length - 1)))) { return false; } + } - true + return true; +}; -json.append = (dest, c) -> - c = clone c - if dest.length != 0 and json.pathMatches c.p, (last = dest[dest.length - 1]).p - if last.na != undefined and c.na != undefined - dest[dest.length - 1] = { p: last.p, na: last.na + c.na } - else if last.li != undefined and c.li == undefined and c.ld == last.li - # insert immediately followed by delete becomes a noop. - if last.ld != undefined - # leave the delete part of the replace - delete last.li - else - dest.pop() - else if last.od != undefined and last.oi == undefined and - c.oi != undefined and c.od == undefined - last.oi = c.oi - else if c.lm != undefined and c.p[c.p.length-1] == c.lm - null # don't do anything - else - dest.push c - else - dest.push c +json.append = function(dest, c) { + let last; + c = clone(c); + if ((dest.length !== 0) && json.pathMatches(c.p, (last = dest[dest.length - 1]).p)) { + if ((last.na !== undefined) && (c.na !== undefined)) { + return dest[dest.length - 1] = { p: last.p, na: last.na + c.na }; + } else if ((last.li !== undefined) && (c.li === undefined) && (c.ld === last.li)) { + // insert immediately followed by delete becomes a noop. + if (last.ld !== undefined) { + // leave the delete part of the replace + return delete last.li; + } else { + return dest.pop(); + } + } else if ((last.od !== undefined) && (last.oi === undefined) && + (c.oi !== undefined) && (c.od === undefined)) { + return last.oi = c.oi; + } else if ((c.lm !== undefined) && (c.p[c.p.length-1] === c.lm)) { + return null; // don't do anything + } else { + return dest.push(c); + } + } else { + return dest.push(c); + } +}; -json.compose = (op1, op2) -> - json.checkValidOp op1 - json.checkValidOp op2 +json.compose = function(op1, op2) { + json.checkValidOp(op1); + json.checkValidOp(op2); - newOp = clone op1 - json.append newOp, c for c in op2 + const newOp = clone(op1); + for (let c of Array.from(op2)) { json.append(newOp, c); } - newOp + return newOp; +}; -json.normalize = (op) -> - newOp = [] +json.normalize = function(op) { + const newOp = []; - op = [op] unless isArray op + if (!isArray(op)) { op = [op]; } - for c in op - c.p ?= [] - json.append newOp, c + for (let c of Array.from(op)) { + if (c.p == null) { c.p = []; } + json.append(newOp, c); + } - newOp + return newOp; +}; -# hax, copied from test/types/json. Apparently this is still the fastest way to deep clone an object, assuming -# we have browser support for JSON. -# http://jsperf.com/cloning-an-object/12 -clone = (o) -> JSON.parse(JSON.stringify o) +// hax, copied from test/types/json. Apparently this is still the fastest way to deep clone an object, assuming +// we have browser support for JSON. +// http://jsperf.com/cloning-an-object/12 +var clone = o => JSON.parse(JSON.stringify(o)); -json.commonPath = (p1, p2) -> - p1 = p1.slice() - p2 = p2.slice() - p1.unshift('data') - p2.unshift('data') - p1 = p1[...p1.length-1] - p2 = p2[...p2.length-1] - return -1 if p2.length == 0 - i = 0 - while p1[i] == p2[i] && i < p1.length - i++ - if i == p2.length - return i-1 - return +json.commonPath = function(p1, p2) { + p1 = p1.slice(); + p2 = p2.slice(); + p1.unshift('data'); + p2.unshift('data'); + p1 = p1.slice(0, p1.length-1); + p2 = p2.slice(0, p2.length-1); + if (p2.length === 0) { return -1; } + let i = 0; + while ((p1[i] === p2[i]) && (i < p1.length)) { + i++; + if (i === p2.length) { + return i-1; + } + } +}; -# transform c so it applies to a document with otherC applied. -json.transformComponent = (dest, c, otherC, type) -> - c = clone c - c.p.push(0) if c.na != undefined - otherC.p.push(0) if otherC.na != undefined +// transform c so it applies to a document with otherC applied. +json.transformComponent = function(dest, c, otherC, type) { + let oc; + c = clone(c); + if (c.na !== undefined) { c.p.push(0); } + if (otherC.na !== undefined) { otherC.p.push(0); } - common = json.commonPath c.p, otherC.p - common2 = json.commonPath otherC.p, c.p + const common = json.commonPath(c.p, otherC.p); + const common2 = json.commonPath(otherC.p, c.p); - cplength = c.p.length - otherCplength = otherC.p.length + const cplength = c.p.length; + const otherCplength = otherC.p.length; - c.p.pop() if c.na != undefined # hax - otherC.p.pop() if otherC.na != undefined + if (c.na !== undefined) { c.p.pop(); } // hax + if (otherC.na !== undefined) { otherC.p.pop(); } - if otherC.na - if common2? && otherCplength >= cplength && otherC.p[common2] == c.p[common2] - if c.ld != undefined - oc = clone otherC - oc.p = oc.p[cplength..] - c.ld = json.apply clone(c.ld), [oc] - else if c.od != undefined - oc = clone otherC - oc.p = oc.p[cplength..] - c.od = json.apply clone(c.od), [oc] - json.append dest, c - return dest + if (otherC.na) { + if ((common2 != null) && (otherCplength >= cplength) && (otherC.p[common2] === c.p[common2])) { + if (c.ld !== undefined) { + oc = clone(otherC); + oc.p = oc.p.slice(cplength); + c.ld = json.apply(clone(c.ld), [oc]); + } else if (c.od !== undefined) { + oc = clone(otherC); + oc.p = oc.p.slice(cplength); + c.od = json.apply(clone(c.od), [oc]); + } + } + json.append(dest, c); + return dest; + } - if common2? && otherCplength > cplength && c.p[common2] == otherC.p[common2] - # transform based on c - if c.ld != undefined - oc = clone otherC - oc.p = oc.p[cplength..] - c.ld = json.apply clone(c.ld), [oc] - else if c.od != undefined - oc = clone otherC - oc.p = oc.p[cplength..] - c.od = json.apply clone(c.od), [oc] + if ((common2 != null) && (otherCplength > cplength) && (c.p[common2] === otherC.p[common2])) { + // transform based on c + if (c.ld !== undefined) { + oc = clone(otherC); + oc.p = oc.p.slice(cplength); + c.ld = json.apply(clone(c.ld), [oc]); + } else if (c.od !== undefined) { + oc = clone(otherC); + oc.p = oc.p.slice(cplength); + c.od = json.apply(clone(c.od), [oc]); + } + } - if common? - commonOperand = cplength == otherCplength - # transform based on otherC - if otherC.na != undefined - # this case is handled above due to icky path hax - else if otherC.si != undefined || otherC.sd != undefined - # String op vs string op - pass through to text type - if c.si != undefined || c.sd != undefined - throw new Error("must be a string?") unless commonOperand + if (common != null) { + let from, p, to; + const commonOperand = cplength === otherCplength; + // transform based on otherC + if (otherC.na !== undefined) { + // this case is handled above due to icky path hax + } else if ((otherC.si !== undefined) || (otherC.sd !== undefined)) { + // String op vs string op - pass through to text type + if ((c.si !== undefined) || (c.sd !== undefined)) { + if (!commonOperand) { throw new Error("must be a string?"); } - # Convert an op component to a text op component - convert = (component) -> - newC = p:component.p[component.p.length - 1] - if component.si - newC.i = component.si - else - newC.d = component.sd - newC + // Convert an op component to a text op component + const convert = function(component) { + const newC = {p:component.p[component.p.length - 1]}; + if (component.si) { + newC.i = component.si; + } else { + newC.d = component.sd; + } + return newC; + }; - tc1 = convert c - tc2 = convert otherC + const tc1 = convert(c); + const tc2 = convert(otherC); - res = [] - text._tc res, tc1, tc2, type - for tc in res - jc = { p: c.p[...common] } - jc.p.push(tc.p) - jc.si = tc.i if tc.i? - jc.sd = tc.d if tc.d? - json.append dest, jc - return dest - else if otherC.li != undefined && otherC.ld != undefined - if otherC.p[common] == c.p[common] - # noop - if !commonOperand - # we're below the deleted element, so -> noop - return dest - else if c.ld != undefined - # we're trying to delete the same element, -> noop - if c.li != undefined and type == 'left' - # we're both replacing one element with another. only one can - # survive! - c.ld = clone otherC.li - else - return dest - else if otherC.li != undefined - if c.li != undefined and c.ld == undefined and commonOperand and c.p[common] == otherC.p[common] - # in li vs. li, left wins. - if type == 'right' - c.p[common]++ - else if otherC.p[common] <= c.p[common] - c.p[common]++ + const res = []; + text._tc(res, tc1, tc2, type); + for (let tc of Array.from(res)) { + const jc = { p: c.p.slice(0, common) }; + jc.p.push(tc.p); + if (tc.i != null) { jc.si = tc.i; } + if (tc.d != null) { jc.sd = tc.d; } + json.append(dest, jc); + } + return dest; + } + } else if ((otherC.li !== undefined) && (otherC.ld !== undefined)) { + if (otherC.p[common] === c.p[common]) { + // noop + if (!commonOperand) { + // we're below the deleted element, so -> noop + return dest; + } else if (c.ld !== undefined) { + // we're trying to delete the same element, -> noop + if ((c.li !== undefined) && (type === 'left')) { + // we're both replacing one element with another. only one can + // survive! + c.ld = clone(otherC.li); + } else { + return dest; + } + } + } + } else if (otherC.li !== undefined) { + if ((c.li !== undefined) && (c.ld === undefined) && commonOperand && (c.p[common] === otherC.p[common])) { + // in li vs. li, left wins. + if (type === 'right') { + c.p[common]++; + } + } else if (otherC.p[common] <= c.p[common]) { + c.p[common]++; + } - if c.lm != undefined - if commonOperand - # otherC edits the same list we edit - if otherC.p[common] <= c.lm - c.lm++ - # changing c.from is handled above. - else if otherC.ld != undefined - if c.lm != undefined - if commonOperand - if otherC.p[common] == c.p[common] - # they deleted the thing we're trying to move - return dest - # otherC edits the same list we edit - p = otherC.p[common] - from = c.p[common] - to = c.lm - if p < to || (p == to && from < to) - c.lm-- + if (c.lm !== undefined) { + if (commonOperand) { + // otherC edits the same list we edit + if (otherC.p[common] <= c.lm) { + c.lm++; + } + } + } + // changing c.from is handled above. + } else if (otherC.ld !== undefined) { + if (c.lm !== undefined) { + if (commonOperand) { + if (otherC.p[common] === c.p[common]) { + // they deleted the thing we're trying to move + return dest; + } + // otherC edits the same list we edit + p = otherC.p[common]; + from = c.p[common]; + to = c.lm; + if ((p < to) || ((p === to) && (from < to))) { + c.lm--; + } + } + } - if otherC.p[common] < c.p[common] - c.p[common]-- - else if otherC.p[common] == c.p[common] - if otherCplength < cplength - # we're below the deleted element, so -> noop - return dest - else if c.ld != undefined - if c.li != undefined - # we're replacing, they're deleting. we become an insert. - delete c.ld - else - # we're trying to delete the same element, -> noop - return dest - else if otherC.lm != undefined - if c.lm != undefined and cplength == otherCplength - # lm vs lm, here we go! - from = c.p[common] - to = c.lm - otherFrom = otherC.p[common] - otherTo = otherC.lm - if otherFrom != otherTo - # if otherFrom == otherTo, we don't need to change our op. + if (otherC.p[common] < c.p[common]) { + c.p[common]--; + } else if (otherC.p[common] === c.p[common]) { + if (otherCplength < cplength) { + // we're below the deleted element, so -> noop + return dest; + } else if (c.ld !== undefined) { + if (c.li !== undefined) { + // we're replacing, they're deleting. we become an insert. + delete c.ld; + } else { + // we're trying to delete the same element, -> noop + return dest; + } + } + } + } else if (otherC.lm !== undefined) { + if ((c.lm !== undefined) && (cplength === otherCplength)) { + // lm vs lm, here we go! + from = c.p[common]; + to = c.lm; + const otherFrom = otherC.p[common]; + const otherTo = otherC.lm; + if (otherFrom !== otherTo) { + // if otherFrom == otherTo, we don't need to change our op. - # where did my thing go? - if from == otherFrom - # they moved it! tie break. - if type == 'left' - c.p[common] = otherTo - if from == to # ugh - c.lm = otherTo - else - return dest - else - # they moved around it - if from > otherFrom - c.p[common]-- - if from > otherTo - c.p[common]++ - else if from == otherTo - if otherFrom > otherTo - c.p[common]++ - if from == to # ugh, again - c.lm++ + // where did my thing go? + if (from === otherFrom) { + // they moved it! tie break. + if (type === 'left') { + c.p[common] = otherTo; + if (from === to) { // ugh + c.lm = otherTo; + } + } else { + return dest; + } + } else { + // they moved around it + if (from > otherFrom) { + c.p[common]--; + } + if (from > otherTo) { + c.p[common]++; + } else if (from === otherTo) { + if (otherFrom > otherTo) { + c.p[common]++; + if (from === to) { // ugh, again + c.lm++; + } + } + } - # step 2: where am i going to put it? - if to > otherFrom - c.lm-- - else if to == otherFrom - if to > from - c.lm-- - if to > otherTo - c.lm++ - else if to == otherTo - # if we're both moving in the same direction, tie break - if (otherTo > otherFrom and to > from) or - (otherTo < otherFrom and to < from) - if type == 'right' - c.lm++ - else - if to > from - c.lm++ - else if to == otherFrom - c.lm-- - else if c.li != undefined and c.ld == undefined and commonOperand - # li - from = otherC.p[common] - to = otherC.lm - p = c.p[common] - if p > from - c.p[common]-- - if p > to - c.p[common]++ - else - # ld, ld+li, si, sd, na, oi, od, oi+od, any li on an element beneath - # the lm - # - # i.e. things care about where their item is after the move. - from = otherC.p[common] - to = otherC.lm - p = c.p[common] - if p == from - c.p[common] = to - else - if p > from - c.p[common]-- - if p > to - c.p[common]++ - else if p == to - if from > to - c.p[common]++ - else if otherC.oi != undefined && otherC.od != undefined - if c.p[common] == otherC.p[common] - if c.oi != undefined and commonOperand - # we inserted where someone else replaced - if type == 'right' - # left wins - return dest - else - # we win, make our op replace what they inserted - c.od = otherC.oi - else - # -> noop if the other component is deleting the same object (or any - # parent) - return dest - else if otherC.oi != undefined - if c.oi != undefined and c.p[common] == otherC.p[common] - # left wins if we try to insert at the same place - if type == 'left' - json.append dest, {p:c.p, od:otherC.oi} - else - return dest - else if otherC.od != undefined - if c.p[common] == otherC.p[common] - return dest if !commonOperand - if c.oi != undefined - delete c.od - else - return dest + // step 2: where am i going to put it? + if (to > otherFrom) { + c.lm--; + } else if (to === otherFrom) { + if (to > from) { + c.lm--; + } + } + if (to > otherTo) { + c.lm++; + } else if (to === otherTo) { + // if we're both moving in the same direction, tie break + if (((otherTo > otherFrom) && (to > from)) || + ((otherTo < otherFrom) && (to < from))) { + if (type === 'right') { + c.lm++; + } + } else { + if (to > from) { + c.lm++; + } else if (to === otherFrom) { + c.lm--; + } + } + } + } + } + } else if ((c.li !== undefined) && (c.ld === undefined) && commonOperand) { + // li + from = otherC.p[common]; + to = otherC.lm; + p = c.p[common]; + if (p > from) { + c.p[common]--; + } + if (p > to) { + c.p[common]++; + } + } else { + // ld, ld+li, si, sd, na, oi, od, oi+od, any li on an element beneath + // the lm + // + // i.e. things care about where their item is after the move. + from = otherC.p[common]; + to = otherC.lm; + p = c.p[common]; + if (p === from) { + c.p[common] = to; + } else { + if (p > from) { + c.p[common]--; + } + if (p > to) { + c.p[common]++; + } else if (p === to) { + if (from > to) { + c.p[common]++; + } + } + } + } + } else if ((otherC.oi !== undefined) && (otherC.od !== undefined)) { + if (c.p[common] === otherC.p[common]) { + if ((c.oi !== undefined) && commonOperand) { + // we inserted where someone else replaced + if (type === 'right') { + // left wins + return dest; + } else { + // we win, make our op replace what they inserted + c.od = otherC.oi; + } + } else { + // -> noop if the other component is deleting the same object (or any + // parent) + return dest; + } + } + } else if (otherC.oi !== undefined) { + if ((c.oi !== undefined) && (c.p[common] === otherC.p[common])) { + // left wins if we try to insert at the same place + if (type === 'left') { + json.append(dest, {p:c.p, od:otherC.oi}); + } else { + return dest; + } + } + } else if (otherC.od !== undefined) { + if (c.p[common] === otherC.p[common]) { + if (!commonOperand) { return dest; } + if (c.oi !== undefined) { + delete c.od; + } else { + return dest; + } + } + } + } - json.append dest, c - return dest + json.append(dest, c); + return dest; +}; -if WEB? - exports.types ||= {} +if (typeof WEB !== 'undefined' && WEB !== null) { + if (!exports.types) { exports.types = {}; } - # This is kind of awful - come up with a better way to hook this helper code up. - exports._bt(json, json.transformComponent, json.checkValidOp, json.append) + // This is kind of awful - come up with a better way to hook this helper code up. + exports._bt(json, json.transformComponent, json.checkValidOp, json.append); - # [] is used to prevent closure from renaming types.text - exports.types.json = json -else - module.exports = json + // [] is used to prevent closure from renaming types.text + exports.types.json = json; +} else { + module.exports = json; - require('./helpers').bootstrapTransform(json, json.transformComponent, json.checkValidOp, json.append) + require('./helpers').bootstrapTransform(json, json.transformComponent, json.checkValidOp, json.append); +} diff --git a/services/document-updater/app/coffee/sharejs/model.js b/services/document-updater/app/coffee/sharejs/model.js index 284d6fd770..9b6e65effd 100644 --- a/services/document-updater/app/coffee/sharejs/model.js +++ b/services/document-updater/app/coffee/sharejs/model.js @@ -1,603 +1,699 @@ -# The model of all the ops. Responsible for applying & transforming remote deltas -# and managing the storage layer. -# -# Actual storage is handled by the database wrappers in db/*, wrapped by DocCache +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS104: Avoid inline assignments + * DS204: Change includes calls to have a more natural evaluation order + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// The model of all the ops. Responsible for applying & transforming remote deltas +// and managing the storage layer. +// +// Actual storage is handled by the database wrappers in db/*, wrapped by DocCache -{EventEmitter} = require 'events' +let Model; +const {EventEmitter} = require('events'); -queue = require './syncqueue' -types = require '../types' +const queue = require('./syncqueue'); +const types = require('../types'); -isArray = (o) -> Object.prototype.toString.call(o) == '[object Array]' +const isArray = o => Object.prototype.toString.call(o) === '[object Array]'; -# This constructor creates a new Model object. There will be one model object -# per server context. -# -# The model object is responsible for a lot of things: -# -# - It manages the interactions with the database -# - It maintains (in memory) a set of all active documents -# - It calls out to the OT functions when necessary -# -# The model is an event emitter. It emits the following events: -# -# create(docName, data): A document has been created with the specified name & data -module.exports = Model = (db, options) -> - # db can be null if the user doesn't want persistance. +// This constructor creates a new Model object. There will be one model object +// per server context. +// +// The model object is responsible for a lot of things: +// +// - It manages the interactions with the database +// - It maintains (in memory) a set of all active documents +// - It calls out to the OT functions when necessary +// +// The model is an event emitter. It emits the following events: +// +// create(docName, data): A document has been created with the specified name & data +module.exports = (Model = function(db, options) { + // db can be null if the user doesn't want persistance. - return new Model(db, options) if !(this instanceof Model) + let getOps; + if (!(this instanceof Model)) { return new Model(db, options); } - model = this + const model = this; - options ?= {} + if (options == null) { options = {}; } - # This is a cache of 'live' documents. - # - # The cache is a map from docName -> { - # ops:[{op, meta}] - # snapshot - # type - # v - # meta - # eventEmitter - # reapTimer - # committedVersion: v - # snapshotWriteLock: bool to make sure writeSnapshot isn't re-entrant - # dbMeta: database specific data - # opQueue: syncQueue for processing ops - # } - # - # The ops list contains the document's last options.numCachedOps ops. (Or all - # of them if we're using a memory store). - # - # Documents are stored in this set so long as the document has been accessed in - # the last few seconds (options.reapTime) OR at least one client has the document - # open. I don't know if I should keep open (but not being edited) documents live - - # maybe if a client has a document open but the document isn't being edited, I should - # flush it from the cache. - # - # In any case, the API to model is designed such that if we want to change that later - # it should be pretty easy to do so without any external-to-the-model code changes. - docs = {} + // This is a cache of 'live' documents. + // + // The cache is a map from docName -> { + // ops:[{op, meta}] + // snapshot + // type + // v + // meta + // eventEmitter + // reapTimer + // committedVersion: v + // snapshotWriteLock: bool to make sure writeSnapshot isn't re-entrant + // dbMeta: database specific data + // opQueue: syncQueue for processing ops + // } + // + // The ops list contains the document's last options.numCachedOps ops. (Or all + // of them if we're using a memory store). + // + // Documents are stored in this set so long as the document has been accessed in + // the last few seconds (options.reapTime) OR at least one client has the document + // open. I don't know if I should keep open (but not being edited) documents live - + // maybe if a client has a document open but the document isn't being edited, I should + // flush it from the cache. + // + // In any case, the API to model is designed such that if we want to change that later + // it should be pretty easy to do so without any external-to-the-model code changes. + const docs = {}; - # This is a map from docName -> [callback]. It is used when a document hasn't been - # cached and multiple getSnapshot() / getVersion() requests come in. All requests - # are added to the callback list and called when db.getSnapshot() returns. - # - # callback(error, snapshot data) - awaitingGetSnapshot = {} + // This is a map from docName -> [callback]. It is used when a document hasn't been + // cached and multiple getSnapshot() / getVersion() requests come in. All requests + // are added to the callback list and called when db.getSnapshot() returns. + // + // callback(error, snapshot data) + const awaitingGetSnapshot = {}; - # The time that documents which no clients have open will stay in the cache. - # Should be > 0. - options.reapTime ?= 3000 + // The time that documents which no clients have open will stay in the cache. + // Should be > 0. + if (options.reapTime == null) { options.reapTime = 3000; } - # The number of operations the cache holds before reusing the space - options.numCachedOps ?= 10 + // The number of operations the cache holds before reusing the space + if (options.numCachedOps == null) { options.numCachedOps = 10; } - # This option forces documents to be reaped, even when there's no database backend. - # This is useful when you don't care about persistance and don't want to gradually - # fill memory. - # - # You might want to set reapTime to a day or something. - options.forceReaping ?= false + // This option forces documents to be reaped, even when there's no database backend. + // This is useful when you don't care about persistance and don't want to gradually + // fill memory. + // + // You might want to set reapTime to a day or something. + if (options.forceReaping == null) { options.forceReaping = false; } - # Until I come up with a better strategy, we'll save a copy of the document snapshot - # to the database every ~20 submitted ops. - options.opsBeforeCommit ?= 20 + // Until I come up with a better strategy, we'll save a copy of the document snapshot + // to the database every ~20 submitted ops. + if (options.opsBeforeCommit == null) { options.opsBeforeCommit = 20; } - # It takes some processing time to transform client ops. The server will punt ops back to the - # client to transform if they're too old. - options.maximumAge ?= 40 + // It takes some processing time to transform client ops. The server will punt ops back to the + // client to transform if they're too old. + if (options.maximumAge == null) { options.maximumAge = 40; } - # **** Cache API methods + // **** Cache API methods - # Its important that all ops are applied in order. This helper method creates the op submission queue - # for a single document. This contains the logic for transforming & applying ops. - makeOpQueue = (docName, doc) -> queue (opData, callback) -> - return callback 'Version missing' unless opData.v >= 0 - return callback 'Op at future version' if opData.v > doc.v + // Its important that all ops are applied in order. This helper method creates the op submission queue + // for a single document. This contains the logic for transforming & applying ops. + const makeOpQueue = (docName, doc) => queue(function(opData, callback) { + if (!(opData.v >= 0)) { return callback('Version missing'); } + if (opData.v > doc.v) { return callback('Op at future version'); } - # Punt the transforming work back to the client if the op is too old. - return callback 'Op too old' if opData.v + options.maximumAge < doc.v + // Punt the transforming work back to the client if the op is too old. + if ((opData.v + options.maximumAge) < doc.v) { return callback('Op too old'); } - opData.meta ||= {} - opData.meta.ts = Date.now() + if (!opData.meta) { opData.meta = {}; } + opData.meta.ts = Date.now(); - # We'll need to transform the op to the current version of the document. This - # calls the callback immediately if opVersion == doc.v. - getOps docName, opData.v, doc.v, (error, ops) -> - return callback error if error + // We'll need to transform the op to the current version of the document. This + // calls the callback immediately if opVersion == doc.v. + return getOps(docName, opData.v, doc.v, function(error, ops) { + let snapshot; + if (error) { return callback(error); } - unless doc.v - opData.v == ops.length - # This should never happen. It indicates that we didn't get all the ops we - # asked for. Its important that the submitted op is correctly transformed. - console.error "Could not get old ops in model for document #{docName}" - console.error "Expected ops #{opData.v} to #{doc.v} and got #{ops.length} ops" - return callback 'Internal error' + if ((doc.v - opData.v) !== ops.length) { + // This should never happen. It indicates that we didn't get all the ops we + // asked for. Its important that the submitted op is correctly transformed. + console.error(`Could not get old ops in model for document ${docName}`); + console.error(`Expected ops ${opData.v} to ${doc.v} and got ${ops.length} ops`); + return callback('Internal error'); + } - if ops.length > 0 - try - # If there's enough ops, it might be worth spinning this out into a webworker thread. - for oldOp in ops - # Dup detection works by sending the id(s) the op has been submitted with previously. - # If the id matches, we reject it. The client can also detect the op has been submitted - # already if it sees its own previous id in the ops it sees when it does catchup. - if oldOp.meta.source and opData.dupIfSource and oldOp.meta.source in opData.dupIfSource - return callback 'Op already submitted' + if (ops.length > 0) { + try { + // If there's enough ops, it might be worth spinning this out into a webworker thread. + for (let oldOp of Array.from(ops)) { + // Dup detection works by sending the id(s) the op has been submitted with previously. + // If the id matches, we reject it. The client can also detect the op has been submitted + // already if it sees its own previous id in the ops it sees when it does catchup. + if (oldOp.meta.source && opData.dupIfSource && Array.from(opData.dupIfSource).includes(oldOp.meta.source)) { + return callback('Op already submitted'); + } - opData.op = doc.type.transform opData.op, oldOp.op, 'left' - opData.v++ - catch error - console.error error.stack - return callback error.message + opData.op = doc.type.transform(opData.op, oldOp.op, 'left'); + opData.v++; + } + } catch (error1) { + error = error1; + console.error(error.stack); + return callback(error.message); + } + } - try - snapshot = doc.type.apply doc.snapshot, opData.op - catch error - console.error error.stack - return callback error.message + try { + snapshot = doc.type.apply(doc.snapshot, opData.op); + } catch (error2) { + error = error2; + console.error(error.stack); + return callback(error.message); + } - # The op data should be at the current version, and the new document data should be at - # the next version. - # - # This should never happen in practice, but its a nice little check to make sure everything - # is hunky-dory. - unless opData.v == doc.v - # This should never happen. - console.error "Version mismatch detected in model. File a ticket - this is a bug." - console.error "Expecting #{opData.v} == #{doc.v}" - return callback 'Internal error' + // The op data should be at the current version, and the new document data should be at + // the next version. + // + // This should never happen in practice, but its a nice little check to make sure everything + // is hunky-dory. + if (opData.v !== doc.v) { + // This should never happen. + console.error("Version mismatch detected in model. File a ticket - this is a bug."); + console.error(`Expecting ${opData.v} == ${doc.v}`); + return callback('Internal error'); + } - #newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta} - writeOp = db?.writeOp or (docName, newOpData, callback) -> callback() + //newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta} + const writeOp = (db != null ? db.writeOp : undefined) || ((docName, newOpData, callback) => callback()); - writeOp docName, opData, (error) -> - if error - # The user should probably know about this. - console.warn "Error writing ops to database: #{error}" - return callback error + return writeOp(docName, opData, function(error) { + if (error) { + // The user should probably know about this. + console.warn(`Error writing ops to database: ${error}`); + return callback(error); + } - options.stats?.writeOp?() + __guardMethod__(options.stats, 'writeOp', o => o.writeOp()); - # This is needed when we emit the 'change' event, below. - oldSnapshot = doc.snapshot + // This is needed when we emit the 'change' event, below. + const oldSnapshot = doc.snapshot; - # All the heavy lifting is now done. Finally, we'll update the cache with the new data - # and (maybe!) save a new document snapshot to the database. + // All the heavy lifting is now done. Finally, we'll update the cache with the new data + // and (maybe!) save a new document snapshot to the database. - doc.v = opData.v + 1 - doc.snapshot = snapshot + doc.v = opData.v + 1; + doc.snapshot = snapshot; - doc.ops.push opData - doc.ops.shift() if db and doc.ops.length > options.numCachedOps + doc.ops.push(opData); + if (db && (doc.ops.length > options.numCachedOps)) { doc.ops.shift(); } - model.emit 'applyOp', docName, opData, snapshot, oldSnapshot - doc.eventEmitter.emit 'op', opData, snapshot, oldSnapshot + model.emit('applyOp', docName, opData, snapshot, oldSnapshot); + doc.eventEmitter.emit('op', opData, snapshot, oldSnapshot); - # The callback is called with the version of the document at which the op was applied. - # This is the op.v after transformation, and its doc.v - 1. - callback null, opData.v + // The callback is called with the version of the document at which the op was applied. + // This is the op.v after transformation, and its doc.v - 1. + callback(null, opData.v); - # I need a decent strategy here for deciding whether or not to save the snapshot. - # - # The 'right' strategy looks something like "Store the snapshot whenever the snapshot - # is smaller than the accumulated op data". For now, I'll just store it every 20 - # ops or something. (Configurable with doc.committedVersion) - if !doc.snapshotWriteLock and doc.committedVersion + options.opsBeforeCommit <= doc.v - tryWriteSnapshot docName, (error) -> - console.warn "Error writing snapshot #{error}. This is nonfatal" if error + // I need a decent strategy here for deciding whether or not to save the snapshot. + // + // The 'right' strategy looks something like "Store the snapshot whenever the snapshot + // is smaller than the accumulated op data". For now, I'll just store it every 20 + // ops or something. (Configurable with doc.committedVersion) + if (!doc.snapshotWriteLock && ((doc.committedVersion + options.opsBeforeCommit) <= doc.v)) { + return tryWriteSnapshot(docName, function(error) { + if (error) { return console.warn(`Error writing snapshot ${error}. This is nonfatal`); } + }); + } + }); + }); + }); - # Add the data for the given docName to the cache. The named document shouldn't already - # exist in the doc set. - # - # Returns the new doc. - add = (docName, error, data, committedVersion, ops, dbMeta) -> - callbacks = awaitingGetSnapshot[docName] - delete awaitingGetSnapshot[docName] + // Add the data for the given docName to the cache. The named document shouldn't already + // exist in the doc set. + // + // Returns the new doc. + const add = function(docName, error, data, committedVersion, ops, dbMeta) { + let callback, doc; + const callbacks = awaitingGetSnapshot[docName]; + delete awaitingGetSnapshot[docName]; - if error - callback error for callback in callbacks if callbacks - else - doc = docs[docName] = - snapshot: data.snapshot - v: data.v - type: data.type - meta: data.meta + if (error) { + if (callbacks) { for (callback of Array.from(callbacks)) { callback(error); } } + } else { + doc = (docs[docName] = { + snapshot: data.snapshot, + v: data.v, + type: data.type, + meta: data.meta, - # Cache of ops - ops: ops or [] + // Cache of ops + ops: ops || [], - eventEmitter: new EventEmitter + eventEmitter: new EventEmitter, - # Timer before the document will be invalidated from the cache (if the document has no - # listeners) - reapTimer: null + // Timer before the document will be invalidated from the cache (if the document has no + // listeners) + reapTimer: null, - # Version of the snapshot thats in the database - committedVersion: committedVersion ? data.v - snapshotWriteLock: false - dbMeta: dbMeta + // Version of the snapshot thats in the database + committedVersion: committedVersion != null ? committedVersion : data.v, + snapshotWriteLock: false, + dbMeta + }); - doc.opQueue = makeOpQueue docName, doc + doc.opQueue = makeOpQueue(docName, doc); - refreshReapingTimeout docName - model.emit 'add', docName, data - callback null, doc for callback in callbacks if callbacks + refreshReapingTimeout(docName); + model.emit('add', docName, data); + if (callbacks) { for (callback of Array.from(callbacks)) { callback(null, doc); } } + } - doc + return doc; + }; - # This is a little helper wrapper around db.getOps. It does two things: - # - # - If there's no database set, it returns an error to the callback - # - It adds version numbers to each op returned from the database - # (These can be inferred from context so the DB doesn't store them, but its useful to have them). - getOpsInternal = (docName, start, end, callback) -> - return callback? 'Document does not exist' unless db + // This is a little helper wrapper around db.getOps. It does two things: + // + // - If there's no database set, it returns an error to the callback + // - It adds version numbers to each op returned from the database + // (These can be inferred from context so the DB doesn't store them, but its useful to have them). + const getOpsInternal = function(docName, start, end, callback) { + if (!db) { return (typeof callback === 'function' ? callback('Document does not exist') : undefined); } - db.getOps docName, start, end, (error, ops) -> - return callback? error if error + return db.getOps(docName, start, end, function(error, ops) { + if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } - v = start - op.v = v++ for op in ops + let v = start; + for (let op of Array.from(ops)) { op.v = v++; } - callback? null, ops + return (typeof callback === 'function' ? callback(null, ops) : undefined); + }); + }; - # Load the named document into the cache. This function is re-entrant. - # - # The callback is called with (error, doc) - load = (docName, callback) -> - if docs[docName] - # The document is already loaded. Return immediately. - options.stats?.cacheHit? 'getSnapshot' - return callback null, docs[docName] + // Load the named document into the cache. This function is re-entrant. + // + // The callback is called with (error, doc) + const load = function(docName, callback) { + if (docs[docName]) { + // The document is already loaded. Return immediately. + __guardMethod__(options.stats, 'cacheHit', o => o.cacheHit('getSnapshot')); + return callback(null, docs[docName]); + } - # We're a memory store. If we don't have it, nobody does. - return callback 'Document does not exist' unless db + // We're a memory store. If we don't have it, nobody does. + if (!db) { return callback('Document does not exist'); } - callbacks = awaitingGetSnapshot[docName] + const callbacks = awaitingGetSnapshot[docName]; - # The document is being loaded already. Add ourselves as a callback. - return callbacks.push callback if callbacks + // The document is being loaded already. Add ourselves as a callback. + if (callbacks) { return callbacks.push(callback); } - options.stats?.cacheMiss? 'getSnapshot' + __guardMethod__(options.stats, 'cacheMiss', o1 => o1.cacheMiss('getSnapshot')); - # The document isn't loaded and isn't being loaded. Load it. - awaitingGetSnapshot[docName] = [callback] - db.getSnapshot docName, (error, data, dbMeta) -> - return add docName, error if error + // The document isn't loaded and isn't being loaded. Load it. + awaitingGetSnapshot[docName] = [callback]; + return db.getSnapshot(docName, function(error, data, dbMeta) { + if (error) { return add(docName, error); } - type = types[data.type] - unless type - console.warn "Type '#{data.type}' missing" - return callback "Type not found" - data.type = type + const type = types[data.type]; + if (!type) { + console.warn(`Type '${data.type}' missing`); + return callback("Type not found"); + } + data.type = type; - committedVersion = data.v + const committedVersion = data.v; - # The server can close without saving the most recent document snapshot. - # In this case, there are extra ops which need to be applied before - # returning the snapshot. - getOpsInternal docName, data.v, null, (error, ops) -> - return callback error if error + // The server can close without saving the most recent document snapshot. + // In this case, there are extra ops which need to be applied before + // returning the snapshot. + return getOpsInternal(docName, data.v, null, function(error, ops) { + if (error) { return callback(error); } - if ops.length > 0 - console.log "Catchup #{docName} #{data.v} -> #{data.v + ops.length}" + if (ops.length > 0) { + console.log(`Catchup ${docName} ${data.v} -> ${data.v + ops.length}`); - try - for op in ops - data.snapshot = type.apply data.snapshot, op.op - data.v++ - catch e - # This should never happen - it indicates that whats in the - # database is invalid. - console.error "Op data invalid for #{docName}: #{e.stack}" - return callback 'Op data invalid' + try { + for (let op of Array.from(ops)) { + data.snapshot = type.apply(data.snapshot, op.op); + data.v++; + } + } catch (e) { + // This should never happen - it indicates that whats in the + // database is invalid. + console.error(`Op data invalid for ${docName}: ${e.stack}`); + return callback('Op data invalid'); + } + } - model.emit 'load', docName, data - add docName, error, data, committedVersion, ops, dbMeta + model.emit('load', docName, data); + return add(docName, error, data, committedVersion, ops, dbMeta); + }); + }); + }; - # This makes sure the cache contains a document. If the doc cache doesn't contain - # a document, it is loaded from the database and stored. - # - # Documents are stored so long as either: - # - They have been accessed within the past #{PERIOD} - # - At least one client has the document open - refreshReapingTimeout = (docName) -> - doc = docs[docName] - return unless doc + // This makes sure the cache contains a document. If the doc cache doesn't contain + // a document, it is loaded from the database and stored. + // + // Documents are stored so long as either: + // - They have been accessed within the past #{PERIOD} + // - At least one client has the document open + var refreshReapingTimeout = function(docName) { + const doc = docs[docName]; + if (!doc) { return; } - # I want to let the clients list be updated before this is called. - process.nextTick -> - # This is an awkward way to find out the number of clients on a document. If this - # causes performance issues, add a numClients field to the document. - # - # The first check is because its possible that between refreshReapingTimeout being called and this - # event being fired, someone called delete() on the document and hence the doc is something else now. - if doc == docs[docName] and - doc.eventEmitter.listeners('op').length == 0 and - (db or options.forceReaping) and - doc.opQueue.busy is false + // I want to let the clients list be updated before this is called. + return process.nextTick(function() { + // This is an awkward way to find out the number of clients on a document. If this + // causes performance issues, add a numClients field to the document. + // + // The first check is because its possible that between refreshReapingTimeout being called and this + // event being fired, someone called delete() on the document and hence the doc is something else now. + if ((doc === docs[docName]) && + (doc.eventEmitter.listeners('op').length === 0) && + (db || options.forceReaping) && + (doc.opQueue.busy === false)) { - clearTimeout doc.reapTimer - doc.reapTimer = reapTimer = setTimeout -> - tryWriteSnapshot docName, -> - # If the reaping timeout has been refreshed while we're writing the snapshot, or if we're - # in the middle of applying an operation, don't reap. - delete docs[docName] if docs[docName].reapTimer is reapTimer and doc.opQueue.busy is false - , options.reapTime + let reapTimer; + clearTimeout(doc.reapTimer); + return doc.reapTimer = (reapTimer = setTimeout(() => tryWriteSnapshot(docName, function() { + // If the reaping timeout has been refreshed while we're writing the snapshot, or if we're + // in the middle of applying an operation, don't reap. + if ((docs[docName].reapTimer === reapTimer) && (doc.opQueue.busy === false)) { return delete docs[docName]; } + }) + , options.reapTime)); + } + }); + }; - tryWriteSnapshot = (docName, callback) -> - return callback?() unless db + var tryWriteSnapshot = function(docName, callback) { + if (!db) { return (typeof callback === 'function' ? callback() : undefined); } - doc = docs[docName] + const doc = docs[docName]; - # The doc is closed - return callback?() unless doc + // The doc is closed + if (!doc) { return (typeof callback === 'function' ? callback() : undefined); } - # The document is already saved. - return callback?() if doc.committedVersion is doc.v + // The document is already saved. + if (doc.committedVersion === doc.v) { return (typeof callback === 'function' ? callback() : undefined); } - return callback? 'Another snapshot write is in progress' if doc.snapshotWriteLock + if (doc.snapshotWriteLock) { return (typeof callback === 'function' ? callback('Another snapshot write is in progress') : undefined); } - doc.snapshotWriteLock = true + doc.snapshotWriteLock = true; - options.stats?.writeSnapshot?() + __guardMethod__(options.stats, 'writeSnapshot', o => o.writeSnapshot()); - writeSnapshot = db?.writeSnapshot or (docName, docData, dbMeta, callback) -> callback() + const writeSnapshot = (db != null ? db.writeSnapshot : undefined) || ((docName, docData, dbMeta, callback) => callback()); - data = - v: doc.v - meta: doc.meta - snapshot: doc.snapshot - # The database doesn't know about object types. + const data = { + v: doc.v, + meta: doc.meta, + snapshot: doc.snapshot, + // The database doesn't know about object types. type: doc.type.name + }; - # Commit snapshot. - writeSnapshot docName, data, doc.dbMeta, (error, dbMeta) -> - doc.snapshotWriteLock = false + // Commit snapshot. + return writeSnapshot(docName, data, doc.dbMeta, function(error, dbMeta) { + doc.snapshotWriteLock = false; - # We have to use data.v here because the version in the doc could - # have been updated between the call to writeSnapshot() and now. - doc.committedVersion = data.v - doc.dbMeta = dbMeta + // We have to use data.v here because the version in the doc could + // have been updated between the call to writeSnapshot() and now. + doc.committedVersion = data.v; + doc.dbMeta = dbMeta; - callback? error + return (typeof callback === 'function' ? callback(error) : undefined); + }); + }; - # *** Model interface methods + // *** Model interface methods - # Create a new document. - # - # data should be {snapshot, type, [meta]}. The version of a new document is 0. - @create = (docName, type, meta, callback) -> - [meta, callback] = [{}, meta] if typeof meta is 'function' + // Create a new document. + // + // data should be {snapshot, type, [meta]}. The version of a new document is 0. + this.create = function(docName, type, meta, callback) { + if (typeof meta === 'function') { [meta, callback] = Array.from([{}, meta]); } - return callback? 'Invalid document name' if docName.match /\// - return callback? 'Document already exists' if docs[docName] + if (docName.match(/\//)) { return (typeof callback === 'function' ? callback('Invalid document name') : undefined); } + if (docs[docName]) { return (typeof callback === 'function' ? callback('Document already exists') : undefined); } - type = types[type] if typeof type == 'string' - return callback? 'Type not found' unless type + if (typeof type === 'string') { type = types[type]; } + if (!type) { return (typeof callback === 'function' ? callback('Type not found') : undefined); } - data = - snapshot:type.create() - type:type.name - meta:meta or {} + const data = { + snapshot:type.create(), + type:type.name, + meta:meta || {}, v:0 + }; - done = (error, dbMeta) -> - # dbMeta can be used to cache extra state needed by the database to access the document, like an ID or something. - return callback? error if error + const done = function(error, dbMeta) { + // dbMeta can be used to cache extra state needed by the database to access the document, like an ID or something. + if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } - # From here on we'll store the object version of the type name. - data.type = type - add docName, null, data, 0, [], dbMeta - model.emit 'create', docName, data - callback?() + // From here on we'll store the object version of the type name. + data.type = type; + add(docName, null, data, 0, [], dbMeta); + model.emit('create', docName, data); + return (typeof callback === 'function' ? callback() : undefined); + }; - if db - db.create docName, data, done - else - done() + if (db) { + return db.create(docName, data, done); + } else { + return done(); + } + }; - # Perminantly deletes the specified document. - # If listeners are attached, they are removed. - # - # The callback is called with (error) if there was an error. If error is null / undefined, the - # document was deleted. - # - # WARNING: This isn't well supported throughout the code. (Eg, streaming clients aren't told about the - # deletion. Subsequent op submissions will fail). - @delete = (docName, callback) -> - doc = docs[docName] + // Perminantly deletes the specified document. + // If listeners are attached, they are removed. + // + // The callback is called with (error) if there was an error. If error is null / undefined, the + // document was deleted. + // + // WARNING: This isn't well supported throughout the code. (Eg, streaming clients aren't told about the + // deletion. Subsequent op submissions will fail). + this.delete = function(docName, callback) { + const doc = docs[docName]; - if doc - clearTimeout doc.reapTimer - delete docs[docName] + if (doc) { + clearTimeout(doc.reapTimer); + delete docs[docName]; + } - done = (error) -> - model.emit 'delete', docName unless error - callback? error + const done = function(error) { + if (!error) { model.emit('delete', docName); } + return (typeof callback === 'function' ? callback(error) : undefined); + }; - if db - db.delete docName, doc?.dbMeta, done - else - done (if !doc then 'Document does not exist') + if (db) { + return db.delete(docName, doc != null ? doc.dbMeta : undefined, done); + } else { + return done((!doc ? 'Document does not exist' : undefined)); + } + }; - # This gets all operations from [start...end]. (That is, its not inclusive.) - # - # end can be null. This means 'get me all ops from start'. - # - # Each op returned is in the form {op:o, meta:m, v:version}. - # - # Callback is called with (error, [ops]) - # - # If the document does not exist, getOps doesn't necessarily return an error. This is because - # its awkward to figure out whether or not the document exists for things - # like the redis database backend. I guess its a bit gross having this inconsistant - # with the other DB calls, but its certainly convenient. - # - # Use getVersion() to determine if a document actually exists, if thats what you're - # after. - @getOps = getOps = (docName, start, end, callback) -> - # getOps will only use the op cache if its there. It won't fill the op cache in. - throw new Error 'start must be 0+' unless start >= 0 + // This gets all operations from [start...end]. (That is, its not inclusive.) + // + // end can be null. This means 'get me all ops from start'. + // + // Each op returned is in the form {op:o, meta:m, v:version}. + // + // Callback is called with (error, [ops]) + // + // If the document does not exist, getOps doesn't necessarily return an error. This is because + // its awkward to figure out whether or not the document exists for things + // like the redis database backend. I guess its a bit gross having this inconsistant + // with the other DB calls, but its certainly convenient. + // + // Use getVersion() to determine if a document actually exists, if thats what you're + // after. + this.getOps = (getOps = function(docName, start, end, callback) { + // getOps will only use the op cache if its there. It won't fill the op cache in. + if (!(start >= 0)) { throw new Error('start must be 0+'); } - [end, callback] = [null, end] if typeof end is 'function' + if (typeof end === 'function') { [end, callback] = Array.from([null, end]); } - ops = docs[docName]?.ops + const ops = docs[docName] != null ? docs[docName].ops : undefined; - if ops - version = docs[docName].v + if (ops) { + const version = docs[docName].v; - # Ops contains an array of ops. The last op in the list is the last op applied - end ?= version - start = Math.min start, end + // Ops contains an array of ops. The last op in the list is the last op applied + if (end == null) { end = version; } + start = Math.min(start, end); - return callback null, [] if start == end + if (start === end) { return callback(null, []); } - # Base is the version number of the oldest op we have cached - base = version - ops.length + // Base is the version number of the oldest op we have cached + const base = version - ops.length; - # If the database is null, we'll trim to the ops we do have and hope thats enough. - if start >= base or db is null - refreshReapingTimeout docName - options.stats?.cacheHit 'getOps' + // If the database is null, we'll trim to the ops we do have and hope thats enough. + if ((start >= base) || (db === null)) { + refreshReapingTimeout(docName); + if (options.stats != null) { + options.stats.cacheHit('getOps'); + } - return callback null, ops[(start - base)...(end - base)] + return callback(null, ops.slice((start - base), (end - base))); + } + } - options.stats?.cacheMiss 'getOps' + if (options.stats != null) { + options.stats.cacheMiss('getOps'); + } - getOpsInternal docName, start, end, callback + return getOpsInternal(docName, start, end, callback); + }); - # Gets the snapshot data for the specified document. - # getSnapshot(docName, callback) - # Callback is called with (error, {v: , type: , snapshot: , meta: }) - @getSnapshot = (docName, callback) -> - load docName, (error, doc) -> - callback error, if doc then {v:doc.v, type:doc.type, snapshot:doc.snapshot, meta:doc.meta} + // Gets the snapshot data for the specified document. + // getSnapshot(docName, callback) + // Callback is called with (error, {v: , type: , snapshot: , meta: }) + this.getSnapshot = (docName, callback) => load(docName, (error, doc) => callback(error, doc ? {v:doc.v, type:doc.type, snapshot:doc.snapshot, meta:doc.meta} : undefined)); - # Gets the latest version # of the document. - # getVersion(docName, callback) - # callback is called with (error, version). - @getVersion = (docName, callback) -> - load docName, (error, doc) -> callback error, doc?.v + // Gets the latest version # of the document. + // getVersion(docName, callback) + // callback is called with (error, version). + this.getVersion = (docName, callback) => load(docName, (error, doc) => callback(error, doc != null ? doc.v : undefined)); - # Apply an op to the specified document. - # The callback is passed (error, applied version #) - # opData = {op:op, v:v, meta:metadata} - # - # Ops are queued before being applied so that the following code applies op C before op B: - # model.applyOp 'doc', OPA, -> model.applyOp 'doc', OPB - # model.applyOp 'doc', OPC - @applyOp = (docName, opData, callback) -> - # All the logic for this is in makeOpQueue, above. - load docName, (error, doc) -> - return callback error if error + // Apply an op to the specified document. + // The callback is passed (error, applied version #) + // opData = {op:op, v:v, meta:metadata} + // + // Ops are queued before being applied so that the following code applies op C before op B: + // model.applyOp 'doc', OPA, -> model.applyOp 'doc', OPB + // model.applyOp 'doc', OPC + this.applyOp = (docName, opData, callback) => // All the logic for this is in makeOpQueue, above. + load(docName, function(error, doc) { + if (error) { return callback(error); } - process.nextTick -> doc.opQueue opData, (error, newVersion) -> - refreshReapingTimeout docName - callback? error, newVersion + return process.nextTick(() => doc.opQueue(opData, function(error, newVersion) { + refreshReapingTimeout(docName); + return (typeof callback === 'function' ? callback(error, newVersion) : undefined); + })); + }); - # TODO: store (some) metadata in DB - # TODO: op and meta should be combineable in the op that gets sent - @applyMetaOp = (docName, metaOpData, callback) -> - {path, value} = metaOpData.meta + // TODO: store (some) metadata in DB + // TODO: op and meta should be combineable in the op that gets sent + this.applyMetaOp = function(docName, metaOpData, callback) { + const {path, value} = metaOpData.meta; - return callback? "path should be an array" unless isArray path + if (!isArray(path)) { return (typeof callback === 'function' ? callback("path should be an array") : undefined); } - load docName, (error, doc) -> - if error? - callback? error - else - applied = false - switch path[0] - when 'shout' - doc.eventEmitter.emit 'op', metaOpData - applied = true + return load(docName, function(error, doc) { + if (error != null) { + return (typeof callback === 'function' ? callback(error) : undefined); + } else { + let applied = false; + switch (path[0]) { + case 'shout': + doc.eventEmitter.emit('op', metaOpData); + applied = true; + break; + } - model.emit 'applyMetaOp', docName, path, value if applied - callback? null, doc.v + if (applied) { model.emit('applyMetaOp', docName, path, value); } + return (typeof callback === 'function' ? callback(null, doc.v) : undefined); + } + }); + }; - # Listen to all ops from the specified version. If version is in the past, all - # ops since that version are sent immediately to the listener. - # - # The callback is called once the listener is attached, but before any ops have been passed - # to the listener. - # - # This will _not_ edit the document metadata. - # - # If there are any listeners, we don't purge the document from the cache. But be aware, this behaviour - # might change in a future version. - # - # version is the document version at which the document is opened. It can be left out if you want to open - # the document at the most recent version. - # - # listener is called with (opData) each time an op is applied. - # - # callback(error, openedVersion) - @listen = (docName, version, listener, callback) -> - [version, listener, callback] = [null, version, listener] if typeof version is 'function' + // Listen to all ops from the specified version. If version is in the past, all + // ops since that version are sent immediately to the listener. + // + // The callback is called once the listener is attached, but before any ops have been passed + // to the listener. + // + // This will _not_ edit the document metadata. + // + // If there are any listeners, we don't purge the document from the cache. But be aware, this behaviour + // might change in a future version. + // + // version is the document version at which the document is opened. It can be left out if you want to open + // the document at the most recent version. + // + // listener is called with (opData) each time an op is applied. + // + // callback(error, openedVersion) + this.listen = function(docName, version, listener, callback) { + if (typeof version === 'function') { [version, listener, callback] = Array.from([null, version, listener]); } - load docName, (error, doc) -> - return callback? error if error + return load(docName, function(error, doc) { + if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } - clearTimeout doc.reapTimer + clearTimeout(doc.reapTimer); - if version? - getOps docName, version, null, (error, data) -> - return callback? error if error + if (version != null) { + return getOps(docName, version, null, function(error, data) { + if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } - doc.eventEmitter.on 'op', listener - callback? null, version - for op in data - listener op + doc.eventEmitter.on('op', listener); + if (typeof callback === 'function') { + callback(null, version); + } + return (() => { + const result = []; + for (let op of Array.from(data)) { + var needle; + listener(op); - # The listener may well remove itself during the catchup phase. If this happens, break early. - # This is done in a quite inefficient way. (O(n) where n = #listeners on doc) - break unless listener in doc.eventEmitter.listeners 'op' + // The listener may well remove itself during the catchup phase. If this happens, break early. + // This is done in a quite inefficient way. (O(n) where n = #listeners on doc) + if ((needle = listener, !Array.from(doc.eventEmitter.listeners('op')).includes(needle))) { break; } else { + result.push(undefined); + } + } + return result; + })(); + }); - else # Version is null / undefined. Just add the listener. - doc.eventEmitter.on 'op', listener - callback? null, doc.v + } else { // Version is null / undefined. Just add the listener. + doc.eventEmitter.on('op', listener); + return (typeof callback === 'function' ? callback(null, doc.v) : undefined); + } + }); + }; - # Remove a listener for a particular document. - # - # removeListener(docName, listener) - # - # This is synchronous. - @removeListener = (docName, listener) -> - # The document should already be loaded. - doc = docs[docName] - throw new Error 'removeListener called but document not loaded' unless doc + // Remove a listener for a particular document. + // + // removeListener(docName, listener) + // + // This is synchronous. + this.removeListener = function(docName, listener) { + // The document should already be loaded. + const doc = docs[docName]; + if (!doc) { throw new Error('removeListener called but document not loaded'); } - doc.eventEmitter.removeListener 'op', listener - refreshReapingTimeout docName + doc.eventEmitter.removeListener('op', listener); + return refreshReapingTimeout(docName); + }; - # Flush saves all snapshot data to the database. I'm not sure whether or not this is actually needed - - # sharejs will happily replay uncommitted ops when documents are re-opened anyway. - @flush = (callback) -> - return callback?() unless db + // Flush saves all snapshot data to the database. I'm not sure whether or not this is actually needed - + // sharejs will happily replay uncommitted ops when documents are re-opened anyway. + this.flush = function(callback) { + if (!db) { return (typeof callback === 'function' ? callback() : undefined); } - pendingWrites = 0 + let pendingWrites = 0; - for docName, doc of docs - if doc.committedVersion < doc.v - pendingWrites++ - # I'm hoping writeSnapshot will always happen in another thread. - tryWriteSnapshot docName, -> - process.nextTick -> - pendingWrites-- - callback?() if pendingWrites is 0 + for (let docName in docs) { + const doc = docs[docName]; + if (doc.committedVersion < doc.v) { + pendingWrites++; + // I'm hoping writeSnapshot will always happen in another thread. + tryWriteSnapshot(docName, () => process.nextTick(function() { + pendingWrites--; + if (pendingWrites === 0) { return (typeof callback === 'function' ? callback() : undefined); } + })); + } + } - # If nothing was queued, terminate immediately. - callback?() if pendingWrites is 0 + // If nothing was queued, terminate immediately. + if (pendingWrites === 0) { return (typeof callback === 'function' ? callback() : undefined); } + }; - # Close the database connection. This is needed so nodejs can shut down cleanly. - @closeDb = -> - db?.close?() - db = null + // Close the database connection. This is needed so nodejs can shut down cleanly. + this.closeDb = function() { + __guardMethod__(db, 'close', o => o.close()); + return db = null; + }; - return +}); -# Model inherits from EventEmitter. -Model:: = new EventEmitter +// Model inherits from EventEmitter. +Model.prototype = new EventEmitter; + +function __guardMethod__(obj, methodName, transform) { + if (typeof obj !== 'undefined' && obj !== null && typeof obj[methodName] === 'function') { + return transform(obj, methodName); + } else { + return undefined; + } +} \ No newline at end of file diff --git a/services/document-updater/app/coffee/sharejs/server/model.js b/services/document-updater/app/coffee/sharejs/server/model.js index 0e699cce92..42dd7acc64 100644 --- a/services/document-updater/app/coffee/sharejs/server/model.js +++ b/services/document-updater/app/coffee/sharejs/server/model.js @@ -1,606 +1,703 @@ -# The model of all the ops. Responsible for applying & transforming remote deltas -# and managing the storage layer. -# -# Actual storage is handled by the database wrappers in db/*, wrapped by DocCache +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS104: Avoid inline assignments + * DS204: Change includes calls to have a more natural evaluation order + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// The model of all the ops. Responsible for applying & transforming remote deltas +// and managing the storage layer. +// +// Actual storage is handled by the database wrappers in db/*, wrapped by DocCache -{EventEmitter} = require 'events' +let Model; +const {EventEmitter} = require('events'); -queue = require './syncqueue' -types = require '../types' +const queue = require('./syncqueue'); +const types = require('../types'); -isArray = (o) -> Object.prototype.toString.call(o) == '[object Array]' +const isArray = o => Object.prototype.toString.call(o) === '[object Array]'; -# This constructor creates a new Model object. There will be one model object -# per server context. -# -# The model object is responsible for a lot of things: -# -# - It manages the interactions with the database -# - It maintains (in memory) a set of all active documents -# - It calls out to the OT functions when necessary -# -# The model is an event emitter. It emits the following events: -# -# create(docName, data): A document has been created with the specified name & data -module.exports = Model = (db, options) -> - # db can be null if the user doesn't want persistance. +// This constructor creates a new Model object. There will be one model object +// per server context. +// +// The model object is responsible for a lot of things: +// +// - It manages the interactions with the database +// - It maintains (in memory) a set of all active documents +// - It calls out to the OT functions when necessary +// +// The model is an event emitter. It emits the following events: +// +// create(docName, data): A document has been created with the specified name & data +module.exports = (Model = function(db, options) { + // db can be null if the user doesn't want persistance. - return new Model(db, options) if !(this instanceof Model) + let getOps; + if (!(this instanceof Model)) { return new Model(db, options); } - model = this + const model = this; - options ?= {} + if (options == null) { options = {}; } - # This is a cache of 'live' documents. - # - # The cache is a map from docName -> { - # ops:[{op, meta}] - # snapshot - # type - # v - # meta - # eventEmitter - # reapTimer - # committedVersion: v - # snapshotWriteLock: bool to make sure writeSnapshot isn't re-entrant - # dbMeta: database specific data - # opQueue: syncQueue for processing ops - # } - # - # The ops list contains the document's last options.numCachedOps ops. (Or all - # of them if we're using a memory store). - # - # Documents are stored in this set so long as the document has been accessed in - # the last few seconds (options.reapTime) OR at least one client has the document - # open. I don't know if I should keep open (but not being edited) documents live - - # maybe if a client has a document open but the document isn't being edited, I should - # flush it from the cache. - # - # In any case, the API to model is designed such that if we want to change that later - # it should be pretty easy to do so without any external-to-the-model code changes. - docs = {} + // This is a cache of 'live' documents. + // + // The cache is a map from docName -> { + // ops:[{op, meta}] + // snapshot + // type + // v + // meta + // eventEmitter + // reapTimer + // committedVersion: v + // snapshotWriteLock: bool to make sure writeSnapshot isn't re-entrant + // dbMeta: database specific data + // opQueue: syncQueue for processing ops + // } + // + // The ops list contains the document's last options.numCachedOps ops. (Or all + // of them if we're using a memory store). + // + // Documents are stored in this set so long as the document has been accessed in + // the last few seconds (options.reapTime) OR at least one client has the document + // open. I don't know if I should keep open (but not being edited) documents live - + // maybe if a client has a document open but the document isn't being edited, I should + // flush it from the cache. + // + // In any case, the API to model is designed such that if we want to change that later + // it should be pretty easy to do so without any external-to-the-model code changes. + const docs = {}; - # This is a map from docName -> [callback]. It is used when a document hasn't been - # cached and multiple getSnapshot() / getVersion() requests come in. All requests - # are added to the callback list and called when db.getSnapshot() returns. - # - # callback(error, snapshot data) - awaitingGetSnapshot = {} + // This is a map from docName -> [callback]. It is used when a document hasn't been + // cached and multiple getSnapshot() / getVersion() requests come in. All requests + // are added to the callback list and called when db.getSnapshot() returns. + // + // callback(error, snapshot data) + const awaitingGetSnapshot = {}; - # The time that documents which no clients have open will stay in the cache. - # Should be > 0. - options.reapTime ?= 3000 + // The time that documents which no clients have open will stay in the cache. + // Should be > 0. + if (options.reapTime == null) { options.reapTime = 3000; } - # The number of operations the cache holds before reusing the space - options.numCachedOps ?= 10 + // The number of operations the cache holds before reusing the space + if (options.numCachedOps == null) { options.numCachedOps = 10; } - # This option forces documents to be reaped, even when there's no database backend. - # This is useful when you don't care about persistance and don't want to gradually - # fill memory. - # - # You might want to set reapTime to a day or something. - options.forceReaping ?= false + // This option forces documents to be reaped, even when there's no database backend. + // This is useful when you don't care about persistance and don't want to gradually + // fill memory. + // + // You might want to set reapTime to a day or something. + if (options.forceReaping == null) { options.forceReaping = false; } - # Until I come up with a better strategy, we'll save a copy of the document snapshot - # to the database every ~20 submitted ops. - options.opsBeforeCommit ?= 20 + // Until I come up with a better strategy, we'll save a copy of the document snapshot + // to the database every ~20 submitted ops. + if (options.opsBeforeCommit == null) { options.opsBeforeCommit = 20; } - # It takes some processing time to transform client ops. The server will punt ops back to the - # client to transform if they're too old. - options.maximumAge ?= 40 + // It takes some processing time to transform client ops. The server will punt ops back to the + // client to transform if they're too old. + if (options.maximumAge == null) { options.maximumAge = 40; } - # **** Cache API methods + // **** Cache API methods - # Its important that all ops are applied in order. This helper method creates the op submission queue - # for a single document. This contains the logic for transforming & applying ops. - makeOpQueue = (docName, doc) -> queue (opData, callback) -> - return callback 'Version missing' unless opData.v >= 0 - return callback 'Op at future version' if opData.v > doc.v + // Its important that all ops are applied in order. This helper method creates the op submission queue + // for a single document. This contains the logic for transforming & applying ops. + const makeOpQueue = (docName, doc) => queue(function(opData, callback) { + if (!(opData.v >= 0)) { return callback('Version missing'); } + if (opData.v > doc.v) { return callback('Op at future version'); } - # Punt the transforming work back to the client if the op is too old. - return callback 'Op too old' if opData.v + options.maximumAge < doc.v + // Punt the transforming work back to the client if the op is too old. + if ((opData.v + options.maximumAge) < doc.v) { return callback('Op too old'); } - opData.meta ||= {} - opData.meta.ts = Date.now() + if (!opData.meta) { opData.meta = {}; } + opData.meta.ts = Date.now(); - # We'll need to transform the op to the current version of the document. This - # calls the callback immediately if opVersion == doc.v. - getOps docName, opData.v, doc.v, (error, ops) -> - return callback error if error + // We'll need to transform the op to the current version of the document. This + // calls the callback immediately if opVersion == doc.v. + return getOps(docName, opData.v, doc.v, function(error, ops) { + let snapshot; + if (error) { return callback(error); } - unless doc.v - opData.v == ops.length - # This should never happen. It indicates that we didn't get all the ops we - # asked for. Its important that the submitted op is correctly transformed. - console.error "Could not get old ops in model for document #{docName}" - console.error "Expected ops #{opData.v} to #{doc.v} and got #{ops.length} ops" - return callback 'Internal error' + if ((doc.v - opData.v) !== ops.length) { + // This should never happen. It indicates that we didn't get all the ops we + // asked for. Its important that the submitted op is correctly transformed. + console.error(`Could not get old ops in model for document ${docName}`); + console.error(`Expected ops ${opData.v} to ${doc.v} and got ${ops.length} ops`); + return callback('Internal error'); + } - if ops.length > 0 - try - # If there's enough ops, it might be worth spinning this out into a webworker thread. - for oldOp in ops - # Dup detection works by sending the id(s) the op has been submitted with previously. - # If the id matches, we reject it. The client can also detect the op has been submitted - # already if it sees its own previous id in the ops it sees when it does catchup. - if oldOp.meta.source and opData.dupIfSource and oldOp.meta.source in opData.dupIfSource - return callback 'Op already submitted' + if (ops.length > 0) { + try { + // If there's enough ops, it might be worth spinning this out into a webworker thread. + for (let oldOp of Array.from(ops)) { + // Dup detection works by sending the id(s) the op has been submitted with previously. + // If the id matches, we reject it. The client can also detect the op has been submitted + // already if it sees its own previous id in the ops it sees when it does catchup. + if (oldOp.meta.source && opData.dupIfSource && Array.from(opData.dupIfSource).includes(oldOp.meta.source)) { + return callback('Op already submitted'); + } - opData.op = doc.type.transform opData.op, oldOp.op, 'left' - opData.v++ - catch error - console.error error.stack - return callback error.message + opData.op = doc.type.transform(opData.op, oldOp.op, 'left'); + opData.v++; + } + } catch (error1) { + error = error1; + console.error(error.stack); + return callback(error.message); + } + } - try - snapshot = doc.type.apply doc.snapshot, opData.op - catch error - console.error error.stack - return callback error.message + try { + snapshot = doc.type.apply(doc.snapshot, opData.op); + } catch (error2) { + error = error2; + console.error(error.stack); + return callback(error.message); + } - if options.maxDocLength? and doc.snapshot.length > options.maxDocLength - return callback "Update takes doc over max doc size" + if ((options.maxDocLength != null) && (doc.snapshot.length > options.maxDocLength)) { + return callback("Update takes doc over max doc size"); + } - # The op data should be at the current version, and the new document data should be at - # the next version. - # - # This should never happen in practice, but its a nice little check to make sure everything - # is hunky-dory. - unless opData.v == doc.v - # This should never happen. - console.error "Version mismatch detected in model. File a ticket - this is a bug." - console.error "Expecting #{opData.v} == #{doc.v}" - return callback 'Internal error' + // The op data should be at the current version, and the new document data should be at + // the next version. + // + // This should never happen in practice, but its a nice little check to make sure everything + // is hunky-dory. + if (opData.v !== doc.v) { + // This should never happen. + console.error("Version mismatch detected in model. File a ticket - this is a bug."); + console.error(`Expecting ${opData.v} == ${doc.v}`); + return callback('Internal error'); + } - #newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta} - writeOp = db?.writeOp or (docName, newOpData, callback) -> callback() + //newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta} + const writeOp = (db != null ? db.writeOp : undefined) || ((docName, newOpData, callback) => callback()); - writeOp docName, opData, (error) -> - if error - # The user should probably know about this. - console.warn "Error writing ops to database: #{error}" - return callback error + return writeOp(docName, opData, function(error) { + if (error) { + // The user should probably know about this. + console.warn(`Error writing ops to database: ${error}`); + return callback(error); + } - options.stats?.writeOp?() + __guardMethod__(options.stats, 'writeOp', o => o.writeOp()); - # This is needed when we emit the 'change' event, below. - oldSnapshot = doc.snapshot + // This is needed when we emit the 'change' event, below. + const oldSnapshot = doc.snapshot; - # All the heavy lifting is now done. Finally, we'll update the cache with the new data - # and (maybe!) save a new document snapshot to the database. + // All the heavy lifting is now done. Finally, we'll update the cache with the new data + // and (maybe!) save a new document snapshot to the database. - doc.v = opData.v + 1 - doc.snapshot = snapshot + doc.v = opData.v + 1; + doc.snapshot = snapshot; - doc.ops.push opData - doc.ops.shift() if db and doc.ops.length > options.numCachedOps + doc.ops.push(opData); + if (db && (doc.ops.length > options.numCachedOps)) { doc.ops.shift(); } - model.emit 'applyOp', docName, opData, snapshot, oldSnapshot - doc.eventEmitter.emit 'op', opData, snapshot, oldSnapshot + model.emit('applyOp', docName, opData, snapshot, oldSnapshot); + doc.eventEmitter.emit('op', opData, snapshot, oldSnapshot); - # The callback is called with the version of the document at which the op was applied. - # This is the op.v after transformation, and its doc.v - 1. - callback null, opData.v + // The callback is called with the version of the document at which the op was applied. + // This is the op.v after transformation, and its doc.v - 1. + callback(null, opData.v); - # I need a decent strategy here for deciding whether or not to save the snapshot. - # - # The 'right' strategy looks something like "Store the snapshot whenever the snapshot - # is smaller than the accumulated op data". For now, I'll just store it every 20 - # ops or something. (Configurable with doc.committedVersion) - if !doc.snapshotWriteLock and doc.committedVersion + options.opsBeforeCommit <= doc.v - tryWriteSnapshot docName, (error) -> - console.warn "Error writing snapshot #{error}. This is nonfatal" if error + // I need a decent strategy here for deciding whether or not to save the snapshot. + // + // The 'right' strategy looks something like "Store the snapshot whenever the snapshot + // is smaller than the accumulated op data". For now, I'll just store it every 20 + // ops or something. (Configurable with doc.committedVersion) + if (!doc.snapshotWriteLock && ((doc.committedVersion + options.opsBeforeCommit) <= doc.v)) { + return tryWriteSnapshot(docName, function(error) { + if (error) { return console.warn(`Error writing snapshot ${error}. This is nonfatal`); } + }); + } + }); + }); + }); - # Add the data for the given docName to the cache. The named document shouldn't already - # exist in the doc set. - # - # Returns the new doc. - add = (docName, error, data, committedVersion, ops, dbMeta) -> - callbacks = awaitingGetSnapshot[docName] - delete awaitingGetSnapshot[docName] + // Add the data for the given docName to the cache. The named document shouldn't already + // exist in the doc set. + // + // Returns the new doc. + const add = function(docName, error, data, committedVersion, ops, dbMeta) { + let callback, doc; + const callbacks = awaitingGetSnapshot[docName]; + delete awaitingGetSnapshot[docName]; - if error - callback error for callback in callbacks if callbacks - else - doc = docs[docName] = - snapshot: data.snapshot - v: data.v - type: data.type - meta: data.meta + if (error) { + if (callbacks) { for (callback of Array.from(callbacks)) { callback(error); } } + } else { + doc = (docs[docName] = { + snapshot: data.snapshot, + v: data.v, + type: data.type, + meta: data.meta, - # Cache of ops - ops: ops or [] + // Cache of ops + ops: ops || [], - eventEmitter: new EventEmitter + eventEmitter: new EventEmitter, - # Timer before the document will be invalidated from the cache (if the document has no - # listeners) - reapTimer: null + // Timer before the document will be invalidated from the cache (if the document has no + // listeners) + reapTimer: null, - # Version of the snapshot thats in the database - committedVersion: committedVersion ? data.v - snapshotWriteLock: false - dbMeta: dbMeta + // Version of the snapshot thats in the database + committedVersion: committedVersion != null ? committedVersion : data.v, + snapshotWriteLock: false, + dbMeta + }); - doc.opQueue = makeOpQueue docName, doc + doc.opQueue = makeOpQueue(docName, doc); - refreshReapingTimeout docName - model.emit 'add', docName, data - callback null, doc for callback in callbacks if callbacks + refreshReapingTimeout(docName); + model.emit('add', docName, data); + if (callbacks) { for (callback of Array.from(callbacks)) { callback(null, doc); } } + } - doc + return doc; + }; - # This is a little helper wrapper around db.getOps. It does two things: - # - # - If there's no database set, it returns an error to the callback - # - It adds version numbers to each op returned from the database - # (These can be inferred from context so the DB doesn't store them, but its useful to have them). - getOpsInternal = (docName, start, end, callback) -> - return callback? 'Document does not exist' unless db + // This is a little helper wrapper around db.getOps. It does two things: + // + // - If there's no database set, it returns an error to the callback + // - It adds version numbers to each op returned from the database + // (These can be inferred from context so the DB doesn't store them, but its useful to have them). + const getOpsInternal = function(docName, start, end, callback) { + if (!db) { return (typeof callback === 'function' ? callback('Document does not exist') : undefined); } - db.getOps docName, start, end, (error, ops) -> - return callback? error if error + return db.getOps(docName, start, end, function(error, ops) { + if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } - v = start - op.v = v++ for op in ops + let v = start; + for (let op of Array.from(ops)) { op.v = v++; } - callback? null, ops + return (typeof callback === 'function' ? callback(null, ops) : undefined); + }); + }; - # Load the named document into the cache. This function is re-entrant. - # - # The callback is called with (error, doc) - load = (docName, callback) -> - if docs[docName] - # The document is already loaded. Return immediately. - options.stats?.cacheHit? 'getSnapshot' - return callback null, docs[docName] + // Load the named document into the cache. This function is re-entrant. + // + // The callback is called with (error, doc) + const load = function(docName, callback) { + if (docs[docName]) { + // The document is already loaded. Return immediately. + __guardMethod__(options.stats, 'cacheHit', o => o.cacheHit('getSnapshot')); + return callback(null, docs[docName]); + } - # We're a memory store. If we don't have it, nobody does. - return callback 'Document does not exist' unless db + // We're a memory store. If we don't have it, nobody does. + if (!db) { return callback('Document does not exist'); } - callbacks = awaitingGetSnapshot[docName] + const callbacks = awaitingGetSnapshot[docName]; - # The document is being loaded already. Add ourselves as a callback. - return callbacks.push callback if callbacks + // The document is being loaded already. Add ourselves as a callback. + if (callbacks) { return callbacks.push(callback); } - options.stats?.cacheMiss? 'getSnapshot' + __guardMethod__(options.stats, 'cacheMiss', o1 => o1.cacheMiss('getSnapshot')); - # The document isn't loaded and isn't being loaded. Load it. - awaitingGetSnapshot[docName] = [callback] - db.getSnapshot docName, (error, data, dbMeta) -> - return add docName, error if error + // The document isn't loaded and isn't being loaded. Load it. + awaitingGetSnapshot[docName] = [callback]; + return db.getSnapshot(docName, function(error, data, dbMeta) { + if (error) { return add(docName, error); } - type = types[data.type] - unless type - console.warn "Type '#{data.type}' missing" - return callback "Type not found" - data.type = type + const type = types[data.type]; + if (!type) { + console.warn(`Type '${data.type}' missing`); + return callback("Type not found"); + } + data.type = type; - committedVersion = data.v + const committedVersion = data.v; - # The server can close without saving the most recent document snapshot. - # In this case, there are extra ops which need to be applied before - # returning the snapshot. - getOpsInternal docName, data.v, null, (error, ops) -> - return callback error if error + // The server can close without saving the most recent document snapshot. + // In this case, there are extra ops which need to be applied before + // returning the snapshot. + return getOpsInternal(docName, data.v, null, function(error, ops) { + if (error) { return callback(error); } - if ops.length > 0 - console.log "Catchup #{docName} #{data.v} -> #{data.v + ops.length}" + if (ops.length > 0) { + console.log(`Catchup ${docName} ${data.v} -> ${data.v + ops.length}`); - try - for op in ops - data.snapshot = type.apply data.snapshot, op.op - data.v++ - catch e - # This should never happen - it indicates that whats in the - # database is invalid. - console.error "Op data invalid for #{docName}: #{e.stack}" - return callback 'Op data invalid' + try { + for (let op of Array.from(ops)) { + data.snapshot = type.apply(data.snapshot, op.op); + data.v++; + } + } catch (e) { + // This should never happen - it indicates that whats in the + // database is invalid. + console.error(`Op data invalid for ${docName}: ${e.stack}`); + return callback('Op data invalid'); + } + } - model.emit 'load', docName, data - add docName, error, data, committedVersion, ops, dbMeta + model.emit('load', docName, data); + return add(docName, error, data, committedVersion, ops, dbMeta); + }); + }); + }; - # This makes sure the cache contains a document. If the doc cache doesn't contain - # a document, it is loaded from the database and stored. - # - # Documents are stored so long as either: - # - They have been accessed within the past #{PERIOD} - # - At least one client has the document open - refreshReapingTimeout = (docName) -> - doc = docs[docName] - return unless doc + // This makes sure the cache contains a document. If the doc cache doesn't contain + // a document, it is loaded from the database and stored. + // + // Documents are stored so long as either: + // - They have been accessed within the past #{PERIOD} + // - At least one client has the document open + var refreshReapingTimeout = function(docName) { + const doc = docs[docName]; + if (!doc) { return; } - # I want to let the clients list be updated before this is called. - process.nextTick -> - # This is an awkward way to find out the number of clients on a document. If this - # causes performance issues, add a numClients field to the document. - # - # The first check is because its possible that between refreshReapingTimeout being called and this - # event being fired, someone called delete() on the document and hence the doc is something else now. - if doc == docs[docName] and - doc.eventEmitter.listeners('op').length == 0 and - (db or options.forceReaping) and - doc.opQueue.busy is false + // I want to let the clients list be updated before this is called. + return process.nextTick(function() { + // This is an awkward way to find out the number of clients on a document. If this + // causes performance issues, add a numClients field to the document. + // + // The first check is because its possible that between refreshReapingTimeout being called and this + // event being fired, someone called delete() on the document and hence the doc is something else now. + if ((doc === docs[docName]) && + (doc.eventEmitter.listeners('op').length === 0) && + (db || options.forceReaping) && + (doc.opQueue.busy === false)) { - clearTimeout doc.reapTimer - doc.reapTimer = reapTimer = setTimeout -> - tryWriteSnapshot docName, -> - # If the reaping timeout has been refreshed while we're writing the snapshot, or if we're - # in the middle of applying an operation, don't reap. - delete docs[docName] if docs[docName].reapTimer is reapTimer and doc.opQueue.busy is false - , options.reapTime + let reapTimer; + clearTimeout(doc.reapTimer); + return doc.reapTimer = (reapTimer = setTimeout(() => tryWriteSnapshot(docName, function() { + // If the reaping timeout has been refreshed while we're writing the snapshot, or if we're + // in the middle of applying an operation, don't reap. + if ((docs[docName].reapTimer === reapTimer) && (doc.opQueue.busy === false)) { return delete docs[docName]; } + }) + , options.reapTime)); + } + }); + }; - tryWriteSnapshot = (docName, callback) -> - return callback?() unless db + var tryWriteSnapshot = function(docName, callback) { + if (!db) { return (typeof callback === 'function' ? callback() : undefined); } - doc = docs[docName] + const doc = docs[docName]; - # The doc is closed - return callback?() unless doc + // The doc is closed + if (!doc) { return (typeof callback === 'function' ? callback() : undefined); } - # The document is already saved. - return callback?() if doc.committedVersion is doc.v + // The document is already saved. + if (doc.committedVersion === doc.v) { return (typeof callback === 'function' ? callback() : undefined); } - return callback? 'Another snapshot write is in progress' if doc.snapshotWriteLock + if (doc.snapshotWriteLock) { return (typeof callback === 'function' ? callback('Another snapshot write is in progress') : undefined); } - doc.snapshotWriteLock = true + doc.snapshotWriteLock = true; - options.stats?.writeSnapshot?() + __guardMethod__(options.stats, 'writeSnapshot', o => o.writeSnapshot()); - writeSnapshot = db?.writeSnapshot or (docName, docData, dbMeta, callback) -> callback() + const writeSnapshot = (db != null ? db.writeSnapshot : undefined) || ((docName, docData, dbMeta, callback) => callback()); - data = - v: doc.v - meta: doc.meta - snapshot: doc.snapshot - # The database doesn't know about object types. + const data = { + v: doc.v, + meta: doc.meta, + snapshot: doc.snapshot, + // The database doesn't know about object types. type: doc.type.name + }; - # Commit snapshot. - writeSnapshot docName, data, doc.dbMeta, (error, dbMeta) -> - doc.snapshotWriteLock = false + // Commit snapshot. + return writeSnapshot(docName, data, doc.dbMeta, function(error, dbMeta) { + doc.snapshotWriteLock = false; - # We have to use data.v here because the version in the doc could - # have been updated between the call to writeSnapshot() and now. - doc.committedVersion = data.v - doc.dbMeta = dbMeta + // We have to use data.v here because the version in the doc could + // have been updated between the call to writeSnapshot() and now. + doc.committedVersion = data.v; + doc.dbMeta = dbMeta; - callback? error + return (typeof callback === 'function' ? callback(error) : undefined); + }); + }; - # *** Model interface methods + // *** Model interface methods - # Create a new document. - # - # data should be {snapshot, type, [meta]}. The version of a new document is 0. - @create = (docName, type, meta, callback) -> - [meta, callback] = [{}, meta] if typeof meta is 'function' + // Create a new document. + // + // data should be {snapshot, type, [meta]}. The version of a new document is 0. + this.create = function(docName, type, meta, callback) { + if (typeof meta === 'function') { [meta, callback] = Array.from([{}, meta]); } - return callback? 'Invalid document name' if docName.match /\// - return callback? 'Document already exists' if docs[docName] + if (docName.match(/\//)) { return (typeof callback === 'function' ? callback('Invalid document name') : undefined); } + if (docs[docName]) { return (typeof callback === 'function' ? callback('Document already exists') : undefined); } - type = types[type] if typeof type == 'string' - return callback? 'Type not found' unless type + if (typeof type === 'string') { type = types[type]; } + if (!type) { return (typeof callback === 'function' ? callback('Type not found') : undefined); } - data = - snapshot:type.create() - type:type.name - meta:meta or {} + const data = { + snapshot:type.create(), + type:type.name, + meta:meta || {}, v:0 + }; - done = (error, dbMeta) -> - # dbMeta can be used to cache extra state needed by the database to access the document, like an ID or something. - return callback? error if error + const done = function(error, dbMeta) { + // dbMeta can be used to cache extra state needed by the database to access the document, like an ID or something. + if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } - # From here on we'll store the object version of the type name. - data.type = type - add docName, null, data, 0, [], dbMeta - model.emit 'create', docName, data - callback?() + // From here on we'll store the object version of the type name. + data.type = type; + add(docName, null, data, 0, [], dbMeta); + model.emit('create', docName, data); + return (typeof callback === 'function' ? callback() : undefined); + }; - if db - db.create docName, data, done - else - done() + if (db) { + return db.create(docName, data, done); + } else { + return done(); + } + }; - # Perminantly deletes the specified document. - # If listeners are attached, they are removed. - # - # The callback is called with (error) if there was an error. If error is null / undefined, the - # document was deleted. - # - # WARNING: This isn't well supported throughout the code. (Eg, streaming clients aren't told about the - # deletion. Subsequent op submissions will fail). - @delete = (docName, callback) -> - doc = docs[docName] + // Perminantly deletes the specified document. + // If listeners are attached, they are removed. + // + // The callback is called with (error) if there was an error. If error is null / undefined, the + // document was deleted. + // + // WARNING: This isn't well supported throughout the code. (Eg, streaming clients aren't told about the + // deletion. Subsequent op submissions will fail). + this.delete = function(docName, callback) { + const doc = docs[docName]; - if doc - clearTimeout doc.reapTimer - delete docs[docName] + if (doc) { + clearTimeout(doc.reapTimer); + delete docs[docName]; + } - done = (error) -> - model.emit 'delete', docName unless error - callback? error + const done = function(error) { + if (!error) { model.emit('delete', docName); } + return (typeof callback === 'function' ? callback(error) : undefined); + }; - if db - db.delete docName, doc?.dbMeta, done - else - done (if !doc then 'Document does not exist') + if (db) { + return db.delete(docName, doc != null ? doc.dbMeta : undefined, done); + } else { + return done((!doc ? 'Document does not exist' : undefined)); + } + }; - # This gets all operations from [start...end]. (That is, its not inclusive.) - # - # end can be null. This means 'get me all ops from start'. - # - # Each op returned is in the form {op:o, meta:m, v:version}. - # - # Callback is called with (error, [ops]) - # - # If the document does not exist, getOps doesn't necessarily return an error. This is because - # its awkward to figure out whether or not the document exists for things - # like the redis database backend. I guess its a bit gross having this inconsistant - # with the other DB calls, but its certainly convenient. - # - # Use getVersion() to determine if a document actually exists, if thats what you're - # after. - @getOps = getOps = (docName, start, end, callback) -> - # getOps will only use the op cache if its there. It won't fill the op cache in. - throw new Error 'start must be 0+' unless start >= 0 + // This gets all operations from [start...end]. (That is, its not inclusive.) + // + // end can be null. This means 'get me all ops from start'. + // + // Each op returned is in the form {op:o, meta:m, v:version}. + // + // Callback is called with (error, [ops]) + // + // If the document does not exist, getOps doesn't necessarily return an error. This is because + // its awkward to figure out whether or not the document exists for things + // like the redis database backend. I guess its a bit gross having this inconsistant + // with the other DB calls, but its certainly convenient. + // + // Use getVersion() to determine if a document actually exists, if thats what you're + // after. + this.getOps = (getOps = function(docName, start, end, callback) { + // getOps will only use the op cache if its there. It won't fill the op cache in. + if (!(start >= 0)) { throw new Error('start must be 0+'); } - [end, callback] = [null, end] if typeof end is 'function' + if (typeof end === 'function') { [end, callback] = Array.from([null, end]); } - ops = docs[docName]?.ops + const ops = docs[docName] != null ? docs[docName].ops : undefined; - if ops - version = docs[docName].v + if (ops) { + const version = docs[docName].v; - # Ops contains an array of ops. The last op in the list is the last op applied - end ?= version - start = Math.min start, end + // Ops contains an array of ops. The last op in the list is the last op applied + if (end == null) { end = version; } + start = Math.min(start, end); - return callback null, [] if start == end + if (start === end) { return callback(null, []); } - # Base is the version number of the oldest op we have cached - base = version - ops.length + // Base is the version number of the oldest op we have cached + const base = version - ops.length; - # If the database is null, we'll trim to the ops we do have and hope thats enough. - if start >= base or db is null - refreshReapingTimeout docName - options.stats?.cacheHit 'getOps' + // If the database is null, we'll trim to the ops we do have and hope thats enough. + if ((start >= base) || (db === null)) { + refreshReapingTimeout(docName); + if (options.stats != null) { + options.stats.cacheHit('getOps'); + } - return callback null, ops[(start - base)...(end - base)] + return callback(null, ops.slice((start - base), (end - base))); + } + } - options.stats?.cacheMiss 'getOps' + if (options.stats != null) { + options.stats.cacheMiss('getOps'); + } - getOpsInternal docName, start, end, callback + return getOpsInternal(docName, start, end, callback); + }); - # Gets the snapshot data for the specified document. - # getSnapshot(docName, callback) - # Callback is called with (error, {v: , type: , snapshot: , meta: }) - @getSnapshot = (docName, callback) -> - load docName, (error, doc) -> - callback error, if doc then {v:doc.v, type:doc.type, snapshot:doc.snapshot, meta:doc.meta} + // Gets the snapshot data for the specified document. + // getSnapshot(docName, callback) + // Callback is called with (error, {v: , type: , snapshot: , meta: }) + this.getSnapshot = (docName, callback) => load(docName, (error, doc) => callback(error, doc ? {v:doc.v, type:doc.type, snapshot:doc.snapshot, meta:doc.meta} : undefined)); - # Gets the latest version # of the document. - # getVersion(docName, callback) - # callback is called with (error, version). - @getVersion = (docName, callback) -> - load docName, (error, doc) -> callback error, doc?.v + // Gets the latest version # of the document. + // getVersion(docName, callback) + // callback is called with (error, version). + this.getVersion = (docName, callback) => load(docName, (error, doc) => callback(error, doc != null ? doc.v : undefined)); - # Apply an op to the specified document. - # The callback is passed (error, applied version #) - # opData = {op:op, v:v, meta:metadata} - # - # Ops are queued before being applied so that the following code applies op C before op B: - # model.applyOp 'doc', OPA, -> model.applyOp 'doc', OPB - # model.applyOp 'doc', OPC - @applyOp = (docName, opData, callback) -> - # All the logic for this is in makeOpQueue, above. - load docName, (error, doc) -> - return callback error if error + // Apply an op to the specified document. + // The callback is passed (error, applied version #) + // opData = {op:op, v:v, meta:metadata} + // + // Ops are queued before being applied so that the following code applies op C before op B: + // model.applyOp 'doc', OPA, -> model.applyOp 'doc', OPB + // model.applyOp 'doc', OPC + this.applyOp = (docName, opData, callback) => // All the logic for this is in makeOpQueue, above. + load(docName, function(error, doc) { + if (error) { return callback(error); } - process.nextTick -> doc.opQueue opData, (error, newVersion) -> - refreshReapingTimeout docName - callback? error, newVersion + return process.nextTick(() => doc.opQueue(opData, function(error, newVersion) { + refreshReapingTimeout(docName); + return (typeof callback === 'function' ? callback(error, newVersion) : undefined); + })); + }); - # TODO: store (some) metadata in DB - # TODO: op and meta should be combineable in the op that gets sent - @applyMetaOp = (docName, metaOpData, callback) -> - {path, value} = metaOpData.meta + // TODO: store (some) metadata in DB + // TODO: op and meta should be combineable in the op that gets sent + this.applyMetaOp = function(docName, metaOpData, callback) { + const {path, value} = metaOpData.meta; - return callback? "path should be an array" unless isArray path + if (!isArray(path)) { return (typeof callback === 'function' ? callback("path should be an array") : undefined); } - load docName, (error, doc) -> - if error? - callback? error - else - applied = false - switch path[0] - when 'shout' - doc.eventEmitter.emit 'op', metaOpData - applied = true + return load(docName, function(error, doc) { + if (error != null) { + return (typeof callback === 'function' ? callback(error) : undefined); + } else { + let applied = false; + switch (path[0]) { + case 'shout': + doc.eventEmitter.emit('op', metaOpData); + applied = true; + break; + } - model.emit 'applyMetaOp', docName, path, value if applied - callback? null, doc.v + if (applied) { model.emit('applyMetaOp', docName, path, value); } + return (typeof callback === 'function' ? callback(null, doc.v) : undefined); + } + }); + }; - # Listen to all ops from the specified version. If version is in the past, all - # ops since that version are sent immediately to the listener. - # - # The callback is called once the listener is attached, but before any ops have been passed - # to the listener. - # - # This will _not_ edit the document metadata. - # - # If there are any listeners, we don't purge the document from the cache. But be aware, this behaviour - # might change in a future version. - # - # version is the document version at which the document is opened. It can be left out if you want to open - # the document at the most recent version. - # - # listener is called with (opData) each time an op is applied. - # - # callback(error, openedVersion) - @listen = (docName, version, listener, callback) -> - [version, listener, callback] = [null, version, listener] if typeof version is 'function' + // Listen to all ops from the specified version. If version is in the past, all + // ops since that version are sent immediately to the listener. + // + // The callback is called once the listener is attached, but before any ops have been passed + // to the listener. + // + // This will _not_ edit the document metadata. + // + // If there are any listeners, we don't purge the document from the cache. But be aware, this behaviour + // might change in a future version. + // + // version is the document version at which the document is opened. It can be left out if you want to open + // the document at the most recent version. + // + // listener is called with (opData) each time an op is applied. + // + // callback(error, openedVersion) + this.listen = function(docName, version, listener, callback) { + if (typeof version === 'function') { [version, listener, callback] = Array.from([null, version, listener]); } - load docName, (error, doc) -> - return callback? error if error + return load(docName, function(error, doc) { + if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } - clearTimeout doc.reapTimer + clearTimeout(doc.reapTimer); - if version? - getOps docName, version, null, (error, data) -> - return callback? error if error + if (version != null) { + return getOps(docName, version, null, function(error, data) { + if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } - doc.eventEmitter.on 'op', listener - callback? null, version - for op in data - listener op + doc.eventEmitter.on('op', listener); + if (typeof callback === 'function') { + callback(null, version); + } + return (() => { + const result = []; + for (let op of Array.from(data)) { + var needle; + listener(op); - # The listener may well remove itself during the catchup phase. If this happens, break early. - # This is done in a quite inefficient way. (O(n) where n = #listeners on doc) - break unless listener in doc.eventEmitter.listeners 'op' + // The listener may well remove itself during the catchup phase. If this happens, break early. + // This is done in a quite inefficient way. (O(n) where n = #listeners on doc) + if ((needle = listener, !Array.from(doc.eventEmitter.listeners('op')).includes(needle))) { break; } else { + result.push(undefined); + } + } + return result; + })(); + }); - else # Version is null / undefined. Just add the listener. - doc.eventEmitter.on 'op', listener - callback? null, doc.v + } else { // Version is null / undefined. Just add the listener. + doc.eventEmitter.on('op', listener); + return (typeof callback === 'function' ? callback(null, doc.v) : undefined); + } + }); + }; - # Remove a listener for a particular document. - # - # removeListener(docName, listener) - # - # This is synchronous. - @removeListener = (docName, listener) -> - # The document should already be loaded. - doc = docs[docName] - throw new Error 'removeListener called but document not loaded' unless doc + // Remove a listener for a particular document. + // + // removeListener(docName, listener) + // + // This is synchronous. + this.removeListener = function(docName, listener) { + // The document should already be loaded. + const doc = docs[docName]; + if (!doc) { throw new Error('removeListener called but document not loaded'); } - doc.eventEmitter.removeListener 'op', listener - refreshReapingTimeout docName + doc.eventEmitter.removeListener('op', listener); + return refreshReapingTimeout(docName); + }; - # Flush saves all snapshot data to the database. I'm not sure whether or not this is actually needed - - # sharejs will happily replay uncommitted ops when documents are re-opened anyway. - @flush = (callback) -> - return callback?() unless db + // Flush saves all snapshot data to the database. I'm not sure whether or not this is actually needed - + // sharejs will happily replay uncommitted ops when documents are re-opened anyway. + this.flush = function(callback) { + if (!db) { return (typeof callback === 'function' ? callback() : undefined); } - pendingWrites = 0 + let pendingWrites = 0; - for docName, doc of docs - if doc.committedVersion < doc.v - pendingWrites++ - # I'm hoping writeSnapshot will always happen in another thread. - tryWriteSnapshot docName, -> - process.nextTick -> - pendingWrites-- - callback?() if pendingWrites is 0 + for (let docName in docs) { + const doc = docs[docName]; + if (doc.committedVersion < doc.v) { + pendingWrites++; + // I'm hoping writeSnapshot will always happen in another thread. + tryWriteSnapshot(docName, () => process.nextTick(function() { + pendingWrites--; + if (pendingWrites === 0) { return (typeof callback === 'function' ? callback() : undefined); } + })); + } + } - # If nothing was queued, terminate immediately. - callback?() if pendingWrites is 0 + // If nothing was queued, terminate immediately. + if (pendingWrites === 0) { return (typeof callback === 'function' ? callback() : undefined); } + }; - # Close the database connection. This is needed so nodejs can shut down cleanly. - @closeDb = -> - db?.close?() - db = null + // Close the database connection. This is needed so nodejs can shut down cleanly. + this.closeDb = function() { + __guardMethod__(db, 'close', o => o.close()); + return db = null; + }; - return +}); -# Model inherits from EventEmitter. -Model:: = new EventEmitter +// Model inherits from EventEmitter. +Model.prototype = new EventEmitter; + +function __guardMethod__(obj, methodName, transform) { + if (typeof obj !== 'undefined' && obj !== null && typeof obj[methodName] === 'function') { + return transform(obj, methodName); + } else { + return undefined; + } +} \ No newline at end of file diff --git a/services/document-updater/app/coffee/sharejs/server/syncqueue.js b/services/document-updater/app/coffee/sharejs/server/syncqueue.js index 746450b010..31b2235ee3 100644 --- a/services/document-updater/app/coffee/sharejs/server/syncqueue.js +++ b/services/document-updater/app/coffee/sharejs/server/syncqueue.js @@ -1,42 +1,52 @@ -# A synchronous processing queue. The queue calls process on the arguments, -# ensuring that process() is only executing once at a time. -# -# process(data, callback) _MUST_ eventually call its callback. -# -# Example: -# -# queue = require 'syncqueue' -# -# fn = queue (data, callback) -> -# asyncthing data, -> -# callback(321) -# -# fn(1) -# fn(2) -# fn(3, (result) -> console.log(result)) -# -# ^--- async thing will only be running once at any time. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// A synchronous processing queue. The queue calls process on the arguments, +// ensuring that process() is only executing once at a time. +// +// process(data, callback) _MUST_ eventually call its callback. +// +// Example: +// +// queue = require 'syncqueue' +// +// fn = queue (data, callback) -> +// asyncthing data, -> +// callback(321) +// +// fn(1) +// fn(2) +// fn(3, (result) -> console.log(result)) +// +// ^--- async thing will only be running once at any time. -module.exports = (process) -> - throw new Error('process is not a function') unless typeof process == 'function' - queue = [] +module.exports = function(process) { + if (typeof process !== 'function') { throw new Error('process is not a function'); } + const queue = []; - enqueue = (data, callback) -> - queue.push [data, callback] - flush() + const enqueue = function(data, callback) { + queue.push([data, callback]); + return flush(); + }; - enqueue.busy = false + enqueue.busy = false; - flush = -> - return if enqueue.busy or queue.length == 0 + var flush = function() { + if (enqueue.busy || (queue.length === 0)) { return; } - enqueue.busy = true - [data, callback] = queue.shift() - process data, (result...) -> # TODO: Make this not use varargs - varargs are really slow. - enqueue.busy = false - # This is called after busy = false so a user can check if enqueue.busy is set in the callback. - callback.apply null, result if callback - flush() + enqueue.busy = true; + const [data, callback] = Array.from(queue.shift()); + return process(data, function(...result) { // TODO: Make this not use varargs - varargs are really slow. + enqueue.busy = false; + // This is called after busy = false so a user can check if enqueue.busy is set in the callback. + if (callback) { callback.apply(null, result); } + return flush(); + }); + }; - enqueue + return enqueue; +}; diff --git a/services/document-updater/app/coffee/sharejs/simple.js b/services/document-updater/app/coffee/sharejs/simple.js index 996b1a5ddc..57c4934f73 100644 --- a/services/document-updater/app/coffee/sharejs/simple.js +++ b/services/document-updater/app/coffee/sharejs/simple.js @@ -1,38 +1,48 @@ -# This is a really simple OT type. Its not compiled with the web client, but it could be. -# -# Its mostly included for demonstration purposes and its used in a lot of unit tests. -# -# This defines a really simple text OT type which only allows inserts. (No deletes). -# -# Ops look like: -# {position:#, text:"asdf"} -# -# Document snapshots look like: -# {str:string} +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// This is a really simple OT type. Its not compiled with the web client, but it could be. +// +// Its mostly included for demonstration purposes and its used in a lot of unit tests. +// +// This defines a really simple text OT type which only allows inserts. (No deletes). +// +// Ops look like: +// {position:#, text:"asdf"} +// +// Document snapshots look like: +// {str:string} -module.exports = - # The name of the OT type. The type is stored in types[type.name]. The name can be - # used in place of the actual type in all the API methods. - name: 'simple' +module.exports = { + // The name of the OT type. The type is stored in types[type.name]. The name can be + // used in place of the actual type in all the API methods. + name: 'simple', - # Create a new document snapshot - create: -> {str:""} + // Create a new document snapshot + create() { return {str:""}; }, - # Apply the given op to the document snapshot. Returns the new snapshot. - # - # The original snapshot should not be modified. - apply: (snapshot, op) -> - throw new Error 'Invalid position' unless 0 <= op.position <= snapshot.str.length + // Apply the given op to the document snapshot. Returns the new snapshot. + // + // The original snapshot should not be modified. + apply(snapshot, op) { + if (!(0 <= op.position && op.position <= snapshot.str.length)) { throw new Error('Invalid position'); } - str = snapshot.str - str = str.slice(0, op.position) + op.text + str.slice(op.position) - {str} + let { + str + } = snapshot; + str = str.slice(0, op.position) + op.text + str.slice(op.position); + return {str}; + }, - # transform op1 by op2. Return transformed version of op1. - # sym describes the symmetry of the op. Its 'left' or 'right' depending on whether the - # op being transformed comes from the client or the server. - transform: (op1, op2, sym) -> - pos = op1.position - pos += op2.text.length if op2.position < pos or (op2.position == pos and sym is 'left') + // transform op1 by op2. Return transformed version of op1. + // sym describes the symmetry of the op. Its 'left' or 'right' depending on whether the + // op being transformed comes from the client or the server. + transform(op1, op2, sym) { + let pos = op1.position; + if ((op2.position < pos) || ((op2.position === pos) && (sym === 'left'))) { pos += op2.text.length; } - return {position:pos, text:op1.text} + return {position:pos, text:op1.text}; + } +}; diff --git a/services/document-updater/app/coffee/sharejs/syncqueue.js b/services/document-updater/app/coffee/sharejs/syncqueue.js index 746450b010..31b2235ee3 100644 --- a/services/document-updater/app/coffee/sharejs/syncqueue.js +++ b/services/document-updater/app/coffee/sharejs/syncqueue.js @@ -1,42 +1,52 @@ -# A synchronous processing queue. The queue calls process on the arguments, -# ensuring that process() is only executing once at a time. -# -# process(data, callback) _MUST_ eventually call its callback. -# -# Example: -# -# queue = require 'syncqueue' -# -# fn = queue (data, callback) -> -# asyncthing data, -> -# callback(321) -# -# fn(1) -# fn(2) -# fn(3, (result) -> console.log(result)) -# -# ^--- async thing will only be running once at any time. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// A synchronous processing queue. The queue calls process on the arguments, +// ensuring that process() is only executing once at a time. +// +// process(data, callback) _MUST_ eventually call its callback. +// +// Example: +// +// queue = require 'syncqueue' +// +// fn = queue (data, callback) -> +// asyncthing data, -> +// callback(321) +// +// fn(1) +// fn(2) +// fn(3, (result) -> console.log(result)) +// +// ^--- async thing will only be running once at any time. -module.exports = (process) -> - throw new Error('process is not a function') unless typeof process == 'function' - queue = [] +module.exports = function(process) { + if (typeof process !== 'function') { throw new Error('process is not a function'); } + const queue = []; - enqueue = (data, callback) -> - queue.push [data, callback] - flush() + const enqueue = function(data, callback) { + queue.push([data, callback]); + return flush(); + }; - enqueue.busy = false + enqueue.busy = false; - flush = -> - return if enqueue.busy or queue.length == 0 + var flush = function() { + if (enqueue.busy || (queue.length === 0)) { return; } - enqueue.busy = true - [data, callback] = queue.shift() - process data, (result...) -> # TODO: Make this not use varargs - varargs are really slow. - enqueue.busy = false - # This is called after busy = false so a user can check if enqueue.busy is set in the callback. - callback.apply null, result if callback - flush() + enqueue.busy = true; + const [data, callback] = Array.from(queue.shift()); + return process(data, function(...result) { // TODO: Make this not use varargs - varargs are really slow. + enqueue.busy = false; + // This is called after busy = false so a user can check if enqueue.busy is set in the callback. + if (callback) { callback.apply(null, result); } + return flush(); + }); + }; - enqueue + return enqueue; +}; diff --git a/services/document-updater/app/coffee/sharejs/text-api.js b/services/document-updater/app/coffee/sharejs/text-api.js index 96243ceffb..295261ff90 100644 --- a/services/document-updater/app/coffee/sharejs/text-api.js +++ b/services/document-updater/app/coffee/sharejs/text-api.js @@ -1,32 +1,44 @@ -# Text document API for text +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// Text document API for text -text = require './text' if typeof WEB is 'undefined' +let text; +if (typeof WEB === 'undefined') { text = require('./text'); } -text.api = - provides: {text:true} +text.api = { + provides: {text:true}, - # The number of characters in the string - getLength: -> @snapshot.length + // The number of characters in the string + getLength() { return this.snapshot.length; }, - # Get the text contents of a document - getText: -> @snapshot + // Get the text contents of a document + getText() { return this.snapshot; }, - insert: (pos, text, callback) -> - op = [{p:pos, i:text}] + insert(pos, text, callback) { + const op = [{p:pos, i:text}]; - @submitOp op, callback - op + this.submitOp(op, callback); + return op; + }, - del: (pos, length, callback) -> - op = [{p:pos, d:@snapshot[pos...(pos + length)]}] + del(pos, length, callback) { + const op = [{p:pos, d:this.snapshot.slice(pos, (pos + length))}]; - @submitOp op, callback - op + this.submitOp(op, callback); + return op; + }, - _register: -> - @on 'remoteop', (op) -> - for component in op - if component.i != undefined - @emit 'insert', component.p, component.i - else - @emit 'delete', component.p, component.d + _register() { + return this.on('remoteop', function(op) { + return Array.from(op).map((component) => + component.i !== undefined ? + this.emit('insert', component.p, component.i) + : + this.emit('delete', component.p, component.d)); + }); + } +}; diff --git a/services/document-updater/app/coffee/sharejs/text-composable-api.js b/services/document-updater/app/coffee/sharejs/text-composable-api.js index 7b27ac163a..160ab1c46e 100644 --- a/services/document-updater/app/coffee/sharejs/text-composable-api.js +++ b/services/document-updater/app/coffee/sharejs/text-composable-api.js @@ -1,43 +1,64 @@ -# Text document API for text +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// Text document API for text -if WEB? - type = exports.types['text-composable'] -else - type = require './text-composable' +let type; +if (typeof WEB !== 'undefined' && WEB !== null) { + type = exports.types['text-composable']; +} else { + type = require('./text-composable'); +} -type.api = - provides: {'text':true} +type.api = { + provides: {'text':true}, - # The number of characters in the string - 'getLength': -> @snapshot.length + // The number of characters in the string + 'getLength'() { return this.snapshot.length; }, - # Get the text contents of a document - 'getText': -> @snapshot + // Get the text contents of a document + 'getText'() { return this.snapshot; }, - 'insert': (pos, text, callback) -> - op = type.normalize [pos, 'i':text, (@snapshot.length - pos)] + 'insert'(pos, text, callback) { + const op = type.normalize([pos, {'i':text}, (this.snapshot.length - pos)]); - @submitOp op, callback - op + this.submitOp(op, callback); + return op; + }, - 'del': (pos, length, callback) -> - op = type.normalize [pos, 'd':@snapshot[pos...(pos + length)], (@snapshot.length - pos - length)] + 'del'(pos, length, callback) { + const op = type.normalize([pos, {'d':this.snapshot.slice(pos, (pos + length))}, (this.snapshot.length - pos - length)]); - @submitOp op, callback - op + this.submitOp(op, callback); + return op; + }, - _register: -> - @on 'remoteop', (op) -> - pos = 0 - for component in op - if typeof component is 'number' - pos += component - else if component.i != undefined - @emit 'insert', pos, component.i - pos += component.i.length - else - # delete - @emit 'delete', pos, component.d - # We don't increment pos, because the position - # specified is after the delete has happened. + _register() { + return this.on('remoteop', function(op) { + let pos = 0; + return (() => { + const result = []; + for (let component of Array.from(op)) { + if (typeof component === 'number') { + result.push(pos += component); + } else if (component.i !== undefined) { + this.emit('insert', pos, component.i); + result.push(pos += component.i.length); + } else { + // delete + result.push(this.emit('delete', pos, component.d)); + } + } + return result; + })(); + }); + } +}; + // We don't increment pos, because the position + // specified is after the delete has happened. diff --git a/services/document-updater/app/coffee/sharejs/text-composable.js b/services/document-updater/app/coffee/sharejs/text-composable.js index 992b567bf0..4f43f769cd 100644 --- a/services/document-updater/app/coffee/sharejs/text-composable.js +++ b/services/document-updater/app/coffee/sharejs/text-composable.js @@ -1,261 +1,315 @@ -# An alternate composable implementation for text. This is much closer -# to the implementation used by google wave. -# -# Ops are lists of components which iterate over the whole document. -# Components are either: -# A number N: Skip N characters in the original document -# {i:'str'}: Insert 'str' at the current position in the document -# {d:'str'}: Delete 'str', which appears at the current position in the document -# -# Eg: [3, {i:'hi'}, 5, {d:'internet'}] -# -# Snapshots are strings. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// An alternate composable implementation for text. This is much closer +// to the implementation used by google wave. +// +// Ops are lists of components which iterate over the whole document. +// Components are either: +// A number N: Skip N characters in the original document +// {i:'str'}: Insert 'str' at the current position in the document +// {d:'str'}: Delete 'str', which appears at the current position in the document +// +// Eg: [3, {i:'hi'}, 5, {d:'internet'}] +// +// Snapshots are strings. -p = -> #require('util').debug -i = -> #require('util').inspect +let makeAppend; +const p = function() {}; //require('util').debug +const i = function() {}; //require('util').inspect -exports = if WEB? then {} else module.exports +const exports = (typeof WEB !== 'undefined' && WEB !== null) ? {} : module.exports; -exports.name = 'text-composable' +exports.name = 'text-composable'; -exports.create = -> '' +exports.create = () => ''; -# -------- Utility methods +// -------- Utility methods -checkOp = (op) -> - throw new Error('Op must be an array of components') unless Array.isArray(op) - last = null - for c in op - if typeof(c) == 'object' - throw new Error("Invalid op component: #{i c}") unless (c.i? && c.i.length > 0) or (c.d? && c.d.length > 0) - else - throw new Error('Op components must be objects or numbers') unless typeof(c) == 'number' - throw new Error('Skip components must be a positive number') unless c > 0 - throw new Error('Adjacent skip components should be added') if typeof(last) == 'number' +const checkOp = function(op) { + if (!Array.isArray(op)) { throw new Error('Op must be an array of components'); } + let last = null; + return (() => { + const result = []; + for (let c of Array.from(op)) { + if (typeof(c) === 'object') { + if (((c.i == null) || !(c.i.length > 0)) && ((c.d == null) || !(c.d.length > 0))) { throw new Error(`Invalid op component: ${i(c)}`); } + } else { + if (typeof(c) !== 'number') { throw new Error('Op components must be objects or numbers'); } + if (!(c > 0)) { throw new Error('Skip components must be a positive number'); } + if (typeof(last) === 'number') { throw new Error('Adjacent skip components should be added'); } + } - last = c + result.push(last = c); + } + return result; + })(); +}; -# Makes a function for appending components to a given op. -# Exported for the randomOpGenerator. -exports._makeAppend = makeAppend = (op) -> (component) -> - if component == 0 || component.i == '' || component.d == '' - return - else if op.length == 0 - op.push component - else if typeof(component) == 'number' && typeof(op[op.length - 1]) == 'number' - op[op.length - 1] += component - else if component.i? && op[op.length - 1].i? - op[op.length - 1].i += component.i - else if component.d? && op[op.length - 1].d? - op[op.length - 1].d += component.d - else - op.push component +// Makes a function for appending components to a given op. +// Exported for the randomOpGenerator. +exports._makeAppend = (makeAppend = op => (function(component) { + if ((component === 0) || (component.i === '') || (component.d === '')) { + return; + } else if (op.length === 0) { + return op.push(component); + } else if ((typeof(component) === 'number') && (typeof(op[op.length - 1]) === 'number')) { + return op[op.length - 1] += component; + } else if ((component.i != null) && (op[op.length - 1].i != null)) { + return op[op.length - 1].i += component.i; + } else if ((component.d != null) && (op[op.length - 1].d != null)) { + return op[op.length - 1].d += component.d; + } else { + return op.push(component); + } +})); -# checkOp op +// checkOp op -# Makes 2 functions for taking components from the start of an op, and for peeking -# at the next op that could be taken. -makeTake = (op) -> - # The index of the next component to take - idx = 0 - # The offset into the component - offset = 0 +// Makes 2 functions for taking components from the start of an op, and for peeking +// at the next op that could be taken. +const makeTake = function(op) { + // The index of the next component to take + let idx = 0; + // The offset into the component + let offset = 0; - # Take up to length n from the front of op. If n is null, take the next - # op component. If indivisableField == 'd', delete components won't be separated. - # If indivisableField == 'i', insert components won't be separated. - take = (n, indivisableField) -> - return null if idx == op.length - #assert.notStrictEqual op.length, i, 'The op is too short to traverse the document' + // Take up to length n from the front of op. If n is null, take the next + // op component. If indivisableField == 'd', delete components won't be separated. + // If indivisableField == 'i', insert components won't be separated. + const take = function(n, indivisableField) { + let c; + if (idx === op.length) { return null; } + //assert.notStrictEqual op.length, i, 'The op is too short to traverse the document' - if typeof(op[idx]) == 'number' - if !n? or op[idx] - offset <= n - c = op[idx] - offset - ++idx; offset = 0 - c - else - offset += n - n - else - # Take from the string - field = if op[idx].i then 'i' else 'd' - c = {} - if !n? or op[idx][field].length - offset <= n or field == indivisableField - c[field] = op[idx][field][offset..] - ++idx; offset = 0 - else - c[field] = op[idx][field][offset...(offset + n)] - offset += n - c + if (typeof(op[idx]) === 'number') { + if ((n == null) || ((op[idx] - offset) <= n)) { + c = op[idx] - offset; + ++idx; offset = 0; + return c; + } else { + offset += n; + return n; + } + } else { + // Take from the string + const field = op[idx].i ? 'i' : 'd'; + c = {}; + if ((n == null) || ((op[idx][field].length - offset) <= n) || (field === indivisableField)) { + c[field] = op[idx][field].slice(offset); + ++idx; offset = 0; + } else { + c[field] = op[idx][field].slice(offset, (offset + n)); + offset += n; + } + return c; + } + }; - peekType = () -> - op[idx] + const peekType = () => op[idx]; - [take, peekType] + return [take, peekType]; +}; -# Find and return the length of an op component -componentLength = (component) -> - if typeof(component) == 'number' - component - else if component.i? - component.i.length - else - component.d.length +// Find and return the length of an op component +const componentLength = function(component) { + if (typeof(component) === 'number') { + return component; + } else if (component.i != null) { + return component.i.length; + } else { + return component.d.length; + } +}; -# Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate -# adjacent inserts and deletes. -exports.normalize = (op) -> - newOp = [] - append = makeAppend newOp - append component for component in op - newOp +// Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate +// adjacent inserts and deletes. +exports.normalize = function(op) { + const newOp = []; + const append = makeAppend(newOp); + for (let component of Array.from(op)) { append(component); } + return newOp; +}; -# Apply the op to the string. Returns the new string. -exports.apply = (str, op) -> - p "Applying #{i op} to '#{str}'" - throw new Error('Snapshot should be a string') unless typeof(str) == 'string' - checkOp op +// Apply the op to the string. Returns the new string. +exports.apply = function(str, op) { + p(`Applying ${i(op)} to '${str}'`); + if (typeof(str) !== 'string') { throw new Error('Snapshot should be a string'); } + checkOp(op); - pos = 0 - newDoc = [] + const pos = 0; + const newDoc = []; - for component in op - if typeof(component) == 'number' - throw new Error('The op is too long for this document') if component > str.length - newDoc.push str[...component] - str = str[component..] - else if component.i? - newDoc.push component.i - else - throw new Error("The deleted text '#{component.d}' doesn't match the next characters in the document '#{str[...component.d.length]}'") unless component.d == str[...component.d.length] - str = str[component.d.length..] + for (let component of Array.from(op)) { + if (typeof(component) === 'number') { + if (component > str.length) { throw new Error('The op is too long for this document'); } + newDoc.push(str.slice(0, component)); + str = str.slice(component); + } else if (component.i != null) { + newDoc.push(component.i); + } else { + if (component.d !== str.slice(0, component.d.length)) { throw new Error(`The deleted text '${component.d}' doesn't match the next characters in the document '${str.slice(0, component.d.length)}'`); } + str = str.slice(component.d.length); + } + } - throw new Error("The applied op doesn't traverse the entire document") unless '' == str + if ('' !== str) { throw new Error("The applied op doesn't traverse the entire document"); } - newDoc.join '' + return newDoc.join(''); +}; -# transform op1 by op2. Return transformed version of op1. -# op1 and op2 are unchanged by transform. -exports.transform = (op, otherOp, side) -> - throw new Error "side (#{side} must be 'left' or 'right'" unless side == 'left' or side == 'right' +// transform op1 by op2. Return transformed version of op1. +// op1 and op2 are unchanged by transform. +exports.transform = function(op, otherOp, side) { + let component; + if ((side !== 'left') && (side !== 'right')) { throw new Error(`side (${side} must be 'left' or 'right'`); } - checkOp op - checkOp otherOp - newOp = [] + checkOp(op); + checkOp(otherOp); + const newOp = []; - append = makeAppend newOp - [take, peek] = makeTake op + const append = makeAppend(newOp); + const [take, peek] = Array.from(makeTake(op)); - for component in otherOp - if typeof(component) == 'number' # Skip - length = component - while length > 0 - chunk = take(length, 'i') - throw new Error('The op traverses more elements than the document has') unless chunk != null + for (component of Array.from(otherOp)) { + var chunk, length; + if (typeof(component) === 'number') { // Skip + length = component; + while (length > 0) { + chunk = take(length, 'i'); + if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } - append chunk - length -= componentLength chunk unless typeof(chunk) == 'object' && chunk.i? - else if component.i? # Insert - if side == 'left' - # The left insert should go first. - o = peek() - append take() if o?.i + append(chunk); + if ((typeof(chunk) !== 'object') || (chunk.i == null)) { length -= componentLength(chunk); } + } + } else if (component.i != null) { // Insert + if (side === 'left') { + // The left insert should go first. + const o = peek(); + if (o != null ? o.i : undefined) { append(take()); } + } - # Otherwise, skip the inserted text. - append(component.i.length) - else # Delete. - #assert.ok component.d - length = component.d.length - while length > 0 - chunk = take(length, 'i') - throw new Error('The op traverses more elements than the document has') unless chunk != null + // Otherwise, skip the inserted text. + append(component.i.length); + } else { // Delete. + //assert.ok component.d + ({ + length + } = component.d); + while (length > 0) { + chunk = take(length, 'i'); + if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } - if typeof(chunk) == 'number' - length -= chunk - else if chunk.i? - append(chunk) - else - #assert.ok chunk.d - # The delete is unnecessary now. - length -= chunk.d.length + if (typeof(chunk) === 'number') { + length -= chunk; + } else if (chunk.i != null) { + append(chunk); + } else { + //assert.ok chunk.d + // The delete is unnecessary now. + length -= chunk.d.length; + } + } + } + } - # Append extras from op1 - while (component = take()) - throw new Error "Remaining fragments in the op: #{i component}" unless component?.i? - append component + // Append extras from op1 + while (component = take()) { + if ((component != null ? component.i : undefined) == null) { throw new Error(`Remaining fragments in the op: ${i(component)}`); } + append(component); + } - newOp + return newOp; +}; -# Compose 2 ops into 1 op. -exports.compose = (op1, op2) -> - p "COMPOSE #{i op1} + #{i op2}" - checkOp op1 - checkOp op2 +// Compose 2 ops into 1 op. +exports.compose = function(op1, op2) { + let component; + p(`COMPOSE ${i(op1)} + ${i(op2)}`); + checkOp(op1); + checkOp(op2); - result = [] + const result = []; - append = makeAppend result - [take, _] = makeTake op1 + const append = makeAppend(result); + const [take, _] = Array.from(makeTake(op1)); - for component in op2 - if typeof(component) == 'number' # Skip - length = component - while length > 0 - chunk = take(length, 'd') - throw new Error('The op traverses more elements than the document has') unless chunk != null + for (component of Array.from(op2)) { + var chunk, length; + if (typeof(component) === 'number') { // Skip + length = component; + while (length > 0) { + chunk = take(length, 'd'); + if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } - append chunk - length -= componentLength chunk unless typeof(chunk) == 'object' && chunk.d? + append(chunk); + if ((typeof(chunk) !== 'object') || (chunk.d == null)) { length -= componentLength(chunk); } + } - else if component.i? # Insert - append {i:component.i} + } else if (component.i != null) { // Insert + append({i:component.i}); - else # Delete - offset = 0 - while offset < component.d.length - chunk = take(component.d.length - offset, 'd') - throw new Error('The op traverses more elements than the document has') unless chunk != null + } else { // Delete + let offset = 0; + while (offset < component.d.length) { + chunk = take(component.d.length - offset, 'd'); + if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } - # If its delete, append it. If its skip, drop it and decrease length. If its insert, check the strings match, drop it and decrease length. - if typeof(chunk) == 'number' - append {d:component.d[offset...(offset + chunk)]} - offset += chunk - else if chunk.i? - throw new Error("The deleted text doesn't match the inserted text") unless component.d[offset...(offset + chunk.i.length)] == chunk.i - offset += chunk.i.length - # The ops cancel each other out. - else - # Delete - append chunk + // If its delete, append it. If its skip, drop it and decrease length. If its insert, check the strings match, drop it and decrease length. + if (typeof(chunk) === 'number') { + append({d:component.d.slice(offset, (offset + chunk))}); + offset += chunk; + } else if (chunk.i != null) { + if (component.d.slice(offset, (offset + chunk.i.length)) !== chunk.i) { throw new Error("The deleted text doesn't match the inserted text"); } + offset += chunk.i.length; + // The ops cancel each other out. + } else { + // Delete + append(chunk); + } + } + } + } - # Append extras from op1 - while (component = take()) - throw new Error "Trailing stuff in op1 #{i component}" unless component?.d? - append component + // Append extras from op1 + while (component = take()) { + if ((component != null ? component.d : undefined) == null) { throw new Error(`Trailing stuff in op1 ${i(component)}`); } + append(component); + } - result + return result; +}; -invertComponent = (c) -> - if typeof(c) == 'number' - c - else if c.i? - {d:c.i} - else - {i:c.d} +const invertComponent = function(c) { + if (typeof(c) === 'number') { + return c; + } else if (c.i != null) { + return {d:c.i}; + } else { + return {i:c.d}; + } +}; -# Invert an op -exports.invert = (op) -> - result = [] - append = makeAppend result +// Invert an op +exports.invert = function(op) { + const result = []; + const append = makeAppend(result); - append(invertComponent component) for component in op + for (let component of Array.from(op)) { append(invertComponent(component)); } - result + return result; +}; -if window? - window.ot ||= {} - window.ot.types ||= {} - window.ot.types.text = exports +if (typeof window !== 'undefined' && window !== null) { + if (!window.ot) { window.ot = {}; } + if (!window.ot.types) { window.ot.types = {}; } + window.ot.types.text = exports; +} diff --git a/services/document-updater/app/coffee/sharejs/text-tp2-api.js b/services/document-updater/app/coffee/sharejs/text-tp2-api.js index d661b5ae37..e3f4f95ea6 100644 --- a/services/document-updater/app/coffee/sharejs/text-tp2-api.js +++ b/services/document-updater/app/coffee/sharejs/text-tp2-api.js @@ -1,89 +1,118 @@ -# Text document API for text-tp2 +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// Text document API for text-tp2 -if WEB? - type = exports.types['text-tp2'] -else - type = require './text-tp2' +let type; +if (typeof WEB !== 'undefined' && WEB !== null) { + type = exports.types['text-tp2']; +} else { + type = require('./text-tp2'); +} -{_takeDoc:takeDoc, _append:append} = type +const {_takeDoc:takeDoc, _append:append} = type; -appendSkipChars = (op, doc, pos, maxlength) -> - while (maxlength == undefined || maxlength > 0) and pos.index < doc.data.length - part = takeDoc doc, pos, maxlength, true - maxlength -= part.length if maxlength != undefined and typeof part is 'string' - append op, (part.length || part) +const appendSkipChars = (op, doc, pos, maxlength) => (() => { + const result = []; + while (((maxlength === undefined) || (maxlength > 0)) && (pos.index < doc.data.length)) { + const part = takeDoc(doc, pos, maxlength, true); + if ((maxlength !== undefined) && (typeof part === 'string')) { maxlength -= part.length; } + result.push(append(op, (part.length || part))); + } + return result; +})(); -type['api'] = - 'provides': {'text':true} +type['api'] = { + 'provides': {'text':true}, - # The number of characters in the string - 'getLength': -> @snapshot.charLength + // The number of characters in the string + 'getLength'() { return this.snapshot.charLength; }, - # Flatten a document into a string - 'getText': -> - strings = (elem for elem in @snapshot.data when typeof elem is 'string') - strings.join '' + // Flatten a document into a string + 'getText'() { + const strings = (Array.from(this.snapshot.data).filter((elem) => typeof elem === 'string')); + return strings.join(''); + }, - 'insert': (pos, text, callback) -> - pos = 0 if pos == undefined + 'insert'(pos, text, callback) { + if (pos === undefined) { pos = 0; } - op = [] - docPos = {index:0, offset:0} + const op = []; + const docPos = {index:0, offset:0}; - appendSkipChars op, @snapshot, docPos, pos - append op, {'i':text} - appendSkipChars op, @snapshot, docPos + appendSkipChars(op, this.snapshot, docPos, pos); + append(op, {'i':text}); + appendSkipChars(op, this.snapshot, docPos); - @submitOp op, callback - op + this.submitOp(op, callback); + return op; + }, - 'del': (pos, length, callback) -> - op = [] - docPos = {index:0, offset:0} + 'del'(pos, length, callback) { + const op = []; + const docPos = {index:0, offset:0}; - appendSkipChars op, @snapshot, docPos, pos + appendSkipChars(op, this.snapshot, docPos, pos); - while length > 0 - part = takeDoc @snapshot, docPos, length, true - if typeof part is 'string' - append op, {'d':part.length} - length -= part.length - else - append op, part + while (length > 0) { + const part = takeDoc(this.snapshot, docPos, length, true); + if (typeof part === 'string') { + append(op, {'d':part.length}); + length -= part.length; + } else { + append(op, part); + } + } - appendSkipChars op, @snapshot, docPos + appendSkipChars(op, this.snapshot, docPos); - @submitOp op, callback - op + this.submitOp(op, callback); + return op; + }, - '_register': -> - # Interpret recieved ops + generate more detailed events for them - @on 'remoteop', (op, snapshot) -> - textPos = 0 - docPos = {index:0, offset:0} + '_register'() { + // Interpret recieved ops + generate more detailed events for them + return this.on('remoteop', function(op, snapshot) { + let textPos = 0; + const docPos = {index:0, offset:0}; - for component in op - if typeof component is 'number' - # Skip - remainder = component - while remainder > 0 - part = takeDoc snapshot, docPos, remainder - if typeof part is 'string' - textPos += part.length - remainder -= part.length || part - else if component.i != undefined - # Insert - if typeof component.i is 'string' - @emit 'insert', textPos, component.i - textPos += component.i.length - else - # Delete - remainder = component.d - while remainder > 0 - part = takeDoc snapshot, docPos, remainder - if typeof part is 'string' - @emit 'delete', textPos, part - remainder -= part.length || part + for (let component of Array.from(op)) { + var part, remainder; + if (typeof component === 'number') { + // Skip + remainder = component; + while (remainder > 0) { + part = takeDoc(snapshot, docPos, remainder); + if (typeof part === 'string') { + textPos += part.length; + } + remainder -= part.length || part; + } + } else if (component.i !== undefined) { + // Insert + if (typeof component.i === 'string') { + this.emit('insert', textPos, component.i); + textPos += component.i.length; + } + } else { + // Delete + remainder = component.d; + while (remainder > 0) { + part = takeDoc(snapshot, docPos, remainder); + if (typeof part === 'string') { + this.emit('delete', textPos, part); + } + remainder -= part.length || part; + } + } + } - return + }); + } +}; diff --git a/services/document-updater/app/coffee/sharejs/text-tp2.js b/services/document-updater/app/coffee/sharejs/text-tp2.js index d19cbdcef4..ab123d6ff7 100644 --- a/services/document-updater/app/coffee/sharejs/text-tp2.js +++ b/services/document-updater/app/coffee/sharejs/text-tp2.js @@ -1,322 +1,398 @@ -# A TP2 implementation of text, following this spec: -# http://code.google.com/p/lightwave/source/browse/trunk/experimental/ot/README -# -# A document is made up of a string and a set of tombstones inserted throughout -# the string. For example, 'some ', (2 tombstones), 'string'. -# -# This is encoded in a document as: {s:'some string', t:[5, -2, 6]} -# -# Ops are lists of components which iterate over the whole document. -# Components are either: -# N: Skip N characters in the original document -# {i:'str'}: Insert 'str' at the current position in the document -# {i:N}: Insert N tombstones at the current position in the document -# {d:N}: Delete (tombstone) N characters at the current position in the document -# -# Eg: [3, {i:'hi'}, 5, {d:8}] -# -# Snapshots are lists with characters and tombstones. Characters are stored in strings -# and adjacent tombstones are flattened into numbers. -# -# Eg, the document: 'Hello .....world' ('.' denotes tombstoned (deleted) characters) -# would be represented by a document snapshot of ['Hello ', 5, 'world'] +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// A TP2 implementation of text, following this spec: +// http://code.google.com/p/lightwave/source/browse/trunk/experimental/ot/README +// +// A document is made up of a string and a set of tombstones inserted throughout +// the string. For example, 'some ', (2 tombstones), 'string'. +// +// This is encoded in a document as: {s:'some string', t:[5, -2, 6]} +// +// Ops are lists of components which iterate over the whole document. +// Components are either: +// N: Skip N characters in the original document +// {i:'str'}: Insert 'str' at the current position in the document +// {i:N}: Insert N tombstones at the current position in the document +// {d:N}: Delete (tombstone) N characters at the current position in the document +// +// Eg: [3, {i:'hi'}, 5, {d:8}] +// +// Snapshots are lists with characters and tombstones. Characters are stored in strings +// and adjacent tombstones are flattened into numbers. +// +// Eg, the document: 'Hello .....world' ('.' denotes tombstoned (deleted) characters) +// would be represented by a document snapshot of ['Hello ', 5, 'world'] -type = - name: 'text-tp2' - tp2: true - create: -> {charLength:0, totalLength:0, positionCache:[], data:[]} - serialize: (doc) -> - throw new Error 'invalid doc snapshot' unless doc.data - doc.data - deserialize: (data) -> - doc = type.create() - doc.data = data +let append, appendDoc, takeDoc; +var type = { + name: 'text-tp2', + tp2: true, + create() { return {charLength:0, totalLength:0, positionCache:[], data:[]}; }, + serialize(doc) { + if (!doc.data) { throw new Error('invalid doc snapshot'); } + return doc.data; + }, + deserialize(data) { + const doc = type.create(); + doc.data = data; - for component in data - if typeof component is 'string' - doc.charLength += component.length - doc.totalLength += component.length - else - doc.totalLength += component + for (let component of Array.from(data)) { + if (typeof component === 'string') { + doc.charLength += component.length; + doc.totalLength += component.length; + } else { + doc.totalLength += component; + } + } - doc + return doc; + } +}; -checkOp = (op) -> - throw new Error('Op must be an array of components') unless Array.isArray(op) - last = null - for c in op - if typeof(c) == 'object' - if c.i != undefined - throw new Error('Inserts must insert a string or a +ive number') unless (typeof(c.i) == 'string' and c.i.length > 0) or (typeof(c.i) == 'number' and c.i > 0) - else if c.d != undefined - throw new Error('Deletes must be a +ive number') unless typeof(c.d) == 'number' and c.d > 0 - else - throw new Error('Operation component must define .i or .d') - else - throw new Error('Op components must be objects or numbers') unless typeof(c) == 'number' - throw new Error('Skip components must be a positive number') unless c > 0 - throw new Error('Adjacent skip components should be combined') if typeof(last) == 'number' +const checkOp = function(op) { + if (!Array.isArray(op)) { throw new Error('Op must be an array of components'); } + let last = null; + return (() => { + const result = []; + for (let c of Array.from(op)) { + if (typeof(c) === 'object') { + if (c.i !== undefined) { + if (((typeof(c.i) !== 'string') || !(c.i.length > 0)) && ((typeof(c.i) !== 'number') || !(c.i > 0))) { throw new Error('Inserts must insert a string or a +ive number'); } + } else if (c.d !== undefined) { + if ((typeof(c.d) !== 'number') || !(c.d > 0)) { throw new Error('Deletes must be a +ive number'); } + } else { + throw new Error('Operation component must define .i or .d'); + } + } else { + if (typeof(c) !== 'number') { throw new Error('Op components must be objects or numbers'); } + if (!(c > 0)) { throw new Error('Skip components must be a positive number'); } + if (typeof(last) === 'number') { throw new Error('Adjacent skip components should be combined'); } + } - last = c + result.push(last = c); + } + return result; + })(); +}; -# Take the next part from the specified position in a document snapshot. -# position = {index, offset}. It will be updated. -type._takeDoc = takeDoc = (doc, position, maxlength, tombsIndivisible) -> - throw new Error 'Operation goes past the end of the document' if position.index >= doc.data.length +// Take the next part from the specified position in a document snapshot. +// position = {index, offset}. It will be updated. +type._takeDoc = (takeDoc = function(doc, position, maxlength, tombsIndivisible) { + if (position.index >= doc.data.length) { throw new Error('Operation goes past the end of the document'); } - part = doc.data[position.index] - # peel off data[0] - result = if typeof(part) == 'string' - if maxlength != undefined - part[position.offset...(position.offset + maxlength)] - else - part[position.offset...] - else - if maxlength == undefined or tombsIndivisible + const part = doc.data[position.index]; + // peel off data[0] + const result = typeof(part) === 'string' ? + maxlength !== undefined ? + part.slice(position.offset, (position.offset + maxlength)) + : + part.slice(position.offset) + : + (maxlength === undefined) || tombsIndivisible ? part - position.offset - else - Math.min(maxlength, part - position.offset) + : + Math.min(maxlength, part - position.offset); - resultLen = result.length || result + const resultLen = result.length || result; - if (part.length || part) - position.offset > resultLen - position.offset += resultLen - else - position.index++ - position.offset = 0 + if (((part.length || part) - position.offset) > resultLen) { + position.offset += resultLen; + } else { + position.index++; + position.offset = 0; + } - result + return result; +}); -# Append a part to the end of a document -type._appendDoc = appendDoc = (doc, p) -> - return if p == 0 or p == '' +// Append a part to the end of a document +type._appendDoc = (appendDoc = function(doc, p) { + if ((p === 0) || (p === '')) { return; } - if typeof p is 'string' - doc.charLength += p.length - doc.totalLength += p.length - else - doc.totalLength += p + if (typeof p === 'string') { + doc.charLength += p.length; + doc.totalLength += p.length; + } else { + doc.totalLength += p; + } - data = doc.data - if data.length == 0 - data.push p - else if typeof(data[data.length - 1]) == typeof(p) - data[data.length - 1] += p - else - data.push p - return + const { + data + } = doc; + if (data.length === 0) { + data.push(p); + } else if (typeof(data[data.length - 1]) === typeof(p)) { + data[data.length - 1] += p; + } else { + data.push(p); + } +}); -# Apply the op to the document. The document is not modified in the process. -type.apply = (doc, op) -> - unless doc.totalLength != undefined and doc.charLength != undefined and doc.data.length != undefined - throw new Error('Snapshot is invalid') +// Apply the op to the document. The document is not modified in the process. +type.apply = function(doc, op) { + if ((doc.totalLength === undefined) || (doc.charLength === undefined) || (doc.data.length === undefined)) { + throw new Error('Snapshot is invalid'); + } - checkOp op + checkOp(op); - newDoc = type.create() - position = {index:0, offset:0} + const newDoc = type.create(); + const position = {index:0, offset:0}; - for component in op - if typeof(component) is 'number' - remainder = component - while remainder > 0 - part = takeDoc doc, position, remainder + for (let component of Array.from(op)) { + var part, remainder; + if (typeof(component) === 'number') { + remainder = component; + while (remainder > 0) { + part = takeDoc(doc, position, remainder); - appendDoc newDoc, part - remainder -= part.length || part + appendDoc(newDoc, part); + remainder -= part.length || part; + } - else if component.i != undefined - appendDoc newDoc, component.i - else if component.d != undefined - remainder = component.d - while remainder > 0 - part = takeDoc doc, position, remainder - remainder -= part.length || part - appendDoc newDoc, component.d + } else if (component.i !== undefined) { + appendDoc(newDoc, component.i); + } else if (component.d !== undefined) { + remainder = component.d; + while (remainder > 0) { + part = takeDoc(doc, position, remainder); + remainder -= part.length || part; + } + appendDoc(newDoc, component.d); + } + } - newDoc + return newDoc; +}; -# Append an op component to the end of the specified op. -# Exported for the randomOpGenerator. -type._append = append = (op, component) -> - if component == 0 || component.i == '' || component.i == 0 || component.d == 0 - return - else if op.length == 0 - op.push component - else - last = op[op.length - 1] - if typeof(component) == 'number' && typeof(last) == 'number' - op[op.length - 1] += component - else if component.i != undefined && last.i? && typeof(last.i) == typeof(component.i) - last.i += component.i - else if component.d != undefined && last.d? - last.d += component.d - else - op.push component +// Append an op component to the end of the specified op. +// Exported for the randomOpGenerator. +type._append = (append = function(op, component) { + if ((component === 0) || (component.i === '') || (component.i === 0) || (component.d === 0)) { + return; + } else if (op.length === 0) { + return op.push(component); + } else { + const last = op[op.length - 1]; + if ((typeof(component) === 'number') && (typeof(last) === 'number')) { + return op[op.length - 1] += component; + } else if ((component.i !== undefined) && (last.i != null) && (typeof(last.i) === typeof(component.i))) { + return last.i += component.i; + } else if ((component.d !== undefined) && (last.d != null)) { + return last.d += component.d; + } else { + return op.push(component); + } + } +}); -# Makes 2 functions for taking components from the start of an op, and for peeking -# at the next op that could be taken. -makeTake = (op) -> - # The index of the next component to take - index = 0 - # The offset into the component - offset = 0 +// Makes 2 functions for taking components from the start of an op, and for peeking +// at the next op that could be taken. +const makeTake = function(op) { + // The index of the next component to take + let index = 0; + // The offset into the component + let offset = 0; - # Take up to length maxlength from the op. If maxlength is not defined, there is no max. - # If insertsIndivisible is true, inserts (& insert tombstones) won't be separated. - # - # Returns null when op is fully consumed. - take = (maxlength, insertsIndivisible) -> - return null if index == op.length + // Take up to length maxlength from the op. If maxlength is not defined, there is no max. + // If insertsIndivisible is true, inserts (& insert tombstones) won't be separated. + // + // Returns null when op is fully consumed. + const take = function(maxlength, insertsIndivisible) { + let current; + if (index === op.length) { return null; } - e = op[index] - if typeof((current = e)) == 'number' or typeof((current = e.i)) == 'number' or (current = e.d) != undefined - if !maxlength? or current - offset <= maxlength or (insertsIndivisible and e.i != undefined) - # Return the rest of the current element. - c = current - offset - ++index; offset = 0 - else - offset += maxlength - c = maxlength - if e.i != undefined then {i:c} else if e.d != undefined then {d:c} else c - else - # Take from the inserted string - if !maxlength? or e.i.length - offset <= maxlength or insertsIndivisible - result = {i:e.i[offset..]} - ++index; offset = 0 - else - result = {i:e.i[offset...offset + maxlength]} - offset += maxlength - result + const e = op[index]; + if ((typeof((current = e)) === 'number') || (typeof((current = e.i)) === 'number') || ((current = e.d) !== undefined)) { + let c; + if ((maxlength == null) || ((current - offset) <= maxlength) || (insertsIndivisible && (e.i !== undefined))) { + // Return the rest of the current element. + c = current - offset; + ++index; offset = 0; + } else { + offset += maxlength; + c = maxlength; + } + if (e.i !== undefined) { return {i:c}; } else if (e.d !== undefined) { return {d:c}; } else { return c; } + } else { + // Take from the inserted string + let result; + if ((maxlength == null) || ((e.i.length - offset) <= maxlength) || insertsIndivisible) { + result = {i:e.i.slice(offset)}; + ++index; offset = 0; + } else { + result = {i:e.i.slice(offset, offset + maxlength)}; + offset += maxlength; + } + return result; + } + }; - peekType = -> op[index] + const peekType = () => op[index]; - [take, peekType] + return [take, peekType]; +}; -# Find and return the length of an op component -componentLength = (component) -> - if typeof(component) == 'number' - component - else if typeof(component.i) == 'string' - component.i.length - else - # This should work because c.d and c.i must be +ive. - component.d or component.i +// Find and return the length of an op component +const componentLength = function(component) { + if (typeof(component) === 'number') { + return component; + } else if (typeof(component.i) === 'string') { + return component.i.length; + } else { + // This should work because c.d and c.i must be +ive. + return component.d || component.i; + } +}; -# Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate -# adjacent inserts and deletes. -type.normalize = (op) -> - newOp = [] - append newOp, component for component in op - newOp +// Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate +// adjacent inserts and deletes. +type.normalize = function(op) { + const newOp = []; + for (let component of Array.from(op)) { append(newOp, component); } + return newOp; +}; -# This is a helper method to transform and prune. goForwards is true for transform, false for prune. -transformer = (op, otherOp, goForwards, side) -> - checkOp op - checkOp otherOp - newOp = [] +// This is a helper method to transform and prune. goForwards is true for transform, false for prune. +const transformer = function(op, otherOp, goForwards, side) { + let component; + checkOp(op); + checkOp(otherOp); + const newOp = []; - [take, peek] = makeTake op + const [take, peek] = Array.from(makeTake(op)); - for component in otherOp - length = componentLength component + for (component of Array.from(otherOp)) { + var chunk; + let length = componentLength(component); - if component.i != undefined # Insert text or tombs - if goForwards # transform - insert skips over inserted parts - if side == 'left' - # The left insert should go first. - append newOp, take() while peek()?.i != undefined + if (component.i !== undefined) { // Insert text or tombs + if (goForwards) { // transform - insert skips over inserted parts + if (side === 'left') { + // The left insert should go first. + while (__guard__(peek(), x => x.i) !== undefined) { append(newOp, take()); } + } - # In any case, skip the inserted text. - append newOp, length + // In any case, skip the inserted text. + append(newOp, length); - else # Prune. Remove skips for inserts. - while length > 0 - chunk = take length, true + } else { // Prune. Remove skips for inserts. + while (length > 0) { + chunk = take(length, true); - throw new Error 'The transformed op is invalid' unless chunk != null - throw new Error 'The transformed op deletes locally inserted characters - it cannot be purged of the insert.' if chunk.d != undefined + if (chunk === null) { throw new Error('The transformed op is invalid'); } + if (chunk.d !== undefined) { throw new Error('The transformed op deletes locally inserted characters - it cannot be purged of the insert.'); } - if typeof chunk is 'number' - length -= chunk - else - append newOp, chunk + if (typeof chunk === 'number') { + length -= chunk; + } else { + append(newOp, chunk); + } + } + } - else # Skip or delete - while length > 0 - chunk = take length, true - throw new Error('The op traverses more elements than the document has') unless chunk != null + } else { // Skip or delete + while (length > 0) { + chunk = take(length, true); + if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } - append newOp, chunk - length -= componentLength chunk unless chunk.i + append(newOp, chunk); + if (!chunk.i) { length -= componentLength(chunk); } + } + } + } - # Append extras from op1 - while (component = take()) - throw new Error "Remaining fragments in the op: #{component}" unless component.i != undefined - append newOp, component + // Append extras from op1 + while (component = take()) { + if (component.i === undefined) { throw new Error(`Remaining fragments in the op: ${component}`); } + append(newOp, component); + } - newOp + return newOp; +}; -# transform op1 by op2. Return transformed version of op1. -# op1 and op2 are unchanged by transform. -# side should be 'left' or 'right', depending on if op1.id <> op2.id. 'left' == client op. -type.transform = (op, otherOp, side) -> - throw new Error "side (#{side}) should be 'left' or 'right'" unless side == 'left' or side == 'right' - transformer op, otherOp, true, side +// transform op1 by op2. Return transformed version of op1. +// op1 and op2 are unchanged by transform. +// side should be 'left' or 'right', depending on if op1.id <> op2.id. 'left' == client op. +type.transform = function(op, otherOp, side) { + if ((side !== 'left') && (side !== 'right')) { throw new Error(`side (${side}) should be 'left' or 'right'`); } + return transformer(op, otherOp, true, side); +}; -# Prune is the inverse of transform. -type.prune = (op, otherOp) -> transformer op, otherOp, false +// Prune is the inverse of transform. +type.prune = (op, otherOp) => transformer(op, otherOp, false); -# Compose 2 ops into 1 op. -type.compose = (op1, op2) -> - return op2 if op1 == null or op1 == undefined +// Compose 2 ops into 1 op. +type.compose = function(op1, op2) { + let component; + if ((op1 === null) || (op1 === undefined)) { return op2; } - checkOp op1 - checkOp op2 + checkOp(op1); + checkOp(op2); - result = [] + const result = []; - [take, _] = makeTake op1 + const [take, _] = Array.from(makeTake(op1)); - for component in op2 + for (component of Array.from(op2)) { - if typeof(component) == 'number' # Skip - # Just copy from op1. - length = component - while length > 0 - chunk = take length - throw new Error('The op traverses more elements than the document has') unless chunk != null + var chunk, length; + if (typeof(component) === 'number') { // Skip + // Just copy from op1. + length = component; + while (length > 0) { + chunk = take(length); + if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } - append result, chunk - length -= componentLength chunk + append(result, chunk); + length -= componentLength(chunk); + } - else if component.i != undefined # Insert - append result, {i:component.i} + } else if (component.i !== undefined) { // Insert + append(result, {i:component.i}); - else # Delete - length = component.d - while length > 0 - chunk = take length - throw new Error('The op traverses more elements than the document has') unless chunk != null + } else { // Delete + length = component.d; + while (length > 0) { + chunk = take(length); + if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } - chunkLength = componentLength chunk - if chunk.i != undefined - append result, {i:chunkLength} - else - append result, {d:chunkLength} + const chunkLength = componentLength(chunk); + if (chunk.i !== undefined) { + append(result, {i:chunkLength}); + } else { + append(result, {d:chunkLength}); + } - length -= chunkLength + length -= chunkLength; + } + } + } - # Append extras from op1 - while (component = take()) - throw new Error "Remaining fragments in op1: #{component}" unless component.i != undefined - append result, component + // Append extras from op1 + while (component = take()) { + if (component.i === undefined) { throw new Error(`Remaining fragments in op1: ${component}`); } + append(result, component); + } - result + return result; +}; -if WEB? - exports.types['text-tp2'] = type -else - module.exports = type +if (typeof WEB !== 'undefined' && WEB !== null) { + exports.types['text-tp2'] = type; +} else { + module.exports = type; +} + +function __guard__(value, transform) { + return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; +} \ No newline at end of file diff --git a/services/document-updater/app/coffee/sharejs/text.js b/services/document-updater/app/coffee/sharejs/text.js index c64b4dfa68..3ecb026c77 100644 --- a/services/document-updater/app/coffee/sharejs/text.js +++ b/services/document-updater/app/coffee/sharejs/text.js @@ -1,209 +1,245 @@ -# A simple text implementation -# -# Operations are lists of components. -# Each component either inserts or deletes at a specified position in the document. -# -# Components are either: -# {i:'str', p:100}: Insert 'str' at position 100 in the document -# {d:'str', p:100}: Delete 'str' at position 100 in the document -# -# Components in an operation are executed sequentially, so the position of components -# assumes previous components have already executed. -# -# Eg: This op: -# [{i:'abc', p:0}] -# is equivalent to this op: -# [{i:'a', p:0}, {i:'b', p:1}, {i:'c', p:2}] +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// A simple text implementation +// +// Operations are lists of components. +// Each component either inserts or deletes at a specified position in the document. +// +// Components are either: +// {i:'str', p:100}: Insert 'str' at position 100 in the document +// {d:'str', p:100}: Delete 'str' at position 100 in the document +// +// Components in an operation are executed sequentially, so the position of components +// assumes previous components have already executed. +// +// Eg: This op: +// [{i:'abc', p:0}] +// is equivalent to this op: +// [{i:'a', p:0}, {i:'b', p:1}, {i:'c', p:2}] -# NOTE: The global scope here is shared with other sharejs files when built with closure. -# Be careful what ends up in your namespace. +// NOTE: The global scope here is shared with other sharejs files when built with closure. +// Be careful what ends up in your namespace. -text = {} +let append, transformComponent; +const text = {}; -text.name = 'text' +text.name = 'text'; -text.create = -> '' +text.create = () => ''; -strInject = (s1, pos, s2) -> s1[...pos] + s2 + s1[pos..] +const strInject = (s1, pos, s2) => s1.slice(0, pos) + s2 + s1.slice(pos); -checkValidComponent = (c) -> - throw new Error 'component missing position field' if typeof c.p != 'number' +const checkValidComponent = function(c) { + if (typeof c.p !== 'number') { throw new Error('component missing position field'); } - i_type = typeof c.i - d_type = typeof c.d - throw new Error 'component needs an i or d field' unless (i_type == 'string') ^ (d_type == 'string') + const i_type = typeof c.i; + const d_type = typeof c.d; + if (!((i_type === 'string') ^ (d_type === 'string'))) { throw new Error('component needs an i or d field'); } - throw new Error 'position cannot be negative' unless c.p >= 0 + if (!(c.p >= 0)) { throw new Error('position cannot be negative'); } +}; -checkValidOp = (op) -> - checkValidComponent(c) for c in op - true +const checkValidOp = function(op) { + for (let c of Array.from(op)) { checkValidComponent(c); } + return true; +}; -text.apply = (snapshot, op) -> - checkValidOp op - for component in op - if component.i? - snapshot = strInject snapshot, component.p, component.i - else - deleted = snapshot[component.p...(component.p + component.d.length)] - throw new Error "Delete component '#{component.d}' does not match deleted text '#{deleted}'" unless component.d == deleted - snapshot = snapshot[...component.p] + snapshot[(component.p + component.d.length)..] +text.apply = function(snapshot, op) { + checkValidOp(op); + for (let component of Array.from(op)) { + if (component.i != null) { + snapshot = strInject(snapshot, component.p, component.i); + } else { + const deleted = snapshot.slice(component.p, (component.p + component.d.length)); + if (component.d !== deleted) { throw new Error(`Delete component '${component.d}' does not match deleted text '${deleted}'`); } + snapshot = snapshot.slice(0, component.p) + snapshot.slice((component.p + component.d.length)); + } + } - snapshot + return snapshot; +}; -# Exported for use by the random op generator. -# -# For simplicity, this version of append does not compress adjacent inserts and deletes of -# the same text. It would be nice to change that at some stage. -text._append = append = (newOp, c) -> - return if c.i == '' or c.d == '' - if newOp.length == 0 - newOp.push c - else - last = newOp[newOp.length - 1] +// Exported for use by the random op generator. +// +// For simplicity, this version of append does not compress adjacent inserts and deletes of +// the same text. It would be nice to change that at some stage. +text._append = (append = function(newOp, c) { + if ((c.i === '') || (c.d === '')) { return; } + if (newOp.length === 0) { + return newOp.push(c); + } else { + const last = newOp[newOp.length - 1]; - # Compose the insert into the previous insert if possible - if last.i? && c.i? and last.p <= c.p <= (last.p + last.i.length) - newOp[newOp.length - 1] = {i:strInject(last.i, c.p - last.p, c.i), p:last.p} - else if last.d? && c.d? and c.p <= last.p <= (c.p + c.d.length) - newOp[newOp.length - 1] = {d:strInject(c.d, last.p - c.p, last.d), p:c.p} - else - newOp.push c + // Compose the insert into the previous insert if possible + if ((last.i != null) && (c.i != null) && (last.p <= c.p && c.p <= (last.p + last.i.length))) { + return newOp[newOp.length - 1] = {i:strInject(last.i, c.p - last.p, c.i), p:last.p}; + } else if ((last.d != null) && (c.d != null) && (c.p <= last.p && last.p <= (c.p + c.d.length))) { + return newOp[newOp.length - 1] = {d:strInject(c.d, last.p - c.p, last.d), p:c.p}; + } else { + return newOp.push(c); + } + } +}); -text.compose = (op1, op2) -> - checkValidOp op1 - checkValidOp op2 +text.compose = function(op1, op2) { + checkValidOp(op1); + checkValidOp(op2); - newOp = op1.slice() - append newOp, c for c in op2 + const newOp = op1.slice(); + for (let c of Array.from(op2)) { append(newOp, c); } - newOp + return newOp; +}; -# Attempt to compress the op components together 'as much as possible'. -# This implementation preserves order and preserves create/delete pairs. -text.compress = (op) -> text.compose [], op +// Attempt to compress the op components together 'as much as possible'. +// This implementation preserves order and preserves create/delete pairs. +text.compress = op => text.compose([], op); -text.normalize = (op) -> - newOp = [] +text.normalize = function(op) { + const newOp = []; - # Normalize should allow ops which are a single (unwrapped) component: - # {i:'asdf', p:23}. - # There's no good way to test if something is an array: - # http://perfectionkills.com/instanceof-considered-harmful-or-how-to-write-a-robust-isarray/ - # so this is probably the least bad solution. - op = [op] if op.i? or op.p? + // Normalize should allow ops which are a single (unwrapped) component: + // {i:'asdf', p:23}. + // There's no good way to test if something is an array: + // http://perfectionkills.com/instanceof-considered-harmful-or-how-to-write-a-robust-isarray/ + // so this is probably the least bad solution. + if ((op.i != null) || (op.p != null)) { op = [op]; } - for c in op - c.p ?= 0 - append newOp, c + for (let c of Array.from(op)) { + if (c.p == null) { c.p = 0; } + append(newOp, c); + } - newOp + return newOp; +}; -# This helper method transforms a position by an op component. -# -# If c is an insert, insertAfter specifies whether the transform -# is pushed after the insert (true) or before it (false). -# -# insertAfter is optional for deletes. -transformPosition = (pos, c, insertAfter) -> - if c.i? - if c.p < pos || (c.p == pos && insertAfter) - pos + c.i.length - else - pos - else - # I think this could also be written as: Math.min(c.p, Math.min(c.p - otherC.p, otherC.d.length)) - # but I think its harder to read that way, and it compiles using ternary operators anyway - # so its no slower written like this. - if pos <= c.p - pos - else if pos <= c.p + c.d.length - c.p - else - pos - c.d.length +// This helper method transforms a position by an op component. +// +// If c is an insert, insertAfter specifies whether the transform +// is pushed after the insert (true) or before it (false). +// +// insertAfter is optional for deletes. +const transformPosition = function(pos, c, insertAfter) { + if (c.i != null) { + if ((c.p < pos) || ((c.p === pos) && insertAfter)) { + return pos + c.i.length; + } else { + return pos; + } + } else { + // I think this could also be written as: Math.min(c.p, Math.min(c.p - otherC.p, otherC.d.length)) + // but I think its harder to read that way, and it compiles using ternary operators anyway + // so its no slower written like this. + if (pos <= c.p) { + return pos; + } else if (pos <= (c.p + c.d.length)) { + return c.p; + } else { + return pos - c.d.length; + } + } +}; -# Helper method to transform a cursor position as a result of an op. -# -# Like transformPosition above, if c is an insert, insertAfter specifies whether the cursor position -# is pushed after an insert (true) or before it (false). -text.transformCursor = (position, op, side) -> - insertAfter = side == 'right' - position = transformPosition position, c, insertAfter for c in op - position +// Helper method to transform a cursor position as a result of an op. +// +// Like transformPosition above, if c is an insert, insertAfter specifies whether the cursor position +// is pushed after an insert (true) or before it (false). +text.transformCursor = function(position, op, side) { + const insertAfter = side === 'right'; + for (let c of Array.from(op)) { position = transformPosition(position, c, insertAfter); } + return position; +}; -# Transform an op component by another op component. Asymmetric. -# The result will be appended to destination. -# -# exported for use in JSON type -text._tc = transformComponent = (dest, c, otherC, side) -> - checkValidOp [c] - checkValidOp [otherC] +// Transform an op component by another op component. Asymmetric. +// The result will be appended to destination. +// +// exported for use in JSON type +text._tc = (transformComponent = function(dest, c, otherC, side) { + checkValidOp([c]); + checkValidOp([otherC]); - if c.i? - append dest, {i:c.i, p:transformPosition(c.p, otherC, side == 'right')} + if (c.i != null) { + append(dest, {i:c.i, p:transformPosition(c.p, otherC, side === 'right')}); - else # Delete - if otherC.i? # delete vs insert - s = c.d - if c.p < otherC.p - append dest, {d:s[...otherC.p - c.p], p:c.p} - s = s[(otherC.p - c.p)..] - if s != '' - append dest, {d:s, p:c.p + otherC.i.length} + } else { // Delete + if (otherC.i != null) { // delete vs insert + let s = c.d; + if (c.p < otherC.p) { + append(dest, {d:s.slice(0, otherC.p - c.p), p:c.p}); + s = s.slice((otherC.p - c.p)); + } + if (s !== '') { + append(dest, {d:s, p:c.p + otherC.i.length}); + } - else # Delete vs delete - if c.p >= otherC.p + otherC.d.length - append dest, {d:c.d, p:c.p - otherC.d.length} - else if c.p + c.d.length <= otherC.p - append dest, c - else - # They overlap somewhere. - newC = {d:'', p:c.p} - if c.p < otherC.p - newC.d = c.d[...(otherC.p - c.p)] - if c.p + c.d.length > otherC.p + otherC.d.length - newC.d += c.d[(otherC.p + otherC.d.length - c.p)..] + } else { // Delete vs delete + if (c.p >= (otherC.p + otherC.d.length)) { + append(dest, {d:c.d, p:c.p - otherC.d.length}); + } else if ((c.p + c.d.length) <= otherC.p) { + append(dest, c); + } else { + // They overlap somewhere. + const newC = {d:'', p:c.p}; + if (c.p < otherC.p) { + newC.d = c.d.slice(0, (otherC.p - c.p)); + } + if ((c.p + c.d.length) > (otherC.p + otherC.d.length)) { + newC.d += c.d.slice(((otherC.p + otherC.d.length) - c.p)); + } - # This is entirely optional - just for a check that the deleted - # text in the two ops matches - intersectStart = Math.max c.p, otherC.p - intersectEnd = Math.min c.p + c.d.length, otherC.p + otherC.d.length - cIntersect = c.d[intersectStart - c.p...intersectEnd - c.p] - otherIntersect = otherC.d[intersectStart - otherC.p...intersectEnd - otherC.p] - throw new Error 'Delete ops delete different text in the same region of the document' unless cIntersect == otherIntersect + // This is entirely optional - just for a check that the deleted + // text in the two ops matches + const intersectStart = Math.max(c.p, otherC.p); + const intersectEnd = Math.min(c.p + c.d.length, otherC.p + otherC.d.length); + const cIntersect = c.d.slice(intersectStart - c.p, intersectEnd - c.p); + const otherIntersect = otherC.d.slice(intersectStart - otherC.p, intersectEnd - otherC.p); + if (cIntersect !== otherIntersect) { throw new Error('Delete ops delete different text in the same region of the document'); } - if newC.d != '' - # This could be rewritten similarly to insert v delete, above. - newC.p = transformPosition newC.p, otherC - append dest, newC + if (newC.d !== '') { + // This could be rewritten similarly to insert v delete, above. + newC.p = transformPosition(newC.p, otherC); + append(dest, newC); + } + } + } + } - dest + return dest; +}); -invertComponent = (c) -> - if c.i? - {d:c.i, p:c.p} - else - {i:c.d, p:c.p} +const invertComponent = function(c) { + if (c.i != null) { + return {d:c.i, p:c.p}; + } else { + return {i:c.d, p:c.p}; + } +}; -# No need to use append for invert, because the components won't be able to -# cancel with one another. -text.invert = (op) -> (invertComponent c for c in op.slice().reverse()) +// No need to use append for invert, because the components won't be able to +// cancel with one another. +text.invert = op => Array.from(op.slice().reverse()).map((c) => invertComponent(c)); -if WEB? - exports.types ||= {} +if (typeof WEB !== 'undefined' && WEB !== null) { + if (!exports.types) { exports.types = {}; } - # This is kind of awful - come up with a better way to hook this helper code up. - bootstrapTransform(text, transformComponent, checkValidOp, append) + // This is kind of awful - come up with a better way to hook this helper code up. + bootstrapTransform(text, transformComponent, checkValidOp, append); - # [] is used to prevent closure from renaming types.text - exports.types.text = text -else - module.exports = text + // [] is used to prevent closure from renaming types.text + exports.types.text = text; +} else { + module.exports = text; - # The text type really shouldn't need this - it should be possible to define - # an efficient transform function by making a sort of transform map and passing each - # op component through it. - require('./helpers').bootstrapTransform(text, transformComponent, checkValidOp, append) + // The text type really shouldn't need this - it should be possible to define + // an efficient transform function by making a sort of transform map and passing each + // op component through it. + require('./helpers').bootstrapTransform(text, transformComponent, checkValidOp, append); +} diff --git a/services/document-updater/app/coffee/sharejs/types/count.js b/services/document-updater/app/coffee/sharejs/types/count.js index da28355efb..ffc3337ac7 100644 --- a/services/document-updater/app/coffee/sharejs/types/count.js +++ b/services/document-updater/app/coffee/sharejs/types/count.js @@ -1,22 +1,30 @@ -# This is a simple type used for testing other OT code. Each op is [expectedSnapshot, increment] +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// This is a simple type used for testing other OT code. Each op is [expectedSnapshot, increment] -exports.name = 'count' -exports.create = -> 1 +exports.name = 'count'; +exports.create = () => 1; -exports.apply = (snapshot, op) -> - [v, inc] = op - throw new Error "Op #{v} != snapshot #{snapshot}" unless snapshot == v - snapshot + inc +exports.apply = function(snapshot, op) { + const [v, inc] = Array.from(op); + if (snapshot !== v) { throw new Error(`Op ${v} != snapshot ${snapshot}`); } + return snapshot + inc; +}; -# transform op1 by op2. Return transformed version of op1. -exports.transform = (op1, op2) -> - throw new Error "Op1 #{op1[0]} != op2 #{op2[0]}" unless op1[0] == op2[0] - [op1[0] + op2[1], op1[1]] +// transform op1 by op2. Return transformed version of op1. +exports.transform = function(op1, op2) { + if (op1[0] !== op2[0]) { throw new Error(`Op1 ${op1[0]} != op2 ${op2[0]}`); } + return [op1[0] + op2[1], op1[1]]; +}; -exports.compose = (op1, op2) -> - throw new Error "Op1 #{op1} + 1 != op2 #{op2}" unless op1[0] + op1[1] == op2[0] - [op1[0], op1[1] + op2[1]] +exports.compose = function(op1, op2) { + if ((op1[0] + op1[1]) !== op2[0]) { throw new Error(`Op1 ${op1} + 1 != op2 ${op2}`); } + return [op1[0], op1[1] + op2[1]]; +}; -exports.generateRandomOp = (doc) -> - [[doc, 1], doc + 1] +exports.generateRandomOp = doc => [[doc, 1], doc + 1]; diff --git a/services/document-updater/app/coffee/sharejs/types/helpers.js b/services/document-updater/app/coffee/sharejs/types/helpers.js index 093b32e1bb..81a561de03 100644 --- a/services/document-updater/app/coffee/sharejs/types/helpers.js +++ b/services/document-updater/app/coffee/sharejs/types/helpers.js @@ -1,65 +1,87 @@ -# These methods let you build a transform function from a transformComponent function -# for OT types like text and JSON in which operations are lists of components -# and transforming them requires N^2 work. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// These methods let you build a transform function from a transformComponent function +// for OT types like text and JSON in which operations are lists of components +// and transforming them requires N^2 work. -# Add transform and transformX functions for an OT type which has transformComponent defined. -# transformComponent(destination array, component, other component, side) -exports['_bt'] = bootstrapTransform = (type, transformComponent, checkValidOp, append) -> - transformComponentX = (left, right, destLeft, destRight) -> - transformComponent destLeft, left, right, 'left' - transformComponent destRight, right, left, 'right' +// Add transform and transformX functions for an OT type which has transformComponent defined. +// transformComponent(destination array, component, other component, side) +let bootstrapTransform; +exports['_bt'] = (bootstrapTransform = function(type, transformComponent, checkValidOp, append) { + let transformX; + const transformComponentX = function(left, right, destLeft, destRight) { + transformComponent(destLeft, left, right, 'left'); + return transformComponent(destRight, right, left, 'right'); + }; - # Transforms rightOp by leftOp. Returns ['rightOp', clientOp'] - type.transformX = type['transformX'] = transformX = (leftOp, rightOp) -> - checkValidOp leftOp - checkValidOp rightOp + // Transforms rightOp by leftOp. Returns ['rightOp', clientOp'] + type.transformX = (type['transformX'] = (transformX = function(leftOp, rightOp) { + checkValidOp(leftOp); + checkValidOp(rightOp); - newRightOp = [] + const newRightOp = []; - for rightComponent in rightOp - # Generate newLeftOp by composing leftOp by rightComponent - newLeftOp = [] + for (let rightComponent of Array.from(rightOp)) { + // Generate newLeftOp by composing leftOp by rightComponent + const newLeftOp = []; - k = 0 - while k < leftOp.length - nextC = [] - transformComponentX leftOp[k], rightComponent, newLeftOp, nextC - k++ + let k = 0; + while (k < leftOp.length) { + var l; + const nextC = []; + transformComponentX(leftOp[k], rightComponent, newLeftOp, nextC); + k++; - if nextC.length == 1 - rightComponent = nextC[0] - else if nextC.length == 0 - append newLeftOp, l for l in leftOp[k..] - rightComponent = null - break - else - # Recurse. - [l_, r_] = transformX leftOp[k..], nextC - append newLeftOp, l for l in l_ - append newRightOp, r for r in r_ - rightComponent = null - break + if (nextC.length === 1) { + rightComponent = nextC[0]; + } else if (nextC.length === 0) { + for (l of Array.from(leftOp.slice(k))) { append(newLeftOp, l); } + rightComponent = null; + break; + } else { + // Recurse. + const [l_, r_] = Array.from(transformX(leftOp.slice(k), nextC)); + for (l of Array.from(l_)) { append(newLeftOp, l); } + for (let r of Array.from(r_)) { append(newRightOp, r); } + rightComponent = null; + break; + } + } - append newRightOp, rightComponent if rightComponent? - leftOp = newLeftOp + if (rightComponent != null) { append(newRightOp, rightComponent); } + leftOp = newLeftOp; + } - [leftOp, newRightOp] + return [leftOp, newRightOp]; + })); - # Transforms op with specified type ('left' or 'right') by otherOp. - type.transform = type['transform'] = (op, otherOp, type) -> - throw new Error "type must be 'left' or 'right'" unless type == 'left' or type == 'right' + // Transforms op with specified type ('left' or 'right') by otherOp. + return type.transform = (type['transform'] = function(op, otherOp, type) { + let _; + if ((type !== 'left') && (type !== 'right')) { throw new Error("type must be 'left' or 'right'"); } - return op if otherOp.length == 0 + if (otherOp.length === 0) { return op; } - # TODO: Benchmark with and without this line. I _think_ it'll make a big difference...? - return transformComponent [], op[0], otherOp[0], type if op.length == 1 and otherOp.length == 1 + // TODO: Benchmark with and without this line. I _think_ it'll make a big difference...? + if ((op.length === 1) && (otherOp.length === 1)) { return transformComponent([], op[0], otherOp[0], type); } - if type == 'left' - [left, _] = transformX op, otherOp - left - else - [_, right] = transformX otherOp, op - right + if (type === 'left') { + let left; + [left, _] = Array.from(transformX(op, otherOp)); + return left; + } else { + let right; + [_, right] = Array.from(transformX(otherOp, op)); + return right; + } + }); +}); -if typeof WEB is 'undefined' - exports.bootstrapTransform = bootstrapTransform +if (typeof WEB === 'undefined') { + exports.bootstrapTransform = bootstrapTransform; +} diff --git a/services/document-updater/app/coffee/sharejs/types/index.js b/services/document-updater/app/coffee/sharejs/types/index.js index 6f3bb8ec20..bf681de7cd 100644 --- a/services/document-updater/app/coffee/sharejs/types/index.js +++ b/services/document-updater/app/coffee/sharejs/types/index.js @@ -1,15 +1,21 @@ +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ -register = (file) -> - type = require file - exports[type.name] = type - try require "#{file}-api" +const register = function(file) { + const type = require(file); + exports[type.name] = type; + try { return require(`${file}-api`); } catch (error) {} +}; -# Import all the built-in types. -register './simple' -register './count' +// Import all the built-in types. +register('./simple'); +register('./count'); -register './text' -register './text-composable' -register './text-tp2' +register('./text'); +register('./text-composable'); +register('./text-tp2'); -register './json' +register('./json'); diff --git a/services/document-updater/app/coffee/sharejs/types/json-api.js b/services/document-updater/app/coffee/sharejs/types/json-api.js index 8819dee798..1c7c2633ba 100644 --- a/services/document-updater/app/coffee/sharejs/types/json-api.js +++ b/services/document-updater/app/coffee/sharejs/types/json-api.js @@ -1,180 +1,273 @@ -# API for JSON OT +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// API for JSON OT -json = require './json' if typeof WEB is 'undefined' +let json; +if (typeof WEB === 'undefined') { json = require('./json'); } -if WEB? - extendDoc = exports.extendDoc - exports.extendDoc = (name, fn) -> - SubDoc::[name] = fn - extendDoc name, fn +if (typeof WEB !== 'undefined' && WEB !== null) { + const { + extendDoc + } = exports; + exports.extendDoc = function(name, fn) { + SubDoc.prototype[name] = fn; + return extendDoc(name, fn); + }; +} -depath = (path) -> - if path.length == 1 and path[0].constructor == Array - path[0] - else path +const depath = function(path) { + if ((path.length === 1) && (path[0].constructor === Array)) { + return path[0]; + } else { return path; } +}; -class SubDoc - constructor: (@doc, @path) -> - at: (path...) -> @doc.at @path.concat depath path - get: -> @doc.getAt @path - # for objects and lists - set: (value, cb) -> @doc.setAt @path, value, cb - # for strings and lists. - insert: (pos, value, cb) -> @doc.insertAt @path, pos, value, cb - # for strings - del: (pos, length, cb) -> @doc.deleteTextAt @path, length, pos, cb - # for objects and lists - remove: (cb) -> @doc.removeAt @path, cb - push: (value, cb) -> @insert @get().length, value, cb - move: (from, to, cb) -> @doc.moveAt @path, from, to, cb - add: (amount, cb) -> @doc.addAt @path, amount, cb - on: (event, cb) -> @doc.addListener @path, event, cb - removeListener: (l) -> @doc.removeListener l +class SubDoc { + constructor(doc, path) { + this.doc = doc; + this.path = path; + } + at(...path) { return this.doc.at(this.path.concat(depath(path))); } + get() { return this.doc.getAt(this.path); } + // for objects and lists + set(value, cb) { return this.doc.setAt(this.path, value, cb); } + // for strings and lists. + insert(pos, value, cb) { return this.doc.insertAt(this.path, pos, value, cb); } + // for strings + del(pos, length, cb) { return this.doc.deleteTextAt(this.path, length, pos, cb); } + // for objects and lists + remove(cb) { return this.doc.removeAt(this.path, cb); } + push(value, cb) { return this.insert(this.get().length, value, cb); } + move(from, to, cb) { return this.doc.moveAt(this.path, from, to, cb); } + add(amount, cb) { return this.doc.addAt(this.path, amount, cb); } + on(event, cb) { return this.doc.addListener(this.path, event, cb); } + removeListener(l) { return this.doc.removeListener(l); } - # text API compatibility - getLength: -> @get().length - getText: -> @get() + // text API compatibility + getLength() { return this.get().length; } + getText() { return this.get(); } +} -traverse = (snapshot, path) -> - container = data:snapshot - key = 'data' - elem = container - for p in path - elem = elem[key] - key = p - throw new Error 'bad path' if typeof elem == 'undefined' - {elem, key} +const traverse = function(snapshot, path) { + const container = {data:snapshot}; + let key = 'data'; + let elem = container; + for (let p of Array.from(path)) { + elem = elem[key]; + key = p; + if (typeof elem === 'undefined') { throw new Error('bad path'); } + } + return {elem, key}; +}; -pathEquals = (p1, p2) -> - return false if p1.length != p2.length - for e,i in p1 - return false if e != p2[i] - true +const pathEquals = function(p1, p2) { + if (p1.length !== p2.length) { return false; } + for (let i = 0; i < p1.length; i++) { + const e = p1[i]; + if (e !== p2[i]) { return false; } + } + return true; +}; -json.api = - provides: {json:true} +json.api = { + provides: {json:true}, - at: (path...) -> new SubDoc this, depath path + at(...path) { return new SubDoc(this, depath(path)); }, - get: -> @snapshot - set: (value, cb) -> @setAt [], value, cb + get() { return this.snapshot; }, + set(value, cb) { return this.setAt([], value, cb); }, - getAt: (path) -> - {elem, key} = traverse @snapshot, path - return elem[key] + getAt(path) { + const {elem, key} = traverse(this.snapshot, path); + return elem[key]; + }, - setAt: (path, value, cb) -> - {elem, key} = traverse @snapshot, path - op = {p:path} - if elem.constructor == Array - op.li = value - op.ld = elem[key] if typeof elem[key] != 'undefined' - else if typeof elem == 'object' - op.oi = value - op.od = elem[key] if typeof elem[key] != 'undefined' - else throw new Error 'bad path' - @submitOp [op], cb + setAt(path, value, cb) { + const {elem, key} = traverse(this.snapshot, path); + const op = {p:path}; + if (elem.constructor === Array) { + op.li = value; + if (typeof elem[key] !== 'undefined') { op.ld = elem[key]; } + } else if (typeof elem === 'object') { + op.oi = value; + if (typeof elem[key] !== 'undefined') { op.od = elem[key]; } + } else { throw new Error('bad path'); } + return this.submitOp([op], cb); + }, - removeAt: (path, cb) -> - {elem, key} = traverse @snapshot, path - throw new Error 'no element at that path' unless typeof elem[key] != 'undefined' - op = {p:path} - if elem.constructor == Array - op.ld = elem[key] - else if typeof elem == 'object' - op.od = elem[key] - else throw new Error 'bad path' - @submitOp [op], cb + removeAt(path, cb) { + const {elem, key} = traverse(this.snapshot, path); + if (typeof elem[key] === 'undefined') { throw new Error('no element at that path'); } + const op = {p:path}; + if (elem.constructor === Array) { + op.ld = elem[key]; + } else if (typeof elem === 'object') { + op.od = elem[key]; + } else { throw new Error('bad path'); } + return this.submitOp([op], cb); + }, - insertAt: (path, pos, value, cb) -> - {elem, key} = traverse @snapshot, path - op = {p:path.concat pos} - if elem[key].constructor == Array - op.li = value - else if typeof elem[key] == 'string' - op.si = value - @submitOp [op], cb + insertAt(path, pos, value, cb) { + const {elem, key} = traverse(this.snapshot, path); + const op = {p:path.concat(pos)}; + if (elem[key].constructor === Array) { + op.li = value; + } else if (typeof elem[key] === 'string') { + op.si = value; + } + return this.submitOp([op], cb); + }, - moveAt: (path, from, to, cb) -> - op = [{p:path.concat(from), lm:to}] - @submitOp op, cb + moveAt(path, from, to, cb) { + const op = [{p:path.concat(from), lm:to}]; + return this.submitOp(op, cb); + }, - addAt: (path, amount, cb) -> - op = [{p:path, na:amount}] - @submitOp op, cb + addAt(path, amount, cb) { + const op = [{p:path, na:amount}]; + return this.submitOp(op, cb); + }, - deleteTextAt: (path, length, pos, cb) -> - {elem, key} = traverse @snapshot, path - op = [{p:path.concat(pos), sd:elem[key][pos...(pos + length)]}] - @submitOp op, cb + deleteTextAt(path, length, pos, cb) { + const {elem, key} = traverse(this.snapshot, path); + const op = [{p:path.concat(pos), sd:elem[key].slice(pos, (pos + length))}]; + return this.submitOp(op, cb); + }, - addListener: (path, event, cb) -> - l = {path, event, cb} - @_listeners.push l - l - removeListener: (l) -> - i = @_listeners.indexOf l - return false if i < 0 - @_listeners.splice i, 1 - return true - _register: -> - @_listeners = [] - @on 'change', (op) -> - for c in op - if c.na != undefined or c.si != undefined or c.sd != undefined - # no change to structure - continue - to_remove = [] - for l, i in @_listeners - # Transform a dummy op by the incoming op to work out what - # should happen to the listener. - dummy = {p:l.path, na:0} - xformed = @type.transformComponent [], dummy, c, 'left' - if xformed.length == 0 - # The op was transformed to noop, so we should delete the listener. - to_remove.push i - else if xformed.length == 1 - # The op remained, so grab its new path into the listener. - l.path = xformed[0].p - else - throw new Error "Bad assumption in json-api: xforming an 'si' op will always result in 0 or 1 components." - to_remove.sort (a, b) -> b - a - for i in to_remove - @_listeners.splice i, 1 - @on 'remoteop', (op) -> - for c in op - match_path = if c.na == undefined then c.p[...c.p.length-1] else c.p - for {path, event, cb} in @_listeners - if pathEquals path, match_path - switch event - when 'insert' - if c.li != undefined and c.ld == undefined - cb(c.p[c.p.length-1], c.li) - else if c.oi != undefined and c.od == undefined - cb(c.p[c.p.length-1], c.oi) - else if c.si != undefined - cb(c.p[c.p.length-1], c.si) - when 'delete' - if c.li == undefined and c.ld != undefined - cb(c.p[c.p.length-1], c.ld) - else if c.oi == undefined and c.od != undefined - cb(c.p[c.p.length-1], c.od) - else if c.sd != undefined - cb(c.p[c.p.length-1], c.sd) - when 'replace' - if c.li != undefined and c.ld != undefined - cb(c.p[c.p.length-1], c.ld, c.li) - else if c.oi != undefined and c.od != undefined - cb(c.p[c.p.length-1], c.od, c.oi) - when 'move' - if c.lm != undefined - cb(c.p[c.p.length-1], c.lm) - when 'add' - if c.na != undefined - cb(c.na) - else if (common = @type.commonPath match_path, path)? - if event == 'child op' - if match_path.length == path.length == common - throw new Error "paths match length and have commonality, but aren't equal?" - child_path = c.p[common+1..] - cb(child_path, c) + addListener(path, event, cb) { + const l = {path, event, cb}; + this._listeners.push(l); + return l; + }, + removeListener(l) { + const i = this._listeners.indexOf(l); + if (i < 0) { return false; } + this._listeners.splice(i, 1); + return true; + }, + _register() { + this._listeners = []; + this.on('change', function(op) { + return (() => { + const result = []; + for (let c of Array.from(op)) { + var i; + if ((c.na !== undefined) || (c.si !== undefined) || (c.sd !== undefined)) { + // no change to structure + continue; + } + var to_remove = []; + for (i = 0; i < this._listeners.length; i++) { + // Transform a dummy op by the incoming op to work out what + // should happen to the listener. + const l = this._listeners[i]; + const dummy = {p:l.path, na:0}; + const xformed = this.type.transformComponent([], dummy, c, 'left'); + if (xformed.length === 0) { + // The op was transformed to noop, so we should delete the listener. + to_remove.push(i); + } else if (xformed.length === 1) { + // The op remained, so grab its new path into the listener. + l.path = xformed[0].p; + } else { + throw new Error("Bad assumption in json-api: xforming an 'si' op will always result in 0 or 1 components."); + } + } + to_remove.sort((a, b) => b - a); + result.push((() => { + const result1 = []; + for (i of Array.from(to_remove)) { + result1.push(this._listeners.splice(i, 1)); + } + return result1; + })()); + } + return result; + })(); + }); + return this.on('remoteop', function(op) { + return (() => { + const result = []; + for (var c of Array.from(op)) { + var match_path = c.na === undefined ? c.p.slice(0, c.p.length-1) : c.p; + result.push((() => { + const result1 = []; + for (let {path, event, cb} of Array.from(this._listeners)) { + var common; + if (pathEquals(path, match_path)) { + switch (event) { + case 'insert': + if ((c.li !== undefined) && (c.ld === undefined)) { + result1.push(cb(c.p[c.p.length-1], c.li)); + } else if ((c.oi !== undefined) && (c.od === undefined)) { + result1.push(cb(c.p[c.p.length-1], c.oi)); + } else if (c.si !== undefined) { + result1.push(cb(c.p[c.p.length-1], c.si)); + } else { + result1.push(undefined); + } + break; + case 'delete': + if ((c.li === undefined) && (c.ld !== undefined)) { + result1.push(cb(c.p[c.p.length-1], c.ld)); + } else if ((c.oi === undefined) && (c.od !== undefined)) { + result1.push(cb(c.p[c.p.length-1], c.od)); + } else if (c.sd !== undefined) { + result1.push(cb(c.p[c.p.length-1], c.sd)); + } else { + result1.push(undefined); + } + break; + case 'replace': + if ((c.li !== undefined) && (c.ld !== undefined)) { + result1.push(cb(c.p[c.p.length-1], c.ld, c.li)); + } else if ((c.oi !== undefined) && (c.od !== undefined)) { + result1.push(cb(c.p[c.p.length-1], c.od, c.oi)); + } else { + result1.push(undefined); + } + break; + case 'move': + if (c.lm !== undefined) { + result1.push(cb(c.p[c.p.length-1], c.lm)); + } else { + result1.push(undefined); + } + break; + case 'add': + if (c.na !== undefined) { + result1.push(cb(c.na)); + } else { + result1.push(undefined); + } + break; + default: + result1.push(undefined); + } + } else if ((common = this.type.commonPath(match_path, path)) != null) { + if (event === 'child op') { + if (match_path.length === path.length && path.length === common) { + throw new Error("paths match length and have commonality, but aren't equal?"); + } + const child_path = c.p.slice(common+1); + result1.push(cb(child_path, c)); + } else { + result1.push(undefined); + } + } else { + result1.push(undefined); + } + } + return result1; + })()); + } + return result; + })(); + }); + } +}; diff --git a/services/document-updater/app/coffee/sharejs/types/json.js b/services/document-updater/app/coffee/sharejs/types/json.js index b03b0947ef..3e3bee79d9 100644 --- a/services/document-updater/app/coffee/sharejs/types/json.js +++ b/services/document-updater/app/coffee/sharejs/types/json.js @@ -1,441 +1,534 @@ -# This is the implementation of the JSON OT type. -# -# Spec is here: https://github.com/josephg/ShareJS/wiki/JSON-Operations +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// This is the implementation of the JSON OT type. +// +// Spec is here: https://github.com/josephg/ShareJS/wiki/JSON-Operations -if WEB? - text = exports.types.text -else - text = require './text' +let text; +if (typeof WEB !== 'undefined' && WEB !== null) { + ({ + text + } = exports.types); +} else { + text = require('./text'); +} -json = {} +const json = {}; -json.name = 'json' +json.name = 'json'; -json.create = -> null +json.create = () => null; -json.invertComponent = (c) -> - c_ = {p: c.p} - c_.sd = c.si if c.si != undefined - c_.si = c.sd if c.sd != undefined - c_.od = c.oi if c.oi != undefined - c_.oi = c.od if c.od != undefined - c_.ld = c.li if c.li != undefined - c_.li = c.ld if c.ld != undefined - c_.na = -c.na if c.na != undefined - if c.lm != undefined - c_.lm = c.p[c.p.length-1] - c_.p = c.p[0...c.p.length - 1].concat([c.lm]) - c_ +json.invertComponent = function(c) { + const c_ = {p: c.p}; + if (c.si !== undefined) { c_.sd = c.si; } + if (c.sd !== undefined) { c_.si = c.sd; } + if (c.oi !== undefined) { c_.od = c.oi; } + if (c.od !== undefined) { c_.oi = c.od; } + if (c.li !== undefined) { c_.ld = c.li; } + if (c.ld !== undefined) { c_.li = c.ld; } + if (c.na !== undefined) { c_.na = -c.na; } + if (c.lm !== undefined) { + c_.lm = c.p[c.p.length-1]; + c_.p = c.p.slice(0, c.p.length - 1).concat([c.lm]); + } + return c_; +}; -json.invert = (op) -> json.invertComponent c for c in op.slice().reverse() +json.invert = op => Array.from(op.slice().reverse()).map((c) => json.invertComponent(c)); -json.checkValidOp = (op) -> +json.checkValidOp = function(op) {}; -isArray = (o) -> Object.prototype.toString.call(o) == '[object Array]' -json.checkList = (elem) -> - throw new Error 'Referenced element not a list' unless isArray(elem) +const isArray = o => Object.prototype.toString.call(o) === '[object Array]'; +json.checkList = function(elem) { + if (!isArray(elem)) { throw new Error('Referenced element not a list'); } +}; -json.checkObj = (elem) -> - throw new Error "Referenced element not an object (it was #{JSON.stringify elem})" unless elem.constructor is Object +json.checkObj = function(elem) { + if (elem.constructor !== Object) { throw new Error(`Referenced element not an object (it was ${JSON.stringify(elem)})`); } +}; -json.apply = (snapshot, op) -> - json.checkValidOp op - op = clone op +json.apply = function(snapshot, op) { + json.checkValidOp(op); + op = clone(op); - container = {data: clone snapshot} + const container = {data: clone(snapshot)}; - try - for c, i in op - parent = null - parentkey = null - elem = container - key = 'data' + try { + for (let i = 0; i < op.length; i++) { + const c = op[i]; + let parent = null; + let parentkey = null; + let elem = container; + let key = 'data'; - for p in c.p - parent = elem - parentkey = key - elem = elem[key] - key = p + for (let p of Array.from(c.p)) { + parent = elem; + parentkey = key; + elem = elem[key]; + key = p; - throw new Error 'Path invalid' unless parent? + if (parent == null) { throw new Error('Path invalid'); } + } - if c.na != undefined - # Number add - throw new Error 'Referenced element not a number' unless typeof elem[key] is 'number' - elem[key] += c.na + if (c.na !== undefined) { + // Number add + if (typeof elem[key] !== 'number') { throw new Error('Referenced element not a number'); } + elem[key] += c.na; - else if c.si != undefined - # String insert - throw new Error "Referenced element not a string (it was #{JSON.stringify elem})" unless typeof elem is 'string' - parent[parentkey] = elem[...key] + c.si + elem[key..] - else if c.sd != undefined - # String delete - throw new Error 'Referenced element not a string' unless typeof elem is 'string' - throw new Error 'Deleted string does not match' unless elem[key...key + c.sd.length] == c.sd - parent[parentkey] = elem[...key] + elem[key + c.sd.length..] + } else if (c.si !== undefined) { + // String insert + if (typeof elem !== 'string') { throw new Error(`Referenced element not a string (it was ${JSON.stringify(elem)})`); } + parent[parentkey] = elem.slice(0, key) + c.si + elem.slice(key); + } else if (c.sd !== undefined) { + // String delete + if (typeof elem !== 'string') { throw new Error('Referenced element not a string'); } + if (elem.slice(key, key + c.sd.length) !== c.sd) { throw new Error('Deleted string does not match'); } + parent[parentkey] = elem.slice(0, key) + elem.slice(key + c.sd.length); - else if c.li != undefined && c.ld != undefined - # List replace - json.checkList elem + } else if ((c.li !== undefined) && (c.ld !== undefined)) { + // List replace + json.checkList(elem); - # Should check the list element matches c.ld - elem[key] = c.li - else if c.li != undefined - # List insert - json.checkList elem + // Should check the list element matches c.ld + elem[key] = c.li; + } else if (c.li !== undefined) { + // List insert + json.checkList(elem); - elem.splice key, 0, c.li - else if c.ld != undefined - # List delete - json.checkList elem + elem.splice(key, 0, c.li); + } else if (c.ld !== undefined) { + // List delete + json.checkList(elem); - # Should check the list element matches c.ld here too. - elem.splice key, 1 - else if c.lm != undefined - # List move - json.checkList elem - if c.lm != key - e = elem[key] - # Remove it... - elem.splice key, 1 - # And insert it back. - elem.splice c.lm, 0, e + // Should check the list element matches c.ld here too. + elem.splice(key, 1); + } else if (c.lm !== undefined) { + // List move + json.checkList(elem); + if (c.lm !== key) { + const e = elem[key]; + // Remove it... + elem.splice(key, 1); + // And insert it back. + elem.splice(c.lm, 0, e); + } - else if c.oi != undefined - # Object insert / replace - json.checkObj elem + } else if (c.oi !== undefined) { + // Object insert / replace + json.checkObj(elem); - # Should check that elem[key] == c.od - elem[key] = c.oi - else if c.od != undefined - # Object delete - json.checkObj elem + // Should check that elem[key] == c.od + elem[key] = c.oi; + } else if (c.od !== undefined) { + // Object delete + json.checkObj(elem); - # Should check that elem[key] == c.od - delete elem[key] - else - throw new Error 'invalid / missing instruction in op' - catch error - # TODO: Roll back all already applied changes. Write tests before implementing this code. - throw error + // Should check that elem[key] == c.od + delete elem[key]; + } else { + throw new Error('invalid / missing instruction in op'); + } + } + } catch (error) { + // TODO: Roll back all already applied changes. Write tests before implementing this code. + throw error; + } - container.data + return container.data; +}; -# Checks if two paths, p1 and p2 match. -json.pathMatches = (p1, p2, ignoreLast) -> - return false unless p1.length == p2.length +// Checks if two paths, p1 and p2 match. +json.pathMatches = function(p1, p2, ignoreLast) { + if (p1.length !== p2.length) { return false; } - for p, i in p1 - return false if p != p2[i] and (!ignoreLast or i != p1.length - 1) + for (let i = 0; i < p1.length; i++) { + const p = p1[i]; + if ((p !== p2[i]) && (!ignoreLast || (i !== (p1.length - 1)))) { return false; } + } - true + return true; +}; -json.append = (dest, c) -> - c = clone c - if dest.length != 0 and json.pathMatches c.p, (last = dest[dest.length - 1]).p - if last.na != undefined and c.na != undefined - dest[dest.length - 1] = { p: last.p, na: last.na + c.na } - else if last.li != undefined and c.li == undefined and c.ld == last.li - # insert immediately followed by delete becomes a noop. - if last.ld != undefined - # leave the delete part of the replace - delete last.li - else - dest.pop() - else if last.od != undefined and last.oi == undefined and - c.oi != undefined and c.od == undefined - last.oi = c.oi - else if c.lm != undefined and c.p[c.p.length-1] == c.lm - null # don't do anything - else - dest.push c - else - dest.push c +json.append = function(dest, c) { + let last; + c = clone(c); + if ((dest.length !== 0) && json.pathMatches(c.p, (last = dest[dest.length - 1]).p)) { + if ((last.na !== undefined) && (c.na !== undefined)) { + return dest[dest.length - 1] = { p: last.p, na: last.na + c.na }; + } else if ((last.li !== undefined) && (c.li === undefined) && (c.ld === last.li)) { + // insert immediately followed by delete becomes a noop. + if (last.ld !== undefined) { + // leave the delete part of the replace + return delete last.li; + } else { + return dest.pop(); + } + } else if ((last.od !== undefined) && (last.oi === undefined) && + (c.oi !== undefined) && (c.od === undefined)) { + return last.oi = c.oi; + } else if ((c.lm !== undefined) && (c.p[c.p.length-1] === c.lm)) { + return null; // don't do anything + } else { + return dest.push(c); + } + } else { + return dest.push(c); + } +}; -json.compose = (op1, op2) -> - json.checkValidOp op1 - json.checkValidOp op2 +json.compose = function(op1, op2) { + json.checkValidOp(op1); + json.checkValidOp(op2); - newOp = clone op1 - json.append newOp, c for c in op2 + const newOp = clone(op1); + for (let c of Array.from(op2)) { json.append(newOp, c); } - newOp + return newOp; +}; -json.normalize = (op) -> - newOp = [] +json.normalize = function(op) { + const newOp = []; - op = [op] unless isArray op + if (!isArray(op)) { op = [op]; } - for c in op - c.p ?= [] - json.append newOp, c + for (let c of Array.from(op)) { + if (c.p == null) { c.p = []; } + json.append(newOp, c); + } - newOp + return newOp; +}; -# hax, copied from test/types/json. Apparently this is still the fastest way to deep clone an object, assuming -# we have browser support for JSON. -# http://jsperf.com/cloning-an-object/12 -clone = (o) -> JSON.parse(JSON.stringify o) +// hax, copied from test/types/json. Apparently this is still the fastest way to deep clone an object, assuming +// we have browser support for JSON. +// http://jsperf.com/cloning-an-object/12 +var clone = o => JSON.parse(JSON.stringify(o)); -json.commonPath = (p1, p2) -> - p1 = p1.slice() - p2 = p2.slice() - p1.unshift('data') - p2.unshift('data') - p1 = p1[...p1.length-1] - p2 = p2[...p2.length-1] - return -1 if p2.length == 0 - i = 0 - while p1[i] == p2[i] && i < p1.length - i++ - if i == p2.length - return i-1 - return +json.commonPath = function(p1, p2) { + p1 = p1.slice(); + p2 = p2.slice(); + p1.unshift('data'); + p2.unshift('data'); + p1 = p1.slice(0, p1.length-1); + p2 = p2.slice(0, p2.length-1); + if (p2.length === 0) { return -1; } + let i = 0; + while ((p1[i] === p2[i]) && (i < p1.length)) { + i++; + if (i === p2.length) { + return i-1; + } + } +}; -# transform c so it applies to a document with otherC applied. -json.transformComponent = (dest, c, otherC, type) -> - c = clone c - c.p.push(0) if c.na != undefined - otherC.p.push(0) if otherC.na != undefined +// transform c so it applies to a document with otherC applied. +json.transformComponent = function(dest, c, otherC, type) { + let oc; + c = clone(c); + if (c.na !== undefined) { c.p.push(0); } + if (otherC.na !== undefined) { otherC.p.push(0); } - common = json.commonPath c.p, otherC.p - common2 = json.commonPath otherC.p, c.p + const common = json.commonPath(c.p, otherC.p); + const common2 = json.commonPath(otherC.p, c.p); - cplength = c.p.length - otherCplength = otherC.p.length + const cplength = c.p.length; + const otherCplength = otherC.p.length; - c.p.pop() if c.na != undefined # hax - otherC.p.pop() if otherC.na != undefined + if (c.na !== undefined) { c.p.pop(); } // hax + if (otherC.na !== undefined) { otherC.p.pop(); } - if otherC.na - if common2? && otherCplength >= cplength && otherC.p[common2] == c.p[common2] - if c.ld != undefined - oc = clone otherC - oc.p = oc.p[cplength..] - c.ld = json.apply clone(c.ld), [oc] - else if c.od != undefined - oc = clone otherC - oc.p = oc.p[cplength..] - c.od = json.apply clone(c.od), [oc] - json.append dest, c - return dest + if (otherC.na) { + if ((common2 != null) && (otherCplength >= cplength) && (otherC.p[common2] === c.p[common2])) { + if (c.ld !== undefined) { + oc = clone(otherC); + oc.p = oc.p.slice(cplength); + c.ld = json.apply(clone(c.ld), [oc]); + } else if (c.od !== undefined) { + oc = clone(otherC); + oc.p = oc.p.slice(cplength); + c.od = json.apply(clone(c.od), [oc]); + } + } + json.append(dest, c); + return dest; + } - if common2? && otherCplength > cplength && c.p[common2] == otherC.p[common2] - # transform based on c - if c.ld != undefined - oc = clone otherC - oc.p = oc.p[cplength..] - c.ld = json.apply clone(c.ld), [oc] - else if c.od != undefined - oc = clone otherC - oc.p = oc.p[cplength..] - c.od = json.apply clone(c.od), [oc] + if ((common2 != null) && (otherCplength > cplength) && (c.p[common2] === otherC.p[common2])) { + // transform based on c + if (c.ld !== undefined) { + oc = clone(otherC); + oc.p = oc.p.slice(cplength); + c.ld = json.apply(clone(c.ld), [oc]); + } else if (c.od !== undefined) { + oc = clone(otherC); + oc.p = oc.p.slice(cplength); + c.od = json.apply(clone(c.od), [oc]); + } + } - if common? - commonOperand = cplength == otherCplength - # transform based on otherC - if otherC.na != undefined - # this case is handled above due to icky path hax - else if otherC.si != undefined || otherC.sd != undefined - # String op vs string op - pass through to text type - if c.si != undefined || c.sd != undefined - throw new Error("must be a string?") unless commonOperand + if (common != null) { + let from, p, to; + const commonOperand = cplength === otherCplength; + // transform based on otherC + if (otherC.na !== undefined) { + // this case is handled above due to icky path hax + } else if ((otherC.si !== undefined) || (otherC.sd !== undefined)) { + // String op vs string op - pass through to text type + if ((c.si !== undefined) || (c.sd !== undefined)) { + if (!commonOperand) { throw new Error("must be a string?"); } - # Convert an op component to a text op component - convert = (component) -> - newC = p:component.p[component.p.length - 1] - if component.si - newC.i = component.si - else - newC.d = component.sd - newC + // Convert an op component to a text op component + const convert = function(component) { + const newC = {p:component.p[component.p.length - 1]}; + if (component.si) { + newC.i = component.si; + } else { + newC.d = component.sd; + } + return newC; + }; - tc1 = convert c - tc2 = convert otherC + const tc1 = convert(c); + const tc2 = convert(otherC); - res = [] - text._tc res, tc1, tc2, type - for tc in res - jc = { p: c.p[...common] } - jc.p.push(tc.p) - jc.si = tc.i if tc.i? - jc.sd = tc.d if tc.d? - json.append dest, jc - return dest - else if otherC.li != undefined && otherC.ld != undefined - if otherC.p[common] == c.p[common] - # noop - if !commonOperand - # we're below the deleted element, so -> noop - return dest - else if c.ld != undefined - # we're trying to delete the same element, -> noop - if c.li != undefined and type == 'left' - # we're both replacing one element with another. only one can - # survive! - c.ld = clone otherC.li - else - return dest - else if otherC.li != undefined - if c.li != undefined and c.ld == undefined and commonOperand and c.p[common] == otherC.p[common] - # in li vs. li, left wins. - if type == 'right' - c.p[common]++ - else if otherC.p[common] <= c.p[common] - c.p[common]++ + const res = []; + text._tc(res, tc1, tc2, type); + for (let tc of Array.from(res)) { + const jc = { p: c.p.slice(0, common) }; + jc.p.push(tc.p); + if (tc.i != null) { jc.si = tc.i; } + if (tc.d != null) { jc.sd = tc.d; } + json.append(dest, jc); + } + return dest; + } + } else if ((otherC.li !== undefined) && (otherC.ld !== undefined)) { + if (otherC.p[common] === c.p[common]) { + // noop + if (!commonOperand) { + // we're below the deleted element, so -> noop + return dest; + } else if (c.ld !== undefined) { + // we're trying to delete the same element, -> noop + if ((c.li !== undefined) && (type === 'left')) { + // we're both replacing one element with another. only one can + // survive! + c.ld = clone(otherC.li); + } else { + return dest; + } + } + } + } else if (otherC.li !== undefined) { + if ((c.li !== undefined) && (c.ld === undefined) && commonOperand && (c.p[common] === otherC.p[common])) { + // in li vs. li, left wins. + if (type === 'right') { + c.p[common]++; + } + } else if (otherC.p[common] <= c.p[common]) { + c.p[common]++; + } - if c.lm != undefined - if commonOperand - # otherC edits the same list we edit - if otherC.p[common] <= c.lm - c.lm++ - # changing c.from is handled above. - else if otherC.ld != undefined - if c.lm != undefined - if commonOperand - if otherC.p[common] == c.p[common] - # they deleted the thing we're trying to move - return dest - # otherC edits the same list we edit - p = otherC.p[common] - from = c.p[common] - to = c.lm - if p < to || (p == to && from < to) - c.lm-- + if (c.lm !== undefined) { + if (commonOperand) { + // otherC edits the same list we edit + if (otherC.p[common] <= c.lm) { + c.lm++; + } + } + } + // changing c.from is handled above. + } else if (otherC.ld !== undefined) { + if (c.lm !== undefined) { + if (commonOperand) { + if (otherC.p[common] === c.p[common]) { + // they deleted the thing we're trying to move + return dest; + } + // otherC edits the same list we edit + p = otherC.p[common]; + from = c.p[common]; + to = c.lm; + if ((p < to) || ((p === to) && (from < to))) { + c.lm--; + } + } + } - if otherC.p[common] < c.p[common] - c.p[common]-- - else if otherC.p[common] == c.p[common] - if otherCplength < cplength - # we're below the deleted element, so -> noop - return dest - else if c.ld != undefined - if c.li != undefined - # we're replacing, they're deleting. we become an insert. - delete c.ld - else - # we're trying to delete the same element, -> noop - return dest - else if otherC.lm != undefined - if c.lm != undefined and cplength == otherCplength - # lm vs lm, here we go! - from = c.p[common] - to = c.lm - otherFrom = otherC.p[common] - otherTo = otherC.lm - if otherFrom != otherTo - # if otherFrom == otherTo, we don't need to change our op. + if (otherC.p[common] < c.p[common]) { + c.p[common]--; + } else if (otherC.p[common] === c.p[common]) { + if (otherCplength < cplength) { + // we're below the deleted element, so -> noop + return dest; + } else if (c.ld !== undefined) { + if (c.li !== undefined) { + // we're replacing, they're deleting. we become an insert. + delete c.ld; + } else { + // we're trying to delete the same element, -> noop + return dest; + } + } + } + } else if (otherC.lm !== undefined) { + if ((c.lm !== undefined) && (cplength === otherCplength)) { + // lm vs lm, here we go! + from = c.p[common]; + to = c.lm; + const otherFrom = otherC.p[common]; + const otherTo = otherC.lm; + if (otherFrom !== otherTo) { + // if otherFrom == otherTo, we don't need to change our op. - # where did my thing go? - if from == otherFrom - # they moved it! tie break. - if type == 'left' - c.p[common] = otherTo - if from == to # ugh - c.lm = otherTo - else - return dest - else - # they moved around it - if from > otherFrom - c.p[common]-- - if from > otherTo - c.p[common]++ - else if from == otherTo - if otherFrom > otherTo - c.p[common]++ - if from == to # ugh, again - c.lm++ + // where did my thing go? + if (from === otherFrom) { + // they moved it! tie break. + if (type === 'left') { + c.p[common] = otherTo; + if (from === to) { // ugh + c.lm = otherTo; + } + } else { + return dest; + } + } else { + // they moved around it + if (from > otherFrom) { + c.p[common]--; + } + if (from > otherTo) { + c.p[common]++; + } else if (from === otherTo) { + if (otherFrom > otherTo) { + c.p[common]++; + if (from === to) { // ugh, again + c.lm++; + } + } + } - # step 2: where am i going to put it? - if to > otherFrom - c.lm-- - else if to == otherFrom - if to > from - c.lm-- - if to > otherTo - c.lm++ - else if to == otherTo - # if we're both moving in the same direction, tie break - if (otherTo > otherFrom and to > from) or - (otherTo < otherFrom and to < from) - if type == 'right' - c.lm++ - else - if to > from - c.lm++ - else if to == otherFrom - c.lm-- - else if c.li != undefined and c.ld == undefined and commonOperand - # li - from = otherC.p[common] - to = otherC.lm - p = c.p[common] - if p > from - c.p[common]-- - if p > to - c.p[common]++ - else - # ld, ld+li, si, sd, na, oi, od, oi+od, any li on an element beneath - # the lm - # - # i.e. things care about where their item is after the move. - from = otherC.p[common] - to = otherC.lm - p = c.p[common] - if p == from - c.p[common] = to - else - if p > from - c.p[common]-- - if p > to - c.p[common]++ - else if p == to - if from > to - c.p[common]++ - else if otherC.oi != undefined && otherC.od != undefined - if c.p[common] == otherC.p[common] - if c.oi != undefined and commonOperand - # we inserted where someone else replaced - if type == 'right' - # left wins - return dest - else - # we win, make our op replace what they inserted - c.od = otherC.oi - else - # -> noop if the other component is deleting the same object (or any - # parent) - return dest - else if otherC.oi != undefined - if c.oi != undefined and c.p[common] == otherC.p[common] - # left wins if we try to insert at the same place - if type == 'left' - json.append dest, {p:c.p, od:otherC.oi} - else - return dest - else if otherC.od != undefined - if c.p[common] == otherC.p[common] - return dest if !commonOperand - if c.oi != undefined - delete c.od - else - return dest + // step 2: where am i going to put it? + if (to > otherFrom) { + c.lm--; + } else if (to === otherFrom) { + if (to > from) { + c.lm--; + } + } + if (to > otherTo) { + c.lm++; + } else if (to === otherTo) { + // if we're both moving in the same direction, tie break + if (((otherTo > otherFrom) && (to > from)) || + ((otherTo < otherFrom) && (to < from))) { + if (type === 'right') { + c.lm++; + } + } else { + if (to > from) { + c.lm++; + } else if (to === otherFrom) { + c.lm--; + } + } + } + } + } + } else if ((c.li !== undefined) && (c.ld === undefined) && commonOperand) { + // li + from = otherC.p[common]; + to = otherC.lm; + p = c.p[common]; + if (p > from) { + c.p[common]--; + } + if (p > to) { + c.p[common]++; + } + } else { + // ld, ld+li, si, sd, na, oi, od, oi+od, any li on an element beneath + // the lm + // + // i.e. things care about where their item is after the move. + from = otherC.p[common]; + to = otherC.lm; + p = c.p[common]; + if (p === from) { + c.p[common] = to; + } else { + if (p > from) { + c.p[common]--; + } + if (p > to) { + c.p[common]++; + } else if (p === to) { + if (from > to) { + c.p[common]++; + } + } + } + } + } else if ((otherC.oi !== undefined) && (otherC.od !== undefined)) { + if (c.p[common] === otherC.p[common]) { + if ((c.oi !== undefined) && commonOperand) { + // we inserted where someone else replaced + if (type === 'right') { + // left wins + return dest; + } else { + // we win, make our op replace what they inserted + c.od = otherC.oi; + } + } else { + // -> noop if the other component is deleting the same object (or any + // parent) + return dest; + } + } + } else if (otherC.oi !== undefined) { + if ((c.oi !== undefined) && (c.p[common] === otherC.p[common])) { + // left wins if we try to insert at the same place + if (type === 'left') { + json.append(dest, {p:c.p, od:otherC.oi}); + } else { + return dest; + } + } + } else if (otherC.od !== undefined) { + if (c.p[common] === otherC.p[common]) { + if (!commonOperand) { return dest; } + if (c.oi !== undefined) { + delete c.od; + } else { + return dest; + } + } + } + } - json.append dest, c - return dest + json.append(dest, c); + return dest; +}; -if WEB? - exports.types ||= {} +if (typeof WEB !== 'undefined' && WEB !== null) { + if (!exports.types) { exports.types = {}; } - # This is kind of awful - come up with a better way to hook this helper code up. - exports._bt(json, json.transformComponent, json.checkValidOp, json.append) + // This is kind of awful - come up with a better way to hook this helper code up. + exports._bt(json, json.transformComponent, json.checkValidOp, json.append); - # [] is used to prevent closure from renaming types.text - exports.types.json = json -else - module.exports = json + // [] is used to prevent closure from renaming types.text + exports.types.json = json; +} else { + module.exports = json; - require('./helpers').bootstrapTransform(json, json.transformComponent, json.checkValidOp, json.append) + require('./helpers').bootstrapTransform(json, json.transformComponent, json.checkValidOp, json.append); +} diff --git a/services/document-updater/app/coffee/sharejs/types/model.js b/services/document-updater/app/coffee/sharejs/types/model.js index 284d6fd770..9b6e65effd 100644 --- a/services/document-updater/app/coffee/sharejs/types/model.js +++ b/services/document-updater/app/coffee/sharejs/types/model.js @@ -1,603 +1,699 @@ -# The model of all the ops. Responsible for applying & transforming remote deltas -# and managing the storage layer. -# -# Actual storage is handled by the database wrappers in db/*, wrapped by DocCache +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS104: Avoid inline assignments + * DS204: Change includes calls to have a more natural evaluation order + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// The model of all the ops. Responsible for applying & transforming remote deltas +// and managing the storage layer. +// +// Actual storage is handled by the database wrappers in db/*, wrapped by DocCache -{EventEmitter} = require 'events' +let Model; +const {EventEmitter} = require('events'); -queue = require './syncqueue' -types = require '../types' +const queue = require('./syncqueue'); +const types = require('../types'); -isArray = (o) -> Object.prototype.toString.call(o) == '[object Array]' +const isArray = o => Object.prototype.toString.call(o) === '[object Array]'; -# This constructor creates a new Model object. There will be one model object -# per server context. -# -# The model object is responsible for a lot of things: -# -# - It manages the interactions with the database -# - It maintains (in memory) a set of all active documents -# - It calls out to the OT functions when necessary -# -# The model is an event emitter. It emits the following events: -# -# create(docName, data): A document has been created with the specified name & data -module.exports = Model = (db, options) -> - # db can be null if the user doesn't want persistance. +// This constructor creates a new Model object. There will be one model object +// per server context. +// +// The model object is responsible for a lot of things: +// +// - It manages the interactions with the database +// - It maintains (in memory) a set of all active documents +// - It calls out to the OT functions when necessary +// +// The model is an event emitter. It emits the following events: +// +// create(docName, data): A document has been created with the specified name & data +module.exports = (Model = function(db, options) { + // db can be null if the user doesn't want persistance. - return new Model(db, options) if !(this instanceof Model) + let getOps; + if (!(this instanceof Model)) { return new Model(db, options); } - model = this + const model = this; - options ?= {} + if (options == null) { options = {}; } - # This is a cache of 'live' documents. - # - # The cache is a map from docName -> { - # ops:[{op, meta}] - # snapshot - # type - # v - # meta - # eventEmitter - # reapTimer - # committedVersion: v - # snapshotWriteLock: bool to make sure writeSnapshot isn't re-entrant - # dbMeta: database specific data - # opQueue: syncQueue for processing ops - # } - # - # The ops list contains the document's last options.numCachedOps ops. (Or all - # of them if we're using a memory store). - # - # Documents are stored in this set so long as the document has been accessed in - # the last few seconds (options.reapTime) OR at least one client has the document - # open. I don't know if I should keep open (but not being edited) documents live - - # maybe if a client has a document open but the document isn't being edited, I should - # flush it from the cache. - # - # In any case, the API to model is designed such that if we want to change that later - # it should be pretty easy to do so without any external-to-the-model code changes. - docs = {} + // This is a cache of 'live' documents. + // + // The cache is a map from docName -> { + // ops:[{op, meta}] + // snapshot + // type + // v + // meta + // eventEmitter + // reapTimer + // committedVersion: v + // snapshotWriteLock: bool to make sure writeSnapshot isn't re-entrant + // dbMeta: database specific data + // opQueue: syncQueue for processing ops + // } + // + // The ops list contains the document's last options.numCachedOps ops. (Or all + // of them if we're using a memory store). + // + // Documents are stored in this set so long as the document has been accessed in + // the last few seconds (options.reapTime) OR at least one client has the document + // open. I don't know if I should keep open (but not being edited) documents live - + // maybe if a client has a document open but the document isn't being edited, I should + // flush it from the cache. + // + // In any case, the API to model is designed such that if we want to change that later + // it should be pretty easy to do so without any external-to-the-model code changes. + const docs = {}; - # This is a map from docName -> [callback]. It is used when a document hasn't been - # cached and multiple getSnapshot() / getVersion() requests come in. All requests - # are added to the callback list and called when db.getSnapshot() returns. - # - # callback(error, snapshot data) - awaitingGetSnapshot = {} + // This is a map from docName -> [callback]. It is used when a document hasn't been + // cached and multiple getSnapshot() / getVersion() requests come in. All requests + // are added to the callback list and called when db.getSnapshot() returns. + // + // callback(error, snapshot data) + const awaitingGetSnapshot = {}; - # The time that documents which no clients have open will stay in the cache. - # Should be > 0. - options.reapTime ?= 3000 + // The time that documents which no clients have open will stay in the cache. + // Should be > 0. + if (options.reapTime == null) { options.reapTime = 3000; } - # The number of operations the cache holds before reusing the space - options.numCachedOps ?= 10 + // The number of operations the cache holds before reusing the space + if (options.numCachedOps == null) { options.numCachedOps = 10; } - # This option forces documents to be reaped, even when there's no database backend. - # This is useful when you don't care about persistance and don't want to gradually - # fill memory. - # - # You might want to set reapTime to a day or something. - options.forceReaping ?= false + // This option forces documents to be reaped, even when there's no database backend. + // This is useful when you don't care about persistance and don't want to gradually + // fill memory. + // + // You might want to set reapTime to a day or something. + if (options.forceReaping == null) { options.forceReaping = false; } - # Until I come up with a better strategy, we'll save a copy of the document snapshot - # to the database every ~20 submitted ops. - options.opsBeforeCommit ?= 20 + // Until I come up with a better strategy, we'll save a copy of the document snapshot + // to the database every ~20 submitted ops. + if (options.opsBeforeCommit == null) { options.opsBeforeCommit = 20; } - # It takes some processing time to transform client ops. The server will punt ops back to the - # client to transform if they're too old. - options.maximumAge ?= 40 + // It takes some processing time to transform client ops. The server will punt ops back to the + // client to transform if they're too old. + if (options.maximumAge == null) { options.maximumAge = 40; } - # **** Cache API methods + // **** Cache API methods - # Its important that all ops are applied in order. This helper method creates the op submission queue - # for a single document. This contains the logic for transforming & applying ops. - makeOpQueue = (docName, doc) -> queue (opData, callback) -> - return callback 'Version missing' unless opData.v >= 0 - return callback 'Op at future version' if opData.v > doc.v + // Its important that all ops are applied in order. This helper method creates the op submission queue + // for a single document. This contains the logic for transforming & applying ops. + const makeOpQueue = (docName, doc) => queue(function(opData, callback) { + if (!(opData.v >= 0)) { return callback('Version missing'); } + if (opData.v > doc.v) { return callback('Op at future version'); } - # Punt the transforming work back to the client if the op is too old. - return callback 'Op too old' if opData.v + options.maximumAge < doc.v + // Punt the transforming work back to the client if the op is too old. + if ((opData.v + options.maximumAge) < doc.v) { return callback('Op too old'); } - opData.meta ||= {} - opData.meta.ts = Date.now() + if (!opData.meta) { opData.meta = {}; } + opData.meta.ts = Date.now(); - # We'll need to transform the op to the current version of the document. This - # calls the callback immediately if opVersion == doc.v. - getOps docName, opData.v, doc.v, (error, ops) -> - return callback error if error + // We'll need to transform the op to the current version of the document. This + // calls the callback immediately if opVersion == doc.v. + return getOps(docName, opData.v, doc.v, function(error, ops) { + let snapshot; + if (error) { return callback(error); } - unless doc.v - opData.v == ops.length - # This should never happen. It indicates that we didn't get all the ops we - # asked for. Its important that the submitted op is correctly transformed. - console.error "Could not get old ops in model for document #{docName}" - console.error "Expected ops #{opData.v} to #{doc.v} and got #{ops.length} ops" - return callback 'Internal error' + if ((doc.v - opData.v) !== ops.length) { + // This should never happen. It indicates that we didn't get all the ops we + // asked for. Its important that the submitted op is correctly transformed. + console.error(`Could not get old ops in model for document ${docName}`); + console.error(`Expected ops ${opData.v} to ${doc.v} and got ${ops.length} ops`); + return callback('Internal error'); + } - if ops.length > 0 - try - # If there's enough ops, it might be worth spinning this out into a webworker thread. - for oldOp in ops - # Dup detection works by sending the id(s) the op has been submitted with previously. - # If the id matches, we reject it. The client can also detect the op has been submitted - # already if it sees its own previous id in the ops it sees when it does catchup. - if oldOp.meta.source and opData.dupIfSource and oldOp.meta.source in opData.dupIfSource - return callback 'Op already submitted' + if (ops.length > 0) { + try { + // If there's enough ops, it might be worth spinning this out into a webworker thread. + for (let oldOp of Array.from(ops)) { + // Dup detection works by sending the id(s) the op has been submitted with previously. + // If the id matches, we reject it. The client can also detect the op has been submitted + // already if it sees its own previous id in the ops it sees when it does catchup. + if (oldOp.meta.source && opData.dupIfSource && Array.from(opData.dupIfSource).includes(oldOp.meta.source)) { + return callback('Op already submitted'); + } - opData.op = doc.type.transform opData.op, oldOp.op, 'left' - opData.v++ - catch error - console.error error.stack - return callback error.message + opData.op = doc.type.transform(opData.op, oldOp.op, 'left'); + opData.v++; + } + } catch (error1) { + error = error1; + console.error(error.stack); + return callback(error.message); + } + } - try - snapshot = doc.type.apply doc.snapshot, opData.op - catch error - console.error error.stack - return callback error.message + try { + snapshot = doc.type.apply(doc.snapshot, opData.op); + } catch (error2) { + error = error2; + console.error(error.stack); + return callback(error.message); + } - # The op data should be at the current version, and the new document data should be at - # the next version. - # - # This should never happen in practice, but its a nice little check to make sure everything - # is hunky-dory. - unless opData.v == doc.v - # This should never happen. - console.error "Version mismatch detected in model. File a ticket - this is a bug." - console.error "Expecting #{opData.v} == #{doc.v}" - return callback 'Internal error' + // The op data should be at the current version, and the new document data should be at + // the next version. + // + // This should never happen in practice, but its a nice little check to make sure everything + // is hunky-dory. + if (opData.v !== doc.v) { + // This should never happen. + console.error("Version mismatch detected in model. File a ticket - this is a bug."); + console.error(`Expecting ${opData.v} == ${doc.v}`); + return callback('Internal error'); + } - #newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta} - writeOp = db?.writeOp or (docName, newOpData, callback) -> callback() + //newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta} + const writeOp = (db != null ? db.writeOp : undefined) || ((docName, newOpData, callback) => callback()); - writeOp docName, opData, (error) -> - if error - # The user should probably know about this. - console.warn "Error writing ops to database: #{error}" - return callback error + return writeOp(docName, opData, function(error) { + if (error) { + // The user should probably know about this. + console.warn(`Error writing ops to database: ${error}`); + return callback(error); + } - options.stats?.writeOp?() + __guardMethod__(options.stats, 'writeOp', o => o.writeOp()); - # This is needed when we emit the 'change' event, below. - oldSnapshot = doc.snapshot + // This is needed when we emit the 'change' event, below. + const oldSnapshot = doc.snapshot; - # All the heavy lifting is now done. Finally, we'll update the cache with the new data - # and (maybe!) save a new document snapshot to the database. + // All the heavy lifting is now done. Finally, we'll update the cache with the new data + // and (maybe!) save a new document snapshot to the database. - doc.v = opData.v + 1 - doc.snapshot = snapshot + doc.v = opData.v + 1; + doc.snapshot = snapshot; - doc.ops.push opData - doc.ops.shift() if db and doc.ops.length > options.numCachedOps + doc.ops.push(opData); + if (db && (doc.ops.length > options.numCachedOps)) { doc.ops.shift(); } - model.emit 'applyOp', docName, opData, snapshot, oldSnapshot - doc.eventEmitter.emit 'op', opData, snapshot, oldSnapshot + model.emit('applyOp', docName, opData, snapshot, oldSnapshot); + doc.eventEmitter.emit('op', opData, snapshot, oldSnapshot); - # The callback is called with the version of the document at which the op was applied. - # This is the op.v after transformation, and its doc.v - 1. - callback null, opData.v + // The callback is called with the version of the document at which the op was applied. + // This is the op.v after transformation, and its doc.v - 1. + callback(null, opData.v); - # I need a decent strategy here for deciding whether or not to save the snapshot. - # - # The 'right' strategy looks something like "Store the snapshot whenever the snapshot - # is smaller than the accumulated op data". For now, I'll just store it every 20 - # ops or something. (Configurable with doc.committedVersion) - if !doc.snapshotWriteLock and doc.committedVersion + options.opsBeforeCommit <= doc.v - tryWriteSnapshot docName, (error) -> - console.warn "Error writing snapshot #{error}. This is nonfatal" if error + // I need a decent strategy here for deciding whether or not to save the snapshot. + // + // The 'right' strategy looks something like "Store the snapshot whenever the snapshot + // is smaller than the accumulated op data". For now, I'll just store it every 20 + // ops or something. (Configurable with doc.committedVersion) + if (!doc.snapshotWriteLock && ((doc.committedVersion + options.opsBeforeCommit) <= doc.v)) { + return tryWriteSnapshot(docName, function(error) { + if (error) { return console.warn(`Error writing snapshot ${error}. This is nonfatal`); } + }); + } + }); + }); + }); - # Add the data for the given docName to the cache. The named document shouldn't already - # exist in the doc set. - # - # Returns the new doc. - add = (docName, error, data, committedVersion, ops, dbMeta) -> - callbacks = awaitingGetSnapshot[docName] - delete awaitingGetSnapshot[docName] + // Add the data for the given docName to the cache. The named document shouldn't already + // exist in the doc set. + // + // Returns the new doc. + const add = function(docName, error, data, committedVersion, ops, dbMeta) { + let callback, doc; + const callbacks = awaitingGetSnapshot[docName]; + delete awaitingGetSnapshot[docName]; - if error - callback error for callback in callbacks if callbacks - else - doc = docs[docName] = - snapshot: data.snapshot - v: data.v - type: data.type - meta: data.meta + if (error) { + if (callbacks) { for (callback of Array.from(callbacks)) { callback(error); } } + } else { + doc = (docs[docName] = { + snapshot: data.snapshot, + v: data.v, + type: data.type, + meta: data.meta, - # Cache of ops - ops: ops or [] + // Cache of ops + ops: ops || [], - eventEmitter: new EventEmitter + eventEmitter: new EventEmitter, - # Timer before the document will be invalidated from the cache (if the document has no - # listeners) - reapTimer: null + // Timer before the document will be invalidated from the cache (if the document has no + // listeners) + reapTimer: null, - # Version of the snapshot thats in the database - committedVersion: committedVersion ? data.v - snapshotWriteLock: false - dbMeta: dbMeta + // Version of the snapshot thats in the database + committedVersion: committedVersion != null ? committedVersion : data.v, + snapshotWriteLock: false, + dbMeta + }); - doc.opQueue = makeOpQueue docName, doc + doc.opQueue = makeOpQueue(docName, doc); - refreshReapingTimeout docName - model.emit 'add', docName, data - callback null, doc for callback in callbacks if callbacks + refreshReapingTimeout(docName); + model.emit('add', docName, data); + if (callbacks) { for (callback of Array.from(callbacks)) { callback(null, doc); } } + } - doc + return doc; + }; - # This is a little helper wrapper around db.getOps. It does two things: - # - # - If there's no database set, it returns an error to the callback - # - It adds version numbers to each op returned from the database - # (These can be inferred from context so the DB doesn't store them, but its useful to have them). - getOpsInternal = (docName, start, end, callback) -> - return callback? 'Document does not exist' unless db + // This is a little helper wrapper around db.getOps. It does two things: + // + // - If there's no database set, it returns an error to the callback + // - It adds version numbers to each op returned from the database + // (These can be inferred from context so the DB doesn't store them, but its useful to have them). + const getOpsInternal = function(docName, start, end, callback) { + if (!db) { return (typeof callback === 'function' ? callback('Document does not exist') : undefined); } - db.getOps docName, start, end, (error, ops) -> - return callback? error if error + return db.getOps(docName, start, end, function(error, ops) { + if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } - v = start - op.v = v++ for op in ops + let v = start; + for (let op of Array.from(ops)) { op.v = v++; } - callback? null, ops + return (typeof callback === 'function' ? callback(null, ops) : undefined); + }); + }; - # Load the named document into the cache. This function is re-entrant. - # - # The callback is called with (error, doc) - load = (docName, callback) -> - if docs[docName] - # The document is already loaded. Return immediately. - options.stats?.cacheHit? 'getSnapshot' - return callback null, docs[docName] + // Load the named document into the cache. This function is re-entrant. + // + // The callback is called with (error, doc) + const load = function(docName, callback) { + if (docs[docName]) { + // The document is already loaded. Return immediately. + __guardMethod__(options.stats, 'cacheHit', o => o.cacheHit('getSnapshot')); + return callback(null, docs[docName]); + } - # We're a memory store. If we don't have it, nobody does. - return callback 'Document does not exist' unless db + // We're a memory store. If we don't have it, nobody does. + if (!db) { return callback('Document does not exist'); } - callbacks = awaitingGetSnapshot[docName] + const callbacks = awaitingGetSnapshot[docName]; - # The document is being loaded already. Add ourselves as a callback. - return callbacks.push callback if callbacks + // The document is being loaded already. Add ourselves as a callback. + if (callbacks) { return callbacks.push(callback); } - options.stats?.cacheMiss? 'getSnapshot' + __guardMethod__(options.stats, 'cacheMiss', o1 => o1.cacheMiss('getSnapshot')); - # The document isn't loaded and isn't being loaded. Load it. - awaitingGetSnapshot[docName] = [callback] - db.getSnapshot docName, (error, data, dbMeta) -> - return add docName, error if error + // The document isn't loaded and isn't being loaded. Load it. + awaitingGetSnapshot[docName] = [callback]; + return db.getSnapshot(docName, function(error, data, dbMeta) { + if (error) { return add(docName, error); } - type = types[data.type] - unless type - console.warn "Type '#{data.type}' missing" - return callback "Type not found" - data.type = type + const type = types[data.type]; + if (!type) { + console.warn(`Type '${data.type}' missing`); + return callback("Type not found"); + } + data.type = type; - committedVersion = data.v + const committedVersion = data.v; - # The server can close without saving the most recent document snapshot. - # In this case, there are extra ops which need to be applied before - # returning the snapshot. - getOpsInternal docName, data.v, null, (error, ops) -> - return callback error if error + // The server can close without saving the most recent document snapshot. + // In this case, there are extra ops which need to be applied before + // returning the snapshot. + return getOpsInternal(docName, data.v, null, function(error, ops) { + if (error) { return callback(error); } - if ops.length > 0 - console.log "Catchup #{docName} #{data.v} -> #{data.v + ops.length}" + if (ops.length > 0) { + console.log(`Catchup ${docName} ${data.v} -> ${data.v + ops.length}`); - try - for op in ops - data.snapshot = type.apply data.snapshot, op.op - data.v++ - catch e - # This should never happen - it indicates that whats in the - # database is invalid. - console.error "Op data invalid for #{docName}: #{e.stack}" - return callback 'Op data invalid' + try { + for (let op of Array.from(ops)) { + data.snapshot = type.apply(data.snapshot, op.op); + data.v++; + } + } catch (e) { + // This should never happen - it indicates that whats in the + // database is invalid. + console.error(`Op data invalid for ${docName}: ${e.stack}`); + return callback('Op data invalid'); + } + } - model.emit 'load', docName, data - add docName, error, data, committedVersion, ops, dbMeta + model.emit('load', docName, data); + return add(docName, error, data, committedVersion, ops, dbMeta); + }); + }); + }; - # This makes sure the cache contains a document. If the doc cache doesn't contain - # a document, it is loaded from the database and stored. - # - # Documents are stored so long as either: - # - They have been accessed within the past #{PERIOD} - # - At least one client has the document open - refreshReapingTimeout = (docName) -> - doc = docs[docName] - return unless doc + // This makes sure the cache contains a document. If the doc cache doesn't contain + // a document, it is loaded from the database and stored. + // + // Documents are stored so long as either: + // - They have been accessed within the past #{PERIOD} + // - At least one client has the document open + var refreshReapingTimeout = function(docName) { + const doc = docs[docName]; + if (!doc) { return; } - # I want to let the clients list be updated before this is called. - process.nextTick -> - # This is an awkward way to find out the number of clients on a document. If this - # causes performance issues, add a numClients field to the document. - # - # The first check is because its possible that between refreshReapingTimeout being called and this - # event being fired, someone called delete() on the document and hence the doc is something else now. - if doc == docs[docName] and - doc.eventEmitter.listeners('op').length == 0 and - (db or options.forceReaping) and - doc.opQueue.busy is false + // I want to let the clients list be updated before this is called. + return process.nextTick(function() { + // This is an awkward way to find out the number of clients on a document. If this + // causes performance issues, add a numClients field to the document. + // + // The first check is because its possible that between refreshReapingTimeout being called and this + // event being fired, someone called delete() on the document and hence the doc is something else now. + if ((doc === docs[docName]) && + (doc.eventEmitter.listeners('op').length === 0) && + (db || options.forceReaping) && + (doc.opQueue.busy === false)) { - clearTimeout doc.reapTimer - doc.reapTimer = reapTimer = setTimeout -> - tryWriteSnapshot docName, -> - # If the reaping timeout has been refreshed while we're writing the snapshot, or if we're - # in the middle of applying an operation, don't reap. - delete docs[docName] if docs[docName].reapTimer is reapTimer and doc.opQueue.busy is false - , options.reapTime + let reapTimer; + clearTimeout(doc.reapTimer); + return doc.reapTimer = (reapTimer = setTimeout(() => tryWriteSnapshot(docName, function() { + // If the reaping timeout has been refreshed while we're writing the snapshot, or if we're + // in the middle of applying an operation, don't reap. + if ((docs[docName].reapTimer === reapTimer) && (doc.opQueue.busy === false)) { return delete docs[docName]; } + }) + , options.reapTime)); + } + }); + }; - tryWriteSnapshot = (docName, callback) -> - return callback?() unless db + var tryWriteSnapshot = function(docName, callback) { + if (!db) { return (typeof callback === 'function' ? callback() : undefined); } - doc = docs[docName] + const doc = docs[docName]; - # The doc is closed - return callback?() unless doc + // The doc is closed + if (!doc) { return (typeof callback === 'function' ? callback() : undefined); } - # The document is already saved. - return callback?() if doc.committedVersion is doc.v + // The document is already saved. + if (doc.committedVersion === doc.v) { return (typeof callback === 'function' ? callback() : undefined); } - return callback? 'Another snapshot write is in progress' if doc.snapshotWriteLock + if (doc.snapshotWriteLock) { return (typeof callback === 'function' ? callback('Another snapshot write is in progress') : undefined); } - doc.snapshotWriteLock = true + doc.snapshotWriteLock = true; - options.stats?.writeSnapshot?() + __guardMethod__(options.stats, 'writeSnapshot', o => o.writeSnapshot()); - writeSnapshot = db?.writeSnapshot or (docName, docData, dbMeta, callback) -> callback() + const writeSnapshot = (db != null ? db.writeSnapshot : undefined) || ((docName, docData, dbMeta, callback) => callback()); - data = - v: doc.v - meta: doc.meta - snapshot: doc.snapshot - # The database doesn't know about object types. + const data = { + v: doc.v, + meta: doc.meta, + snapshot: doc.snapshot, + // The database doesn't know about object types. type: doc.type.name + }; - # Commit snapshot. - writeSnapshot docName, data, doc.dbMeta, (error, dbMeta) -> - doc.snapshotWriteLock = false + // Commit snapshot. + return writeSnapshot(docName, data, doc.dbMeta, function(error, dbMeta) { + doc.snapshotWriteLock = false; - # We have to use data.v here because the version in the doc could - # have been updated between the call to writeSnapshot() and now. - doc.committedVersion = data.v - doc.dbMeta = dbMeta + // We have to use data.v here because the version in the doc could + // have been updated between the call to writeSnapshot() and now. + doc.committedVersion = data.v; + doc.dbMeta = dbMeta; - callback? error + return (typeof callback === 'function' ? callback(error) : undefined); + }); + }; - # *** Model interface methods + // *** Model interface methods - # Create a new document. - # - # data should be {snapshot, type, [meta]}. The version of a new document is 0. - @create = (docName, type, meta, callback) -> - [meta, callback] = [{}, meta] if typeof meta is 'function' + // Create a new document. + // + // data should be {snapshot, type, [meta]}. The version of a new document is 0. + this.create = function(docName, type, meta, callback) { + if (typeof meta === 'function') { [meta, callback] = Array.from([{}, meta]); } - return callback? 'Invalid document name' if docName.match /\// - return callback? 'Document already exists' if docs[docName] + if (docName.match(/\//)) { return (typeof callback === 'function' ? callback('Invalid document name') : undefined); } + if (docs[docName]) { return (typeof callback === 'function' ? callback('Document already exists') : undefined); } - type = types[type] if typeof type == 'string' - return callback? 'Type not found' unless type + if (typeof type === 'string') { type = types[type]; } + if (!type) { return (typeof callback === 'function' ? callback('Type not found') : undefined); } - data = - snapshot:type.create() - type:type.name - meta:meta or {} + const data = { + snapshot:type.create(), + type:type.name, + meta:meta || {}, v:0 + }; - done = (error, dbMeta) -> - # dbMeta can be used to cache extra state needed by the database to access the document, like an ID or something. - return callback? error if error + const done = function(error, dbMeta) { + // dbMeta can be used to cache extra state needed by the database to access the document, like an ID or something. + if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } - # From here on we'll store the object version of the type name. - data.type = type - add docName, null, data, 0, [], dbMeta - model.emit 'create', docName, data - callback?() + // From here on we'll store the object version of the type name. + data.type = type; + add(docName, null, data, 0, [], dbMeta); + model.emit('create', docName, data); + return (typeof callback === 'function' ? callback() : undefined); + }; - if db - db.create docName, data, done - else - done() + if (db) { + return db.create(docName, data, done); + } else { + return done(); + } + }; - # Perminantly deletes the specified document. - # If listeners are attached, they are removed. - # - # The callback is called with (error) if there was an error. If error is null / undefined, the - # document was deleted. - # - # WARNING: This isn't well supported throughout the code. (Eg, streaming clients aren't told about the - # deletion. Subsequent op submissions will fail). - @delete = (docName, callback) -> - doc = docs[docName] + // Perminantly deletes the specified document. + // If listeners are attached, they are removed. + // + // The callback is called with (error) if there was an error. If error is null / undefined, the + // document was deleted. + // + // WARNING: This isn't well supported throughout the code. (Eg, streaming clients aren't told about the + // deletion. Subsequent op submissions will fail). + this.delete = function(docName, callback) { + const doc = docs[docName]; - if doc - clearTimeout doc.reapTimer - delete docs[docName] + if (doc) { + clearTimeout(doc.reapTimer); + delete docs[docName]; + } - done = (error) -> - model.emit 'delete', docName unless error - callback? error + const done = function(error) { + if (!error) { model.emit('delete', docName); } + return (typeof callback === 'function' ? callback(error) : undefined); + }; - if db - db.delete docName, doc?.dbMeta, done - else - done (if !doc then 'Document does not exist') + if (db) { + return db.delete(docName, doc != null ? doc.dbMeta : undefined, done); + } else { + return done((!doc ? 'Document does not exist' : undefined)); + } + }; - # This gets all operations from [start...end]. (That is, its not inclusive.) - # - # end can be null. This means 'get me all ops from start'. - # - # Each op returned is in the form {op:o, meta:m, v:version}. - # - # Callback is called with (error, [ops]) - # - # If the document does not exist, getOps doesn't necessarily return an error. This is because - # its awkward to figure out whether or not the document exists for things - # like the redis database backend. I guess its a bit gross having this inconsistant - # with the other DB calls, but its certainly convenient. - # - # Use getVersion() to determine if a document actually exists, if thats what you're - # after. - @getOps = getOps = (docName, start, end, callback) -> - # getOps will only use the op cache if its there. It won't fill the op cache in. - throw new Error 'start must be 0+' unless start >= 0 + // This gets all operations from [start...end]. (That is, its not inclusive.) + // + // end can be null. This means 'get me all ops from start'. + // + // Each op returned is in the form {op:o, meta:m, v:version}. + // + // Callback is called with (error, [ops]) + // + // If the document does not exist, getOps doesn't necessarily return an error. This is because + // its awkward to figure out whether or not the document exists for things + // like the redis database backend. I guess its a bit gross having this inconsistant + // with the other DB calls, but its certainly convenient. + // + // Use getVersion() to determine if a document actually exists, if thats what you're + // after. + this.getOps = (getOps = function(docName, start, end, callback) { + // getOps will only use the op cache if its there. It won't fill the op cache in. + if (!(start >= 0)) { throw new Error('start must be 0+'); } - [end, callback] = [null, end] if typeof end is 'function' + if (typeof end === 'function') { [end, callback] = Array.from([null, end]); } - ops = docs[docName]?.ops + const ops = docs[docName] != null ? docs[docName].ops : undefined; - if ops - version = docs[docName].v + if (ops) { + const version = docs[docName].v; - # Ops contains an array of ops. The last op in the list is the last op applied - end ?= version - start = Math.min start, end + // Ops contains an array of ops. The last op in the list is the last op applied + if (end == null) { end = version; } + start = Math.min(start, end); - return callback null, [] if start == end + if (start === end) { return callback(null, []); } - # Base is the version number of the oldest op we have cached - base = version - ops.length + // Base is the version number of the oldest op we have cached + const base = version - ops.length; - # If the database is null, we'll trim to the ops we do have and hope thats enough. - if start >= base or db is null - refreshReapingTimeout docName - options.stats?.cacheHit 'getOps' + // If the database is null, we'll trim to the ops we do have and hope thats enough. + if ((start >= base) || (db === null)) { + refreshReapingTimeout(docName); + if (options.stats != null) { + options.stats.cacheHit('getOps'); + } - return callback null, ops[(start - base)...(end - base)] + return callback(null, ops.slice((start - base), (end - base))); + } + } - options.stats?.cacheMiss 'getOps' + if (options.stats != null) { + options.stats.cacheMiss('getOps'); + } - getOpsInternal docName, start, end, callback + return getOpsInternal(docName, start, end, callback); + }); - # Gets the snapshot data for the specified document. - # getSnapshot(docName, callback) - # Callback is called with (error, {v: , type: , snapshot: , meta: }) - @getSnapshot = (docName, callback) -> - load docName, (error, doc) -> - callback error, if doc then {v:doc.v, type:doc.type, snapshot:doc.snapshot, meta:doc.meta} + // Gets the snapshot data for the specified document. + // getSnapshot(docName, callback) + // Callback is called with (error, {v: , type: , snapshot: , meta: }) + this.getSnapshot = (docName, callback) => load(docName, (error, doc) => callback(error, doc ? {v:doc.v, type:doc.type, snapshot:doc.snapshot, meta:doc.meta} : undefined)); - # Gets the latest version # of the document. - # getVersion(docName, callback) - # callback is called with (error, version). - @getVersion = (docName, callback) -> - load docName, (error, doc) -> callback error, doc?.v + // Gets the latest version # of the document. + // getVersion(docName, callback) + // callback is called with (error, version). + this.getVersion = (docName, callback) => load(docName, (error, doc) => callback(error, doc != null ? doc.v : undefined)); - # Apply an op to the specified document. - # The callback is passed (error, applied version #) - # opData = {op:op, v:v, meta:metadata} - # - # Ops are queued before being applied so that the following code applies op C before op B: - # model.applyOp 'doc', OPA, -> model.applyOp 'doc', OPB - # model.applyOp 'doc', OPC - @applyOp = (docName, opData, callback) -> - # All the logic for this is in makeOpQueue, above. - load docName, (error, doc) -> - return callback error if error + // Apply an op to the specified document. + // The callback is passed (error, applied version #) + // opData = {op:op, v:v, meta:metadata} + // + // Ops are queued before being applied so that the following code applies op C before op B: + // model.applyOp 'doc', OPA, -> model.applyOp 'doc', OPB + // model.applyOp 'doc', OPC + this.applyOp = (docName, opData, callback) => // All the logic for this is in makeOpQueue, above. + load(docName, function(error, doc) { + if (error) { return callback(error); } - process.nextTick -> doc.opQueue opData, (error, newVersion) -> - refreshReapingTimeout docName - callback? error, newVersion + return process.nextTick(() => doc.opQueue(opData, function(error, newVersion) { + refreshReapingTimeout(docName); + return (typeof callback === 'function' ? callback(error, newVersion) : undefined); + })); + }); - # TODO: store (some) metadata in DB - # TODO: op and meta should be combineable in the op that gets sent - @applyMetaOp = (docName, metaOpData, callback) -> - {path, value} = metaOpData.meta + // TODO: store (some) metadata in DB + // TODO: op and meta should be combineable in the op that gets sent + this.applyMetaOp = function(docName, metaOpData, callback) { + const {path, value} = metaOpData.meta; - return callback? "path should be an array" unless isArray path + if (!isArray(path)) { return (typeof callback === 'function' ? callback("path should be an array") : undefined); } - load docName, (error, doc) -> - if error? - callback? error - else - applied = false - switch path[0] - when 'shout' - doc.eventEmitter.emit 'op', metaOpData - applied = true + return load(docName, function(error, doc) { + if (error != null) { + return (typeof callback === 'function' ? callback(error) : undefined); + } else { + let applied = false; + switch (path[0]) { + case 'shout': + doc.eventEmitter.emit('op', metaOpData); + applied = true; + break; + } - model.emit 'applyMetaOp', docName, path, value if applied - callback? null, doc.v + if (applied) { model.emit('applyMetaOp', docName, path, value); } + return (typeof callback === 'function' ? callback(null, doc.v) : undefined); + } + }); + }; - # Listen to all ops from the specified version. If version is in the past, all - # ops since that version are sent immediately to the listener. - # - # The callback is called once the listener is attached, but before any ops have been passed - # to the listener. - # - # This will _not_ edit the document metadata. - # - # If there are any listeners, we don't purge the document from the cache. But be aware, this behaviour - # might change in a future version. - # - # version is the document version at which the document is opened. It can be left out if you want to open - # the document at the most recent version. - # - # listener is called with (opData) each time an op is applied. - # - # callback(error, openedVersion) - @listen = (docName, version, listener, callback) -> - [version, listener, callback] = [null, version, listener] if typeof version is 'function' + // Listen to all ops from the specified version. If version is in the past, all + // ops since that version are sent immediately to the listener. + // + // The callback is called once the listener is attached, but before any ops have been passed + // to the listener. + // + // This will _not_ edit the document metadata. + // + // If there are any listeners, we don't purge the document from the cache. But be aware, this behaviour + // might change in a future version. + // + // version is the document version at which the document is opened. It can be left out if you want to open + // the document at the most recent version. + // + // listener is called with (opData) each time an op is applied. + // + // callback(error, openedVersion) + this.listen = function(docName, version, listener, callback) { + if (typeof version === 'function') { [version, listener, callback] = Array.from([null, version, listener]); } - load docName, (error, doc) -> - return callback? error if error + return load(docName, function(error, doc) { + if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } - clearTimeout doc.reapTimer + clearTimeout(doc.reapTimer); - if version? - getOps docName, version, null, (error, data) -> - return callback? error if error + if (version != null) { + return getOps(docName, version, null, function(error, data) { + if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } - doc.eventEmitter.on 'op', listener - callback? null, version - for op in data - listener op + doc.eventEmitter.on('op', listener); + if (typeof callback === 'function') { + callback(null, version); + } + return (() => { + const result = []; + for (let op of Array.from(data)) { + var needle; + listener(op); - # The listener may well remove itself during the catchup phase. If this happens, break early. - # This is done in a quite inefficient way. (O(n) where n = #listeners on doc) - break unless listener in doc.eventEmitter.listeners 'op' + // The listener may well remove itself during the catchup phase. If this happens, break early. + // This is done in a quite inefficient way. (O(n) where n = #listeners on doc) + if ((needle = listener, !Array.from(doc.eventEmitter.listeners('op')).includes(needle))) { break; } else { + result.push(undefined); + } + } + return result; + })(); + }); - else # Version is null / undefined. Just add the listener. - doc.eventEmitter.on 'op', listener - callback? null, doc.v + } else { // Version is null / undefined. Just add the listener. + doc.eventEmitter.on('op', listener); + return (typeof callback === 'function' ? callback(null, doc.v) : undefined); + } + }); + }; - # Remove a listener for a particular document. - # - # removeListener(docName, listener) - # - # This is synchronous. - @removeListener = (docName, listener) -> - # The document should already be loaded. - doc = docs[docName] - throw new Error 'removeListener called but document not loaded' unless doc + // Remove a listener for a particular document. + // + // removeListener(docName, listener) + // + // This is synchronous. + this.removeListener = function(docName, listener) { + // The document should already be loaded. + const doc = docs[docName]; + if (!doc) { throw new Error('removeListener called but document not loaded'); } - doc.eventEmitter.removeListener 'op', listener - refreshReapingTimeout docName + doc.eventEmitter.removeListener('op', listener); + return refreshReapingTimeout(docName); + }; - # Flush saves all snapshot data to the database. I'm not sure whether or not this is actually needed - - # sharejs will happily replay uncommitted ops when documents are re-opened anyway. - @flush = (callback) -> - return callback?() unless db + // Flush saves all snapshot data to the database. I'm not sure whether or not this is actually needed - + // sharejs will happily replay uncommitted ops when documents are re-opened anyway. + this.flush = function(callback) { + if (!db) { return (typeof callback === 'function' ? callback() : undefined); } - pendingWrites = 0 + let pendingWrites = 0; - for docName, doc of docs - if doc.committedVersion < doc.v - pendingWrites++ - # I'm hoping writeSnapshot will always happen in another thread. - tryWriteSnapshot docName, -> - process.nextTick -> - pendingWrites-- - callback?() if pendingWrites is 0 + for (let docName in docs) { + const doc = docs[docName]; + if (doc.committedVersion < doc.v) { + pendingWrites++; + // I'm hoping writeSnapshot will always happen in another thread. + tryWriteSnapshot(docName, () => process.nextTick(function() { + pendingWrites--; + if (pendingWrites === 0) { return (typeof callback === 'function' ? callback() : undefined); } + })); + } + } - # If nothing was queued, terminate immediately. - callback?() if pendingWrites is 0 + // If nothing was queued, terminate immediately. + if (pendingWrites === 0) { return (typeof callback === 'function' ? callback() : undefined); } + }; - # Close the database connection. This is needed so nodejs can shut down cleanly. - @closeDb = -> - db?.close?() - db = null + // Close the database connection. This is needed so nodejs can shut down cleanly. + this.closeDb = function() { + __guardMethod__(db, 'close', o => o.close()); + return db = null; + }; - return +}); -# Model inherits from EventEmitter. -Model:: = new EventEmitter +// Model inherits from EventEmitter. +Model.prototype = new EventEmitter; + +function __guardMethod__(obj, methodName, transform) { + if (typeof obj !== 'undefined' && obj !== null && typeof obj[methodName] === 'function') { + return transform(obj, methodName); + } else { + return undefined; + } +} \ No newline at end of file diff --git a/services/document-updater/app/coffee/sharejs/types/simple.js b/services/document-updater/app/coffee/sharejs/types/simple.js index 996b1a5ddc..57c4934f73 100644 --- a/services/document-updater/app/coffee/sharejs/types/simple.js +++ b/services/document-updater/app/coffee/sharejs/types/simple.js @@ -1,38 +1,48 @@ -# This is a really simple OT type. Its not compiled with the web client, but it could be. -# -# Its mostly included for demonstration purposes and its used in a lot of unit tests. -# -# This defines a really simple text OT type which only allows inserts. (No deletes). -# -# Ops look like: -# {position:#, text:"asdf"} -# -# Document snapshots look like: -# {str:string} +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// This is a really simple OT type. Its not compiled with the web client, but it could be. +// +// Its mostly included for demonstration purposes and its used in a lot of unit tests. +// +// This defines a really simple text OT type which only allows inserts. (No deletes). +// +// Ops look like: +// {position:#, text:"asdf"} +// +// Document snapshots look like: +// {str:string} -module.exports = - # The name of the OT type. The type is stored in types[type.name]. The name can be - # used in place of the actual type in all the API methods. - name: 'simple' +module.exports = { + // The name of the OT type. The type is stored in types[type.name]. The name can be + // used in place of the actual type in all the API methods. + name: 'simple', - # Create a new document snapshot - create: -> {str:""} + // Create a new document snapshot + create() { return {str:""}; }, - # Apply the given op to the document snapshot. Returns the new snapshot. - # - # The original snapshot should not be modified. - apply: (snapshot, op) -> - throw new Error 'Invalid position' unless 0 <= op.position <= snapshot.str.length + // Apply the given op to the document snapshot. Returns the new snapshot. + // + // The original snapshot should not be modified. + apply(snapshot, op) { + if (!(0 <= op.position && op.position <= snapshot.str.length)) { throw new Error('Invalid position'); } - str = snapshot.str - str = str.slice(0, op.position) + op.text + str.slice(op.position) - {str} + let { + str + } = snapshot; + str = str.slice(0, op.position) + op.text + str.slice(op.position); + return {str}; + }, - # transform op1 by op2. Return transformed version of op1. - # sym describes the symmetry of the op. Its 'left' or 'right' depending on whether the - # op being transformed comes from the client or the server. - transform: (op1, op2, sym) -> - pos = op1.position - pos += op2.text.length if op2.position < pos or (op2.position == pos and sym is 'left') + // transform op1 by op2. Return transformed version of op1. + // sym describes the symmetry of the op. Its 'left' or 'right' depending on whether the + // op being transformed comes from the client or the server. + transform(op1, op2, sym) { + let pos = op1.position; + if ((op2.position < pos) || ((op2.position === pos) && (sym === 'left'))) { pos += op2.text.length; } - return {position:pos, text:op1.text} + return {position:pos, text:op1.text}; + } +}; diff --git a/services/document-updater/app/coffee/sharejs/types/syncqueue.js b/services/document-updater/app/coffee/sharejs/types/syncqueue.js index 746450b010..31b2235ee3 100644 --- a/services/document-updater/app/coffee/sharejs/types/syncqueue.js +++ b/services/document-updater/app/coffee/sharejs/types/syncqueue.js @@ -1,42 +1,52 @@ -# A synchronous processing queue. The queue calls process on the arguments, -# ensuring that process() is only executing once at a time. -# -# process(data, callback) _MUST_ eventually call its callback. -# -# Example: -# -# queue = require 'syncqueue' -# -# fn = queue (data, callback) -> -# asyncthing data, -> -# callback(321) -# -# fn(1) -# fn(2) -# fn(3, (result) -> console.log(result)) -# -# ^--- async thing will only be running once at any time. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// A synchronous processing queue. The queue calls process on the arguments, +// ensuring that process() is only executing once at a time. +// +// process(data, callback) _MUST_ eventually call its callback. +// +// Example: +// +// queue = require 'syncqueue' +// +// fn = queue (data, callback) -> +// asyncthing data, -> +// callback(321) +// +// fn(1) +// fn(2) +// fn(3, (result) -> console.log(result)) +// +// ^--- async thing will only be running once at any time. -module.exports = (process) -> - throw new Error('process is not a function') unless typeof process == 'function' - queue = [] +module.exports = function(process) { + if (typeof process !== 'function') { throw new Error('process is not a function'); } + const queue = []; - enqueue = (data, callback) -> - queue.push [data, callback] - flush() + const enqueue = function(data, callback) { + queue.push([data, callback]); + return flush(); + }; - enqueue.busy = false + enqueue.busy = false; - flush = -> - return if enqueue.busy or queue.length == 0 + var flush = function() { + if (enqueue.busy || (queue.length === 0)) { return; } - enqueue.busy = true - [data, callback] = queue.shift() - process data, (result...) -> # TODO: Make this not use varargs - varargs are really slow. - enqueue.busy = false - # This is called after busy = false so a user can check if enqueue.busy is set in the callback. - callback.apply null, result if callback - flush() + enqueue.busy = true; + const [data, callback] = Array.from(queue.shift()); + return process(data, function(...result) { // TODO: Make this not use varargs - varargs are really slow. + enqueue.busy = false; + // This is called after busy = false so a user can check if enqueue.busy is set in the callback. + if (callback) { callback.apply(null, result); } + return flush(); + }); + }; - enqueue + return enqueue; +}; diff --git a/services/document-updater/app/coffee/sharejs/types/text-api.js b/services/document-updater/app/coffee/sharejs/types/text-api.js index 96243ceffb..295261ff90 100644 --- a/services/document-updater/app/coffee/sharejs/types/text-api.js +++ b/services/document-updater/app/coffee/sharejs/types/text-api.js @@ -1,32 +1,44 @@ -# Text document API for text +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// Text document API for text -text = require './text' if typeof WEB is 'undefined' +let text; +if (typeof WEB === 'undefined') { text = require('./text'); } -text.api = - provides: {text:true} +text.api = { + provides: {text:true}, - # The number of characters in the string - getLength: -> @snapshot.length + // The number of characters in the string + getLength() { return this.snapshot.length; }, - # Get the text contents of a document - getText: -> @snapshot + // Get the text contents of a document + getText() { return this.snapshot; }, - insert: (pos, text, callback) -> - op = [{p:pos, i:text}] + insert(pos, text, callback) { + const op = [{p:pos, i:text}]; - @submitOp op, callback - op + this.submitOp(op, callback); + return op; + }, - del: (pos, length, callback) -> - op = [{p:pos, d:@snapshot[pos...(pos + length)]}] + del(pos, length, callback) { + const op = [{p:pos, d:this.snapshot.slice(pos, (pos + length))}]; - @submitOp op, callback - op + this.submitOp(op, callback); + return op; + }, - _register: -> - @on 'remoteop', (op) -> - for component in op - if component.i != undefined - @emit 'insert', component.p, component.i - else - @emit 'delete', component.p, component.d + _register() { + return this.on('remoteop', function(op) { + return Array.from(op).map((component) => + component.i !== undefined ? + this.emit('insert', component.p, component.i) + : + this.emit('delete', component.p, component.d)); + }); + } +}; diff --git a/services/document-updater/app/coffee/sharejs/types/text-composable-api.js b/services/document-updater/app/coffee/sharejs/types/text-composable-api.js index 7b27ac163a..160ab1c46e 100644 --- a/services/document-updater/app/coffee/sharejs/types/text-composable-api.js +++ b/services/document-updater/app/coffee/sharejs/types/text-composable-api.js @@ -1,43 +1,64 @@ -# Text document API for text +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// Text document API for text -if WEB? - type = exports.types['text-composable'] -else - type = require './text-composable' +let type; +if (typeof WEB !== 'undefined' && WEB !== null) { + type = exports.types['text-composable']; +} else { + type = require('./text-composable'); +} -type.api = - provides: {'text':true} +type.api = { + provides: {'text':true}, - # The number of characters in the string - 'getLength': -> @snapshot.length + // The number of characters in the string + 'getLength'() { return this.snapshot.length; }, - # Get the text contents of a document - 'getText': -> @snapshot + // Get the text contents of a document + 'getText'() { return this.snapshot; }, - 'insert': (pos, text, callback) -> - op = type.normalize [pos, 'i':text, (@snapshot.length - pos)] + 'insert'(pos, text, callback) { + const op = type.normalize([pos, {'i':text}, (this.snapshot.length - pos)]); - @submitOp op, callback - op + this.submitOp(op, callback); + return op; + }, - 'del': (pos, length, callback) -> - op = type.normalize [pos, 'd':@snapshot[pos...(pos + length)], (@snapshot.length - pos - length)] + 'del'(pos, length, callback) { + const op = type.normalize([pos, {'d':this.snapshot.slice(pos, (pos + length))}, (this.snapshot.length - pos - length)]); - @submitOp op, callback - op + this.submitOp(op, callback); + return op; + }, - _register: -> - @on 'remoteop', (op) -> - pos = 0 - for component in op - if typeof component is 'number' - pos += component - else if component.i != undefined - @emit 'insert', pos, component.i - pos += component.i.length - else - # delete - @emit 'delete', pos, component.d - # We don't increment pos, because the position - # specified is after the delete has happened. + _register() { + return this.on('remoteop', function(op) { + let pos = 0; + return (() => { + const result = []; + for (let component of Array.from(op)) { + if (typeof component === 'number') { + result.push(pos += component); + } else if (component.i !== undefined) { + this.emit('insert', pos, component.i); + result.push(pos += component.i.length); + } else { + // delete + result.push(this.emit('delete', pos, component.d)); + } + } + return result; + })(); + }); + } +}; + // We don't increment pos, because the position + // specified is after the delete has happened. diff --git a/services/document-updater/app/coffee/sharejs/types/text-composable.js b/services/document-updater/app/coffee/sharejs/types/text-composable.js index 992b567bf0..4f43f769cd 100644 --- a/services/document-updater/app/coffee/sharejs/types/text-composable.js +++ b/services/document-updater/app/coffee/sharejs/types/text-composable.js @@ -1,261 +1,315 @@ -# An alternate composable implementation for text. This is much closer -# to the implementation used by google wave. -# -# Ops are lists of components which iterate over the whole document. -# Components are either: -# A number N: Skip N characters in the original document -# {i:'str'}: Insert 'str' at the current position in the document -# {d:'str'}: Delete 'str', which appears at the current position in the document -# -# Eg: [3, {i:'hi'}, 5, {d:'internet'}] -# -# Snapshots are strings. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// An alternate composable implementation for text. This is much closer +// to the implementation used by google wave. +// +// Ops are lists of components which iterate over the whole document. +// Components are either: +// A number N: Skip N characters in the original document +// {i:'str'}: Insert 'str' at the current position in the document +// {d:'str'}: Delete 'str', which appears at the current position in the document +// +// Eg: [3, {i:'hi'}, 5, {d:'internet'}] +// +// Snapshots are strings. -p = -> #require('util').debug -i = -> #require('util').inspect +let makeAppend; +const p = function() {}; //require('util').debug +const i = function() {}; //require('util').inspect -exports = if WEB? then {} else module.exports +const exports = (typeof WEB !== 'undefined' && WEB !== null) ? {} : module.exports; -exports.name = 'text-composable' +exports.name = 'text-composable'; -exports.create = -> '' +exports.create = () => ''; -# -------- Utility methods +// -------- Utility methods -checkOp = (op) -> - throw new Error('Op must be an array of components') unless Array.isArray(op) - last = null - for c in op - if typeof(c) == 'object' - throw new Error("Invalid op component: #{i c}") unless (c.i? && c.i.length > 0) or (c.d? && c.d.length > 0) - else - throw new Error('Op components must be objects or numbers') unless typeof(c) == 'number' - throw new Error('Skip components must be a positive number') unless c > 0 - throw new Error('Adjacent skip components should be added') if typeof(last) == 'number' +const checkOp = function(op) { + if (!Array.isArray(op)) { throw new Error('Op must be an array of components'); } + let last = null; + return (() => { + const result = []; + for (let c of Array.from(op)) { + if (typeof(c) === 'object') { + if (((c.i == null) || !(c.i.length > 0)) && ((c.d == null) || !(c.d.length > 0))) { throw new Error(`Invalid op component: ${i(c)}`); } + } else { + if (typeof(c) !== 'number') { throw new Error('Op components must be objects or numbers'); } + if (!(c > 0)) { throw new Error('Skip components must be a positive number'); } + if (typeof(last) === 'number') { throw new Error('Adjacent skip components should be added'); } + } - last = c + result.push(last = c); + } + return result; + })(); +}; -# Makes a function for appending components to a given op. -# Exported for the randomOpGenerator. -exports._makeAppend = makeAppend = (op) -> (component) -> - if component == 0 || component.i == '' || component.d == '' - return - else if op.length == 0 - op.push component - else if typeof(component) == 'number' && typeof(op[op.length - 1]) == 'number' - op[op.length - 1] += component - else if component.i? && op[op.length - 1].i? - op[op.length - 1].i += component.i - else if component.d? && op[op.length - 1].d? - op[op.length - 1].d += component.d - else - op.push component +// Makes a function for appending components to a given op. +// Exported for the randomOpGenerator. +exports._makeAppend = (makeAppend = op => (function(component) { + if ((component === 0) || (component.i === '') || (component.d === '')) { + return; + } else if (op.length === 0) { + return op.push(component); + } else if ((typeof(component) === 'number') && (typeof(op[op.length - 1]) === 'number')) { + return op[op.length - 1] += component; + } else if ((component.i != null) && (op[op.length - 1].i != null)) { + return op[op.length - 1].i += component.i; + } else if ((component.d != null) && (op[op.length - 1].d != null)) { + return op[op.length - 1].d += component.d; + } else { + return op.push(component); + } +})); -# checkOp op +// checkOp op -# Makes 2 functions for taking components from the start of an op, and for peeking -# at the next op that could be taken. -makeTake = (op) -> - # The index of the next component to take - idx = 0 - # The offset into the component - offset = 0 +// Makes 2 functions for taking components from the start of an op, and for peeking +// at the next op that could be taken. +const makeTake = function(op) { + // The index of the next component to take + let idx = 0; + // The offset into the component + let offset = 0; - # Take up to length n from the front of op. If n is null, take the next - # op component. If indivisableField == 'd', delete components won't be separated. - # If indivisableField == 'i', insert components won't be separated. - take = (n, indivisableField) -> - return null if idx == op.length - #assert.notStrictEqual op.length, i, 'The op is too short to traverse the document' + // Take up to length n from the front of op. If n is null, take the next + // op component. If indivisableField == 'd', delete components won't be separated. + // If indivisableField == 'i', insert components won't be separated. + const take = function(n, indivisableField) { + let c; + if (idx === op.length) { return null; } + //assert.notStrictEqual op.length, i, 'The op is too short to traverse the document' - if typeof(op[idx]) == 'number' - if !n? or op[idx] - offset <= n - c = op[idx] - offset - ++idx; offset = 0 - c - else - offset += n - n - else - # Take from the string - field = if op[idx].i then 'i' else 'd' - c = {} - if !n? or op[idx][field].length - offset <= n or field == indivisableField - c[field] = op[idx][field][offset..] - ++idx; offset = 0 - else - c[field] = op[idx][field][offset...(offset + n)] - offset += n - c + if (typeof(op[idx]) === 'number') { + if ((n == null) || ((op[idx] - offset) <= n)) { + c = op[idx] - offset; + ++idx; offset = 0; + return c; + } else { + offset += n; + return n; + } + } else { + // Take from the string + const field = op[idx].i ? 'i' : 'd'; + c = {}; + if ((n == null) || ((op[idx][field].length - offset) <= n) || (field === indivisableField)) { + c[field] = op[idx][field].slice(offset); + ++idx; offset = 0; + } else { + c[field] = op[idx][field].slice(offset, (offset + n)); + offset += n; + } + return c; + } + }; - peekType = () -> - op[idx] + const peekType = () => op[idx]; - [take, peekType] + return [take, peekType]; +}; -# Find and return the length of an op component -componentLength = (component) -> - if typeof(component) == 'number' - component - else if component.i? - component.i.length - else - component.d.length +// Find and return the length of an op component +const componentLength = function(component) { + if (typeof(component) === 'number') { + return component; + } else if (component.i != null) { + return component.i.length; + } else { + return component.d.length; + } +}; -# Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate -# adjacent inserts and deletes. -exports.normalize = (op) -> - newOp = [] - append = makeAppend newOp - append component for component in op - newOp +// Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate +// adjacent inserts and deletes. +exports.normalize = function(op) { + const newOp = []; + const append = makeAppend(newOp); + for (let component of Array.from(op)) { append(component); } + return newOp; +}; -# Apply the op to the string. Returns the new string. -exports.apply = (str, op) -> - p "Applying #{i op} to '#{str}'" - throw new Error('Snapshot should be a string') unless typeof(str) == 'string' - checkOp op +// Apply the op to the string. Returns the new string. +exports.apply = function(str, op) { + p(`Applying ${i(op)} to '${str}'`); + if (typeof(str) !== 'string') { throw new Error('Snapshot should be a string'); } + checkOp(op); - pos = 0 - newDoc = [] + const pos = 0; + const newDoc = []; - for component in op - if typeof(component) == 'number' - throw new Error('The op is too long for this document') if component > str.length - newDoc.push str[...component] - str = str[component..] - else if component.i? - newDoc.push component.i - else - throw new Error("The deleted text '#{component.d}' doesn't match the next characters in the document '#{str[...component.d.length]}'") unless component.d == str[...component.d.length] - str = str[component.d.length..] + for (let component of Array.from(op)) { + if (typeof(component) === 'number') { + if (component > str.length) { throw new Error('The op is too long for this document'); } + newDoc.push(str.slice(0, component)); + str = str.slice(component); + } else if (component.i != null) { + newDoc.push(component.i); + } else { + if (component.d !== str.slice(0, component.d.length)) { throw new Error(`The deleted text '${component.d}' doesn't match the next characters in the document '${str.slice(0, component.d.length)}'`); } + str = str.slice(component.d.length); + } + } - throw new Error("The applied op doesn't traverse the entire document") unless '' == str + if ('' !== str) { throw new Error("The applied op doesn't traverse the entire document"); } - newDoc.join '' + return newDoc.join(''); +}; -# transform op1 by op2. Return transformed version of op1. -# op1 and op2 are unchanged by transform. -exports.transform = (op, otherOp, side) -> - throw new Error "side (#{side} must be 'left' or 'right'" unless side == 'left' or side == 'right' +// transform op1 by op2. Return transformed version of op1. +// op1 and op2 are unchanged by transform. +exports.transform = function(op, otherOp, side) { + let component; + if ((side !== 'left') && (side !== 'right')) { throw new Error(`side (${side} must be 'left' or 'right'`); } - checkOp op - checkOp otherOp - newOp = [] + checkOp(op); + checkOp(otherOp); + const newOp = []; - append = makeAppend newOp - [take, peek] = makeTake op + const append = makeAppend(newOp); + const [take, peek] = Array.from(makeTake(op)); - for component in otherOp - if typeof(component) == 'number' # Skip - length = component - while length > 0 - chunk = take(length, 'i') - throw new Error('The op traverses more elements than the document has') unless chunk != null + for (component of Array.from(otherOp)) { + var chunk, length; + if (typeof(component) === 'number') { // Skip + length = component; + while (length > 0) { + chunk = take(length, 'i'); + if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } - append chunk - length -= componentLength chunk unless typeof(chunk) == 'object' && chunk.i? - else if component.i? # Insert - if side == 'left' - # The left insert should go first. - o = peek() - append take() if o?.i + append(chunk); + if ((typeof(chunk) !== 'object') || (chunk.i == null)) { length -= componentLength(chunk); } + } + } else if (component.i != null) { // Insert + if (side === 'left') { + // The left insert should go first. + const o = peek(); + if (o != null ? o.i : undefined) { append(take()); } + } - # Otherwise, skip the inserted text. - append(component.i.length) - else # Delete. - #assert.ok component.d - length = component.d.length - while length > 0 - chunk = take(length, 'i') - throw new Error('The op traverses more elements than the document has') unless chunk != null + // Otherwise, skip the inserted text. + append(component.i.length); + } else { // Delete. + //assert.ok component.d + ({ + length + } = component.d); + while (length > 0) { + chunk = take(length, 'i'); + if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } - if typeof(chunk) == 'number' - length -= chunk - else if chunk.i? - append(chunk) - else - #assert.ok chunk.d - # The delete is unnecessary now. - length -= chunk.d.length + if (typeof(chunk) === 'number') { + length -= chunk; + } else if (chunk.i != null) { + append(chunk); + } else { + //assert.ok chunk.d + // The delete is unnecessary now. + length -= chunk.d.length; + } + } + } + } - # Append extras from op1 - while (component = take()) - throw new Error "Remaining fragments in the op: #{i component}" unless component?.i? - append component + // Append extras from op1 + while (component = take()) { + if ((component != null ? component.i : undefined) == null) { throw new Error(`Remaining fragments in the op: ${i(component)}`); } + append(component); + } - newOp + return newOp; +}; -# Compose 2 ops into 1 op. -exports.compose = (op1, op2) -> - p "COMPOSE #{i op1} + #{i op2}" - checkOp op1 - checkOp op2 +// Compose 2 ops into 1 op. +exports.compose = function(op1, op2) { + let component; + p(`COMPOSE ${i(op1)} + ${i(op2)}`); + checkOp(op1); + checkOp(op2); - result = [] + const result = []; - append = makeAppend result - [take, _] = makeTake op1 + const append = makeAppend(result); + const [take, _] = Array.from(makeTake(op1)); - for component in op2 - if typeof(component) == 'number' # Skip - length = component - while length > 0 - chunk = take(length, 'd') - throw new Error('The op traverses more elements than the document has') unless chunk != null + for (component of Array.from(op2)) { + var chunk, length; + if (typeof(component) === 'number') { // Skip + length = component; + while (length > 0) { + chunk = take(length, 'd'); + if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } - append chunk - length -= componentLength chunk unless typeof(chunk) == 'object' && chunk.d? + append(chunk); + if ((typeof(chunk) !== 'object') || (chunk.d == null)) { length -= componentLength(chunk); } + } - else if component.i? # Insert - append {i:component.i} + } else if (component.i != null) { // Insert + append({i:component.i}); - else # Delete - offset = 0 - while offset < component.d.length - chunk = take(component.d.length - offset, 'd') - throw new Error('The op traverses more elements than the document has') unless chunk != null + } else { // Delete + let offset = 0; + while (offset < component.d.length) { + chunk = take(component.d.length - offset, 'd'); + if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } - # If its delete, append it. If its skip, drop it and decrease length. If its insert, check the strings match, drop it and decrease length. - if typeof(chunk) == 'number' - append {d:component.d[offset...(offset + chunk)]} - offset += chunk - else if chunk.i? - throw new Error("The deleted text doesn't match the inserted text") unless component.d[offset...(offset + chunk.i.length)] == chunk.i - offset += chunk.i.length - # The ops cancel each other out. - else - # Delete - append chunk + // If its delete, append it. If its skip, drop it and decrease length. If its insert, check the strings match, drop it and decrease length. + if (typeof(chunk) === 'number') { + append({d:component.d.slice(offset, (offset + chunk))}); + offset += chunk; + } else if (chunk.i != null) { + if (component.d.slice(offset, (offset + chunk.i.length)) !== chunk.i) { throw new Error("The deleted text doesn't match the inserted text"); } + offset += chunk.i.length; + // The ops cancel each other out. + } else { + // Delete + append(chunk); + } + } + } + } - # Append extras from op1 - while (component = take()) - throw new Error "Trailing stuff in op1 #{i component}" unless component?.d? - append component + // Append extras from op1 + while (component = take()) { + if ((component != null ? component.d : undefined) == null) { throw new Error(`Trailing stuff in op1 ${i(component)}`); } + append(component); + } - result + return result; +}; -invertComponent = (c) -> - if typeof(c) == 'number' - c - else if c.i? - {d:c.i} - else - {i:c.d} +const invertComponent = function(c) { + if (typeof(c) === 'number') { + return c; + } else if (c.i != null) { + return {d:c.i}; + } else { + return {i:c.d}; + } +}; -# Invert an op -exports.invert = (op) -> - result = [] - append = makeAppend result +// Invert an op +exports.invert = function(op) { + const result = []; + const append = makeAppend(result); - append(invertComponent component) for component in op + for (let component of Array.from(op)) { append(invertComponent(component)); } - result + return result; +}; -if window? - window.ot ||= {} - window.ot.types ||= {} - window.ot.types.text = exports +if (typeof window !== 'undefined' && window !== null) { + if (!window.ot) { window.ot = {}; } + if (!window.ot.types) { window.ot.types = {}; } + window.ot.types.text = exports; +} diff --git a/services/document-updater/app/coffee/sharejs/types/text-tp2-api.js b/services/document-updater/app/coffee/sharejs/types/text-tp2-api.js index d661b5ae37..e3f4f95ea6 100644 --- a/services/document-updater/app/coffee/sharejs/types/text-tp2-api.js +++ b/services/document-updater/app/coffee/sharejs/types/text-tp2-api.js @@ -1,89 +1,118 @@ -# Text document API for text-tp2 +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// Text document API for text-tp2 -if WEB? - type = exports.types['text-tp2'] -else - type = require './text-tp2' +let type; +if (typeof WEB !== 'undefined' && WEB !== null) { + type = exports.types['text-tp2']; +} else { + type = require('./text-tp2'); +} -{_takeDoc:takeDoc, _append:append} = type +const {_takeDoc:takeDoc, _append:append} = type; -appendSkipChars = (op, doc, pos, maxlength) -> - while (maxlength == undefined || maxlength > 0) and pos.index < doc.data.length - part = takeDoc doc, pos, maxlength, true - maxlength -= part.length if maxlength != undefined and typeof part is 'string' - append op, (part.length || part) +const appendSkipChars = (op, doc, pos, maxlength) => (() => { + const result = []; + while (((maxlength === undefined) || (maxlength > 0)) && (pos.index < doc.data.length)) { + const part = takeDoc(doc, pos, maxlength, true); + if ((maxlength !== undefined) && (typeof part === 'string')) { maxlength -= part.length; } + result.push(append(op, (part.length || part))); + } + return result; +})(); -type['api'] = - 'provides': {'text':true} +type['api'] = { + 'provides': {'text':true}, - # The number of characters in the string - 'getLength': -> @snapshot.charLength + // The number of characters in the string + 'getLength'() { return this.snapshot.charLength; }, - # Flatten a document into a string - 'getText': -> - strings = (elem for elem in @snapshot.data when typeof elem is 'string') - strings.join '' + // Flatten a document into a string + 'getText'() { + const strings = (Array.from(this.snapshot.data).filter((elem) => typeof elem === 'string')); + return strings.join(''); + }, - 'insert': (pos, text, callback) -> - pos = 0 if pos == undefined + 'insert'(pos, text, callback) { + if (pos === undefined) { pos = 0; } - op = [] - docPos = {index:0, offset:0} + const op = []; + const docPos = {index:0, offset:0}; - appendSkipChars op, @snapshot, docPos, pos - append op, {'i':text} - appendSkipChars op, @snapshot, docPos + appendSkipChars(op, this.snapshot, docPos, pos); + append(op, {'i':text}); + appendSkipChars(op, this.snapshot, docPos); - @submitOp op, callback - op + this.submitOp(op, callback); + return op; + }, - 'del': (pos, length, callback) -> - op = [] - docPos = {index:0, offset:0} + 'del'(pos, length, callback) { + const op = []; + const docPos = {index:0, offset:0}; - appendSkipChars op, @snapshot, docPos, pos + appendSkipChars(op, this.snapshot, docPos, pos); - while length > 0 - part = takeDoc @snapshot, docPos, length, true - if typeof part is 'string' - append op, {'d':part.length} - length -= part.length - else - append op, part + while (length > 0) { + const part = takeDoc(this.snapshot, docPos, length, true); + if (typeof part === 'string') { + append(op, {'d':part.length}); + length -= part.length; + } else { + append(op, part); + } + } - appendSkipChars op, @snapshot, docPos + appendSkipChars(op, this.snapshot, docPos); - @submitOp op, callback - op + this.submitOp(op, callback); + return op; + }, - '_register': -> - # Interpret recieved ops + generate more detailed events for them - @on 'remoteop', (op, snapshot) -> - textPos = 0 - docPos = {index:0, offset:0} + '_register'() { + // Interpret recieved ops + generate more detailed events for them + return this.on('remoteop', function(op, snapshot) { + let textPos = 0; + const docPos = {index:0, offset:0}; - for component in op - if typeof component is 'number' - # Skip - remainder = component - while remainder > 0 - part = takeDoc snapshot, docPos, remainder - if typeof part is 'string' - textPos += part.length - remainder -= part.length || part - else if component.i != undefined - # Insert - if typeof component.i is 'string' - @emit 'insert', textPos, component.i - textPos += component.i.length - else - # Delete - remainder = component.d - while remainder > 0 - part = takeDoc snapshot, docPos, remainder - if typeof part is 'string' - @emit 'delete', textPos, part - remainder -= part.length || part + for (let component of Array.from(op)) { + var part, remainder; + if (typeof component === 'number') { + // Skip + remainder = component; + while (remainder > 0) { + part = takeDoc(snapshot, docPos, remainder); + if (typeof part === 'string') { + textPos += part.length; + } + remainder -= part.length || part; + } + } else if (component.i !== undefined) { + // Insert + if (typeof component.i === 'string') { + this.emit('insert', textPos, component.i); + textPos += component.i.length; + } + } else { + // Delete + remainder = component.d; + while (remainder > 0) { + part = takeDoc(snapshot, docPos, remainder); + if (typeof part === 'string') { + this.emit('delete', textPos, part); + } + remainder -= part.length || part; + } + } + } - return + }); + } +}; diff --git a/services/document-updater/app/coffee/sharejs/types/text-tp2.js b/services/document-updater/app/coffee/sharejs/types/text-tp2.js index d19cbdcef4..ab123d6ff7 100644 --- a/services/document-updater/app/coffee/sharejs/types/text-tp2.js +++ b/services/document-updater/app/coffee/sharejs/types/text-tp2.js @@ -1,322 +1,398 @@ -# A TP2 implementation of text, following this spec: -# http://code.google.com/p/lightwave/source/browse/trunk/experimental/ot/README -# -# A document is made up of a string and a set of tombstones inserted throughout -# the string. For example, 'some ', (2 tombstones), 'string'. -# -# This is encoded in a document as: {s:'some string', t:[5, -2, 6]} -# -# Ops are lists of components which iterate over the whole document. -# Components are either: -# N: Skip N characters in the original document -# {i:'str'}: Insert 'str' at the current position in the document -# {i:N}: Insert N tombstones at the current position in the document -# {d:N}: Delete (tombstone) N characters at the current position in the document -# -# Eg: [3, {i:'hi'}, 5, {d:8}] -# -# Snapshots are lists with characters and tombstones. Characters are stored in strings -# and adjacent tombstones are flattened into numbers. -# -# Eg, the document: 'Hello .....world' ('.' denotes tombstoned (deleted) characters) -# would be represented by a document snapshot of ['Hello ', 5, 'world'] +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// A TP2 implementation of text, following this spec: +// http://code.google.com/p/lightwave/source/browse/trunk/experimental/ot/README +// +// A document is made up of a string and a set of tombstones inserted throughout +// the string. For example, 'some ', (2 tombstones), 'string'. +// +// This is encoded in a document as: {s:'some string', t:[5, -2, 6]} +// +// Ops are lists of components which iterate over the whole document. +// Components are either: +// N: Skip N characters in the original document +// {i:'str'}: Insert 'str' at the current position in the document +// {i:N}: Insert N tombstones at the current position in the document +// {d:N}: Delete (tombstone) N characters at the current position in the document +// +// Eg: [3, {i:'hi'}, 5, {d:8}] +// +// Snapshots are lists with characters and tombstones. Characters are stored in strings +// and adjacent tombstones are flattened into numbers. +// +// Eg, the document: 'Hello .....world' ('.' denotes tombstoned (deleted) characters) +// would be represented by a document snapshot of ['Hello ', 5, 'world'] -type = - name: 'text-tp2' - tp2: true - create: -> {charLength:0, totalLength:0, positionCache:[], data:[]} - serialize: (doc) -> - throw new Error 'invalid doc snapshot' unless doc.data - doc.data - deserialize: (data) -> - doc = type.create() - doc.data = data +let append, appendDoc, takeDoc; +var type = { + name: 'text-tp2', + tp2: true, + create() { return {charLength:0, totalLength:0, positionCache:[], data:[]}; }, + serialize(doc) { + if (!doc.data) { throw new Error('invalid doc snapshot'); } + return doc.data; + }, + deserialize(data) { + const doc = type.create(); + doc.data = data; - for component in data - if typeof component is 'string' - doc.charLength += component.length - doc.totalLength += component.length - else - doc.totalLength += component + for (let component of Array.from(data)) { + if (typeof component === 'string') { + doc.charLength += component.length; + doc.totalLength += component.length; + } else { + doc.totalLength += component; + } + } - doc + return doc; + } +}; -checkOp = (op) -> - throw new Error('Op must be an array of components') unless Array.isArray(op) - last = null - for c in op - if typeof(c) == 'object' - if c.i != undefined - throw new Error('Inserts must insert a string or a +ive number') unless (typeof(c.i) == 'string' and c.i.length > 0) or (typeof(c.i) == 'number' and c.i > 0) - else if c.d != undefined - throw new Error('Deletes must be a +ive number') unless typeof(c.d) == 'number' and c.d > 0 - else - throw new Error('Operation component must define .i or .d') - else - throw new Error('Op components must be objects or numbers') unless typeof(c) == 'number' - throw new Error('Skip components must be a positive number') unless c > 0 - throw new Error('Adjacent skip components should be combined') if typeof(last) == 'number' +const checkOp = function(op) { + if (!Array.isArray(op)) { throw new Error('Op must be an array of components'); } + let last = null; + return (() => { + const result = []; + for (let c of Array.from(op)) { + if (typeof(c) === 'object') { + if (c.i !== undefined) { + if (((typeof(c.i) !== 'string') || !(c.i.length > 0)) && ((typeof(c.i) !== 'number') || !(c.i > 0))) { throw new Error('Inserts must insert a string or a +ive number'); } + } else if (c.d !== undefined) { + if ((typeof(c.d) !== 'number') || !(c.d > 0)) { throw new Error('Deletes must be a +ive number'); } + } else { + throw new Error('Operation component must define .i or .d'); + } + } else { + if (typeof(c) !== 'number') { throw new Error('Op components must be objects or numbers'); } + if (!(c > 0)) { throw new Error('Skip components must be a positive number'); } + if (typeof(last) === 'number') { throw new Error('Adjacent skip components should be combined'); } + } - last = c + result.push(last = c); + } + return result; + })(); +}; -# Take the next part from the specified position in a document snapshot. -# position = {index, offset}. It will be updated. -type._takeDoc = takeDoc = (doc, position, maxlength, tombsIndivisible) -> - throw new Error 'Operation goes past the end of the document' if position.index >= doc.data.length +// Take the next part from the specified position in a document snapshot. +// position = {index, offset}. It will be updated. +type._takeDoc = (takeDoc = function(doc, position, maxlength, tombsIndivisible) { + if (position.index >= doc.data.length) { throw new Error('Operation goes past the end of the document'); } - part = doc.data[position.index] - # peel off data[0] - result = if typeof(part) == 'string' - if maxlength != undefined - part[position.offset...(position.offset + maxlength)] - else - part[position.offset...] - else - if maxlength == undefined or tombsIndivisible + const part = doc.data[position.index]; + // peel off data[0] + const result = typeof(part) === 'string' ? + maxlength !== undefined ? + part.slice(position.offset, (position.offset + maxlength)) + : + part.slice(position.offset) + : + (maxlength === undefined) || tombsIndivisible ? part - position.offset - else - Math.min(maxlength, part - position.offset) + : + Math.min(maxlength, part - position.offset); - resultLen = result.length || result + const resultLen = result.length || result; - if (part.length || part) - position.offset > resultLen - position.offset += resultLen - else - position.index++ - position.offset = 0 + if (((part.length || part) - position.offset) > resultLen) { + position.offset += resultLen; + } else { + position.index++; + position.offset = 0; + } - result + return result; +}); -# Append a part to the end of a document -type._appendDoc = appendDoc = (doc, p) -> - return if p == 0 or p == '' +// Append a part to the end of a document +type._appendDoc = (appendDoc = function(doc, p) { + if ((p === 0) || (p === '')) { return; } - if typeof p is 'string' - doc.charLength += p.length - doc.totalLength += p.length - else - doc.totalLength += p + if (typeof p === 'string') { + doc.charLength += p.length; + doc.totalLength += p.length; + } else { + doc.totalLength += p; + } - data = doc.data - if data.length == 0 - data.push p - else if typeof(data[data.length - 1]) == typeof(p) - data[data.length - 1] += p - else - data.push p - return + const { + data + } = doc; + if (data.length === 0) { + data.push(p); + } else if (typeof(data[data.length - 1]) === typeof(p)) { + data[data.length - 1] += p; + } else { + data.push(p); + } +}); -# Apply the op to the document. The document is not modified in the process. -type.apply = (doc, op) -> - unless doc.totalLength != undefined and doc.charLength != undefined and doc.data.length != undefined - throw new Error('Snapshot is invalid') +// Apply the op to the document. The document is not modified in the process. +type.apply = function(doc, op) { + if ((doc.totalLength === undefined) || (doc.charLength === undefined) || (doc.data.length === undefined)) { + throw new Error('Snapshot is invalid'); + } - checkOp op + checkOp(op); - newDoc = type.create() - position = {index:0, offset:0} + const newDoc = type.create(); + const position = {index:0, offset:0}; - for component in op - if typeof(component) is 'number' - remainder = component - while remainder > 0 - part = takeDoc doc, position, remainder + for (let component of Array.from(op)) { + var part, remainder; + if (typeof(component) === 'number') { + remainder = component; + while (remainder > 0) { + part = takeDoc(doc, position, remainder); - appendDoc newDoc, part - remainder -= part.length || part + appendDoc(newDoc, part); + remainder -= part.length || part; + } - else if component.i != undefined - appendDoc newDoc, component.i - else if component.d != undefined - remainder = component.d - while remainder > 0 - part = takeDoc doc, position, remainder - remainder -= part.length || part - appendDoc newDoc, component.d + } else if (component.i !== undefined) { + appendDoc(newDoc, component.i); + } else if (component.d !== undefined) { + remainder = component.d; + while (remainder > 0) { + part = takeDoc(doc, position, remainder); + remainder -= part.length || part; + } + appendDoc(newDoc, component.d); + } + } - newDoc + return newDoc; +}; -# Append an op component to the end of the specified op. -# Exported for the randomOpGenerator. -type._append = append = (op, component) -> - if component == 0 || component.i == '' || component.i == 0 || component.d == 0 - return - else if op.length == 0 - op.push component - else - last = op[op.length - 1] - if typeof(component) == 'number' && typeof(last) == 'number' - op[op.length - 1] += component - else if component.i != undefined && last.i? && typeof(last.i) == typeof(component.i) - last.i += component.i - else if component.d != undefined && last.d? - last.d += component.d - else - op.push component +// Append an op component to the end of the specified op. +// Exported for the randomOpGenerator. +type._append = (append = function(op, component) { + if ((component === 0) || (component.i === '') || (component.i === 0) || (component.d === 0)) { + return; + } else if (op.length === 0) { + return op.push(component); + } else { + const last = op[op.length - 1]; + if ((typeof(component) === 'number') && (typeof(last) === 'number')) { + return op[op.length - 1] += component; + } else if ((component.i !== undefined) && (last.i != null) && (typeof(last.i) === typeof(component.i))) { + return last.i += component.i; + } else if ((component.d !== undefined) && (last.d != null)) { + return last.d += component.d; + } else { + return op.push(component); + } + } +}); -# Makes 2 functions for taking components from the start of an op, and for peeking -# at the next op that could be taken. -makeTake = (op) -> - # The index of the next component to take - index = 0 - # The offset into the component - offset = 0 +// Makes 2 functions for taking components from the start of an op, and for peeking +// at the next op that could be taken. +const makeTake = function(op) { + // The index of the next component to take + let index = 0; + // The offset into the component + let offset = 0; - # Take up to length maxlength from the op. If maxlength is not defined, there is no max. - # If insertsIndivisible is true, inserts (& insert tombstones) won't be separated. - # - # Returns null when op is fully consumed. - take = (maxlength, insertsIndivisible) -> - return null if index == op.length + // Take up to length maxlength from the op. If maxlength is not defined, there is no max. + // If insertsIndivisible is true, inserts (& insert tombstones) won't be separated. + // + // Returns null when op is fully consumed. + const take = function(maxlength, insertsIndivisible) { + let current; + if (index === op.length) { return null; } - e = op[index] - if typeof((current = e)) == 'number' or typeof((current = e.i)) == 'number' or (current = e.d) != undefined - if !maxlength? or current - offset <= maxlength or (insertsIndivisible and e.i != undefined) - # Return the rest of the current element. - c = current - offset - ++index; offset = 0 - else - offset += maxlength - c = maxlength - if e.i != undefined then {i:c} else if e.d != undefined then {d:c} else c - else - # Take from the inserted string - if !maxlength? or e.i.length - offset <= maxlength or insertsIndivisible - result = {i:e.i[offset..]} - ++index; offset = 0 - else - result = {i:e.i[offset...offset + maxlength]} - offset += maxlength - result + const e = op[index]; + if ((typeof((current = e)) === 'number') || (typeof((current = e.i)) === 'number') || ((current = e.d) !== undefined)) { + let c; + if ((maxlength == null) || ((current - offset) <= maxlength) || (insertsIndivisible && (e.i !== undefined))) { + // Return the rest of the current element. + c = current - offset; + ++index; offset = 0; + } else { + offset += maxlength; + c = maxlength; + } + if (e.i !== undefined) { return {i:c}; } else if (e.d !== undefined) { return {d:c}; } else { return c; } + } else { + // Take from the inserted string + let result; + if ((maxlength == null) || ((e.i.length - offset) <= maxlength) || insertsIndivisible) { + result = {i:e.i.slice(offset)}; + ++index; offset = 0; + } else { + result = {i:e.i.slice(offset, offset + maxlength)}; + offset += maxlength; + } + return result; + } + }; - peekType = -> op[index] + const peekType = () => op[index]; - [take, peekType] + return [take, peekType]; +}; -# Find and return the length of an op component -componentLength = (component) -> - if typeof(component) == 'number' - component - else if typeof(component.i) == 'string' - component.i.length - else - # This should work because c.d and c.i must be +ive. - component.d or component.i +// Find and return the length of an op component +const componentLength = function(component) { + if (typeof(component) === 'number') { + return component; + } else if (typeof(component.i) === 'string') { + return component.i.length; + } else { + // This should work because c.d and c.i must be +ive. + return component.d || component.i; + } +}; -# Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate -# adjacent inserts and deletes. -type.normalize = (op) -> - newOp = [] - append newOp, component for component in op - newOp +// Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate +// adjacent inserts and deletes. +type.normalize = function(op) { + const newOp = []; + for (let component of Array.from(op)) { append(newOp, component); } + return newOp; +}; -# This is a helper method to transform and prune. goForwards is true for transform, false for prune. -transformer = (op, otherOp, goForwards, side) -> - checkOp op - checkOp otherOp - newOp = [] +// This is a helper method to transform and prune. goForwards is true for transform, false for prune. +const transformer = function(op, otherOp, goForwards, side) { + let component; + checkOp(op); + checkOp(otherOp); + const newOp = []; - [take, peek] = makeTake op + const [take, peek] = Array.from(makeTake(op)); - for component in otherOp - length = componentLength component + for (component of Array.from(otherOp)) { + var chunk; + let length = componentLength(component); - if component.i != undefined # Insert text or tombs - if goForwards # transform - insert skips over inserted parts - if side == 'left' - # The left insert should go first. - append newOp, take() while peek()?.i != undefined + if (component.i !== undefined) { // Insert text or tombs + if (goForwards) { // transform - insert skips over inserted parts + if (side === 'left') { + // The left insert should go first. + while (__guard__(peek(), x => x.i) !== undefined) { append(newOp, take()); } + } - # In any case, skip the inserted text. - append newOp, length + // In any case, skip the inserted text. + append(newOp, length); - else # Prune. Remove skips for inserts. - while length > 0 - chunk = take length, true + } else { // Prune. Remove skips for inserts. + while (length > 0) { + chunk = take(length, true); - throw new Error 'The transformed op is invalid' unless chunk != null - throw new Error 'The transformed op deletes locally inserted characters - it cannot be purged of the insert.' if chunk.d != undefined + if (chunk === null) { throw new Error('The transformed op is invalid'); } + if (chunk.d !== undefined) { throw new Error('The transformed op deletes locally inserted characters - it cannot be purged of the insert.'); } - if typeof chunk is 'number' - length -= chunk - else - append newOp, chunk + if (typeof chunk === 'number') { + length -= chunk; + } else { + append(newOp, chunk); + } + } + } - else # Skip or delete - while length > 0 - chunk = take length, true - throw new Error('The op traverses more elements than the document has') unless chunk != null + } else { // Skip or delete + while (length > 0) { + chunk = take(length, true); + if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } - append newOp, chunk - length -= componentLength chunk unless chunk.i + append(newOp, chunk); + if (!chunk.i) { length -= componentLength(chunk); } + } + } + } - # Append extras from op1 - while (component = take()) - throw new Error "Remaining fragments in the op: #{component}" unless component.i != undefined - append newOp, component + // Append extras from op1 + while (component = take()) { + if (component.i === undefined) { throw new Error(`Remaining fragments in the op: ${component}`); } + append(newOp, component); + } - newOp + return newOp; +}; -# transform op1 by op2. Return transformed version of op1. -# op1 and op2 are unchanged by transform. -# side should be 'left' or 'right', depending on if op1.id <> op2.id. 'left' == client op. -type.transform = (op, otherOp, side) -> - throw new Error "side (#{side}) should be 'left' or 'right'" unless side == 'left' or side == 'right' - transformer op, otherOp, true, side +// transform op1 by op2. Return transformed version of op1. +// op1 and op2 are unchanged by transform. +// side should be 'left' or 'right', depending on if op1.id <> op2.id. 'left' == client op. +type.transform = function(op, otherOp, side) { + if ((side !== 'left') && (side !== 'right')) { throw new Error(`side (${side}) should be 'left' or 'right'`); } + return transformer(op, otherOp, true, side); +}; -# Prune is the inverse of transform. -type.prune = (op, otherOp) -> transformer op, otherOp, false +// Prune is the inverse of transform. +type.prune = (op, otherOp) => transformer(op, otherOp, false); -# Compose 2 ops into 1 op. -type.compose = (op1, op2) -> - return op2 if op1 == null or op1 == undefined +// Compose 2 ops into 1 op. +type.compose = function(op1, op2) { + let component; + if ((op1 === null) || (op1 === undefined)) { return op2; } - checkOp op1 - checkOp op2 + checkOp(op1); + checkOp(op2); - result = [] + const result = []; - [take, _] = makeTake op1 + const [take, _] = Array.from(makeTake(op1)); - for component in op2 + for (component of Array.from(op2)) { - if typeof(component) == 'number' # Skip - # Just copy from op1. - length = component - while length > 0 - chunk = take length - throw new Error('The op traverses more elements than the document has') unless chunk != null + var chunk, length; + if (typeof(component) === 'number') { // Skip + // Just copy from op1. + length = component; + while (length > 0) { + chunk = take(length); + if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } - append result, chunk - length -= componentLength chunk + append(result, chunk); + length -= componentLength(chunk); + } - else if component.i != undefined # Insert - append result, {i:component.i} + } else if (component.i !== undefined) { // Insert + append(result, {i:component.i}); - else # Delete - length = component.d - while length > 0 - chunk = take length - throw new Error('The op traverses more elements than the document has') unless chunk != null + } else { // Delete + length = component.d; + while (length > 0) { + chunk = take(length); + if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } - chunkLength = componentLength chunk - if chunk.i != undefined - append result, {i:chunkLength} - else - append result, {d:chunkLength} + const chunkLength = componentLength(chunk); + if (chunk.i !== undefined) { + append(result, {i:chunkLength}); + } else { + append(result, {d:chunkLength}); + } - length -= chunkLength + length -= chunkLength; + } + } + } - # Append extras from op1 - while (component = take()) - throw new Error "Remaining fragments in op1: #{component}" unless component.i != undefined - append result, component + // Append extras from op1 + while (component = take()) { + if (component.i === undefined) { throw new Error(`Remaining fragments in op1: ${component}`); } + append(result, component); + } - result + return result; +}; -if WEB? - exports.types['text-tp2'] = type -else - module.exports = type +if (typeof WEB !== 'undefined' && WEB !== null) { + exports.types['text-tp2'] = type; +} else { + module.exports = type; +} + +function __guard__(value, transform) { + return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; +} \ No newline at end of file diff --git a/services/document-updater/app/coffee/sharejs/types/text.js b/services/document-updater/app/coffee/sharejs/types/text.js index 2a3b79997d..fed546d10f 100644 --- a/services/document-updater/app/coffee/sharejs/types/text.js +++ b/services/document-updater/app/coffee/sharejs/types/text.js @@ -1,263 +1,305 @@ -# A simple text implementation -# -# Operations are lists of components. -# Each component either inserts or deletes at a specified position in the document. -# -# Components are either: -# {i:'str', p:100}: Insert 'str' at position 100 in the document -# {d:'str', p:100}: Delete 'str' at position 100 in the document -# -# Components in an operation are executed sequentially, so the position of components -# assumes previous components have already executed. -# -# Eg: This op: -# [{i:'abc', p:0}] -# is equivalent to this op: -# [{i:'a', p:0}, {i:'b', p:1}, {i:'c', p:2}] +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// A simple text implementation +// +// Operations are lists of components. +// Each component either inserts or deletes at a specified position in the document. +// +// Components are either: +// {i:'str', p:100}: Insert 'str' at position 100 in the document +// {d:'str', p:100}: Delete 'str' at position 100 in the document +// +// Components in an operation are executed sequentially, so the position of components +// assumes previous components have already executed. +// +// Eg: This op: +// [{i:'abc', p:0}] +// is equivalent to this op: +// [{i:'a', p:0}, {i:'b', p:1}, {i:'c', p:2}] -# NOTE: The global scope here is shared with other sharejs files when built with closure. -# Be careful what ends up in your namespace. +// NOTE: The global scope here is shared with other sharejs files when built with closure. +// Be careful what ends up in your namespace. -text = {} +let append, transformComponent; +const text = {}; -text.name = 'text' +text.name = 'text'; -text.create = -> '' +text.create = () => ''; -strInject = (s1, pos, s2) -> s1[...pos] + s2 + s1[pos..] +const strInject = (s1, pos, s2) => s1.slice(0, pos) + s2 + s1.slice(pos); -checkValidComponent = (c) -> - throw new Error 'component missing position field' if typeof c.p != 'number' +const checkValidComponent = function(c) { + if (typeof c.p !== 'number') { throw new Error('component missing position field'); } - i_type = typeof c.i - d_type = typeof c.d - c_type = typeof c.c - throw new Error 'component needs an i, d or c field' unless (i_type == 'string') ^ (d_type == 'string') ^ (c_type == 'string') + const i_type = typeof c.i; + const d_type = typeof c.d; + const c_type = typeof c.c; + if (!((i_type === 'string') ^ (d_type === 'string') ^ (c_type === 'string'))) { throw new Error('component needs an i, d or c field'); } - throw new Error 'position cannot be negative' unless c.p >= 0 + if (!(c.p >= 0)) { throw new Error('position cannot be negative'); } +}; -checkValidOp = (op) -> - checkValidComponent(c) for c in op - true +const checkValidOp = function(op) { + for (let c of Array.from(op)) { checkValidComponent(c); } + return true; +}; -text.apply = (snapshot, op) -> - checkValidOp op - for component in op - if component.i? - snapshot = strInject snapshot, component.p, component.i - else if component.d? - deleted = snapshot[component.p...(component.p + component.d.length)] - throw new Error "Delete component '#{component.d}' does not match deleted text '#{deleted}'" unless component.d == deleted - snapshot = snapshot[...component.p] + snapshot[(component.p + component.d.length)..] - else if component.c? - comment = snapshot[component.p...(component.p + component.c.length)] - throw new Error "Comment component '#{component.c}' does not match commented text '#{comment}'" unless component.c == comment - else - throw new Error "Unknown op type" - snapshot +text.apply = function(snapshot, op) { + checkValidOp(op); + for (let component of Array.from(op)) { + if (component.i != null) { + snapshot = strInject(snapshot, component.p, component.i); + } else if (component.d != null) { + const deleted = snapshot.slice(component.p, (component.p + component.d.length)); + if (component.d !== deleted) { throw new Error(`Delete component '${component.d}' does not match deleted text '${deleted}'`); } + snapshot = snapshot.slice(0, component.p) + snapshot.slice((component.p + component.d.length)); + } else if (component.c != null) { + const comment = snapshot.slice(component.p, (component.p + component.c.length)); + if (component.c !== comment) { throw new Error(`Comment component '${component.c}' does not match commented text '${comment}'`); } + } else { + throw new Error("Unknown op type"); + } + } + return snapshot; +}; -# Exported for use by the random op generator. -# -# For simplicity, this version of append does not compress adjacent inserts and deletes of -# the same text. It would be nice to change that at some stage. -text._append = append = (newOp, c) -> - return if c.i == '' or c.d == '' - if newOp.length == 0 - newOp.push c - else - last = newOp[newOp.length - 1] +// Exported for use by the random op generator. +// +// For simplicity, this version of append does not compress adjacent inserts and deletes of +// the same text. It would be nice to change that at some stage. +text._append = (append = function(newOp, c) { + if ((c.i === '') || (c.d === '')) { return; } + if (newOp.length === 0) { + return newOp.push(c); + } else { + const last = newOp[newOp.length - 1]; - # Compose the insert into the previous insert if possible - if last.i? && c.i? and last.p <= c.p <= (last.p + last.i.length) - newOp[newOp.length - 1] = {i:strInject(last.i, c.p - last.p, c.i), p:last.p} - else if last.d? && c.d? and c.p <= last.p <= (c.p + c.d.length) - newOp[newOp.length - 1] = {d:strInject(c.d, last.p - c.p, last.d), p:c.p} - else - newOp.push c + // Compose the insert into the previous insert if possible + if ((last.i != null) && (c.i != null) && (last.p <= c.p && c.p <= (last.p + last.i.length))) { + return newOp[newOp.length - 1] = {i:strInject(last.i, c.p - last.p, c.i), p:last.p}; + } else if ((last.d != null) && (c.d != null) && (c.p <= last.p && last.p <= (c.p + c.d.length))) { + return newOp[newOp.length - 1] = {d:strInject(c.d, last.p - c.p, last.d), p:c.p}; + } else { + return newOp.push(c); + } + } +}); -text.compose = (op1, op2) -> - checkValidOp op1 - checkValidOp op2 +text.compose = function(op1, op2) { + checkValidOp(op1); + checkValidOp(op2); - newOp = op1.slice() - append newOp, c for c in op2 + const newOp = op1.slice(); + for (let c of Array.from(op2)) { append(newOp, c); } - newOp + return newOp; +}; -# Attempt to compress the op components together 'as much as possible'. -# This implementation preserves order and preserves create/delete pairs. -text.compress = (op) -> text.compose [], op +// Attempt to compress the op components together 'as much as possible'. +// This implementation preserves order and preserves create/delete pairs. +text.compress = op => text.compose([], op); -text.normalize = (op) -> - newOp = [] +text.normalize = function(op) { + const newOp = []; - # Normalize should allow ops which are a single (unwrapped) component: - # {i:'asdf', p:23}. - # There's no good way to test if something is an array: - # http://perfectionkills.com/instanceof-considered-harmful-or-how-to-write-a-robust-isarray/ - # so this is probably the least bad solution. - op = [op] if op.i? or op.p? + // Normalize should allow ops which are a single (unwrapped) component: + // {i:'asdf', p:23}. + // There's no good way to test if something is an array: + // http://perfectionkills.com/instanceof-considered-harmful-or-how-to-write-a-robust-isarray/ + // so this is probably the least bad solution. + if ((op.i != null) || (op.p != null)) { op = [op]; } - for c in op - c.p ?= 0 - append newOp, c + for (let c of Array.from(op)) { + if (c.p == null) { c.p = 0; } + append(newOp, c); + } - newOp + return newOp; +}; -# This helper method transforms a position by an op component. -# -# If c is an insert, insertAfter specifies whether the transform -# is pushed after the insert (true) or before it (false). -# -# insertAfter is optional for deletes. -transformPosition = (pos, c, insertAfter) -> - if c.i? - if c.p < pos || (c.p == pos && insertAfter) - pos + c.i.length - else - pos - else if c.d? - # I think this could also be written as: Math.min(c.p, Math.min(c.p - otherC.p, otherC.d.length)) - # but I think its harder to read that way, and it compiles using ternary operators anyway - # so its no slower written like this. - if pos <= c.p - pos - else if pos <= c.p + c.d.length - c.p - else - pos - c.d.length - else if c.c? - pos - else - throw new Error("unknown op type") +// This helper method transforms a position by an op component. +// +// If c is an insert, insertAfter specifies whether the transform +// is pushed after the insert (true) or before it (false). +// +// insertAfter is optional for deletes. +const transformPosition = function(pos, c, insertAfter) { + if (c.i != null) { + if ((c.p < pos) || ((c.p === pos) && insertAfter)) { + return pos + c.i.length; + } else { + return pos; + } + } else if (c.d != null) { + // I think this could also be written as: Math.min(c.p, Math.min(c.p - otherC.p, otherC.d.length)) + // but I think its harder to read that way, and it compiles using ternary operators anyway + // so its no slower written like this. + if (pos <= c.p) { + return pos; + } else if (pos <= (c.p + c.d.length)) { + return c.p; + } else { + return pos - c.d.length; + } + } else if (c.c != null) { + return pos; + } else { + throw new Error("unknown op type"); + } +}; -# Helper method to transform a cursor position as a result of an op. -# -# Like transformPosition above, if c is an insert, insertAfter specifies whether the cursor position -# is pushed after an insert (true) or before it (false). -text.transformCursor = (position, op, side) -> - insertAfter = side == 'right' - position = transformPosition position, c, insertAfter for c in op - position +// Helper method to transform a cursor position as a result of an op. +// +// Like transformPosition above, if c is an insert, insertAfter specifies whether the cursor position +// is pushed after an insert (true) or before it (false). +text.transformCursor = function(position, op, side) { + const insertAfter = side === 'right'; + for (let c of Array.from(op)) { position = transformPosition(position, c, insertAfter); } + return position; +}; -# Transform an op component by another op component. Asymmetric. -# The result will be appended to destination. -# -# exported for use in JSON type -text._tc = transformComponent = (dest, c, otherC, side) -> - checkValidOp [c] - checkValidOp [otherC] +// Transform an op component by another op component. Asymmetric. +// The result will be appended to destination. +// +// exported for use in JSON type +text._tc = (transformComponent = function(dest, c, otherC, side) { + let cIntersect, intersectEnd, intersectStart, newC, otherIntersect; + checkValidOp([c]); + checkValidOp([otherC]); - if c.i? - append dest, {i:c.i, p:transformPosition(c.p, otherC, side == 'right')} + if (c.i != null) { + append(dest, {i:c.i, p:transformPosition(c.p, otherC, side === 'right')}); - else if c.d? # Delete - if otherC.i? # delete vs insert - s = c.d - if c.p < otherC.p - append dest, {d:s[...otherC.p - c.p], p:c.p} - s = s[(otherC.p - c.p)..] - if s != '' - append dest, {d:s, p:c.p + otherC.i.length} + } else if (c.d != null) { // Delete + if (otherC.i != null) { // delete vs insert + let s = c.d; + if (c.p < otherC.p) { + append(dest, {d:s.slice(0, otherC.p - c.p), p:c.p}); + s = s.slice((otherC.p - c.p)); + } + if (s !== '') { + append(dest, {d:s, p:c.p + otherC.i.length}); + } - else if otherC.d? # Delete vs delete - if c.p >= otherC.p + otherC.d.length - append dest, {d:c.d, p:c.p - otherC.d.length} - else if c.p + c.d.length <= otherC.p - append dest, c - else - # They overlap somewhere. - newC = {d:'', p:c.p} - if c.p < otherC.p - newC.d = c.d[...(otherC.p - c.p)] - if c.p + c.d.length > otherC.p + otherC.d.length - newC.d += c.d[(otherC.p + otherC.d.length - c.p)..] + } else if (otherC.d != null) { // Delete vs delete + if (c.p >= (otherC.p + otherC.d.length)) { + append(dest, {d:c.d, p:c.p - otherC.d.length}); + } else if ((c.p + c.d.length) <= otherC.p) { + append(dest, c); + } else { + // They overlap somewhere. + newC = {d:'', p:c.p}; + if (c.p < otherC.p) { + newC.d = c.d.slice(0, (otherC.p - c.p)); + } + if ((c.p + c.d.length) > (otherC.p + otherC.d.length)) { + newC.d += c.d.slice(((otherC.p + otherC.d.length) - c.p)); + } - # This is entirely optional - just for a check that the deleted - # text in the two ops matches - intersectStart = Math.max c.p, otherC.p - intersectEnd = Math.min c.p + c.d.length, otherC.p + otherC.d.length - cIntersect = c.d[intersectStart - c.p...intersectEnd - c.p] - otherIntersect = otherC.d[intersectStart - otherC.p...intersectEnd - otherC.p] - throw new Error 'Delete ops delete different text in the same region of the document' unless cIntersect == otherIntersect + // This is entirely optional - just for a check that the deleted + // text in the two ops matches + intersectStart = Math.max(c.p, otherC.p); + intersectEnd = Math.min(c.p + c.d.length, otherC.p + otherC.d.length); + cIntersect = c.d.slice(intersectStart - c.p, intersectEnd - c.p); + otherIntersect = otherC.d.slice(intersectStart - otherC.p, intersectEnd - otherC.p); + if (cIntersect !== otherIntersect) { throw new Error('Delete ops delete different text in the same region of the document'); } - if newC.d != '' - # This could be rewritten similarly to insert v delete, above. - newC.p = transformPosition newC.p, otherC - append dest, newC + if (newC.d !== '') { + // This could be rewritten similarly to insert v delete, above. + newC.p = transformPosition(newC.p, otherC); + append(dest, newC); + } + } - else if otherC.c? - append dest, c + } else if (otherC.c != null) { + append(dest, c); - else - throw new Error("unknown op type") + } else { + throw new Error("unknown op type"); + } - else if c.c? # Comment - if otherC.i? - if c.p < otherC.p < c.p + c.c.length - offset = otherC.p - c.p - new_c = (c.c[0..(offset-1)] + otherC.i + c.c[offset...]) - append dest, {c:new_c, p:c.p, t: c.t} - else - append dest, {c:c.c, p:transformPosition(c.p, otherC, true), t: c.t} + } else if (c.c != null) { // Comment + if (otherC.i != null) { + if (c.p < otherC.p && otherC.p < c.p + c.c.length) { + const offset = otherC.p - c.p; + const new_c = (c.c.slice(0, +(offset-1) + 1 || undefined) + otherC.i + c.c.slice(offset)); + append(dest, {c:new_c, p:c.p, t: c.t}); + } else { + append(dest, {c:c.c, p:transformPosition(c.p, otherC, true), t: c.t}); + } - else if otherC.d? - if c.p >= otherC.p + otherC.d.length - append dest, {c:c.c, p:c.p - otherC.d.length, t: c.t} - else if c.p + c.c.length <= otherC.p - append dest, c - else # Delete overlaps comment - # They overlap somewhere. - newC = {c:'', p:c.p, t: c.t} - if c.p < otherC.p - newC.c = c.c[...(otherC.p - c.p)] - if c.p + c.c.length > otherC.p + otherC.d.length - newC.c += c.c[(otherC.p + otherC.d.length - c.p)..] + } else if (otherC.d != null) { + if (c.p >= (otherC.p + otherC.d.length)) { + append(dest, {c:c.c, p:c.p - otherC.d.length, t: c.t}); + } else if ((c.p + c.c.length) <= otherC.p) { + append(dest, c); + } else { // Delete overlaps comment + // They overlap somewhere. + newC = {c:'', p:c.p, t: c.t}; + if (c.p < otherC.p) { + newC.c = c.c.slice(0, (otherC.p - c.p)); + } + if ((c.p + c.c.length) > (otherC.p + otherC.d.length)) { + newC.c += c.c.slice(((otherC.p + otherC.d.length) - c.p)); + } - # This is entirely optional - just for a check that the deleted - # text in the two ops matches - intersectStart = Math.max c.p, otherC.p - intersectEnd = Math.min c.p + c.c.length, otherC.p + otherC.d.length - cIntersect = c.c[intersectStart - c.p...intersectEnd - c.p] - otherIntersect = otherC.d[intersectStart - otherC.p...intersectEnd - otherC.p] - throw new Error 'Delete ops delete different text in the same region of the document' unless cIntersect == otherIntersect + // This is entirely optional - just for a check that the deleted + // text in the two ops matches + intersectStart = Math.max(c.p, otherC.p); + intersectEnd = Math.min(c.p + c.c.length, otherC.p + otherC.d.length); + cIntersect = c.c.slice(intersectStart - c.p, intersectEnd - c.p); + otherIntersect = otherC.d.slice(intersectStart - otherC.p, intersectEnd - otherC.p); + if (cIntersect !== otherIntersect) { throw new Error('Delete ops delete different text in the same region of the document'); } - newC.p = transformPosition newC.p, otherC - append dest, newC + newC.p = transformPosition(newC.p, otherC); + append(dest, newC); + } - else if otherC.c? - append dest, c + } else if (otherC.c != null) { + append(dest, c); - else - throw new Error("unknown op type") + } else { + throw new Error("unknown op type"); + } + } - dest + return dest; +}); -invertComponent = (c) -> - if c.i? - {d:c.i, p:c.p} - else - {i:c.d, p:c.p} +const invertComponent = function(c) { + if (c.i != null) { + return {d:c.i, p:c.p}; + } else { + return {i:c.d, p:c.p}; + } +}; -# No need to use append for invert, because the components won't be able to -# cancel with one another. -text.invert = (op) -> (invertComponent c for c in op.slice().reverse()) +// No need to use append for invert, because the components won't be able to +// cancel with one another. +text.invert = op => Array.from(op.slice().reverse()).map((c) => invertComponent(c)); -if WEB? - exports.types ||= {} +if (typeof WEB !== 'undefined' && WEB !== null) { + if (!exports.types) { exports.types = {}; } - # This is kind of awful - come up with a better way to hook this helper code up. - bootstrapTransform(text, transformComponent, checkValidOp, append) + // This is kind of awful - come up with a better way to hook this helper code up. + bootstrapTransform(text, transformComponent, checkValidOp, append); - # [] is used to prevent closure from renaming types.text - exports.types.text = text -else - module.exports = text + // [] is used to prevent closure from renaming types.text + exports.types.text = text; +} else { + module.exports = text; - # The text type really shouldn't need this - it should be possible to define - # an efficient transform function by making a sort of transform map and passing each - # op component through it. - require('./helpers').bootstrapTransform(text, transformComponent, checkValidOp, append) + // The text type really shouldn't need this - it should be possible to define + // an efficient transform function by making a sort of transform map and passing each + // op component through it. + require('./helpers').bootstrapTransform(text, transformComponent, checkValidOp, append); +} diff --git a/services/document-updater/app/coffee/sharejs/types/web-prelude.js b/services/document-updater/app/coffee/sharejs/types/web-prelude.js index 3c045532dc..b7252728e9 100644 --- a/services/document-updater/app/coffee/sharejs/types/web-prelude.js +++ b/services/document-updater/app/coffee/sharejs/types/web-prelude.js @@ -1,11 +1,11 @@ -# This is included at the top of each compiled type file for the web. +// This is included at the top of each compiled type file for the web. -`/** +/** @const @type {boolean} */ -var WEB = true; -` +const WEB = true; -exports = window['sharejs'] + +const exports = window['sharejs']; diff --git a/services/document-updater/app/coffee/sharejs/web-prelude.js b/services/document-updater/app/coffee/sharejs/web-prelude.js index 3c045532dc..b7252728e9 100644 --- a/services/document-updater/app/coffee/sharejs/web-prelude.js +++ b/services/document-updater/app/coffee/sharejs/web-prelude.js @@ -1,11 +1,11 @@ -# This is included at the top of each compiled type file for the web. +// This is included at the top of each compiled type file for the web. -`/** +/** @const @type {boolean} */ -var WEB = true; -` +const WEB = true; -exports = window['sharejs'] + +const exports = window['sharejs']; From dad1d1212f42fe9d70dc6edd3bc2712f6a585266 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 6 May 2020 12:09:15 +0200 Subject: [PATCH 611/769] decaffeinate: Run post-processing cleanups on DeleteQueueManager.coffee and 58 other files --- .../app/coffee/DeleteQueueManager.js | 9 ++++- .../document-updater/app/coffee/DiffCodec.js | 11 +++++- .../app/coffee/DispatchManager.js | 9 ++++- .../app/coffee/DocumentManager.js | 9 ++++- .../document-updater/app/coffee/Errors.js | 6 +++ .../app/coffee/HistoryManager.js | 6 +++ .../app/coffee/HistoryRedisManager.js | 7 ++++ .../app/coffee/HttpController.js | 10 ++++- .../app/coffee/LockManager.js | 7 ++++ .../app/coffee/LoggerSerializers.js | 5 +++ .../document-updater/app/coffee/Metrics.js | 2 + .../app/coffee/PersistenceManager.js | 8 ++++ .../document-updater/app/coffee/Profiler.js | 7 +++- .../app/coffee/ProjectFlusher.js | 10 ++++- .../app/coffee/ProjectHistoryRedisManager.js | 10 ++++- .../app/coffee/ProjectManager.js | 13 +++++-- .../app/coffee/RangesManager.js | 16 +++++--- .../app/coffee/RangesTracker.js | 38 +++++++++++-------- .../app/coffee/RateLimitManager.js | 5 +++ .../app/coffee/RealTimeRedisManager.js | 6 +++ .../app/coffee/RedisManager.js | 8 +++- .../document-updater/app/coffee/ShareJsDB.js | 6 +++ .../app/coffee/ShareJsUpdateManager.js | 7 ++++ .../app/coffee/SnapshotManager.js | 10 ++++- .../document-updater/app/coffee/UpdateKeys.js | 5 +++ .../app/coffee/UpdateManager.js | 13 +++++-- .../document-updater/app/coffee/mongojs.js | 2 + .../app/coffee/sharejs/count.js | 2 + .../app/coffee/sharejs/helpers.js | 14 +++++-- .../app/coffee/sharejs/index.js | 2 + .../app/coffee/sharejs/json-api.js | 13 +++++-- .../app/coffee/sharejs/json.js | 15 ++++++-- .../app/coffee/sharejs/model.js | 19 +++++++--- .../app/coffee/sharejs/server/model.js | 19 +++++++--- .../app/coffee/sharejs/server/syncqueue.js | 2 + .../app/coffee/sharejs/simple.js | 4 +- .../app/coffee/sharejs/syncqueue.js | 2 + .../app/coffee/sharejs/text-api.js | 2 + .../app/coffee/sharejs/text-composable-api.js | 7 +++- .../app/coffee/sharejs/text-composable.js | 30 +++++++++------ .../app/coffee/sharejs/text-tp2-api.js | 9 ++++- .../app/coffee/sharejs/text-tp2.js | 18 ++++++--- .../app/coffee/sharejs/text.js | 17 ++++++--- .../app/coffee/sharejs/types/count.js | 2 + .../app/coffee/sharejs/types/helpers.js | 14 +++++-- .../app/coffee/sharejs/types/index.js | 2 + .../app/coffee/sharejs/types/json-api.js | 13 +++++-- .../app/coffee/sharejs/types/json.js | 15 ++++++-- .../app/coffee/sharejs/types/model.js | 19 +++++++--- .../app/coffee/sharejs/types/simple.js | 4 +- .../app/coffee/sharejs/types/syncqueue.js | 2 + .../app/coffee/sharejs/types/text-api.js | 2 + .../sharejs/types/text-composable-api.js | 7 +++- .../coffee/sharejs/types/text-composable.js | 30 +++++++++------ .../app/coffee/sharejs/types/text-tp2-api.js | 9 ++++- .../app/coffee/sharejs/types/text-tp2.js | 18 ++++++--- .../app/coffee/sharejs/types/text.js | 17 ++++++--- .../app/coffee/sharejs/types/web-prelude.js | 7 +++- .../app/coffee/sharejs/web-prelude.js | 7 +++- 59 files changed, 456 insertions(+), 132 deletions(-) diff --git a/services/document-updater/app/coffee/DeleteQueueManager.js b/services/document-updater/app/coffee/DeleteQueueManager.js index 2b6230100a..a970f5825f 100644 --- a/services/document-updater/app/coffee/DeleteQueueManager.js +++ b/services/document-updater/app/coffee/DeleteQueueManager.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -41,7 +48,7 @@ module.exports = (DeleteQueueManager = { return cb(); } // are any of the timestamps newer than the time the project was flushed? - for (let timestamp of Array.from(timestamps)) { + for (const timestamp of Array.from(timestamps)) { if (timestamp > flushTimestamp) { metrics.inc("queued-delete-skipped"); logger.debug({project_id, timestamps, flushTimestamp}, "found newer timestamp, will skip delete"); diff --git a/services/document-updater/app/coffee/DiffCodec.js b/services/document-updater/app/coffee/DiffCodec.js index c5c99b7acc..8b87cee1d3 100644 --- a/services/document-updater/app/coffee/DiffCodec.js +++ b/services/document-updater/app/coffee/DiffCodec.js @@ -1,3 +1,12 @@ +/* eslint-disable + camelcase, + handle-callback-err, + new-cap, + no-throw-literal, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -23,7 +32,7 @@ module.exports = (DiffCodec = { const ops = []; let position = 0; - for (let diff of Array.from(diffs)) { + for (const diff of Array.from(diffs)) { const type = diff[0]; const content = diff[1]; if (type === this.ADDED) { diff --git a/services/document-updater/app/coffee/DispatchManager.js b/services/document-updater/app/coffee/DispatchManager.js index 3bf343dd2e..8f11378a9c 100644 --- a/services/document-updater/app/coffee/DispatchManager.js +++ b/services/document-updater/app/coffee/DispatchManager.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -71,7 +78,7 @@ module.exports = (DispatchManager = { const RateLimiter = new RateLimitManager(number); return (() => { const result = []; - for (let i = 1, end = number, asc = 1 <= end; asc ? i <= end : i >= end; asc ? i++ : i--) { + for (let i = 1, end = number, asc = end >= 1; asc ? i <= end : i >= end; asc ? i++ : i--) { const worker = DispatchManager.createDispatcher(RateLimiter); result.push(worker.run()); } diff --git a/services/document-updater/app/coffee/DocumentManager.js b/services/document-updater/app/coffee/DocumentManager.js index c5a9ebb3d1..d1e3e3ca07 100644 --- a/services/document-updater/app/coffee/DocumentManager.js +++ b/services/document-updater/app/coffee/DocumentManager.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -96,7 +103,7 @@ module.exports = (DocumentManager = { return DiffCodec.diffAsShareJsOp(oldLines, newLines, function(error, op) { if (error != null) { return callback(error); } if (undoing) { - for (let o of Array.from(op || [])) { + for (const o of Array.from(op || [])) { o.u = true; } // Turn on undo flag for each op for track changes } diff --git a/services/document-updater/app/coffee/Errors.js b/services/document-updater/app/coffee/Errors.js index a8cb2efb1d..d4bbfb7acc 100644 --- a/services/document-updater/app/coffee/Errors.js +++ b/services/document-updater/app/coffee/Errors.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-proto, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. let Errors; var NotFoundError = function(message) { const error = new Error(message); diff --git a/services/document-updater/app/coffee/HistoryManager.js b/services/document-updater/app/coffee/HistoryManager.js index ac9ba9a706..457e8247fb 100644 --- a/services/document-updater/app/coffee/HistoryManager.js +++ b/services/document-updater/app/coffee/HistoryManager.js @@ -1,3 +1,9 @@ +/* eslint-disable + camelcase, + handle-callback-err, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/document-updater/app/coffee/HistoryRedisManager.js b/services/document-updater/app/coffee/HistoryRedisManager.js index 6e2aba403c..a63d80b11c 100644 --- a/services/document-updater/app/coffee/HistoryRedisManager.js +++ b/services/document-updater/app/coffee/HistoryRedisManager.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/document-updater/app/coffee/HttpController.js b/services/document-updater/app/coffee/HttpController.js index dfc749eeb9..4dc1622b43 100644 --- a/services/document-updater/app/coffee/HttpController.js +++ b/services/document-updater/app/coffee/HttpController.js @@ -1,3 +1,9 @@ +/* eslint-disable + camelcase, + handle-callback-err, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -58,7 +64,7 @@ module.exports = (HttpController = { _getTotalSizeOfLines(lines) { let size = 0; - for (let line of Array.from(lines)) { + for (const line of Array.from(lines)) { size += (line.length + 1); } return size; @@ -75,7 +81,7 @@ module.exports = (HttpController = { logger.log({project_id, exclude: excludeItems}, "getting docs via http"); const timer = new Metrics.Timer("http.getAllDocs"); const excludeVersions = {}; - for (let item of Array.from(excludeItems)) { + for (const item of Array.from(excludeItems)) { const [id,version] = Array.from(item != null ? item.split(':') : undefined); excludeVersions[id] = version; } diff --git a/services/document-updater/app/coffee/LockManager.js b/services/document-updater/app/coffee/LockManager.js index 2b278c31e4..a861ed8607 100644 --- a/services/document-updater/app/coffee/LockManager.js +++ b/services/document-updater/app/coffee/LockManager.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/document-updater/app/coffee/LoggerSerializers.js b/services/document-updater/app/coffee/LoggerSerializers.js index 87696abf3a..bd55383fd3 100644 --- a/services/document-updater/app/coffee/LoggerSerializers.js +++ b/services/document-updater/app/coffee/LoggerSerializers.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/document-updater/app/coffee/Metrics.js b/services/document-updater/app/coffee/Metrics.js index 8a46f7aa83..d5bfb88492 100644 --- a/services/document-updater/app/coffee/Metrics.js +++ b/services/document-updater/app/coffee/Metrics.js @@ -1 +1,3 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. module.exports = require("metrics-sharelatex"); \ No newline at end of file diff --git a/services/document-updater/app/coffee/PersistenceManager.js b/services/document-updater/app/coffee/PersistenceManager.js index f981f6bf90..2a9e5e9d04 100644 --- a/services/document-updater/app/coffee/PersistenceManager.js +++ b/services/document-updater/app/coffee/PersistenceManager.js @@ -1,3 +1,11 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-unsafe-negation, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/services/document-updater/app/coffee/Profiler.js b/services/document-updater/app/coffee/Profiler.js index 2ca3484496..23e480bea8 100644 --- a/services/document-updater/app/coffee/Profiler.js +++ b/services/document-updater/app/coffee/Profiler.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS206: Consider reworking classes to avoid initClass @@ -39,7 +44,7 @@ module.exports = (Profiler = (function() { const totalTime = deltaMs(this.t, this.t0); if (totalTime > this.LOG_CUTOFF_TIME) { // log anything greater than cutoff const args = {}; - for (let k in this.args) { + for (const k in this.args) { const v = this.args[k]; args[k] = v; } diff --git a/services/document-updater/app/coffee/ProjectFlusher.js b/services/document-updater/app/coffee/ProjectFlusher.js index d42eb59531..ef7bb834c2 100644 --- a/services/document-updater/app/coffee/ProjectFlusher.js +++ b/services/document-updater/app/coffee/ProjectFlusher.js @@ -1,3 +1,9 @@ +/* eslint-disable + camelcase, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -39,7 +45,7 @@ var ProjectFlusher = { let keys; if (error != null) { return callback(error); } [cursor, keys] = Array.from(reply); - for (let key of Array.from(keys)) { + for (const key of Array.from(keys)) { keySet[key] = true; } keys = Object.keys(keySet); @@ -59,7 +65,7 @@ var ProjectFlusher = { _extractIds(keyList) { const ids = (() => { const result = []; - for (let key of Array.from(keyList)) { + for (const key of Array.from(keyList)) { const m = key.match(/:\{?([0-9a-f]{24})\}?/); // extract object id result.push(m[1]); } diff --git a/services/document-updater/app/coffee/ProjectHistoryRedisManager.js b/services/document-updater/app/coffee/ProjectHistoryRedisManager.js index cccacba2d2..de19542227 100644 --- a/services/document-updater/app/coffee/ProjectHistoryRedisManager.js +++ b/services/document-updater/app/coffee/ProjectHistoryRedisManager.js @@ -1,3 +1,9 @@ +/* eslint-disable + camelcase, + handle-callback-err, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -17,8 +23,8 @@ const metrics = require('./Metrics'); module.exports = (ProjectHistoryRedisManager = { queueOps(project_id, ...rest) { // Record metric for ops pushed onto queue - const adjustedLength = Math.max(rest.length, 1), ops = rest.slice(0, adjustedLength - 1), val = rest[adjustedLength - 1], callback = val != null ? val : function(error, projectUpdateCount) {}; - for (let op of Array.from(ops)) { + const adjustedLength = Math.max(rest.length, 1); const ops = rest.slice(0, adjustedLength - 1); const val = rest[adjustedLength - 1]; const callback = val != null ? val : function(error, projectUpdateCount) {}; + for (const op of Array.from(ops)) { metrics.summary("redis.projectHistoryOps", op.length, {status: "push"}); } const multi = rclient.multi(); diff --git a/services/document-updater/app/coffee/ProjectManager.js b/services/document-updater/app/coffee/ProjectManager.js index 8b45b7d32d..deac1e451a 100644 --- a/services/document-updater/app/coffee/ProjectManager.js +++ b/services/document-updater/app/coffee/ProjectManager.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -28,7 +35,7 @@ module.exports = (ProjectManager = { if (error != null) { return callback(error); } const jobs = []; const errors = []; - for (let doc_id of Array.from((doc_ids || []))) { + for (const doc_id of Array.from((doc_ids || []))) { ((doc_id => jobs.push(callback => DocumentManager.flushDocIfLoadedWithLock(project_id, doc_id, function(error) { if ((error != null) && error instanceof Errors.NotFoundError) { logger.warn({err: error, project_id, doc_id}, "found deleted doc when flushing"); @@ -66,7 +73,7 @@ module.exports = (ProjectManager = { if (error != null) { return callback(error); } const jobs = []; const errors = []; - for (let doc_id of Array.from((doc_ids || []))) { + for (const doc_id of Array.from((doc_ids || []))) { ((doc_id => jobs.push(callback => DocumentManager.flushAndDeleteDocWithLock(project_id, doc_id, {}, function(error) { if (error != null) { logger.error({err: error, project_id, doc_id}, "error deleting doc"); @@ -142,7 +149,7 @@ module.exports = (ProjectManager = { return callback(error); } const jobs = []; - for (let doc_id of Array.from(doc_ids || [])) { + for (const doc_id of Array.from(doc_ids || [])) { ((doc_id => jobs.push(cb => // get the doc lines from redis DocumentManager.getDocAndFlushIfOldWithLock(project_id, doc_id, function(err, lines, version) { if (err != null) { diff --git a/services/document-updater/app/coffee/RangesManager.js b/services/document-updater/app/coffee/RangesManager.js index 83523f33b5..f5890f5ad3 100644 --- a/services/document-updater/app/coffee/RangesManager.js +++ b/services/document-updater/app/coffee/RangesManager.js @@ -1,3 +1,9 @@ +/* eslint-disable + camelcase, + handle-callback-err, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -22,12 +28,12 @@ module.exports = (RangesManager = { const {changes, comments} = _.cloneDeep(entries); const rangesTracker = new RangesTracker(changes, comments); const emptyRangeCountBefore = RangesManager._emptyRangesCount(rangesTracker); - for (let update of Array.from(updates)) { + for (const update of Array.from(updates)) { rangesTracker.track_changes = !!update.meta.tc; - if (!!update.meta.tc) { + if (update.meta.tc) { rangesTracker.setIdSeed(update.meta.tc); } - for (let op of Array.from(update.op)) { + for (const op of Array.from(update.op)) { try { rangesTracker.applyOp(op, { user_id: (update.meta != null ? update.meta.user_id : undefined) }); } catch (error1) { @@ -95,12 +101,12 @@ module.exports = (RangesManager = { _emptyRangesCount(ranges) { let count = 0; - for (let comment of Array.from((ranges.comments || []))) { + for (const comment of Array.from((ranges.comments || []))) { if (comment.op.c === "") { count++; } } - for (let change of Array.from((ranges.changes || []))) { + for (const change of Array.from((ranges.changes || []))) { if (change.op.i != null) { if (change.op.i === "") { count++; diff --git a/services/document-updater/app/coffee/RangesTracker.js b/services/document-updater/app/coffee/RangesTracker.js index de7e885c5c..80422ec54f 100644 --- a/services/document-updater/app/coffee/RangesTracker.js +++ b/services/document-updater/app/coffee/RangesTracker.js @@ -1,3 +1,11 @@ +/* eslint-disable + camelcase, + no-return-assign, + no-undef, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -88,7 +96,7 @@ const load = function() { getComment(comment_id) { let comment = null; - for (let c of Array.from(this.comments)) { + for (const c of Array.from(this.comments)) { if (c.id === comment_id) { comment = c; break; @@ -107,7 +115,7 @@ const load = function() { moveCommentId(comment_id, position, text) { return (() => { const result = []; - for (let comment of Array.from(this.comments)) { + for (const comment of Array.from(this.comments)) { if (comment.id === comment_id) { comment.op.p = position; comment.op.c = text; @@ -122,7 +130,7 @@ const load = function() { getChange(change_id) { let change = null; - for (let c of Array.from(this.changes)) { + for (const c of Array.from(this.changes)) { if (c.id === change_id) { change = c; break; @@ -135,11 +143,11 @@ const load = function() { const changes_response = []; const ids_map = {}; - for (let change_id of Array.from(change_ids)) { + for (const change_id of Array.from(change_ids)) { ids_map[change_id] = true; } - for (let change of Array.from(this.changes)) { + for (const change of Array.from(this.changes)) { if (ids_map[change.id]) { delete ids_map[change.id]; changes_response.push(change); @@ -159,13 +167,13 @@ const load = function() { if (!(change_to_remove_ids != null ? change_to_remove_ids.length : undefined) > 0) { return; } const i = this.changes.length; const remove_change_id = {}; - for (let change_id of Array.from(change_to_remove_ids)) { + for (const change_id of Array.from(change_to_remove_ids)) { remove_change_id[change_id] = true; } const remaining_changes = []; - for (let change of Array.from(this.changes)) { + for (const change of Array.from(this.changes)) { if (remove_change_id[change.id]) { delete remove_change_id[change.id]; this._markAsDirty(change, "change", "removed"); @@ -179,7 +187,7 @@ const load = function() { validate(text) { let content; - for (let change of Array.from(this.changes)) { + for (const change of Array.from(this.changes)) { if (change.op.i != null) { content = text.slice(change.op.p, change.op.p + change.op.i.length); if (content !== change.op.i) { @@ -187,7 +195,7 @@ const load = function() { } } } - for (let comment of Array.from(this.comments)) { + for (const comment of Array.from(this.comments)) { content = text.slice(comment.op.p, comment.op.p + comment.op.c.length); if (content !== comment.op.c) { throw new Error(`Comment (${JSON.stringify(comment)}) doesn't match text (${JSON.stringify(content)})`); @@ -243,7 +251,7 @@ const load = function() { applyInsertToComments(op) { return (() => { const result = []; - for (let comment of Array.from(this.comments)) { + for (const comment of Array.from(this.comments)) { if (op.p <= comment.op.p) { comment.op.p += op.i.length; result.push(this._markAsDirty(comment, "comment", "moved")); @@ -265,7 +273,7 @@ const load = function() { const op_end = op.p + op_length; return (() => { const result = []; - for (let comment of Array.from(this.comments)) { + for (const comment of Array.from(this.comments)) { const comment_start = comment.op.p; const comment_end = comment.op.p + comment.op.c.length; const comment_length = comment_end - comment_start; @@ -419,7 +427,7 @@ const load = function() { }, metadata: {} }; - for (let key in change.metadata) { const value = change.metadata[key]; after_change.metadata[key] = value; } + for (const key in change.metadata) { const value = change.metadata[key]; after_change.metadata[key] = value; } new_changes.push(after_change); } } @@ -636,7 +644,7 @@ const load = function() { } }); - for (let modification of Array.from(op_modifications)) { + for (const modification of Array.from(op_modifications)) { if (modification.i != null) { content = content.slice(0, modification.p) + modification.i + content.slice(modification.p); } else if (modification.d != null) { @@ -656,7 +664,7 @@ const load = function() { let previous_change = null; const remove_changes = []; const moved_changes = []; - for (let change of Array.from(this.changes)) { + for (const change of Array.from(this.changes)) { if (((previous_change != null ? previous_change.op.i : undefined) != null) && (change.op.i != null)) { const previous_change_end = previous_change.op.p + previous_change.op.i.length; const previous_change_user_id = previous_change.metadata.user_id; @@ -704,7 +712,7 @@ const load = function() { _clone(object) { const clone = {}; - for (let k in object) { const v = object[k]; clone[k] = v; } + for (const k in object) { const v = object[k]; clone[k] = v; } return clone; } }; diff --git a/services/document-updater/app/coffee/RateLimitManager.js b/services/document-updater/app/coffee/RateLimitManager.js index 534fdade92..17803f1316 100644 --- a/services/document-updater/app/coffee/RateLimitManager.js +++ b/services/document-updater/app/coffee/RateLimitManager.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/document-updater/app/coffee/RealTimeRedisManager.js b/services/document-updater/app/coffee/RealTimeRedisManager.js index b3d7a65680..e2aa12e8d3 100644 --- a/services/document-updater/app/coffee/RealTimeRedisManager.js +++ b/services/document-updater/app/coffee/RealTimeRedisManager.js @@ -1,3 +1,9 @@ +/* eslint-disable + camelcase, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/services/document-updater/app/coffee/RedisManager.js b/services/document-updater/app/coffee/RedisManager.js index f434dfc9d4..80944e10be 100644 --- a/services/document-updater/app/coffee/RedisManager.js +++ b/services/document-updater/app/coffee/RedisManager.js @@ -1,3 +1,9 @@ +/* eslint-disable + camelcase, + handle-callback-err, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -299,7 +305,7 @@ module.exports = (RedisManager = { } const jsonOps = appliedOps.map(op => JSON.stringify(op)); - for (let op of Array.from(jsonOps)) { + for (const op of Array.from(jsonOps)) { if (op.indexOf("\u0000") !== -1) { error = new Error("null bytes found in jsonOps"); // this check was added to catch memory corruption in JSON.stringify diff --git a/services/document-updater/app/coffee/ShareJsDB.js b/services/document-updater/app/coffee/ShareJsDB.js index 5b313cee96..20bf42919f 100644 --- a/services/document-updater/app/coffee/ShareJsDB.js +++ b/services/document-updater/app/coffee/ShareJsDB.js @@ -1,3 +1,9 @@ +/* eslint-disable + camelcase, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.js b/services/document-updater/app/coffee/ShareJsUpdateManager.js index 82eb6923b0..574a5127fa 100644 --- a/services/document-updater/app/coffee/ShareJsUpdateManager.js +++ b/services/document-updater/app/coffee/ShareJsUpdateManager.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/services/document-updater/app/coffee/SnapshotManager.js b/services/document-updater/app/coffee/SnapshotManager.js index 5f998096af..ca03be85d6 100644 --- a/services/document-updater/app/coffee/SnapshotManager.js +++ b/services/document-updater/app/coffee/SnapshotManager.js @@ -1,3 +1,9 @@ +/* eslint-disable + camelcase, + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -38,11 +44,11 @@ module.exports = (SnapshotManager = { } }; - for (let change of Array.from(ranges.changes || [])) { + for (const change of Array.from(ranges.changes || [])) { change.id = SnapshotManager._safeObjectId(change.id); updateMetadata(change.metadata); } - for (let comment of Array.from(ranges.comments || [])) { + for (const comment of Array.from(ranges.comments || [])) { comment.id = SnapshotManager._safeObjectId(comment.id); if ((comment.op != null ? comment.op.t : undefined) != null) { comment.op.t = SnapshotManager._safeObjectId(comment.op.t); diff --git a/services/document-updater/app/coffee/UpdateKeys.js b/services/document-updater/app/coffee/UpdateKeys.js index 470be0ce4a..bcafb807dc 100644 --- a/services/document-updater/app/coffee/UpdateKeys.js +++ b/services/document-updater/app/coffee/UpdateKeys.js @@ -1,3 +1,8 @@ +/* eslint-disable + camelcase, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. module.exports = { combineProjectIdAndDocId(project_id, doc_id) { return `${project_id}:${doc_id}`; }, splitProjectIdAndDocId(project_and_doc_id) { return project_and_doc_id.split(":"); } diff --git a/services/document-updater/app/coffee/UpdateManager.js b/services/document-updater/app/coffee/UpdateManager.js index 5151dfb4e7..de1656a336 100644 --- a/services/document-updater/app/coffee/UpdateManager.js +++ b/services/document-updater/app/coffee/UpdateManager.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -145,7 +152,7 @@ module.exports = (UpdateManager = { }, lockUpdatesAndDo(method, project_id, doc_id, ...rest) { - const adjustedLength = Math.max(rest.length, 1), args = rest.slice(0, adjustedLength - 1), callback = rest[adjustedLength - 1]; + const adjustedLength = Math.max(rest.length, 1); const args = rest.slice(0, adjustedLength - 1); const callback = rest[adjustedLength - 1]; const profile = new Profiler("lockUpdatesAndDo", {project_id, doc_id}); return LockManager.getLock(doc_id, function(error, lockValue) { profile.log("getLock"); @@ -185,7 +192,7 @@ module.exports = (UpdateManager = { // 16-bit character of a blackboard bold character (http://www.fileformat.info/info/unicode/char/1d400/index.htm). // Something must be going on client side that is screwing up the encoding and splitting the // two 16-bit characters so that \uD835 is standalone. - for (let op of Array.from(update.op || [])) { + for (const op of Array.from(update.op || [])) { if (op.i != null) { // Replace high and low surrogate characters with 'replacement character' (\uFFFD) op.i = op.i.replace(/[\uD800-\uDFFF]/g, "\uFFFD"); @@ -215,7 +222,7 @@ module.exports = (UpdateManager = { // changes to it for the next update. return (() => { const result = []; - for (let op of Array.from(update.op)) { + for (const op of Array.from(update.op)) { if (op.i != null) { doc_length += op.i.length; } diff --git a/services/document-updater/app/coffee/mongojs.js b/services/document-updater/app/coffee/mongojs.js index daf6fbed6d..61092e1009 100644 --- a/services/document-updater/app/coffee/mongojs.js +++ b/services/document-updater/app/coffee/mongojs.js @@ -1,3 +1,5 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/document-updater/app/coffee/sharejs/count.js b/services/document-updater/app/coffee/sharejs/count.js index ffc3337ac7..c77b76b098 100644 --- a/services/document-updater/app/coffee/sharejs/count.js +++ b/services/document-updater/app/coffee/sharejs/count.js @@ -1,3 +1,5 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/services/document-updater/app/coffee/sharejs/helpers.js b/services/document-updater/app/coffee/sharejs/helpers.js index 81a561de03..b4500a3214 100644 --- a/services/document-updater/app/coffee/sharejs/helpers.js +++ b/services/document-updater/app/coffee/sharejs/helpers.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -12,7 +18,7 @@ // Add transform and transformX functions for an OT type which has transformComponent defined. // transformComponent(destination array, component, other component, side) let bootstrapTransform; -exports['_bt'] = (bootstrapTransform = function(type, transformComponent, checkValidOp, append) { +exports._bt = (bootstrapTransform = function(type, transformComponent, checkValidOp, append) { let transformX; const transformComponentX = function(left, right, destLeft, destRight) { transformComponent(destLeft, left, right, 'left'); @@ -20,7 +26,7 @@ exports['_bt'] = (bootstrapTransform = function(type, transformComponent, checkV }; // Transforms rightOp by leftOp. Returns ['rightOp', clientOp'] - type.transformX = (type['transformX'] = (transformX = function(leftOp, rightOp) { + type.transformX = (type.transformX = (transformX = function(leftOp, rightOp) { checkValidOp(leftOp); checkValidOp(rightOp); @@ -47,7 +53,7 @@ exports['_bt'] = (bootstrapTransform = function(type, transformComponent, checkV // Recurse. const [l_, r_] = Array.from(transformX(leftOp.slice(k), nextC)); for (l of Array.from(l_)) { append(newLeftOp, l); } - for (let r of Array.from(r_)) { append(newRightOp, r); } + for (const r of Array.from(r_)) { append(newRightOp, r); } rightComponent = null; break; } @@ -61,7 +67,7 @@ exports['_bt'] = (bootstrapTransform = function(type, transformComponent, checkV })); // Transforms op with specified type ('left' or 'right') by otherOp. - return type.transform = (type['transform'] = function(op, otherOp, type) { + return type.transform = (type.transform = function(op, otherOp, type) { let _; if ((type !== 'left') && (type !== 'right')) { throw new Error("type must be 'left' or 'right'"); } diff --git a/services/document-updater/app/coffee/sharejs/index.js b/services/document-updater/app/coffee/sharejs/index.js index bf681de7cd..a322063e83 100644 --- a/services/document-updater/app/coffee/sharejs/index.js +++ b/services/document-updater/app/coffee/sharejs/index.js @@ -1,3 +1,5 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/document-updater/app/coffee/sharejs/json-api.js b/services/document-updater/app/coffee/sharejs/json-api.js index 1c7c2633ba..67e54f5334 100644 --- a/services/document-updater/app/coffee/sharejs/json-api.js +++ b/services/document-updater/app/coffee/sharejs/json-api.js @@ -1,3 +1,9 @@ +/* eslint-disable + camelcase, + no-undef, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -32,6 +38,7 @@ class SubDoc { this.doc = doc; this.path = path; } + at(...path) { return this.doc.at(this.path.concat(depath(path))); } get() { return this.doc.getAt(this.path); } // for objects and lists @@ -57,7 +64,7 @@ const traverse = function(snapshot, path) { const container = {data:snapshot}; let key = 'data'; let elem = container; - for (let p of Array.from(path)) { + for (const p of Array.from(path)) { elem = elem[key]; key = p; if (typeof elem === 'undefined') { throw new Error('bad path'); } @@ -155,7 +162,7 @@ json.api = { this.on('change', function(op) { return (() => { const result = []; - for (let c of Array.from(op)) { + for (const c of Array.from(op)) { var i; if ((c.na !== undefined) || (c.si !== undefined) || (c.sd !== undefined)) { // no change to structure @@ -197,7 +204,7 @@ json.api = { var match_path = c.na === undefined ? c.p.slice(0, c.p.length-1) : c.p; result.push((() => { const result1 = []; - for (let {path, event, cb} of Array.from(this._listeners)) { + for (const {path, event, cb} of Array.from(this._listeners)) { var common; if (pathEquals(path, match_path)) { switch (event) { diff --git a/services/document-updater/app/coffee/sharejs/json.js b/services/document-updater/app/coffee/sharejs/json.js index 3e3bee79d9..5619c09be1 100644 --- a/services/document-updater/app/coffee/sharejs/json.js +++ b/services/document-updater/app/coffee/sharejs/json.js @@ -1,3 +1,10 @@ +/* eslint-disable + no-return-assign, + no-undef, + no-useless-catch, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -67,7 +74,7 @@ json.apply = function(snapshot, op) { let elem = container; let key = 'data'; - for (let p of Array.from(c.p)) { + for (const p of Array.from(c.p)) { parent = elem; parentkey = key; elem = elem[key]; @@ -187,7 +194,7 @@ json.compose = function(op1, op2) { json.checkValidOp(op2); const newOp = clone(op1); - for (let c of Array.from(op2)) { json.append(newOp, c); } + for (const c of Array.from(op2)) { json.append(newOp, c); } return newOp; }; @@ -197,7 +204,7 @@ json.normalize = function(op) { if (!isArray(op)) { op = [op]; } - for (let c of Array.from(op)) { + for (const c of Array.from(op)) { if (c.p == null) { c.p = []; } json.append(newOp, c); } @@ -300,7 +307,7 @@ json.transformComponent = function(dest, c, otherC, type) { const res = []; text._tc(res, tc1, tc2, type); - for (let tc of Array.from(res)) { + for (const tc of Array.from(res)) { const jc = { p: c.p.slice(0, common) }; jc.p.push(tc.p); if (tc.i != null) { jc.si = tc.i; } diff --git a/services/document-updater/app/coffee/sharejs/model.js b/services/document-updater/app/coffee/sharejs/model.js index 9b6e65effd..68f68f2e7d 100644 --- a/services/document-updater/app/coffee/sharejs/model.js +++ b/services/document-updater/app/coffee/sharejs/model.js @@ -1,3 +1,10 @@ +/* eslint-disable + no-console, + no-return-assign, + standard/no-callback-literal, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -133,7 +140,7 @@ module.exports = (Model = function(db, options) { if (ops.length > 0) { try { // If there's enough ops, it might be worth spinning this out into a webworker thread. - for (let oldOp of Array.from(ops)) { + for (const oldOp of Array.from(ops)) { // Dup detection works by sending the id(s) the op has been submitted with previously. // If the id matches, we reject it. The client can also detect the op has been submitted // already if it sees its own previous id in the ops it sees when it does catchup. @@ -171,7 +178,7 @@ module.exports = (Model = function(db, options) { return callback('Internal error'); } - //newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta} + // newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta} const writeOp = (db != null ? db.writeOp : undefined) || ((docName, newOpData, callback) => callback()); return writeOp(docName, opData, function(error) { @@ -271,7 +278,7 @@ module.exports = (Model = function(db, options) { if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } let v = start; - for (let op of Array.from(ops)) { op.v = v++; } + for (const op of Array.from(ops)) { op.v = v++; } return (typeof callback === 'function' ? callback(null, ops) : undefined); }); @@ -321,7 +328,7 @@ module.exports = (Model = function(db, options) { console.log(`Catchup ${docName} ${data.v} -> ${data.v + ops.length}`); try { - for (let op of Array.from(ops)) { + for (const op of Array.from(ops)) { data.snapshot = type.apply(data.snapshot, op.op); data.v++; } @@ -620,7 +627,7 @@ module.exports = (Model = function(db, options) { } return (() => { const result = []; - for (let op of Array.from(data)) { + for (const op of Array.from(data)) { var needle; listener(op); @@ -662,7 +669,7 @@ module.exports = (Model = function(db, options) { let pendingWrites = 0; - for (let docName in docs) { + for (const docName in docs) { const doc = docs[docName]; if (doc.committedVersion < doc.v) { pendingWrites++; diff --git a/services/document-updater/app/coffee/sharejs/server/model.js b/services/document-updater/app/coffee/sharejs/server/model.js index 42dd7acc64..485420c040 100644 --- a/services/document-updater/app/coffee/sharejs/server/model.js +++ b/services/document-updater/app/coffee/sharejs/server/model.js @@ -1,3 +1,10 @@ +/* eslint-disable + no-console, + no-return-assign, + standard/no-callback-literal, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -133,7 +140,7 @@ module.exports = (Model = function(db, options) { if (ops.length > 0) { try { // If there's enough ops, it might be worth spinning this out into a webworker thread. - for (let oldOp of Array.from(ops)) { + for (const oldOp of Array.from(ops)) { // Dup detection works by sending the id(s) the op has been submitted with previously. // If the id matches, we reject it. The client can also detect the op has been submitted // already if it sees its own previous id in the ops it sees when it does catchup. @@ -175,7 +182,7 @@ module.exports = (Model = function(db, options) { return callback('Internal error'); } - //newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta} + // newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta} const writeOp = (db != null ? db.writeOp : undefined) || ((docName, newOpData, callback) => callback()); return writeOp(docName, opData, function(error) { @@ -275,7 +282,7 @@ module.exports = (Model = function(db, options) { if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } let v = start; - for (let op of Array.from(ops)) { op.v = v++; } + for (const op of Array.from(ops)) { op.v = v++; } return (typeof callback === 'function' ? callback(null, ops) : undefined); }); @@ -325,7 +332,7 @@ module.exports = (Model = function(db, options) { console.log(`Catchup ${docName} ${data.v} -> ${data.v + ops.length}`); try { - for (let op of Array.from(ops)) { + for (const op of Array.from(ops)) { data.snapshot = type.apply(data.snapshot, op.op); data.v++; } @@ -624,7 +631,7 @@ module.exports = (Model = function(db, options) { } return (() => { const result = []; - for (let op of Array.from(data)) { + for (const op of Array.from(data)) { var needle; listener(op); @@ -666,7 +673,7 @@ module.exports = (Model = function(db, options) { let pendingWrites = 0; - for (let docName in docs) { + for (const docName in docs) { const doc = docs[docName]; if (doc.committedVersion < doc.v) { pendingWrites++; diff --git a/services/document-updater/app/coffee/sharejs/server/syncqueue.js b/services/document-updater/app/coffee/sharejs/server/syncqueue.js index 31b2235ee3..2eecb615e6 100644 --- a/services/document-updater/app/coffee/sharejs/server/syncqueue.js +++ b/services/document-updater/app/coffee/sharejs/server/syncqueue.js @@ -1,3 +1,5 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/services/document-updater/app/coffee/sharejs/simple.js b/services/document-updater/app/coffee/sharejs/simple.js index 57c4934f73..c0e8e85394 100644 --- a/services/document-updater/app/coffee/sharejs/simple.js +++ b/services/document-updater/app/coffee/sharejs/simple.js @@ -1,3 +1,5 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns @@ -27,7 +29,7 @@ module.exports = { // // The original snapshot should not be modified. apply(snapshot, op) { - if (!(0 <= op.position && op.position <= snapshot.str.length)) { throw new Error('Invalid position'); } + if (!(op.position >= 0 && op.position <= snapshot.str.length)) { throw new Error('Invalid position'); } let { str diff --git a/services/document-updater/app/coffee/sharejs/syncqueue.js b/services/document-updater/app/coffee/sharejs/syncqueue.js index 31b2235ee3..2eecb615e6 100644 --- a/services/document-updater/app/coffee/sharejs/syncqueue.js +++ b/services/document-updater/app/coffee/sharejs/syncqueue.js @@ -1,3 +1,5 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/services/document-updater/app/coffee/sharejs/text-api.js b/services/document-updater/app/coffee/sharejs/text-api.js index 295261ff90..7c39b25899 100644 --- a/services/document-updater/app/coffee/sharejs/text-api.js +++ b/services/document-updater/app/coffee/sharejs/text-api.js @@ -1,3 +1,5 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/services/document-updater/app/coffee/sharejs/text-composable-api.js b/services/document-updater/app/coffee/sharejs/text-composable-api.js index 160ab1c46e..ba6e5f0242 100644 --- a/services/document-updater/app/coffee/sharejs/text-composable-api.js +++ b/services/document-updater/app/coffee/sharejs/text-composable-api.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-undef, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -43,7 +48,7 @@ type.api = { let pos = 0; return (() => { const result = []; - for (let component of Array.from(op)) { + for (const component of Array.from(op)) { if (typeof component === 'number') { result.push(pos += component); } else if (component.i !== undefined) { diff --git a/services/document-updater/app/coffee/sharejs/text-composable.js b/services/document-updater/app/coffee/sharejs/text-composable.js index 4f43f769cd..79dfb63308 100644 --- a/services/document-updater/app/coffee/sharejs/text-composable.js +++ b/services/document-updater/app/coffee/sharejs/text-composable.js @@ -1,3 +1,11 @@ +/* eslint-disable + no-cond-assign, + no-return-assign, + no-undef, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -20,8 +28,8 @@ // Snapshots are strings. let makeAppend; -const p = function() {}; //require('util').debug -const i = function() {}; //require('util').inspect +const p = function() {}; // require('util').debug +const i = function() {}; // require('util').inspect const exports = (typeof WEB !== 'undefined' && WEB !== null) ? {} : module.exports; @@ -36,7 +44,7 @@ const checkOp = function(op) { let last = null; return (() => { const result = []; - for (let c of Array.from(op)) { + for (const c of Array.from(op)) { if (typeof(c) === 'object') { if (((c.i == null) || !(c.i.length > 0)) && ((c.d == null) || !(c.d.length > 0))) { throw new Error(`Invalid op component: ${i(c)}`); } } else { @@ -55,7 +63,7 @@ const checkOp = function(op) { // Exported for the randomOpGenerator. exports._makeAppend = (makeAppend = op => (function(component) { if ((component === 0) || (component.i === '') || (component.d === '')) { - return; + } else if (op.length === 0) { return op.push(component); } else if ((typeof(component) === 'number') && (typeof(op[op.length - 1]) === 'number')) { @@ -85,7 +93,7 @@ const makeTake = function(op) { const take = function(n, indivisableField) { let c; if (idx === op.length) { return null; } - //assert.notStrictEqual op.length, i, 'The op is too short to traverse the document' + // assert.notStrictEqual op.length, i, 'The op is too short to traverse the document' if (typeof(op[idx]) === 'number') { if ((n == null) || ((op[idx] - offset) <= n)) { @@ -132,7 +140,7 @@ const componentLength = function(component) { exports.normalize = function(op) { const newOp = []; const append = makeAppend(newOp); - for (let component of Array.from(op)) { append(component); } + for (const component of Array.from(op)) { append(component); } return newOp; }; @@ -145,7 +153,7 @@ exports.apply = function(str, op) { const pos = 0; const newDoc = []; - for (let component of Array.from(op)) { + for (const component of Array.from(op)) { if (typeof(component) === 'number') { if (component > str.length) { throw new Error('The op is too long for this document'); } newDoc.push(str.slice(0, component)); @@ -158,7 +166,7 @@ exports.apply = function(str, op) { } } - if ('' !== str) { throw new Error("The applied op doesn't traverse the entire document"); } + if (str !== '') { throw new Error("The applied op doesn't traverse the entire document"); } return newDoc.join(''); }; @@ -197,7 +205,7 @@ exports.transform = function(op, otherOp, side) { // Otherwise, skip the inserted text. append(component.i.length); } else { // Delete. - //assert.ok component.d + // assert.ok component.d ({ length } = component.d); @@ -210,7 +218,7 @@ exports.transform = function(op, otherOp, side) { } else if (chunk.i != null) { append(chunk); } else { - //assert.ok chunk.d + // assert.ok chunk.d // The delete is unnecessary now. length -= chunk.d.length; } @@ -302,7 +310,7 @@ exports.invert = function(op) { const result = []; const append = makeAppend(result); - for (let component of Array.from(op)) { append(invertComponent(component)); } + for (const component of Array.from(op)) { append(invertComponent(component)); } return result; }; diff --git a/services/document-updater/app/coffee/sharejs/text-tp2-api.js b/services/document-updater/app/coffee/sharejs/text-tp2-api.js index e3f4f95ea6..97bf606267 100644 --- a/services/document-updater/app/coffee/sharejs/text-tp2-api.js +++ b/services/document-updater/app/coffee/sharejs/text-tp2-api.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-undef, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -27,7 +32,7 @@ const appendSkipChars = (op, doc, pos, maxlength) => (() => { return result; })(); -type['api'] = { +type.api = { 'provides': {'text':true}, // The number of characters in the string @@ -81,7 +86,7 @@ type['api'] = { let textPos = 0; const docPos = {index:0, offset:0}; - for (let component of Array.from(op)) { + for (const component of Array.from(op)) { var part, remainder; if (typeof component === 'number') { // Skip diff --git a/services/document-updater/app/coffee/sharejs/text-tp2.js b/services/document-updater/app/coffee/sharejs/text-tp2.js index ab123d6ff7..4efcb05871 100644 --- a/services/document-updater/app/coffee/sharejs/text-tp2.js +++ b/services/document-updater/app/coffee/sharejs/text-tp2.js @@ -1,3 +1,11 @@ +/* eslint-disable + no-cond-assign, + no-return-assign, + no-undef, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -43,7 +51,7 @@ var type = { const doc = type.create(); doc.data = data; - for (let component of Array.from(data)) { + for (const component of Array.from(data)) { if (typeof component === 'string') { doc.charLength += component.length; doc.totalLength += component.length; @@ -62,7 +70,7 @@ const checkOp = function(op) { let last = null; return (() => { const result = []; - for (let c of Array.from(op)) { + for (const c of Array.from(op)) { if (typeof(c) === 'object') { if (c.i !== undefined) { if (((typeof(c.i) !== 'string') || !(c.i.length > 0)) && ((typeof(c.i) !== 'number') || !(c.i > 0))) { throw new Error('Inserts must insert a string or a +ive number'); } @@ -147,7 +155,7 @@ type.apply = function(doc, op) { const newDoc = type.create(); const position = {index:0, offset:0}; - for (let component of Array.from(op)) { + for (const component of Array.from(op)) { var part, remainder; if (typeof(component) === 'number') { remainder = component; @@ -177,7 +185,7 @@ type.apply = function(doc, op) { // Exported for the randomOpGenerator. type._append = (append = function(op, component) { if ((component === 0) || (component.i === '') || (component.i === 0) || (component.d === 0)) { - return; + } else if (op.length === 0) { return op.push(component); } else { @@ -257,7 +265,7 @@ const componentLength = function(component) { // adjacent inserts and deletes. type.normalize = function(op) { const newOp = []; - for (let component of Array.from(op)) { append(newOp, component); } + for (const component of Array.from(op)) { append(newOp, component); } return newOp; }; diff --git a/services/document-updater/app/coffee/sharejs/text.js b/services/document-updater/app/coffee/sharejs/text.js index 3ecb026c77..3e28b898b6 100644 --- a/services/document-updater/app/coffee/sharejs/text.js +++ b/services/document-updater/app/coffee/sharejs/text.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + no-return-assign, + no-undef, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -45,13 +52,13 @@ const checkValidComponent = function(c) { }; const checkValidOp = function(op) { - for (let c of Array.from(op)) { checkValidComponent(c); } + for (const c of Array.from(op)) { checkValidComponent(c); } return true; }; text.apply = function(snapshot, op) { checkValidOp(op); - for (let component of Array.from(op)) { + for (const component of Array.from(op)) { if (component.i != null) { snapshot = strInject(snapshot, component.p, component.i); } else { @@ -92,7 +99,7 @@ text.compose = function(op1, op2) { checkValidOp(op2); const newOp = op1.slice(); - for (let c of Array.from(op2)) { append(newOp, c); } + for (const c of Array.from(op2)) { append(newOp, c); } return newOp; }; @@ -111,7 +118,7 @@ text.normalize = function(op) { // so this is probably the least bad solution. if ((op.i != null) || (op.p != null)) { op = [op]; } - for (let c of Array.from(op)) { + for (const c of Array.from(op)) { if (c.p == null) { c.p = 0; } append(newOp, c); } @@ -152,7 +159,7 @@ const transformPosition = function(pos, c, insertAfter) { // is pushed after an insert (true) or before it (false). text.transformCursor = function(position, op, side) { const insertAfter = side === 'right'; - for (let c of Array.from(op)) { position = transformPosition(position, c, insertAfter); } + for (const c of Array.from(op)) { position = transformPosition(position, c, insertAfter); } return position; }; diff --git a/services/document-updater/app/coffee/sharejs/types/count.js b/services/document-updater/app/coffee/sharejs/types/count.js index ffc3337ac7..c77b76b098 100644 --- a/services/document-updater/app/coffee/sharejs/types/count.js +++ b/services/document-updater/app/coffee/sharejs/types/count.js @@ -1,3 +1,5 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/services/document-updater/app/coffee/sharejs/types/helpers.js b/services/document-updater/app/coffee/sharejs/types/helpers.js index 81a561de03..b4500a3214 100644 --- a/services/document-updater/app/coffee/sharejs/types/helpers.js +++ b/services/document-updater/app/coffee/sharejs/types/helpers.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -12,7 +18,7 @@ // Add transform and transformX functions for an OT type which has transformComponent defined. // transformComponent(destination array, component, other component, side) let bootstrapTransform; -exports['_bt'] = (bootstrapTransform = function(type, transformComponent, checkValidOp, append) { +exports._bt = (bootstrapTransform = function(type, transformComponent, checkValidOp, append) { let transformX; const transformComponentX = function(left, right, destLeft, destRight) { transformComponent(destLeft, left, right, 'left'); @@ -20,7 +26,7 @@ exports['_bt'] = (bootstrapTransform = function(type, transformComponent, checkV }; // Transforms rightOp by leftOp. Returns ['rightOp', clientOp'] - type.transformX = (type['transformX'] = (transformX = function(leftOp, rightOp) { + type.transformX = (type.transformX = (transformX = function(leftOp, rightOp) { checkValidOp(leftOp); checkValidOp(rightOp); @@ -47,7 +53,7 @@ exports['_bt'] = (bootstrapTransform = function(type, transformComponent, checkV // Recurse. const [l_, r_] = Array.from(transformX(leftOp.slice(k), nextC)); for (l of Array.from(l_)) { append(newLeftOp, l); } - for (let r of Array.from(r_)) { append(newRightOp, r); } + for (const r of Array.from(r_)) { append(newRightOp, r); } rightComponent = null; break; } @@ -61,7 +67,7 @@ exports['_bt'] = (bootstrapTransform = function(type, transformComponent, checkV })); // Transforms op with specified type ('left' or 'right') by otherOp. - return type.transform = (type['transform'] = function(op, otherOp, type) { + return type.transform = (type.transform = function(op, otherOp, type) { let _; if ((type !== 'left') && (type !== 'right')) { throw new Error("type must be 'left' or 'right'"); } diff --git a/services/document-updater/app/coffee/sharejs/types/index.js b/services/document-updater/app/coffee/sharejs/types/index.js index bf681de7cd..a322063e83 100644 --- a/services/document-updater/app/coffee/sharejs/types/index.js +++ b/services/document-updater/app/coffee/sharejs/types/index.js @@ -1,3 +1,5 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/document-updater/app/coffee/sharejs/types/json-api.js b/services/document-updater/app/coffee/sharejs/types/json-api.js index 1c7c2633ba..67e54f5334 100644 --- a/services/document-updater/app/coffee/sharejs/types/json-api.js +++ b/services/document-updater/app/coffee/sharejs/types/json-api.js @@ -1,3 +1,9 @@ +/* eslint-disable + camelcase, + no-undef, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -32,6 +38,7 @@ class SubDoc { this.doc = doc; this.path = path; } + at(...path) { return this.doc.at(this.path.concat(depath(path))); } get() { return this.doc.getAt(this.path); } // for objects and lists @@ -57,7 +64,7 @@ const traverse = function(snapshot, path) { const container = {data:snapshot}; let key = 'data'; let elem = container; - for (let p of Array.from(path)) { + for (const p of Array.from(path)) { elem = elem[key]; key = p; if (typeof elem === 'undefined') { throw new Error('bad path'); } @@ -155,7 +162,7 @@ json.api = { this.on('change', function(op) { return (() => { const result = []; - for (let c of Array.from(op)) { + for (const c of Array.from(op)) { var i; if ((c.na !== undefined) || (c.si !== undefined) || (c.sd !== undefined)) { // no change to structure @@ -197,7 +204,7 @@ json.api = { var match_path = c.na === undefined ? c.p.slice(0, c.p.length-1) : c.p; result.push((() => { const result1 = []; - for (let {path, event, cb} of Array.from(this._listeners)) { + for (const {path, event, cb} of Array.from(this._listeners)) { var common; if (pathEquals(path, match_path)) { switch (event) { diff --git a/services/document-updater/app/coffee/sharejs/types/json.js b/services/document-updater/app/coffee/sharejs/types/json.js index 3e3bee79d9..5619c09be1 100644 --- a/services/document-updater/app/coffee/sharejs/types/json.js +++ b/services/document-updater/app/coffee/sharejs/types/json.js @@ -1,3 +1,10 @@ +/* eslint-disable + no-return-assign, + no-undef, + no-useless-catch, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -67,7 +74,7 @@ json.apply = function(snapshot, op) { let elem = container; let key = 'data'; - for (let p of Array.from(c.p)) { + for (const p of Array.from(c.p)) { parent = elem; parentkey = key; elem = elem[key]; @@ -187,7 +194,7 @@ json.compose = function(op1, op2) { json.checkValidOp(op2); const newOp = clone(op1); - for (let c of Array.from(op2)) { json.append(newOp, c); } + for (const c of Array.from(op2)) { json.append(newOp, c); } return newOp; }; @@ -197,7 +204,7 @@ json.normalize = function(op) { if (!isArray(op)) { op = [op]; } - for (let c of Array.from(op)) { + for (const c of Array.from(op)) { if (c.p == null) { c.p = []; } json.append(newOp, c); } @@ -300,7 +307,7 @@ json.transformComponent = function(dest, c, otherC, type) { const res = []; text._tc(res, tc1, tc2, type); - for (let tc of Array.from(res)) { + for (const tc of Array.from(res)) { const jc = { p: c.p.slice(0, common) }; jc.p.push(tc.p); if (tc.i != null) { jc.si = tc.i; } diff --git a/services/document-updater/app/coffee/sharejs/types/model.js b/services/document-updater/app/coffee/sharejs/types/model.js index 9b6e65effd..68f68f2e7d 100644 --- a/services/document-updater/app/coffee/sharejs/types/model.js +++ b/services/document-updater/app/coffee/sharejs/types/model.js @@ -1,3 +1,10 @@ +/* eslint-disable + no-console, + no-return-assign, + standard/no-callback-literal, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -133,7 +140,7 @@ module.exports = (Model = function(db, options) { if (ops.length > 0) { try { // If there's enough ops, it might be worth spinning this out into a webworker thread. - for (let oldOp of Array.from(ops)) { + for (const oldOp of Array.from(ops)) { // Dup detection works by sending the id(s) the op has been submitted with previously. // If the id matches, we reject it. The client can also detect the op has been submitted // already if it sees its own previous id in the ops it sees when it does catchup. @@ -171,7 +178,7 @@ module.exports = (Model = function(db, options) { return callback('Internal error'); } - //newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta} + // newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta} const writeOp = (db != null ? db.writeOp : undefined) || ((docName, newOpData, callback) => callback()); return writeOp(docName, opData, function(error) { @@ -271,7 +278,7 @@ module.exports = (Model = function(db, options) { if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } let v = start; - for (let op of Array.from(ops)) { op.v = v++; } + for (const op of Array.from(ops)) { op.v = v++; } return (typeof callback === 'function' ? callback(null, ops) : undefined); }); @@ -321,7 +328,7 @@ module.exports = (Model = function(db, options) { console.log(`Catchup ${docName} ${data.v} -> ${data.v + ops.length}`); try { - for (let op of Array.from(ops)) { + for (const op of Array.from(ops)) { data.snapshot = type.apply(data.snapshot, op.op); data.v++; } @@ -620,7 +627,7 @@ module.exports = (Model = function(db, options) { } return (() => { const result = []; - for (let op of Array.from(data)) { + for (const op of Array.from(data)) { var needle; listener(op); @@ -662,7 +669,7 @@ module.exports = (Model = function(db, options) { let pendingWrites = 0; - for (let docName in docs) { + for (const docName in docs) { const doc = docs[docName]; if (doc.committedVersion < doc.v) { pendingWrites++; diff --git a/services/document-updater/app/coffee/sharejs/types/simple.js b/services/document-updater/app/coffee/sharejs/types/simple.js index 57c4934f73..c0e8e85394 100644 --- a/services/document-updater/app/coffee/sharejs/types/simple.js +++ b/services/document-updater/app/coffee/sharejs/types/simple.js @@ -1,3 +1,5 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns @@ -27,7 +29,7 @@ module.exports = { // // The original snapshot should not be modified. apply(snapshot, op) { - if (!(0 <= op.position && op.position <= snapshot.str.length)) { throw new Error('Invalid position'); } + if (!(op.position >= 0 && op.position <= snapshot.str.length)) { throw new Error('Invalid position'); } let { str diff --git a/services/document-updater/app/coffee/sharejs/types/syncqueue.js b/services/document-updater/app/coffee/sharejs/types/syncqueue.js index 31b2235ee3..2eecb615e6 100644 --- a/services/document-updater/app/coffee/sharejs/types/syncqueue.js +++ b/services/document-updater/app/coffee/sharejs/types/syncqueue.js @@ -1,3 +1,5 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/services/document-updater/app/coffee/sharejs/types/text-api.js b/services/document-updater/app/coffee/sharejs/types/text-api.js index 295261ff90..7c39b25899 100644 --- a/services/document-updater/app/coffee/sharejs/types/text-api.js +++ b/services/document-updater/app/coffee/sharejs/types/text-api.js @@ -1,3 +1,5 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/services/document-updater/app/coffee/sharejs/types/text-composable-api.js b/services/document-updater/app/coffee/sharejs/types/text-composable-api.js index 160ab1c46e..ba6e5f0242 100644 --- a/services/document-updater/app/coffee/sharejs/types/text-composable-api.js +++ b/services/document-updater/app/coffee/sharejs/types/text-composable-api.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-undef, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -43,7 +48,7 @@ type.api = { let pos = 0; return (() => { const result = []; - for (let component of Array.from(op)) { + for (const component of Array.from(op)) { if (typeof component === 'number') { result.push(pos += component); } else if (component.i !== undefined) { diff --git a/services/document-updater/app/coffee/sharejs/types/text-composable.js b/services/document-updater/app/coffee/sharejs/types/text-composable.js index 4f43f769cd..79dfb63308 100644 --- a/services/document-updater/app/coffee/sharejs/types/text-composable.js +++ b/services/document-updater/app/coffee/sharejs/types/text-composable.js @@ -1,3 +1,11 @@ +/* eslint-disable + no-cond-assign, + no-return-assign, + no-undef, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -20,8 +28,8 @@ // Snapshots are strings. let makeAppend; -const p = function() {}; //require('util').debug -const i = function() {}; //require('util').inspect +const p = function() {}; // require('util').debug +const i = function() {}; // require('util').inspect const exports = (typeof WEB !== 'undefined' && WEB !== null) ? {} : module.exports; @@ -36,7 +44,7 @@ const checkOp = function(op) { let last = null; return (() => { const result = []; - for (let c of Array.from(op)) { + for (const c of Array.from(op)) { if (typeof(c) === 'object') { if (((c.i == null) || !(c.i.length > 0)) && ((c.d == null) || !(c.d.length > 0))) { throw new Error(`Invalid op component: ${i(c)}`); } } else { @@ -55,7 +63,7 @@ const checkOp = function(op) { // Exported for the randomOpGenerator. exports._makeAppend = (makeAppend = op => (function(component) { if ((component === 0) || (component.i === '') || (component.d === '')) { - return; + } else if (op.length === 0) { return op.push(component); } else if ((typeof(component) === 'number') && (typeof(op[op.length - 1]) === 'number')) { @@ -85,7 +93,7 @@ const makeTake = function(op) { const take = function(n, indivisableField) { let c; if (idx === op.length) { return null; } - //assert.notStrictEqual op.length, i, 'The op is too short to traverse the document' + // assert.notStrictEqual op.length, i, 'The op is too short to traverse the document' if (typeof(op[idx]) === 'number') { if ((n == null) || ((op[idx] - offset) <= n)) { @@ -132,7 +140,7 @@ const componentLength = function(component) { exports.normalize = function(op) { const newOp = []; const append = makeAppend(newOp); - for (let component of Array.from(op)) { append(component); } + for (const component of Array.from(op)) { append(component); } return newOp; }; @@ -145,7 +153,7 @@ exports.apply = function(str, op) { const pos = 0; const newDoc = []; - for (let component of Array.from(op)) { + for (const component of Array.from(op)) { if (typeof(component) === 'number') { if (component > str.length) { throw new Error('The op is too long for this document'); } newDoc.push(str.slice(0, component)); @@ -158,7 +166,7 @@ exports.apply = function(str, op) { } } - if ('' !== str) { throw new Error("The applied op doesn't traverse the entire document"); } + if (str !== '') { throw new Error("The applied op doesn't traverse the entire document"); } return newDoc.join(''); }; @@ -197,7 +205,7 @@ exports.transform = function(op, otherOp, side) { // Otherwise, skip the inserted text. append(component.i.length); } else { // Delete. - //assert.ok component.d + // assert.ok component.d ({ length } = component.d); @@ -210,7 +218,7 @@ exports.transform = function(op, otherOp, side) { } else if (chunk.i != null) { append(chunk); } else { - //assert.ok chunk.d + // assert.ok chunk.d // The delete is unnecessary now. length -= chunk.d.length; } @@ -302,7 +310,7 @@ exports.invert = function(op) { const result = []; const append = makeAppend(result); - for (let component of Array.from(op)) { append(invertComponent(component)); } + for (const component of Array.from(op)) { append(invertComponent(component)); } return result; }; diff --git a/services/document-updater/app/coffee/sharejs/types/text-tp2-api.js b/services/document-updater/app/coffee/sharejs/types/text-tp2-api.js index e3f4f95ea6..97bf606267 100644 --- a/services/document-updater/app/coffee/sharejs/types/text-tp2-api.js +++ b/services/document-updater/app/coffee/sharejs/types/text-tp2-api.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-undef, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -27,7 +32,7 @@ const appendSkipChars = (op, doc, pos, maxlength) => (() => { return result; })(); -type['api'] = { +type.api = { 'provides': {'text':true}, // The number of characters in the string @@ -81,7 +86,7 @@ type['api'] = { let textPos = 0; const docPos = {index:0, offset:0}; - for (let component of Array.from(op)) { + for (const component of Array.from(op)) { var part, remainder; if (typeof component === 'number') { // Skip diff --git a/services/document-updater/app/coffee/sharejs/types/text-tp2.js b/services/document-updater/app/coffee/sharejs/types/text-tp2.js index ab123d6ff7..4efcb05871 100644 --- a/services/document-updater/app/coffee/sharejs/types/text-tp2.js +++ b/services/document-updater/app/coffee/sharejs/types/text-tp2.js @@ -1,3 +1,11 @@ +/* eslint-disable + no-cond-assign, + no-return-assign, + no-undef, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -43,7 +51,7 @@ var type = { const doc = type.create(); doc.data = data; - for (let component of Array.from(data)) { + for (const component of Array.from(data)) { if (typeof component === 'string') { doc.charLength += component.length; doc.totalLength += component.length; @@ -62,7 +70,7 @@ const checkOp = function(op) { let last = null; return (() => { const result = []; - for (let c of Array.from(op)) { + for (const c of Array.from(op)) { if (typeof(c) === 'object') { if (c.i !== undefined) { if (((typeof(c.i) !== 'string') || !(c.i.length > 0)) && ((typeof(c.i) !== 'number') || !(c.i > 0))) { throw new Error('Inserts must insert a string or a +ive number'); } @@ -147,7 +155,7 @@ type.apply = function(doc, op) { const newDoc = type.create(); const position = {index:0, offset:0}; - for (let component of Array.from(op)) { + for (const component of Array.from(op)) { var part, remainder; if (typeof(component) === 'number') { remainder = component; @@ -177,7 +185,7 @@ type.apply = function(doc, op) { // Exported for the randomOpGenerator. type._append = (append = function(op, component) { if ((component === 0) || (component.i === '') || (component.i === 0) || (component.d === 0)) { - return; + } else if (op.length === 0) { return op.push(component); } else { @@ -257,7 +265,7 @@ const componentLength = function(component) { // adjacent inserts and deletes. type.normalize = function(op) { const newOp = []; - for (let component of Array.from(op)) { append(newOp, component); } + for (const component of Array.from(op)) { append(newOp, component); } return newOp; }; diff --git a/services/document-updater/app/coffee/sharejs/types/text.js b/services/document-updater/app/coffee/sharejs/types/text.js index fed546d10f..66aee0f7d7 100644 --- a/services/document-updater/app/coffee/sharejs/types/text.js +++ b/services/document-updater/app/coffee/sharejs/types/text.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + no-return-assign, + no-undef, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -46,13 +53,13 @@ const checkValidComponent = function(c) { }; const checkValidOp = function(op) { - for (let c of Array.from(op)) { checkValidComponent(c); } + for (const c of Array.from(op)) { checkValidComponent(c); } return true; }; text.apply = function(snapshot, op) { checkValidOp(op); - for (let component of Array.from(op)) { + for (const component of Array.from(op)) { if (component.i != null) { snapshot = strInject(snapshot, component.p, component.i); } else if (component.d != null) { @@ -97,7 +104,7 @@ text.compose = function(op1, op2) { checkValidOp(op2); const newOp = op1.slice(); - for (let c of Array.from(op2)) { append(newOp, c); } + for (const c of Array.from(op2)) { append(newOp, c); } return newOp; }; @@ -116,7 +123,7 @@ text.normalize = function(op) { // so this is probably the least bad solution. if ((op.i != null) || (op.p != null)) { op = [op]; } - for (let c of Array.from(op)) { + for (const c of Array.from(op)) { if (c.p == null) { c.p = 0; } append(newOp, c); } @@ -161,7 +168,7 @@ const transformPosition = function(pos, c, insertAfter) { // is pushed after an insert (true) or before it (false). text.transformCursor = function(position, op, side) { const insertAfter = side === 'right'; - for (let c of Array.from(op)) { position = transformPosition(position, c, insertAfter); } + for (const c of Array.from(op)) { position = transformPosition(position, c, insertAfter); } return position; }; diff --git a/services/document-updater/app/coffee/sharejs/types/web-prelude.js b/services/document-updater/app/coffee/sharejs/types/web-prelude.js index b7252728e9..e6a7529a52 100644 --- a/services/document-updater/app/coffee/sharejs/types/web-prelude.js +++ b/services/document-updater/app/coffee/sharejs/types/web-prelude.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. // This is included at the top of each compiled type file for the web. /** @@ -7,5 +12,5 @@ const WEB = true; -const exports = window['sharejs']; +const exports = window.sharejs; diff --git a/services/document-updater/app/coffee/sharejs/web-prelude.js b/services/document-updater/app/coffee/sharejs/web-prelude.js index b7252728e9..e6a7529a52 100644 --- a/services/document-updater/app/coffee/sharejs/web-prelude.js +++ b/services/document-updater/app/coffee/sharejs/web-prelude.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. // This is included at the top of each compiled type file for the web. /** @@ -7,5 +12,5 @@ const WEB = true; -const exports = window['sharejs']; +const exports = window.sharejs; From a519980c10a6f1d9208a0efb660355ed41078820 Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Wed, 6 May 2020 12:09:23 +0200 Subject: [PATCH 612/769] decaffeinate: rename app/coffee dir to app/js --- .../document-updater/app/{coffee => js}/DeleteQueueManager.js | 0 services/document-updater/app/{coffee => js}/DiffCodec.js | 0 services/document-updater/app/{coffee => js}/DispatchManager.js | 0 services/document-updater/app/{coffee => js}/DocumentManager.js | 0 services/document-updater/app/{coffee => js}/Errors.js | 0 services/document-updater/app/{coffee => js}/HistoryManager.js | 0 .../document-updater/app/{coffee => js}/HistoryRedisManager.js | 0 services/document-updater/app/{coffee => js}/HttpController.js | 0 services/document-updater/app/{coffee => js}/LockManager.js | 0 services/document-updater/app/{coffee => js}/LoggerSerializers.js | 0 services/document-updater/app/{coffee => js}/Metrics.js | 0 .../document-updater/app/{coffee => js}/PersistenceManager.js | 0 services/document-updater/app/{coffee => js}/Profiler.js | 0 services/document-updater/app/{coffee => js}/ProjectFlusher.js | 0 .../app/{coffee => js}/ProjectHistoryRedisManager.js | 0 services/document-updater/app/{coffee => js}/ProjectManager.js | 0 services/document-updater/app/{coffee => js}/RangesManager.js | 0 services/document-updater/app/{coffee => js}/RangesTracker.js | 0 services/document-updater/app/{coffee => js}/RateLimitManager.js | 0 .../document-updater/app/{coffee => js}/RealTimeRedisManager.js | 0 services/document-updater/app/{coffee => js}/RedisManager.js | 0 services/document-updater/app/{coffee => js}/ShareJsDB.js | 0 .../document-updater/app/{coffee => js}/ShareJsUpdateManager.js | 0 services/document-updater/app/{coffee => js}/SnapshotManager.js | 0 services/document-updater/app/{coffee => js}/UpdateKeys.js | 0 services/document-updater/app/{coffee => js}/UpdateManager.js | 0 services/document-updater/app/{coffee => js}/mongojs.js | 0 services/document-updater/app/{coffee => js}/sharejs/README.md | 0 services/document-updater/app/{coffee => js}/sharejs/count.js | 0 services/document-updater/app/{coffee => js}/sharejs/helpers.js | 0 services/document-updater/app/{coffee => js}/sharejs/index.js | 0 services/document-updater/app/{coffee => js}/sharejs/json-api.js | 0 services/document-updater/app/{coffee => js}/sharejs/json.js | 0 services/document-updater/app/{coffee => js}/sharejs/model.js | 0 .../document-updater/app/{coffee => js}/sharejs/server/model.js | 0 .../app/{coffee => js}/sharejs/server/syncqueue.js | 0 services/document-updater/app/{coffee => js}/sharejs/simple.js | 0 services/document-updater/app/{coffee => js}/sharejs/syncqueue.js | 0 services/document-updater/app/{coffee => js}/sharejs/text-api.js | 0 .../app/{coffee => js}/sharejs/text-composable-api.js | 0 .../app/{coffee => js}/sharejs/text-composable.js | 0 .../document-updater/app/{coffee => js}/sharejs/text-tp2-api.js | 0 services/document-updater/app/{coffee => js}/sharejs/text-tp2.js | 0 services/document-updater/app/{coffee => js}/sharejs/text.js | 0 .../document-updater/app/{coffee => js}/sharejs/types/count.js | 0 .../document-updater/app/{coffee => js}/sharejs/types/helpers.js | 0 .../document-updater/app/{coffee => js}/sharejs/types/index.js | 0 .../document-updater/app/{coffee => js}/sharejs/types/json-api.js | 0 .../document-updater/app/{coffee => js}/sharejs/types/json.js | 0 .../document-updater/app/{coffee => js}/sharejs/types/model.js | 0 .../document-updater/app/{coffee => js}/sharejs/types/simple.js | 0 .../app/{coffee => js}/sharejs/types/syncqueue.js | 0 .../document-updater/app/{coffee => js}/sharejs/types/text-api.js | 0 .../app/{coffee => js}/sharejs/types/text-composable-api.js | 0 .../app/{coffee => js}/sharejs/types/text-composable.js | 0 .../app/{coffee => js}/sharejs/types/text-tp2-api.js | 0 .../document-updater/app/{coffee => js}/sharejs/types/text-tp2.js | 0 .../document-updater/app/{coffee => js}/sharejs/types/text.js | 0 .../app/{coffee => js}/sharejs/types/web-prelude.js | 0 .../document-updater/app/{coffee => js}/sharejs/web-prelude.js | 0 60 files changed, 0 insertions(+), 0 deletions(-) rename services/document-updater/app/{coffee => js}/DeleteQueueManager.js (100%) rename services/document-updater/app/{coffee => js}/DiffCodec.js (100%) rename services/document-updater/app/{coffee => js}/DispatchManager.js (100%) rename services/document-updater/app/{coffee => js}/DocumentManager.js (100%) rename services/document-updater/app/{coffee => js}/Errors.js (100%) rename services/document-updater/app/{coffee => js}/HistoryManager.js (100%) rename services/document-updater/app/{coffee => js}/HistoryRedisManager.js (100%) rename services/document-updater/app/{coffee => js}/HttpController.js (100%) rename services/document-updater/app/{coffee => js}/LockManager.js (100%) rename services/document-updater/app/{coffee => js}/LoggerSerializers.js (100%) rename services/document-updater/app/{coffee => js}/Metrics.js (100%) rename services/document-updater/app/{coffee => js}/PersistenceManager.js (100%) rename services/document-updater/app/{coffee => js}/Profiler.js (100%) rename services/document-updater/app/{coffee => js}/ProjectFlusher.js (100%) rename services/document-updater/app/{coffee => js}/ProjectHistoryRedisManager.js (100%) rename services/document-updater/app/{coffee => js}/ProjectManager.js (100%) rename services/document-updater/app/{coffee => js}/RangesManager.js (100%) rename services/document-updater/app/{coffee => js}/RangesTracker.js (100%) rename services/document-updater/app/{coffee => js}/RateLimitManager.js (100%) rename services/document-updater/app/{coffee => js}/RealTimeRedisManager.js (100%) rename services/document-updater/app/{coffee => js}/RedisManager.js (100%) rename services/document-updater/app/{coffee => js}/ShareJsDB.js (100%) rename services/document-updater/app/{coffee => js}/ShareJsUpdateManager.js (100%) rename services/document-updater/app/{coffee => js}/SnapshotManager.js (100%) rename services/document-updater/app/{coffee => js}/UpdateKeys.js (100%) rename services/document-updater/app/{coffee => js}/UpdateManager.js (100%) rename services/document-updater/app/{coffee => js}/mongojs.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/README.md (100%) rename services/document-updater/app/{coffee => js}/sharejs/count.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/helpers.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/index.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/json-api.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/json.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/model.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/server/model.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/server/syncqueue.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/simple.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/syncqueue.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/text-api.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/text-composable-api.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/text-composable.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/text-tp2-api.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/text-tp2.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/text.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/types/count.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/types/helpers.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/types/index.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/types/json-api.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/types/json.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/types/model.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/types/simple.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/types/syncqueue.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/types/text-api.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/types/text-composable-api.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/types/text-composable.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/types/text-tp2-api.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/types/text-tp2.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/types/text.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/types/web-prelude.js (100%) rename services/document-updater/app/{coffee => js}/sharejs/web-prelude.js (100%) diff --git a/services/document-updater/app/coffee/DeleteQueueManager.js b/services/document-updater/app/js/DeleteQueueManager.js similarity index 100% rename from services/document-updater/app/coffee/DeleteQueueManager.js rename to services/document-updater/app/js/DeleteQueueManager.js diff --git a/services/document-updater/app/coffee/DiffCodec.js b/services/document-updater/app/js/DiffCodec.js similarity index 100% rename from services/document-updater/app/coffee/DiffCodec.js rename to services/document-updater/app/js/DiffCodec.js diff --git a/services/document-updater/app/coffee/DispatchManager.js b/services/document-updater/app/js/DispatchManager.js similarity index 100% rename from services/document-updater/app/coffee/DispatchManager.js rename to services/document-updater/app/js/DispatchManager.js diff --git a/services/document-updater/app/coffee/DocumentManager.js b/services/document-updater/app/js/DocumentManager.js similarity index 100% rename from services/document-updater/app/coffee/DocumentManager.js rename to services/document-updater/app/js/DocumentManager.js diff --git a/services/document-updater/app/coffee/Errors.js b/services/document-updater/app/js/Errors.js similarity index 100% rename from services/document-updater/app/coffee/Errors.js rename to services/document-updater/app/js/Errors.js diff --git a/services/document-updater/app/coffee/HistoryManager.js b/services/document-updater/app/js/HistoryManager.js similarity index 100% rename from services/document-updater/app/coffee/HistoryManager.js rename to services/document-updater/app/js/HistoryManager.js diff --git a/services/document-updater/app/coffee/HistoryRedisManager.js b/services/document-updater/app/js/HistoryRedisManager.js similarity index 100% rename from services/document-updater/app/coffee/HistoryRedisManager.js rename to services/document-updater/app/js/HistoryRedisManager.js diff --git a/services/document-updater/app/coffee/HttpController.js b/services/document-updater/app/js/HttpController.js similarity index 100% rename from services/document-updater/app/coffee/HttpController.js rename to services/document-updater/app/js/HttpController.js diff --git a/services/document-updater/app/coffee/LockManager.js b/services/document-updater/app/js/LockManager.js similarity index 100% rename from services/document-updater/app/coffee/LockManager.js rename to services/document-updater/app/js/LockManager.js diff --git a/services/document-updater/app/coffee/LoggerSerializers.js b/services/document-updater/app/js/LoggerSerializers.js similarity index 100% rename from services/document-updater/app/coffee/LoggerSerializers.js rename to services/document-updater/app/js/LoggerSerializers.js diff --git a/services/document-updater/app/coffee/Metrics.js b/services/document-updater/app/js/Metrics.js similarity index 100% rename from services/document-updater/app/coffee/Metrics.js rename to services/document-updater/app/js/Metrics.js diff --git a/services/document-updater/app/coffee/PersistenceManager.js b/services/document-updater/app/js/PersistenceManager.js similarity index 100% rename from services/document-updater/app/coffee/PersistenceManager.js rename to services/document-updater/app/js/PersistenceManager.js diff --git a/services/document-updater/app/coffee/Profiler.js b/services/document-updater/app/js/Profiler.js similarity index 100% rename from services/document-updater/app/coffee/Profiler.js rename to services/document-updater/app/js/Profiler.js diff --git a/services/document-updater/app/coffee/ProjectFlusher.js b/services/document-updater/app/js/ProjectFlusher.js similarity index 100% rename from services/document-updater/app/coffee/ProjectFlusher.js rename to services/document-updater/app/js/ProjectFlusher.js diff --git a/services/document-updater/app/coffee/ProjectHistoryRedisManager.js b/services/document-updater/app/js/ProjectHistoryRedisManager.js similarity index 100% rename from services/document-updater/app/coffee/ProjectHistoryRedisManager.js rename to services/document-updater/app/js/ProjectHistoryRedisManager.js diff --git a/services/document-updater/app/coffee/ProjectManager.js b/services/document-updater/app/js/ProjectManager.js similarity index 100% rename from services/document-updater/app/coffee/ProjectManager.js rename to services/document-updater/app/js/ProjectManager.js diff --git a/services/document-updater/app/coffee/RangesManager.js b/services/document-updater/app/js/RangesManager.js similarity index 100% rename from services/document-updater/app/coffee/RangesManager.js rename to services/document-updater/app/js/RangesManager.js diff --git a/services/document-updater/app/coffee/RangesTracker.js b/services/document-updater/app/js/RangesTracker.js similarity index 100% rename from services/document-updater/app/coffee/RangesTracker.js rename to services/document-updater/app/js/RangesTracker.js diff --git a/services/document-updater/app/coffee/RateLimitManager.js b/services/document-updater/app/js/RateLimitManager.js similarity index 100% rename from services/document-updater/app/coffee/RateLimitManager.js rename to services/document-updater/app/js/RateLimitManager.js diff --git a/services/document-updater/app/coffee/RealTimeRedisManager.js b/services/document-updater/app/js/RealTimeRedisManager.js similarity index 100% rename from services/document-updater/app/coffee/RealTimeRedisManager.js rename to services/document-updater/app/js/RealTimeRedisManager.js diff --git a/services/document-updater/app/coffee/RedisManager.js b/services/document-updater/app/js/RedisManager.js similarity index 100% rename from services/document-updater/app/coffee/RedisManager.js rename to services/document-updater/app/js/RedisManager.js diff --git a/services/document-updater/app/coffee/ShareJsDB.js b/services/document-updater/app/js/ShareJsDB.js similarity index 100% rename from services/document-updater/app/coffee/ShareJsDB.js rename to services/document-updater/app/js/ShareJsDB.js diff --git a/services/document-updater/app/coffee/ShareJsUpdateManager.js b/services/document-updater/app/js/ShareJsUpdateManager.js similarity index 100% rename from services/document-updater/app/coffee/ShareJsUpdateManager.js rename to services/document-updater/app/js/ShareJsUpdateManager.js diff --git a/services/document-updater/app/coffee/SnapshotManager.js b/services/document-updater/app/js/SnapshotManager.js similarity index 100% rename from services/document-updater/app/coffee/SnapshotManager.js rename to services/document-updater/app/js/SnapshotManager.js diff --git a/services/document-updater/app/coffee/UpdateKeys.js b/services/document-updater/app/js/UpdateKeys.js similarity index 100% rename from services/document-updater/app/coffee/UpdateKeys.js rename to services/document-updater/app/js/UpdateKeys.js diff --git a/services/document-updater/app/coffee/UpdateManager.js b/services/document-updater/app/js/UpdateManager.js similarity index 100% rename from services/document-updater/app/coffee/UpdateManager.js rename to services/document-updater/app/js/UpdateManager.js diff --git a/services/document-updater/app/coffee/mongojs.js b/services/document-updater/app/js/mongojs.js similarity index 100% rename from services/document-updater/app/coffee/mongojs.js rename to services/document-updater/app/js/mongojs.js diff --git a/services/document-updater/app/coffee/sharejs/README.md b/services/document-updater/app/js/sharejs/README.md similarity index 100% rename from services/document-updater/app/coffee/sharejs/README.md rename to services/document-updater/app/js/sharejs/README.md diff --git a/services/document-updater/app/coffee/sharejs/count.js b/services/document-updater/app/js/sharejs/count.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/count.js rename to services/document-updater/app/js/sharejs/count.js diff --git a/services/document-updater/app/coffee/sharejs/helpers.js b/services/document-updater/app/js/sharejs/helpers.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/helpers.js rename to services/document-updater/app/js/sharejs/helpers.js diff --git a/services/document-updater/app/coffee/sharejs/index.js b/services/document-updater/app/js/sharejs/index.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/index.js rename to services/document-updater/app/js/sharejs/index.js diff --git a/services/document-updater/app/coffee/sharejs/json-api.js b/services/document-updater/app/js/sharejs/json-api.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/json-api.js rename to services/document-updater/app/js/sharejs/json-api.js diff --git a/services/document-updater/app/coffee/sharejs/json.js b/services/document-updater/app/js/sharejs/json.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/json.js rename to services/document-updater/app/js/sharejs/json.js diff --git a/services/document-updater/app/coffee/sharejs/model.js b/services/document-updater/app/js/sharejs/model.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/model.js rename to services/document-updater/app/js/sharejs/model.js diff --git a/services/document-updater/app/coffee/sharejs/server/model.js b/services/document-updater/app/js/sharejs/server/model.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/server/model.js rename to services/document-updater/app/js/sharejs/server/model.js diff --git a/services/document-updater/app/coffee/sharejs/server/syncqueue.js b/services/document-updater/app/js/sharejs/server/syncqueue.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/server/syncqueue.js rename to services/document-updater/app/js/sharejs/server/syncqueue.js diff --git a/services/document-updater/app/coffee/sharejs/simple.js b/services/document-updater/app/js/sharejs/simple.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/simple.js rename to services/document-updater/app/js/sharejs/simple.js diff --git a/services/document-updater/app/coffee/sharejs/syncqueue.js b/services/document-updater/app/js/sharejs/syncqueue.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/syncqueue.js rename to services/document-updater/app/js/sharejs/syncqueue.js diff --git a/services/document-updater/app/coffee/sharejs/text-api.js b/services/document-updater/app/js/sharejs/text-api.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/text-api.js rename to services/document-updater/app/js/sharejs/text-api.js diff --git a/services/document-updater/app/coffee/sharejs/text-composable-api.js b/services/document-updater/app/js/sharejs/text-composable-api.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/text-composable-api.js rename to services/document-updater/app/js/sharejs/text-composable-api.js diff --git a/services/document-updater/app/coffee/sharejs/text-composable.js b/services/document-updater/app/js/sharejs/text-composable.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/text-composable.js rename to services/document-updater/app/js/sharejs/text-composable.js diff --git a/services/document-updater/app/coffee/sharejs/text-tp2-api.js b/services/document-updater/app/js/sharejs/text-tp2-api.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/text-tp2-api.js rename to services/document-updater/app/js/sharejs/text-tp2-api.js diff --git a/services/document-updater/app/coffee/sharejs/text-tp2.js b/services/document-updater/app/js/sharejs/text-tp2.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/text-tp2.js rename to services/document-updater/app/js/sharejs/text-tp2.js diff --git a/services/document-updater/app/coffee/sharejs/text.js b/services/document-updater/app/js/sharejs/text.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/text.js rename to services/document-updater/app/js/sharejs/text.js diff --git a/services/document-updater/app/coffee/sharejs/types/count.js b/services/document-updater/app/js/sharejs/types/count.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/count.js rename to services/document-updater/app/js/sharejs/types/count.js diff --git a/services/document-updater/app/coffee/sharejs/types/helpers.js b/services/document-updater/app/js/sharejs/types/helpers.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/helpers.js rename to services/document-updater/app/js/sharejs/types/helpers.js diff --git a/services/document-updater/app/coffee/sharejs/types/index.js b/services/document-updater/app/js/sharejs/types/index.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/index.js rename to services/document-updater/app/js/sharejs/types/index.js diff --git a/services/document-updater/app/coffee/sharejs/types/json-api.js b/services/document-updater/app/js/sharejs/types/json-api.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/json-api.js rename to services/document-updater/app/js/sharejs/types/json-api.js diff --git a/services/document-updater/app/coffee/sharejs/types/json.js b/services/document-updater/app/js/sharejs/types/json.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/json.js rename to services/document-updater/app/js/sharejs/types/json.js diff --git a/services/document-updater/app/coffee/sharejs/types/model.js b/services/document-updater/app/js/sharejs/types/model.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/model.js rename to services/document-updater/app/js/sharejs/types/model.js diff --git a/services/document-updater/app/coffee/sharejs/types/simple.js b/services/document-updater/app/js/sharejs/types/simple.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/simple.js rename to services/document-updater/app/js/sharejs/types/simple.js diff --git a/services/document-updater/app/coffee/sharejs/types/syncqueue.js b/services/document-updater/app/js/sharejs/types/syncqueue.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/syncqueue.js rename to services/document-updater/app/js/sharejs/types/syncqueue.js diff --git a/services/document-updater/app/coffee/sharejs/types/text-api.js b/services/document-updater/app/js/sharejs/types/text-api.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/text-api.js rename to services/document-updater/app/js/sharejs/types/text-api.js diff --git a/services/document-updater/app/coffee/sharejs/types/text-composable-api.js b/services/document-updater/app/js/sharejs/types/text-composable-api.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/text-composable-api.js rename to services/document-updater/app/js/sharejs/types/text-composable-api.js diff --git a/services/document-updater/app/coffee/sharejs/types/text-composable.js b/services/document-updater/app/js/sharejs/types/text-composable.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/text-composable.js rename to services/document-updater/app/js/sharejs/types/text-composable.js diff --git a/services/document-updater/app/coffee/sharejs/types/text-tp2-api.js b/services/document-updater/app/js/sharejs/types/text-tp2-api.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/text-tp2-api.js rename to services/document-updater/app/js/sharejs/types/text-tp2-api.js diff --git a/services/document-updater/app/coffee/sharejs/types/text-tp2.js b/services/document-updater/app/js/sharejs/types/text-tp2.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/text-tp2.js rename to services/document-updater/app/js/sharejs/types/text-tp2.js diff --git a/services/document-updater/app/coffee/sharejs/types/text.js b/services/document-updater/app/js/sharejs/types/text.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/text.js rename to services/document-updater/app/js/sharejs/types/text.js diff --git a/services/document-updater/app/coffee/sharejs/types/web-prelude.js b/services/document-updater/app/js/sharejs/types/web-prelude.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/types/web-prelude.js rename to services/document-updater/app/js/sharejs/types/web-prelude.js diff --git a/services/document-updater/app/coffee/sharejs/web-prelude.js b/services/document-updater/app/js/sharejs/web-prelude.js similarity index 100% rename from services/document-updater/app/coffee/sharejs/web-prelude.js rename to services/document-updater/app/js/sharejs/web-prelude.js From dbf9e88dc33956ac56e2ba5b93e242d4ac78c97a Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Wed, 6 May 2020 12:09:33 +0200 Subject: [PATCH 613/769] prettier: convert app/js decaffeinated files to Prettier format --- .../app/js/DeleteQueueManager.js | 189 ++- services/document-updater/app/js/DiffCodec.js | 76 +- .../app/js/DispatchManager.js | 170 +- .../app/js/DocumentManager.js | 1022 ++++++++---- services/document-updater/app/js/Errors.js | 70 +- .../document-updater/app/js/HistoryManager.js | 361 ++-- .../app/js/HistoryRedisManager.js | 48 +- .../document-updater/app/js/HttpController.js | 747 +++++---- .../document-updater/app/js/LockManager.js | 259 +-- .../app/js/LoggerSerializers.js | 77 +- services/document-updater/app/js/Metrics.js | 2 +- .../app/js/PersistenceManager.js | 280 ++-- services/document-updater/app/js/Profiler.js | 93 +- .../document-updater/app/js/ProjectFlusher.js | 198 ++- .../app/js/ProjectHistoryRedisManager.js | 241 ++- .../document-updater/app/js/ProjectManager.js | 565 ++++--- .../document-updater/app/js/RangesManager.js | 241 +-- .../document-updater/app/js/RangesTracker.js | 1458 +++++++++-------- .../app/js/RateLimitManager.js | 111 +- .../app/js/RealTimeRedisManager.js | 126 +- .../document-updater/app/js/RedisManager.js | 1124 ++++++++----- services/document-updater/app/js/ShareJsDB.js | 119 +- .../app/js/ShareJsUpdateManager.js | 198 ++- .../app/js/SnapshotManager.js | 81 +- .../document-updater/app/js/UpdateKeys.js | 10 +- .../document-updater/app/js/UpdateManager.js | 599 ++++--- services/document-updater/app/js/mongojs.js | 30 +- .../document-updater/app/js/sharejs/count.js | 39 +- .../app/js/sharejs/helpers.js | 111 +- .../document-updater/app/js/sharejs/index.js | 24 +- .../app/js/sharejs/json-api.js | 457 +++--- .../document-updater/app/js/sharejs/json.js | 621 ++++--- .../document-updater/app/js/sharejs/model.js | 835 ++++++---- .../app/js/sharejs/server/model.js | 844 ++++++---- .../app/js/sharejs/server/syncqueue.js | 50 +- .../document-updater/app/js/sharejs/simple.js | 26 +- .../app/js/sharejs/syncqueue.js | 50 +- .../app/js/sharejs/text-api.js | 52 +- .../app/js/sharejs/text-composable-api.js | 69 +- .../app/js/sharejs/text-composable.js | 450 ++--- .../app/js/sharejs/text-tp2-api.js | 132 +- .../app/js/sharejs/text-tp2.js | 549 ++++--- .../document-updater/app/js/sharejs/text.js | 280 ++-- .../app/js/sharejs/types/count.js | 39 +- .../app/js/sharejs/types/helpers.js | 111 +- .../app/js/sharejs/types/index.js | 24 +- .../app/js/sharejs/types/json-api.js | 457 +++--- .../app/js/sharejs/types/json.js | 621 ++++--- .../app/js/sharejs/types/model.js | 835 ++++++---- .../app/js/sharejs/types/simple.js | 26 +- .../app/js/sharejs/types/syncqueue.js | 50 +- .../app/js/sharejs/types/text-api.js | 52 +- .../js/sharejs/types/text-composable-api.js | 69 +- .../app/js/sharejs/types/text-composable.js | 450 ++--- .../app/js/sharejs/types/text-tp2-api.js | 132 +- .../app/js/sharejs/types/text-tp2.js | 549 ++++--- .../app/js/sharejs/types/text.js | 372 +++-- .../app/js/sharejs/types/web-prelude.js | 6 +- .../app/js/sharejs/web-prelude.js | 6 +- 59 files changed, 10071 insertions(+), 6812 deletions(-) diff --git a/services/document-updater/app/js/DeleteQueueManager.js b/services/document-updater/app/js/DeleteQueueManager.js index a970f5825f..fc5bb904e3 100644 --- a/services/document-updater/app/js/DeleteQueueManager.js +++ b/services/document-updater/app/js/DeleteQueueManager.js @@ -12,22 +12,22 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let DeleteQueueManager; -const Settings = require('settings-sharelatex'); -const RedisManager = require("./RedisManager"); -const ProjectManager = require("./ProjectManager"); -const logger = require("logger-sharelatex"); -const metrics = require("./Metrics"); -const async = require("async"); +let DeleteQueueManager +const Settings = require('settings-sharelatex') +const RedisManager = require('./RedisManager') +const ProjectManager = require('./ProjectManager') +const logger = require('logger-sharelatex') +const metrics = require('./Metrics') +const async = require('async') // Maintain a sorted set of project flushAndDelete requests, ordered by timestamp // (ZADD), and process them from oldest to newest. A flushAndDelete request comes -// from real-time and is triggered when a user leaves a project. +// from real-time and is triggered when a user leaves a project. // // The aim is to remove the project from redis 5 minutes after the last request // if there has been no activity (document updates) in that time. If there is // activity we can expect a further flushAndDelete request when the editing user -// leaves the project. +// leaves the project. // // If a new flushAndDelete request comes in while an existing request is already // in the queue we update the timestamp as we can postpone flushing further. @@ -35,75 +35,110 @@ const async = require("async"); // Documents are processed by checking the queue, seeing if the first entry is // older than 5 minutes, and popping it from the queue in that case. -module.exports = (DeleteQueueManager = { - flushAndDeleteOldProjects(options, callback) { - const startTime = Date.now(); - const cutoffTime = (startTime - options.min_delete_age) + (100 * (Math.random() - 0.5)); - let count = 0; +module.exports = DeleteQueueManager = { + flushAndDeleteOldProjects(options, callback) { + const startTime = Date.now() + const cutoffTime = + startTime - options.min_delete_age + 100 * (Math.random() - 0.5) + let count = 0 - const flushProjectIfNotModified = (project_id, flushTimestamp, cb) => ProjectManager.getProjectDocsTimestamps(project_id, function(err, timestamps) { - if (err != null) { return callback(err); } - if (timestamps.length === 0) { - logger.log({project_id}, "skipping flush of queued project - no timestamps"); - return cb(); + const flushProjectIfNotModified = (project_id, flushTimestamp, cb) => + ProjectManager.getProjectDocsTimestamps(project_id, function ( + err, + timestamps + ) { + if (err != null) { + return callback(err) + } + if (timestamps.length === 0) { + logger.log( + { project_id }, + 'skipping flush of queued project - no timestamps' + ) + return cb() + } + // are any of the timestamps newer than the time the project was flushed? + for (const timestamp of Array.from(timestamps)) { + if (timestamp > flushTimestamp) { + metrics.inc('queued-delete-skipped') + logger.debug( + { project_id, timestamps, flushTimestamp }, + 'found newer timestamp, will skip delete' + ) + return cb() + } + } + logger.log({ project_id, flushTimestamp }, 'flushing queued project') + return ProjectManager.flushAndDeleteProjectWithLocks( + project_id, + { skip_history_flush: false }, + function (err) { + if (err != null) { + logger.err({ project_id, err }, 'error flushing queued project') } - // are any of the timestamps newer than the time the project was flushed? - for (const timestamp of Array.from(timestamps)) { - if (timestamp > flushTimestamp) { - metrics.inc("queued-delete-skipped"); - logger.debug({project_id, timestamps, flushTimestamp}, "found newer timestamp, will skip delete"); - return cb(); - } - } - logger.log({project_id, flushTimestamp}, "flushing queued project"); - return ProjectManager.flushAndDeleteProjectWithLocks(project_id, {skip_history_flush: false}, function(err) { - if (err != null) { - logger.err({project_id, err}, "error flushing queued project"); - } - metrics.inc("queued-delete-completed"); - return cb(null, true); - }); - }); + metrics.inc('queued-delete-completed') + return cb(null, true) + } + ) + }) - var flushNextProject = function() { - const now = Date.now(); - if ((now - startTime) > options.timeout) { - logger.log("hit time limit on flushing old projects"); - return callback(null, count); - } - if (count > options.limit) { - logger.log("hit count limit on flushing old projects"); - return callback(null, count); - } - return RedisManager.getNextProjectToFlushAndDelete(cutoffTime, function(err, project_id, flushTimestamp, queueLength) { - if (err != null) { return callback(err); } - if ((project_id == null)) { return callback(null, count); } - logger.log({project_id, queueLength}, "flushing queued project"); - metrics.globalGauge("queued-flush-backlog", queueLength); - return flushProjectIfNotModified(project_id, flushTimestamp, function(err, flushed) { - if (flushed) { count++; } - return flushNextProject(); - }); - }); - }; - - return flushNextProject(); - }, - - startBackgroundFlush() { - const SHORT_DELAY = 10; - const LONG_DELAY = 1000; - var doFlush = function() { - if (Settings.shuttingDown) { - logger.warn("discontinuing background flush due to shutdown"); - return; - } - return DeleteQueueManager.flushAndDeleteOldProjects({ - timeout:1000, - min_delete_age:3*60*1000, - limit:1000 // high value, to ensure we always flush enough projects - }, (err, flushed) => setTimeout(doFlush, (flushed > 10 ? SHORT_DELAY : LONG_DELAY))); - }; - return doFlush(); + var flushNextProject = function () { + const now = Date.now() + if (now - startTime > options.timeout) { + logger.log('hit time limit on flushing old projects') + return callback(null, count) + } + if (count > options.limit) { + logger.log('hit count limit on flushing old projects') + return callback(null, count) + } + return RedisManager.getNextProjectToFlushAndDelete(cutoffTime, function ( + err, + project_id, + flushTimestamp, + queueLength + ) { + if (err != null) { + return callback(err) + } + if (project_id == null) { + return callback(null, count) + } + logger.log({ project_id, queueLength }, 'flushing queued project') + metrics.globalGauge('queued-flush-backlog', queueLength) + return flushProjectIfNotModified(project_id, flushTimestamp, function ( + err, + flushed + ) { + if (flushed) { + count++ + } + return flushNextProject() + }) + }) } -}); + + return flushNextProject() + }, + + startBackgroundFlush() { + const SHORT_DELAY = 10 + const LONG_DELAY = 1000 + var doFlush = function () { + if (Settings.shuttingDown) { + logger.warn('discontinuing background flush due to shutdown') + return + } + return DeleteQueueManager.flushAndDeleteOldProjects( + { + timeout: 1000, + min_delete_age: 3 * 60 * 1000, + limit: 1000 // high value, to ensure we always flush enough projects + }, + (err, flushed) => + setTimeout(doFlush, flushed > 10 ? SHORT_DELAY : LONG_DELAY) + ) + } + return doFlush() + } +} diff --git a/services/document-updater/app/js/DiffCodec.js b/services/document-updater/app/js/DiffCodec.js index 8b87cee1d3..22251800b2 100644 --- a/services/document-updater/app/js/DiffCodec.js +++ b/services/document-updater/app/js/DiffCodec.js @@ -14,44 +14,44 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let DiffCodec; -const { - diff_match_patch -} = require("../lib/diff_match_patch"); -const dmp = new diff_match_patch(); +let DiffCodec +const { diff_match_patch } = require('../lib/diff_match_patch') +const dmp = new diff_match_patch() -module.exports = (DiffCodec = { - ADDED: 1, - REMOVED: -1, - UNCHANGED: 0, +module.exports = DiffCodec = { + ADDED: 1, + REMOVED: -1, + UNCHANGED: 0, - diffAsShareJsOp(before, after, callback) { - if (callback == null) { callback = function(error, ops) {}; } - const diffs = dmp.diff_main(before.join("\n"), after.join("\n")); - dmp.diff_cleanupSemantic(diffs); + diffAsShareJsOp(before, after, callback) { + if (callback == null) { + callback = function (error, ops) {} + } + const diffs = dmp.diff_main(before.join('\n'), after.join('\n')) + dmp.diff_cleanupSemantic(diffs) - const ops = []; - let position = 0; - for (const diff of Array.from(diffs)) { - const type = diff[0]; - const content = diff[1]; - if (type === this.ADDED) { - ops.push({ - i: content, - p: position - }); - position += content.length; - } else if (type === this.REMOVED) { - ops.push({ - d: content, - p: position - }); - } else if (type === this.UNCHANGED) { - position += content.length; - } else { - throw "Unknown type"; - } - } - return callback(null, ops); - } -}); + const ops = [] + let position = 0 + for (const diff of Array.from(diffs)) { + const type = diff[0] + const content = diff[1] + if (type === this.ADDED) { + ops.push({ + i: content, + p: position + }) + position += content.length + } else if (type === this.REMOVED) { + ops.push({ + d: content, + p: position + }) + } else if (type === this.UNCHANGED) { + position += content.length + } else { + throw 'Unknown type' + } + } + return callback(null, ops) + } +} diff --git a/services/document-updater/app/js/DispatchManager.js b/services/document-updater/app/js/DispatchManager.js index 8f11378a9c..2b9b2c4fb7 100644 --- a/services/document-updater/app/js/DispatchManager.js +++ b/services/document-updater/app/js/DispatchManager.js @@ -14,75 +14,105 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let DispatchManager; -const Settings = require('settings-sharelatex'); -const logger = require('logger-sharelatex'); -const Keys = require('./UpdateKeys'); -const redis = require("redis-sharelatex"); -const Errors = require("./Errors"); +let DispatchManager +const Settings = require('settings-sharelatex') +const logger = require('logger-sharelatex') +const Keys = require('./UpdateKeys') +const redis = require('redis-sharelatex') +const Errors = require('./Errors') -const UpdateManager = require('./UpdateManager'); -const Metrics = require('./Metrics'); -const RateLimitManager = require('./RateLimitManager'); +const UpdateManager = require('./UpdateManager') +const Metrics = require('./Metrics') +const RateLimitManager = require('./RateLimitManager') -module.exports = (DispatchManager = { - createDispatcher(RateLimiter) { - const client = redis.createClient(Settings.redis.documentupdater); - var worker = { - client, - _waitForUpdateThenDispatchWorker(callback) { - if (callback == null) { callback = function(error) {}; } - const timer = new Metrics.Timer("worker.waiting"); - return worker.client.blpop("pending-updates-list", 0, function(error, result) { - logger.log("getting pending-updates-list", error, result); - timer.done(); - if (error != null) { return callback(error); } - if ((result == null)) { return callback(); } - const [list_name, doc_key] = Array.from(result); - const [project_id, doc_id] = Array.from(Keys.splitProjectIdAndDocId(doc_key)); - // Dispatch this in the background - const backgroundTask = cb => UpdateManager.processOutstandingUpdatesWithLock(project_id, doc_id, function(error) { - // log everything except OpRangeNotAvailable errors, these are normal - if (error != null) { - // downgrade OpRangeNotAvailable and "Delete component" errors so they are not sent to sentry - const logAsWarning = (error instanceof Errors.OpRangeNotAvailableError) || (error instanceof Errors.DeleteMismatchError); - if (logAsWarning) { - logger.warn({err: error, project_id, doc_id}, "error processing update"); - } else { - logger.error({err: error, project_id, doc_id}, "error processing update"); - } - } - return cb(); - }); - return RateLimiter.run(backgroundTask, callback); - }); - }, - - run() { - if (Settings.shuttingDown) { return; } - return worker._waitForUpdateThenDispatchWorker(error => { - if (error != null) { - logger.error({err: error}, "Error in worker process"); - throw error; - } else { - return worker.run(); - } - }); - } - }; - - return worker; - }, - - createAndStartDispatchers(number) { - const RateLimiter = new RateLimitManager(number); - return (() => { - const result = []; - for (let i = 1, end = number, asc = end >= 1; asc ? i <= end : i >= end; asc ? i++ : i--) { - const worker = DispatchManager.createDispatcher(RateLimiter); - result.push(worker.run()); - } - return result; - })(); - } -}); +module.exports = DispatchManager = { + createDispatcher(RateLimiter) { + const client = redis.createClient(Settings.redis.documentupdater) + var worker = { + client, + _waitForUpdateThenDispatchWorker(callback) { + if (callback == null) { + callback = function (error) {} + } + const timer = new Metrics.Timer('worker.waiting') + return worker.client.blpop('pending-updates-list', 0, function ( + error, + result + ) { + logger.log('getting pending-updates-list', error, result) + timer.done() + if (error != null) { + return callback(error) + } + if (result == null) { + return callback() + } + const [list_name, doc_key] = Array.from(result) + const [project_id, doc_id] = Array.from( + Keys.splitProjectIdAndDocId(doc_key) + ) + // Dispatch this in the background + const backgroundTask = (cb) => + UpdateManager.processOutstandingUpdatesWithLock( + project_id, + doc_id, + function (error) { + // log everything except OpRangeNotAvailable errors, these are normal + if (error != null) { + // downgrade OpRangeNotAvailable and "Delete component" errors so they are not sent to sentry + const logAsWarning = + error instanceof Errors.OpRangeNotAvailableError || + error instanceof Errors.DeleteMismatchError + if (logAsWarning) { + logger.warn( + { err: error, project_id, doc_id }, + 'error processing update' + ) + } else { + logger.error( + { err: error, project_id, doc_id }, + 'error processing update' + ) + } + } + return cb() + } + ) + return RateLimiter.run(backgroundTask, callback) + }) + }, + + run() { + if (Settings.shuttingDown) { + return + } + return worker._waitForUpdateThenDispatchWorker((error) => { + if (error != null) { + logger.error({ err: error }, 'Error in worker process') + throw error + } else { + return worker.run() + } + }) + } + } + + return worker + }, + + createAndStartDispatchers(number) { + const RateLimiter = new RateLimitManager(number) + return (() => { + const result = [] + for ( + let i = 1, end = number, asc = end >= 1; + asc ? i <= end : i >= end; + asc ? i++ : i-- + ) { + const worker = DispatchManager.createDispatcher(RateLimiter) + result.push(worker.run()) + } + return result + })() + } +} diff --git a/services/document-updater/app/js/DocumentManager.js b/services/document-updater/app/js/DocumentManager.js index d1e3e3ca07..b6c4510f9f 100644 --- a/services/document-updater/app/js/DocumentManager.js +++ b/services/document-updater/app/js/DocumentManager.js @@ -12,336 +12,762 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let DocumentManager; -const RedisManager = require("./RedisManager"); -const ProjectHistoryRedisManager = require("./ProjectHistoryRedisManager"); -const PersistenceManager = require("./PersistenceManager"); -const DiffCodec = require("./DiffCodec"); -const logger = require("logger-sharelatex"); -const Metrics = require("./Metrics"); -const HistoryManager = require("./HistoryManager"); -const RealTimeRedisManager = require("./RealTimeRedisManager"); -const Errors = require("./Errors"); -const RangesManager = require("./RangesManager"); -const async = require("async"); +let DocumentManager +const RedisManager = require('./RedisManager') +const ProjectHistoryRedisManager = require('./ProjectHistoryRedisManager') +const PersistenceManager = require('./PersistenceManager') +const DiffCodec = require('./DiffCodec') +const logger = require('logger-sharelatex') +const Metrics = require('./Metrics') +const HistoryManager = require('./HistoryManager') +const RealTimeRedisManager = require('./RealTimeRedisManager') +const Errors = require('./Errors') +const RangesManager = require('./RangesManager') +const async = require('async') -const MAX_UNFLUSHED_AGE = 300 * 1000; // 5 mins, document should be flushed to mongo this time after a change +const MAX_UNFLUSHED_AGE = 300 * 1000 // 5 mins, document should be flushed to mongo this time after a change -module.exports = (DocumentManager = { - getDoc(project_id, doc_id, _callback) { - if (_callback == null) { _callback = function(error, lines, version, ranges, pathname, projectHistoryId, unflushedTime, alreadyLoaded) {}; } - const timer = new Metrics.Timer("docManager.getDoc"); - const callback = function(...args) { - timer.done(); - return _callback(...Array.from(args || [])); - }; +module.exports = DocumentManager = { + getDoc(project_id, doc_id, _callback) { + if (_callback == null) { + _callback = function ( + error, + lines, + version, + ranges, + pathname, + projectHistoryId, + unflushedTime, + alreadyLoaded + ) {} + } + const timer = new Metrics.Timer('docManager.getDoc') + const callback = function (...args) { + timer.done() + return _callback(...Array.from(args || [])) + } - return RedisManager.getDoc(project_id, doc_id, function(error, lines, version, ranges, pathname, projectHistoryId, unflushedTime) { - if (error != null) { return callback(error); } - if ((lines == null) || (version == null)) { - logger.log({project_id, doc_id}, "doc not in redis so getting from persistence API"); - return PersistenceManager.getDoc(project_id, doc_id, function(error, lines, version, ranges, pathname, projectHistoryId, projectHistoryType) { - if (error != null) { return callback(error); } - logger.log({project_id, doc_id, lines, version, pathname, projectHistoryId, projectHistoryType}, "got doc from persistence API"); - return RedisManager.putDocInMemory(project_id, doc_id, lines, version, ranges, pathname, projectHistoryId, function(error) { - if (error != null) { return callback(error); } - return RedisManager.setHistoryType(doc_id, projectHistoryType, function(error) { - if (error != null) { return callback(error); } - return callback(null, lines, version, ranges || {}, pathname, projectHistoryId, null, false); - }); - }); - }); - } else { - return callback(null, lines, version, ranges, pathname, projectHistoryId, unflushedTime, true); - } - }); - }, + return RedisManager.getDoc(project_id, doc_id, function ( + error, + lines, + version, + ranges, + pathname, + projectHistoryId, + unflushedTime + ) { + if (error != null) { + return callback(error) + } + if (lines == null || version == null) { + logger.log( + { project_id, doc_id }, + 'doc not in redis so getting from persistence API' + ) + return PersistenceManager.getDoc(project_id, doc_id, function ( + error, + lines, + version, + ranges, + pathname, + projectHistoryId, + projectHistoryType + ) { + if (error != null) { + return callback(error) + } + logger.log( + { + project_id, + doc_id, + lines, + version, + pathname, + projectHistoryId, + projectHistoryType + }, + 'got doc from persistence API' + ) + return RedisManager.putDocInMemory( + project_id, + doc_id, + lines, + version, + ranges, + pathname, + projectHistoryId, + function (error) { + if (error != null) { + return callback(error) + } + return RedisManager.setHistoryType( + doc_id, + projectHistoryType, + function (error) { + if (error != null) { + return callback(error) + } + return callback( + null, + lines, + version, + ranges || {}, + pathname, + projectHistoryId, + null, + false + ) + } + ) + } + ) + }) + } else { + return callback( + null, + lines, + version, + ranges, + pathname, + projectHistoryId, + unflushedTime, + true + ) + } + }) + }, - getDocAndRecentOps(project_id, doc_id, fromVersion, _callback) { - if (_callback == null) { _callback = function(error, lines, version, ops, ranges, pathname, projectHistoryId) {}; } - const timer = new Metrics.Timer("docManager.getDocAndRecentOps"); - const callback = function(...args) { - timer.done(); - return _callback(...Array.from(args || [])); - }; + getDocAndRecentOps(project_id, doc_id, fromVersion, _callback) { + if (_callback == null) { + _callback = function ( + error, + lines, + version, + ops, + ranges, + pathname, + projectHistoryId + ) {} + } + const timer = new Metrics.Timer('docManager.getDocAndRecentOps') + const callback = function (...args) { + timer.done() + return _callback(...Array.from(args || [])) + } - return DocumentManager.getDoc(project_id, doc_id, function(error, lines, version, ranges, pathname, projectHistoryId) { - if (error != null) { return callback(error); } - if (fromVersion === -1) { - return callback(null, lines, version, [], ranges, pathname, projectHistoryId); - } else { - return RedisManager.getPreviousDocOps(doc_id, fromVersion, version, function(error, ops) { - if (error != null) { return callback(error); } - return callback(null, lines, version, ops, ranges, pathname, projectHistoryId); - }); - } - }); - }, + return DocumentManager.getDoc(project_id, doc_id, function ( + error, + lines, + version, + ranges, + pathname, + projectHistoryId + ) { + if (error != null) { + return callback(error) + } + if (fromVersion === -1) { + return callback( + null, + lines, + version, + [], + ranges, + pathname, + projectHistoryId + ) + } else { + return RedisManager.getPreviousDocOps( + doc_id, + fromVersion, + version, + function (error, ops) { + if (error != null) { + return callback(error) + } + return callback( + null, + lines, + version, + ops, + ranges, + pathname, + projectHistoryId + ) + } + ) + } + }) + }, - setDoc(project_id, doc_id, newLines, source, user_id, undoing, _callback) { - if (_callback == null) { _callback = function(error) {}; } - const timer = new Metrics.Timer("docManager.setDoc"); - const callback = function(...args) { - timer.done(); - return _callback(...Array.from(args || [])); - }; + setDoc(project_id, doc_id, newLines, source, user_id, undoing, _callback) { + if (_callback == null) { + _callback = function (error) {} + } + const timer = new Metrics.Timer('docManager.setDoc') + const callback = function (...args) { + timer.done() + return _callback(...Array.from(args || [])) + } - if ((newLines == null)) { - return callback(new Error("No lines were provided to setDoc")); - } + if (newLines == null) { + return callback(new Error('No lines were provided to setDoc')) + } - const UpdateManager = require("./UpdateManager"); - return DocumentManager.getDoc(project_id, doc_id, function(error, oldLines, version, ranges, pathname, projectHistoryId, unflushedTime, alreadyLoaded) { - if (error != null) { return callback(error); } + const UpdateManager = require('./UpdateManager') + return DocumentManager.getDoc(project_id, doc_id, function ( + error, + oldLines, + version, + ranges, + pathname, + projectHistoryId, + unflushedTime, + alreadyLoaded + ) { + if (error != null) { + return callback(error) + } - if ((oldLines != null) && (oldLines.length > 0) && (oldLines[0].text != null)) { - logger.log({doc_id, project_id, oldLines, newLines}, "document is JSON so not updating"); - return callback(null); - } + if (oldLines != null && oldLines.length > 0 && oldLines[0].text != null) { + logger.log( + { doc_id, project_id, oldLines, newLines }, + 'document is JSON so not updating' + ) + return callback(null) + } - logger.log({doc_id, project_id, oldLines, newLines}, "setting a document via http"); - return DiffCodec.diffAsShareJsOp(oldLines, newLines, function(error, op) { - if (error != null) { return callback(error); } - if (undoing) { - for (const o of Array.from(op || [])) { - o.u = true; - } // Turn on undo flag for each op for track changes - } - const update = { - doc: doc_id, - op, - v: version, - meta: { - type: "external", - source, - user_id - } - }; - return UpdateManager.applyUpdate(project_id, doc_id, update, function(error) { - if (error != null) { return callback(error); } - // If the document was loaded already, then someone has it open - // in a project, and the usual flushing mechanism will happen. - // Otherwise we should remove it immediately since nothing else - // is using it. - if (alreadyLoaded) { - return DocumentManager.flushDocIfLoaded(project_id, doc_id, function(error) { - if (error != null) { return callback(error); } - return callback(null); - }); - } else { - return DocumentManager.flushAndDeleteDoc(project_id, doc_id, {}, function(error) { - // There is no harm in flushing project history if the previous - // call failed and sometimes it is required - HistoryManager.flushProjectChangesAsync(project_id); + logger.log( + { doc_id, project_id, oldLines, newLines }, + 'setting a document via http' + ) + return DiffCodec.diffAsShareJsOp(oldLines, newLines, function ( + error, + op + ) { + if (error != null) { + return callback(error) + } + if (undoing) { + for (const o of Array.from(op || [])) { + o.u = true + } // Turn on undo flag for each op for track changes + } + const update = { + doc: doc_id, + op, + v: version, + meta: { + type: 'external', + source, + user_id + } + } + return UpdateManager.applyUpdate(project_id, doc_id, update, function ( + error + ) { + if (error != null) { + return callback(error) + } + // If the document was loaded already, then someone has it open + // in a project, and the usual flushing mechanism will happen. + // Otherwise we should remove it immediately since nothing else + // is using it. + if (alreadyLoaded) { + return DocumentManager.flushDocIfLoaded( + project_id, + doc_id, + function (error) { + if (error != null) { + return callback(error) + } + return callback(null) + } + ) + } else { + return DocumentManager.flushAndDeleteDoc( + project_id, + doc_id, + {}, + function (error) { + // There is no harm in flushing project history if the previous + // call failed and sometimes it is required + HistoryManager.flushProjectChangesAsync(project_id) - if (error != null) { return callback(error); } - return callback(null); - }); - } - }); - }); - }); - }, + if (error != null) { + return callback(error) + } + return callback(null) + } + ) + } + }) + }) + }) + }, - flushDocIfLoaded(project_id, doc_id, _callback) { - if (_callback == null) { _callback = function(error) {}; } - const timer = new Metrics.Timer("docManager.flushDocIfLoaded"); - const callback = function(...args) { - timer.done(); - return _callback(...Array.from(args || [])); - }; - return RedisManager.getDoc(project_id, doc_id, function(error, lines, version, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy) { - if (error != null) { return callback(error); } - if ((lines == null) || (version == null)) { - logger.log({project_id, doc_id}, "doc is not loaded so not flushing"); - return callback(null); // TODO: return a flag to bail out, as we go on to remove doc from memory? - } else { - logger.log({project_id, doc_id, version}, "flushing doc"); - return PersistenceManager.setDoc(project_id, doc_id, lines, version, ranges, lastUpdatedAt, lastUpdatedBy, function(error) { - if (error != null) { return callback(error); } - return RedisManager.clearUnflushedTime(doc_id, callback); - }); - } - }); - }, + flushDocIfLoaded(project_id, doc_id, _callback) { + if (_callback == null) { + _callback = function (error) {} + } + const timer = new Metrics.Timer('docManager.flushDocIfLoaded') + const callback = function (...args) { + timer.done() + return _callback(...Array.from(args || [])) + } + return RedisManager.getDoc(project_id, doc_id, function ( + error, + lines, + version, + ranges, + pathname, + projectHistoryId, + unflushedTime, + lastUpdatedAt, + lastUpdatedBy + ) { + if (error != null) { + return callback(error) + } + if (lines == null || version == null) { + logger.log({ project_id, doc_id }, 'doc is not loaded so not flushing') + return callback(null) // TODO: return a flag to bail out, as we go on to remove doc from memory? + } else { + logger.log({ project_id, doc_id, version }, 'flushing doc') + return PersistenceManager.setDoc( + project_id, + doc_id, + lines, + version, + ranges, + lastUpdatedAt, + lastUpdatedBy, + function (error) { + if (error != null) { + return callback(error) + } + return RedisManager.clearUnflushedTime(doc_id, callback) + } + ) + } + }) + }, - flushAndDeleteDoc(project_id, doc_id, options, _callback) { - const timer = new Metrics.Timer("docManager.flushAndDeleteDoc"); - const callback = function(...args) { - timer.done(); - return _callback(...Array.from(args || [])); - }; + flushAndDeleteDoc(project_id, doc_id, options, _callback) { + const timer = new Metrics.Timer('docManager.flushAndDeleteDoc') + const callback = function (...args) { + timer.done() + return _callback(...Array.from(args || [])) + } - return DocumentManager.flushDocIfLoaded(project_id, doc_id, function(error) { - if (error != null) { - if (options.ignoreFlushErrors) { - logger.warn({project_id, doc_id, err: error}, "ignoring flush error while deleting document"); - } else { - return callback(error); - } - } + return DocumentManager.flushDocIfLoaded(project_id, doc_id, function ( + error + ) { + if (error != null) { + if (options.ignoreFlushErrors) { + logger.warn( + { project_id, doc_id, err: error }, + 'ignoring flush error while deleting document' + ) + } else { + return callback(error) + } + } - // Flush in the background since it requires a http request - HistoryManager.flushDocChangesAsync(project_id, doc_id); + // Flush in the background since it requires a http request + HistoryManager.flushDocChangesAsync(project_id, doc_id) - return RedisManager.removeDocFromMemory(project_id, doc_id, function(error) { - if (error != null) { return callback(error); } - return callback(null); - }); - }); - }, + return RedisManager.removeDocFromMemory(project_id, doc_id, function ( + error + ) { + if (error != null) { + return callback(error) + } + return callback(null) + }) + }) + }, - acceptChanges(project_id, doc_id, change_ids, _callback) { - if (change_ids == null) { change_ids = []; } - if (_callback == null) { _callback = function(error) {}; } - const timer = new Metrics.Timer("docManager.acceptChanges"); - const callback = function(...args) { - timer.done(); - return _callback(...Array.from(args || [])); - }; + acceptChanges(project_id, doc_id, change_ids, _callback) { + if (change_ids == null) { + change_ids = [] + } + if (_callback == null) { + _callback = function (error) {} + } + const timer = new Metrics.Timer('docManager.acceptChanges') + const callback = function (...args) { + timer.done() + return _callback(...Array.from(args || [])) + } - return DocumentManager.getDoc(project_id, doc_id, function(error, lines, version, ranges) { - if (error != null) { return callback(error); } - if ((lines == null) || (version == null)) { - return callback(new Errors.NotFoundError(`document not found: ${doc_id}`)); - } - return RangesManager.acceptChanges(change_ids, ranges, function(error, new_ranges) { - if (error != null) { return callback(error); } - return RedisManager.updateDocument(project_id, doc_id, lines, version, [], new_ranges, {}, function(error) { - if (error != null) { return callback(error); } - return callback(); - }); - }); - }); - }, + return DocumentManager.getDoc(project_id, doc_id, function ( + error, + lines, + version, + ranges + ) { + if (error != null) { + return callback(error) + } + if (lines == null || version == null) { + return callback( + new Errors.NotFoundError(`document not found: ${doc_id}`) + ) + } + return RangesManager.acceptChanges(change_ids, ranges, function ( + error, + new_ranges + ) { + if (error != null) { + return callback(error) + } + return RedisManager.updateDocument( + project_id, + doc_id, + lines, + version, + [], + new_ranges, + {}, + function (error) { + if (error != null) { + return callback(error) + } + return callback() + } + ) + }) + }) + }, - deleteComment(project_id, doc_id, comment_id, _callback) { - if (_callback == null) { _callback = function(error) {}; } - const timer = new Metrics.Timer("docManager.deleteComment"); - const callback = function(...args) { - timer.done(); - return _callback(...Array.from(args || [])); - }; + deleteComment(project_id, doc_id, comment_id, _callback) { + if (_callback == null) { + _callback = function (error) {} + } + const timer = new Metrics.Timer('docManager.deleteComment') + const callback = function (...args) { + timer.done() + return _callback(...Array.from(args || [])) + } - return DocumentManager.getDoc(project_id, doc_id, function(error, lines, version, ranges) { - if (error != null) { return callback(error); } - if ((lines == null) || (version == null)) { - return callback(new Errors.NotFoundError(`document not found: ${doc_id}`)); - } - return RangesManager.deleteComment(comment_id, ranges, function(error, new_ranges) { - if (error != null) { return callback(error); } - return RedisManager.updateDocument(project_id, doc_id, lines, version, [], new_ranges, {}, function(error) { - if (error != null) { return callback(error); } - return callback(); - }); - }); - }); - }, + return DocumentManager.getDoc(project_id, doc_id, function ( + error, + lines, + version, + ranges + ) { + if (error != null) { + return callback(error) + } + if (lines == null || version == null) { + return callback( + new Errors.NotFoundError(`document not found: ${doc_id}`) + ) + } + return RangesManager.deleteComment(comment_id, ranges, function ( + error, + new_ranges + ) { + if (error != null) { + return callback(error) + } + return RedisManager.updateDocument( + project_id, + doc_id, + lines, + version, + [], + new_ranges, + {}, + function (error) { + if (error != null) { + return callback(error) + } + return callback() + } + ) + }) + }) + }, - renameDoc(project_id, doc_id, user_id, update, projectHistoryId, _callback) { - if (_callback == null) { _callback = function(error) {}; } - const timer = new Metrics.Timer("docManager.updateProject"); - const callback = function(...args) { - timer.done(); - return _callback(...Array.from(args || [])); - }; + renameDoc(project_id, doc_id, user_id, update, projectHistoryId, _callback) { + if (_callback == null) { + _callback = function (error) {} + } + const timer = new Metrics.Timer('docManager.updateProject') + const callback = function (...args) { + timer.done() + return _callback(...Array.from(args || [])) + } - return RedisManager.renameDoc(project_id, doc_id, user_id, update, projectHistoryId, callback); - }, + return RedisManager.renameDoc( + project_id, + doc_id, + user_id, + update, + projectHistoryId, + callback + ) + }, - getDocAndFlushIfOld(project_id, doc_id, callback) { - if (callback == null) { callback = function(error, doc) {}; } - return DocumentManager.getDoc(project_id, doc_id, function(error, lines, version, ranges, pathname, projectHistoryId, unflushedTime, alreadyLoaded) { - if (error != null) { return callback(error); } - // if doc was already loaded see if it needs to be flushed - if (alreadyLoaded && (unflushedTime != null) && ((Date.now() - unflushedTime) > MAX_UNFLUSHED_AGE)) { - return DocumentManager.flushDocIfLoaded(project_id, doc_id, function(error) { - if (error != null) { return callback(error); } - return callback(null, lines, version); - }); - } else { - return callback(null, lines, version); - } - }); - }, + getDocAndFlushIfOld(project_id, doc_id, callback) { + if (callback == null) { + callback = function (error, doc) {} + } + return DocumentManager.getDoc(project_id, doc_id, function ( + error, + lines, + version, + ranges, + pathname, + projectHistoryId, + unflushedTime, + alreadyLoaded + ) { + if (error != null) { + return callback(error) + } + // if doc was already loaded see if it needs to be flushed + if ( + alreadyLoaded && + unflushedTime != null && + Date.now() - unflushedTime > MAX_UNFLUSHED_AGE + ) { + return DocumentManager.flushDocIfLoaded(project_id, doc_id, function ( + error + ) { + if (error != null) { + return callback(error) + } + return callback(null, lines, version) + }) + } else { + return callback(null, lines, version) + } + }) + }, - resyncDocContents(project_id, doc_id, callback) { - logger.log({project_id, doc_id}, "start resyncing doc contents"); - return RedisManager.getDoc(project_id, doc_id, function(error, lines, version, ranges, pathname, projectHistoryId) { - if (error != null) { return callback(error); } + resyncDocContents(project_id, doc_id, callback) { + logger.log({ project_id, doc_id }, 'start resyncing doc contents') + return RedisManager.getDoc(project_id, doc_id, function ( + error, + lines, + version, + ranges, + pathname, + projectHistoryId + ) { + if (error != null) { + return callback(error) + } - if ((lines == null) || (version == null)) { - logger.log({project_id, doc_id}, "resyncing doc contents - not found in redis - retrieving from web"); - return PersistenceManager.getDoc(project_id, doc_id, function(error, lines, version, ranges, pathname, projectHistoryId) { - if (error != null) { - logger.error({project_id, doc_id, getDocError: error}, "resyncing doc contents - error retrieving from web"); - return callback(error); - } - return ProjectHistoryRedisManager.queueResyncDocContent(project_id, projectHistoryId, doc_id, lines, version, pathname, callback); - }); - } else { - logger.log({project_id, doc_id}, "resyncing doc contents - doc in redis - will queue in redis"); - return ProjectHistoryRedisManager.queueResyncDocContent(project_id, projectHistoryId, doc_id, lines, version, pathname, callback); - } - }); - }, + if (lines == null || version == null) { + logger.log( + { project_id, doc_id }, + 'resyncing doc contents - not found in redis - retrieving from web' + ) + return PersistenceManager.getDoc(project_id, doc_id, function ( + error, + lines, + version, + ranges, + pathname, + projectHistoryId + ) { + if (error != null) { + logger.error( + { project_id, doc_id, getDocError: error }, + 'resyncing doc contents - error retrieving from web' + ) + return callback(error) + } + return ProjectHistoryRedisManager.queueResyncDocContent( + project_id, + projectHistoryId, + doc_id, + lines, + version, + pathname, + callback + ) + }) + } else { + logger.log( + { project_id, doc_id }, + 'resyncing doc contents - doc in redis - will queue in redis' + ) + return ProjectHistoryRedisManager.queueResyncDocContent( + project_id, + projectHistoryId, + doc_id, + lines, + version, + pathname, + callback + ) + } + }) + }, - getDocWithLock(project_id, doc_id, callback) { - if (callback == null) { callback = function(error, lines, version) {}; } - const UpdateManager = require("./UpdateManager"); - return UpdateManager.lockUpdatesAndDo(DocumentManager.getDoc, project_id, doc_id, callback); - }, + getDocWithLock(project_id, doc_id, callback) { + if (callback == null) { + callback = function (error, lines, version) {} + } + const UpdateManager = require('./UpdateManager') + return UpdateManager.lockUpdatesAndDo( + DocumentManager.getDoc, + project_id, + doc_id, + callback + ) + }, - getDocAndRecentOpsWithLock(project_id, doc_id, fromVersion, callback) { - if (callback == null) { callback = function(error, lines, version, ops, ranges, pathname, projectHistoryId) {}; } - const UpdateManager = require("./UpdateManager"); - return UpdateManager.lockUpdatesAndDo(DocumentManager.getDocAndRecentOps, project_id, doc_id, fromVersion, callback); - }, + getDocAndRecentOpsWithLock(project_id, doc_id, fromVersion, callback) { + if (callback == null) { + callback = function ( + error, + lines, + version, + ops, + ranges, + pathname, + projectHistoryId + ) {} + } + const UpdateManager = require('./UpdateManager') + return UpdateManager.lockUpdatesAndDo( + DocumentManager.getDocAndRecentOps, + project_id, + doc_id, + fromVersion, + callback + ) + }, - getDocAndFlushIfOldWithLock(project_id, doc_id, callback) { - if (callback == null) { callback = function(error, doc) {}; } - const UpdateManager = require("./UpdateManager"); - return UpdateManager.lockUpdatesAndDo(DocumentManager.getDocAndFlushIfOld, project_id, doc_id, callback); - }, + getDocAndFlushIfOldWithLock(project_id, doc_id, callback) { + if (callback == null) { + callback = function (error, doc) {} + } + const UpdateManager = require('./UpdateManager') + return UpdateManager.lockUpdatesAndDo( + DocumentManager.getDocAndFlushIfOld, + project_id, + doc_id, + callback + ) + }, - setDocWithLock(project_id, doc_id, lines, source, user_id, undoing, callback) { - if (callback == null) { callback = function(error) {}; } - const UpdateManager = require("./UpdateManager"); - return UpdateManager.lockUpdatesAndDo(DocumentManager.setDoc, project_id, doc_id, lines, source, user_id, undoing, callback); - }, + setDocWithLock( + project_id, + doc_id, + lines, + source, + user_id, + undoing, + callback + ) { + if (callback == null) { + callback = function (error) {} + } + const UpdateManager = require('./UpdateManager') + return UpdateManager.lockUpdatesAndDo( + DocumentManager.setDoc, + project_id, + doc_id, + lines, + source, + user_id, + undoing, + callback + ) + }, - flushDocIfLoadedWithLock(project_id, doc_id, callback) { - if (callback == null) { callback = function(error) {}; } - const UpdateManager = require("./UpdateManager"); - return UpdateManager.lockUpdatesAndDo(DocumentManager.flushDocIfLoaded, project_id, doc_id, callback); - }, + flushDocIfLoadedWithLock(project_id, doc_id, callback) { + if (callback == null) { + callback = function (error) {} + } + const UpdateManager = require('./UpdateManager') + return UpdateManager.lockUpdatesAndDo( + DocumentManager.flushDocIfLoaded, + project_id, + doc_id, + callback + ) + }, - flushAndDeleteDocWithLock(project_id, doc_id, options, callback) { - const UpdateManager = require("./UpdateManager"); - return UpdateManager.lockUpdatesAndDo(DocumentManager.flushAndDeleteDoc, project_id, doc_id, options, callback); - }, + flushAndDeleteDocWithLock(project_id, doc_id, options, callback) { + const UpdateManager = require('./UpdateManager') + return UpdateManager.lockUpdatesAndDo( + DocumentManager.flushAndDeleteDoc, + project_id, + doc_id, + options, + callback + ) + }, - acceptChangesWithLock(project_id, doc_id, change_ids, callback) { - if (callback == null) { callback = function(error) {}; } - const UpdateManager = require("./UpdateManager"); - return UpdateManager.lockUpdatesAndDo(DocumentManager.acceptChanges, project_id, doc_id, change_ids, callback); - }, + acceptChangesWithLock(project_id, doc_id, change_ids, callback) { + if (callback == null) { + callback = function (error) {} + } + const UpdateManager = require('./UpdateManager') + return UpdateManager.lockUpdatesAndDo( + DocumentManager.acceptChanges, + project_id, + doc_id, + change_ids, + callback + ) + }, - deleteCommentWithLock(project_id, doc_id, thread_id, callback) { - if (callback == null) { callback = function(error) {}; } - const UpdateManager = require("./UpdateManager"); - return UpdateManager.lockUpdatesAndDo(DocumentManager.deleteComment, project_id, doc_id, thread_id, callback); - }, + deleteCommentWithLock(project_id, doc_id, thread_id, callback) { + if (callback == null) { + callback = function (error) {} + } + const UpdateManager = require('./UpdateManager') + return UpdateManager.lockUpdatesAndDo( + DocumentManager.deleteComment, + project_id, + doc_id, + thread_id, + callback + ) + }, - renameDocWithLock(project_id, doc_id, user_id, update, projectHistoryId, callback) { - if (callback == null) { callback = function(error) {}; } - const UpdateManager = require("./UpdateManager"); - return UpdateManager.lockUpdatesAndDo(DocumentManager.renameDoc, project_id, doc_id, user_id, update, projectHistoryId, callback); - }, + renameDocWithLock( + project_id, + doc_id, + user_id, + update, + projectHistoryId, + callback + ) { + if (callback == null) { + callback = function (error) {} + } + const UpdateManager = require('./UpdateManager') + return UpdateManager.lockUpdatesAndDo( + DocumentManager.renameDoc, + project_id, + doc_id, + user_id, + update, + projectHistoryId, + callback + ) + }, - resyncDocContentsWithLock(project_id, doc_id, callback) { - if (callback == null) { callback = function(error) {}; } - const UpdateManager = require("./UpdateManager"); - return UpdateManager.lockUpdatesAndDo(DocumentManager.resyncDocContents, project_id, doc_id, callback); - } -}); + resyncDocContentsWithLock(project_id, doc_id, callback) { + if (callback == null) { + callback = function (error) {} + } + const UpdateManager = require('./UpdateManager') + return UpdateManager.lockUpdatesAndDo( + DocumentManager.resyncDocContents, + project_id, + doc_id, + callback + ) + } +} diff --git a/services/document-updater/app/js/Errors.js b/services/document-updater/app/js/Errors.js index d4bbfb7acc..c67c2c8422 100644 --- a/services/document-updater/app/js/Errors.js +++ b/services/document-updater/app/js/Errors.js @@ -4,42 +4,42 @@ */ // TODO: This file was created by bulk-decaffeinate. // Fix any style issues and re-enable lint. -let Errors; -var NotFoundError = function(message) { - const error = new Error(message); - error.name = "NotFoundError"; - error.__proto__ = NotFoundError.prototype; - return error; -}; -NotFoundError.prototype.__proto__ = Error.prototype; +let Errors +var NotFoundError = function (message) { + const error = new Error(message) + error.name = 'NotFoundError' + error.__proto__ = NotFoundError.prototype + return error +} +NotFoundError.prototype.__proto__ = Error.prototype -var OpRangeNotAvailableError = function(message) { - const error = new Error(message); - error.name = "OpRangeNotAvailableError"; - error.__proto__ = OpRangeNotAvailableError.prototype; - return error; -}; -OpRangeNotAvailableError.prototype.__proto__ = Error.prototype; +var OpRangeNotAvailableError = function (message) { + const error = new Error(message) + error.name = 'OpRangeNotAvailableError' + error.__proto__ = OpRangeNotAvailableError.prototype + return error +} +OpRangeNotAvailableError.prototype.__proto__ = Error.prototype -var ProjectStateChangedError = function(message) { - const error = new Error(message); - error.name = "ProjectStateChangedError"; - error.__proto__ = ProjectStateChangedError.prototype; - return error; -}; -ProjectStateChangedError.prototype.__proto__ = Error.prototype; +var ProjectStateChangedError = function (message) { + const error = new Error(message) + error.name = 'ProjectStateChangedError' + error.__proto__ = ProjectStateChangedError.prototype + return error +} +ProjectStateChangedError.prototype.__proto__ = Error.prototype -var DeleteMismatchError = function(message) { - const error = new Error(message); - error.name = "DeleteMismatchError"; - error.__proto__ = DeleteMismatchError.prototype; - return error; -}; -DeleteMismatchError.prototype.__proto__ = Error.prototype; +var DeleteMismatchError = function (message) { + const error = new Error(message) + error.name = 'DeleteMismatchError' + error.__proto__ = DeleteMismatchError.prototype + return error +} +DeleteMismatchError.prototype.__proto__ = Error.prototype -module.exports = (Errors = { - NotFoundError, - OpRangeNotAvailableError, - ProjectStateChangedError, - DeleteMismatchError -}); +module.exports = Errors = { + NotFoundError, + OpRangeNotAvailableError, + ProjectStateChangedError, + DeleteMismatchError +} diff --git a/services/document-updater/app/js/HistoryManager.js b/services/document-updater/app/js/HistoryManager.js index 457e8247fb..18c5b7f6d2 100644 --- a/services/document-updater/app/js/HistoryManager.js +++ b/services/document-updater/app/js/HistoryManager.js @@ -11,140 +11,253 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let HistoryManager; -const async = require("async"); -const logger = require("logger-sharelatex"); -const request = require("request"); -const Settings = require("settings-sharelatex"); -const HistoryRedisManager = require("./HistoryRedisManager"); -const ProjectHistoryRedisManager = require("./ProjectHistoryRedisManager"); -const RedisManager = require("./RedisManager"); -const metrics = require("./Metrics"); +let HistoryManager +const async = require('async') +const logger = require('logger-sharelatex') +const request = require('request') +const Settings = require('settings-sharelatex') +const HistoryRedisManager = require('./HistoryRedisManager') +const ProjectHistoryRedisManager = require('./ProjectHistoryRedisManager') +const RedisManager = require('./RedisManager') +const metrics = require('./Metrics') -module.exports = (HistoryManager = { - flushDocChangesAsync(project_id, doc_id) { - if (((Settings.apis != null ? Settings.apis.trackchanges : undefined) == null)) { - logger.warn({ doc_id }, "track changes API is not configured, so not flushing"); - return; - } - return RedisManager.getHistoryType(doc_id, function(err, projectHistoryType) { - if (err != null) { - logger.warn({err, doc_id}, "error getting history type"); - } - // if there's an error continue and flush to track-changes for safety - if (Settings.disableDoubleFlush && (projectHistoryType === "project-history")) { - return logger.debug({doc_id, projectHistoryType}, "skipping track-changes flush"); - } else { - metrics.inc('history-flush', 1, { status: 'track-changes'}); - const url = `${Settings.apis.trackchanges.url}/project/${project_id}/doc/${doc_id}/flush`; - logger.log({ project_id, doc_id, url, projectHistoryType }, "flushing doc in track changes api"); - return request.post(url, function(error, res, body){ - if (error != null) { - return logger.error({ error, doc_id, project_id}, "track changes doc to track changes api"); - } else if ((res.statusCode < 200) && (res.statusCode >= 300)) { - return logger.error({ doc_id, project_id }, `track changes api returned a failure status code: ${res.statusCode}`); - } - }); - } - }); - }, +module.exports = HistoryManager = { + flushDocChangesAsync(project_id, doc_id) { + if ( + (Settings.apis != null ? Settings.apis.trackchanges : undefined) == null + ) { + logger.warn( + { doc_id }, + 'track changes API is not configured, so not flushing' + ) + return + } + return RedisManager.getHistoryType(doc_id, function ( + err, + projectHistoryType + ) { + if (err != null) { + logger.warn({ err, doc_id }, 'error getting history type') + } + // if there's an error continue and flush to track-changes for safety + if ( + Settings.disableDoubleFlush && + projectHistoryType === 'project-history' + ) { + return logger.debug( + { doc_id, projectHistoryType }, + 'skipping track-changes flush' + ) + } else { + metrics.inc('history-flush', 1, { status: 'track-changes' }) + const url = `${Settings.apis.trackchanges.url}/project/${project_id}/doc/${doc_id}/flush` + logger.log( + { project_id, doc_id, url, projectHistoryType }, + 'flushing doc in track changes api' + ) + return request.post(url, function (error, res, body) { + if (error != null) { + return logger.error( + { error, doc_id, project_id }, + 'track changes doc to track changes api' + ) + } else if (res.statusCode < 200 && res.statusCode >= 300) { + return logger.error( + { doc_id, project_id }, + `track changes api returned a failure status code: ${res.statusCode}` + ) + } + }) + } + }) + }, - // flush changes in the background - flushProjectChangesAsync(project_id) { - if (!__guard__(Settings.apis != null ? Settings.apis.project_history : undefined, x => x.enabled)) { return; } - return HistoryManager.flushProjectChanges(project_id, {background:true}, function() {}); - }, + // flush changes in the background + flushProjectChangesAsync(project_id) { + if ( + !__guard__( + Settings.apis != null ? Settings.apis.project_history : undefined, + (x) => x.enabled + ) + ) { + return + } + return HistoryManager.flushProjectChanges( + project_id, + { background: true }, + function () {} + ) + }, - // flush changes and callback (for when we need to know the queue is flushed) - flushProjectChanges(project_id, options, callback) { - if (callback == null) { callback = function(error) {}; } - if (!__guard__(Settings.apis != null ? Settings.apis.project_history : undefined, x => x.enabled)) { return callback(); } - if (options.skip_history_flush) { - logger.log({project_id}, "skipping flush of project history"); - return callback(); - } - metrics.inc('history-flush', 1, { status: 'project-history'}); - const url = `${Settings.apis.project_history.url}/project/${project_id}/flush`; - const qs = {}; - if (options.background) { qs.background = true; } // pass on the background flush option if present - logger.log({ project_id, url, qs }, "flushing doc in project history api"); - return request.post({url, qs}, function(error, res, body){ - if (error != null) { - logger.error({ error, project_id}, "project history doc to track changes api"); - return callback(error); - } else if ((res.statusCode < 200) && (res.statusCode >= 300)) { - logger.error({ project_id }, `project history api returned a failure status code: ${res.statusCode}`); - return callback(error); - } else { - return callback(); - } - }); - }, + // flush changes and callback (for when we need to know the queue is flushed) + flushProjectChanges(project_id, options, callback) { + if (callback == null) { + callback = function (error) {} + } + if ( + !__guard__( + Settings.apis != null ? Settings.apis.project_history : undefined, + (x) => x.enabled + ) + ) { + return callback() + } + if (options.skip_history_flush) { + logger.log({ project_id }, 'skipping flush of project history') + return callback() + } + metrics.inc('history-flush', 1, { status: 'project-history' }) + const url = `${Settings.apis.project_history.url}/project/${project_id}/flush` + const qs = {} + if (options.background) { + qs.background = true + } // pass on the background flush option if present + logger.log({ project_id, url, qs }, 'flushing doc in project history api') + return request.post({ url, qs }, function (error, res, body) { + if (error != null) { + logger.error( + { error, project_id }, + 'project history doc to track changes api' + ) + return callback(error) + } else if (res.statusCode < 200 && res.statusCode >= 300) { + logger.error( + { project_id }, + `project history api returned a failure status code: ${res.statusCode}` + ) + return callback(error) + } else { + return callback() + } + }) + }, - FLUSH_DOC_EVERY_N_OPS: 100, - FLUSH_PROJECT_EVERY_N_OPS: 500, + FLUSH_DOC_EVERY_N_OPS: 100, + FLUSH_PROJECT_EVERY_N_OPS: 500, - recordAndFlushHistoryOps(project_id, doc_id, ops, doc_ops_length, project_ops_length, callback) { - if (ops == null) { ops = []; } - if (callback == null) { callback = function(error) {}; } - if (ops.length === 0) { - return callback(); - } + recordAndFlushHistoryOps( + project_id, + doc_id, + ops, + doc_ops_length, + project_ops_length, + callback + ) { + if (ops == null) { + ops = [] + } + if (callback == null) { + callback = function (error) {} + } + if (ops.length === 0) { + return callback() + } - // record updates for project history - if (__guard__(Settings.apis != null ? Settings.apis.project_history : undefined, x => x.enabled)) { - if (HistoryManager.shouldFlushHistoryOps(project_ops_length, ops.length, HistoryManager.FLUSH_PROJECT_EVERY_N_OPS)) { - // Do this in the background since it uses HTTP and so may be too - // slow to wait for when processing a doc update. - logger.log({ project_ops_length, project_id }, "flushing project history api"); - HistoryManager.flushProjectChangesAsync(project_id); - } - } + // record updates for project history + if ( + __guard__( + Settings.apis != null ? Settings.apis.project_history : undefined, + (x) => x.enabled + ) + ) { + if ( + HistoryManager.shouldFlushHistoryOps( + project_ops_length, + ops.length, + HistoryManager.FLUSH_PROJECT_EVERY_N_OPS + ) + ) { + // Do this in the background since it uses HTTP and so may be too + // slow to wait for when processing a doc update. + logger.log( + { project_ops_length, project_id }, + 'flushing project history api' + ) + HistoryManager.flushProjectChangesAsync(project_id) + } + } - // if the doc_ops_length is undefined it means the project is not using track-changes - // so we can bail out here - if (typeof(doc_ops_length) === 'undefined') { - logger.debug({ project_id, doc_id}, "skipping flush to track-changes, only using project-history"); - return callback(); - } + // if the doc_ops_length is undefined it means the project is not using track-changes + // so we can bail out here + if (typeof doc_ops_length === 'undefined') { + logger.debug( + { project_id, doc_id }, + 'skipping flush to track-changes, only using project-history' + ) + return callback() + } - // record updates for track-changes - return HistoryRedisManager.recordDocHasHistoryOps(project_id, doc_id, ops, function(error) { - if (error != null) { return callback(error); } - if (HistoryManager.shouldFlushHistoryOps(doc_ops_length, ops.length, HistoryManager.FLUSH_DOC_EVERY_N_OPS)) { - // Do this in the background since it uses HTTP and so may be too - // slow to wait for when processing a doc update. - logger.log({ doc_ops_length, doc_id, project_id }, "flushing track changes api"); - HistoryManager.flushDocChangesAsync(project_id, doc_id); - } - return callback(); - }); - }, + // record updates for track-changes + return HistoryRedisManager.recordDocHasHistoryOps( + project_id, + doc_id, + ops, + function (error) { + if (error != null) { + return callback(error) + } + if ( + HistoryManager.shouldFlushHistoryOps( + doc_ops_length, + ops.length, + HistoryManager.FLUSH_DOC_EVERY_N_OPS + ) + ) { + // Do this in the background since it uses HTTP and so may be too + // slow to wait for when processing a doc update. + logger.log( + { doc_ops_length, doc_id, project_id }, + 'flushing track changes api' + ) + HistoryManager.flushDocChangesAsync(project_id, doc_id) + } + return callback() + } + ) + }, - shouldFlushHistoryOps(length, ops_length, threshold) { - if (!length) { return false; } // don't flush unless we know the length - // We want to flush every 100 ops, i.e. 100, 200, 300, etc - // Find out which 'block' (i.e. 0-99, 100-199) we were in before and after pushing these - // ops. If we've changed, then we've gone over a multiple of 100 and should flush. - // (Most of the time, we will only hit 100 and then flushing will put us back to 0) - const previousLength = length - ops_length; - const prevBlock = Math.floor(previousLength / threshold); - const newBlock = Math.floor(length / threshold); - return newBlock !== prevBlock; - }, + shouldFlushHistoryOps(length, ops_length, threshold) { + if (!length) { + return false + } // don't flush unless we know the length + // We want to flush every 100 ops, i.e. 100, 200, 300, etc + // Find out which 'block' (i.e. 0-99, 100-199) we were in before and after pushing these + // ops. If we've changed, then we've gone over a multiple of 100 and should flush. + // (Most of the time, we will only hit 100 and then flushing will put us back to 0) + const previousLength = length - ops_length + const prevBlock = Math.floor(previousLength / threshold) + const newBlock = Math.floor(length / threshold) + return newBlock !== prevBlock + }, - MAX_PARALLEL_REQUESTS: 4, + MAX_PARALLEL_REQUESTS: 4, - resyncProjectHistory(project_id, projectHistoryId, docs, files, callback) { - return ProjectHistoryRedisManager.queueResyncProjectStructure(project_id, projectHistoryId, docs, files, function(error) { - if (error != null) { return callback(error); } - const DocumentManager = require("./DocumentManager"); - const resyncDoc = (doc, cb) => DocumentManager.resyncDocContentsWithLock(project_id, doc.doc, cb); - return async.eachLimit(docs, HistoryManager.MAX_PARALLEL_REQUESTS, resyncDoc, callback); - }); - } -}); + resyncProjectHistory(project_id, projectHistoryId, docs, files, callback) { + return ProjectHistoryRedisManager.queueResyncProjectStructure( + project_id, + projectHistoryId, + docs, + files, + function (error) { + if (error != null) { + return callback(error) + } + const DocumentManager = require('./DocumentManager') + const resyncDoc = (doc, cb) => + DocumentManager.resyncDocContentsWithLock(project_id, doc.doc, cb) + return async.eachLimit( + docs, + HistoryManager.MAX_PARALLEL_REQUESTS, + resyncDoc, + callback + ) + } + ) + } +} function __guard__(value, transform) { - return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; -} \ No newline at end of file + return typeof value !== 'undefined' && value !== null + ? transform(value) + : undefined +} diff --git a/services/document-updater/app/js/HistoryRedisManager.js b/services/document-updater/app/js/HistoryRedisManager.js index a63d80b11c..5b9c76646c 100644 --- a/services/document-updater/app/js/HistoryRedisManager.js +++ b/services/document-updater/app/js/HistoryRedisManager.js @@ -11,23 +11,33 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let HistoryRedisManager; -const Settings = require('settings-sharelatex'); -const rclient = require("redis-sharelatex").createClient(Settings.redis.history); -const Keys = Settings.redis.history.key_schema; -const logger = require('logger-sharelatex'); +let HistoryRedisManager +const Settings = require('settings-sharelatex') +const rclient = require('redis-sharelatex').createClient(Settings.redis.history) +const Keys = Settings.redis.history.key_schema +const logger = require('logger-sharelatex') -module.exports = (HistoryRedisManager = { - recordDocHasHistoryOps(project_id, doc_id, ops, callback) { - if (ops == null) { ops = []; } - if (callback == null) { callback = function(error) {}; } - if (ops.length === 0) { - return callback(new Error("cannot push no ops")); // This should never be called with no ops, but protect against a redis error if we sent an empty array to rpush - } - logger.log({project_id, doc_id}, "marking doc in project for history ops"); - return rclient.sadd(Keys.docsWithHistoryOps({project_id}), doc_id, function(error) { - if (error != null) { return callback(error); } - return callback(); - }); - } -}); +module.exports = HistoryRedisManager = { + recordDocHasHistoryOps(project_id, doc_id, ops, callback) { + if (ops == null) { + ops = [] + } + if (callback == null) { + callback = function (error) {} + } + if (ops.length === 0) { + return callback(new Error('cannot push no ops')) // This should never be called with no ops, but protect against a redis error if we sent an empty array to rpush + } + logger.log({ project_id, doc_id }, 'marking doc in project for history ops') + return rclient.sadd( + Keys.docsWithHistoryOps({ project_id }), + doc_id, + function (error) { + if (error != null) { + return callback(error) + } + return callback() + } + ) + } +} diff --git a/services/document-updater/app/js/HttpController.js b/services/document-updater/app/js/HttpController.js index 4dc1622b43..b6bd00214e 100644 --- a/services/document-updater/app/js/HttpController.js +++ b/services/document-updater/app/js/HttpController.js @@ -12,331 +12,470 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let HttpController; -const DocumentManager = require("./DocumentManager"); -const HistoryManager = require("./HistoryManager"); -const ProjectManager = require("./ProjectManager"); -const Errors = require("./Errors"); -const logger = require("logger-sharelatex"); -const Metrics = require("./Metrics"); -const ProjectFlusher = require("./ProjectFlusher"); -const DeleteQueueManager = require("./DeleteQueueManager"); -const async = require("async"); +let HttpController +const DocumentManager = require('./DocumentManager') +const HistoryManager = require('./HistoryManager') +const ProjectManager = require('./ProjectManager') +const Errors = require('./Errors') +const logger = require('logger-sharelatex') +const Metrics = require('./Metrics') +const ProjectFlusher = require('./ProjectFlusher') +const DeleteQueueManager = require('./DeleteQueueManager') +const async = require('async') -const TWO_MEGABYTES = 2 * 1024 * 1024; +const TWO_MEGABYTES = 2 * 1024 * 1024 -module.exports = (HttpController = { - getDoc(req, res, next) { - let fromVersion; - if (next == null) { next = function(error) {}; } - const { - doc_id - } = req.params; - const { - project_id - } = req.params; - logger.log({project_id, doc_id}, "getting doc via http"); - const timer = new Metrics.Timer("http.getDoc"); +module.exports = HttpController = { + getDoc(req, res, next) { + let fromVersion + if (next == null) { + next = function (error) {} + } + const { doc_id } = req.params + const { project_id } = req.params + logger.log({ project_id, doc_id }, 'getting doc via http') + const timer = new Metrics.Timer('http.getDoc') - if ((req.query != null ? req.query.fromVersion : undefined) != null) { - fromVersion = parseInt(req.query.fromVersion, 10); - } else { - fromVersion = -1; - } + if ((req.query != null ? req.query.fromVersion : undefined) != null) { + fromVersion = parseInt(req.query.fromVersion, 10) + } else { + fromVersion = -1 + } - return DocumentManager.getDocAndRecentOpsWithLock(project_id, doc_id, fromVersion, function(error, lines, version, ops, ranges, pathname) { - timer.done(); - if (error != null) { return next(error); } - logger.log({project_id, doc_id}, "got doc via http"); - if ((lines == null) || (version == null)) { - return next(new Errors.NotFoundError("document not found")); - } - return res.json({ - id: doc_id, - lines, - version, - ops, - ranges, - pathname - }); - }); - }, + return DocumentManager.getDocAndRecentOpsWithLock( + project_id, + doc_id, + fromVersion, + function (error, lines, version, ops, ranges, pathname) { + timer.done() + if (error != null) { + return next(error) + } + logger.log({ project_id, doc_id }, 'got doc via http') + if (lines == null || version == null) { + return next(new Errors.NotFoundError('document not found')) + } + return res.json({ + id: doc_id, + lines, + version, + ops, + ranges, + pathname + }) + } + ) + }, - _getTotalSizeOfLines(lines) { - let size = 0; - for (const line of Array.from(lines)) { - size += (line.length + 1); - } - return size; - }, + _getTotalSizeOfLines(lines) { + let size = 0 + for (const line of Array.from(lines)) { + size += line.length + 1 + } + return size + }, - getProjectDocsAndFlushIfOld(req, res, next) { - if (next == null) { next = function(error) {}; } - const { - project_id - } = req.params; - const projectStateHash = req.query != null ? req.query.state : undefined; - // exclude is string of existing docs "id:version,id:version,..." - const excludeItems = __guard__(req.query != null ? req.query.exclude : undefined, x => x.split(',')) || []; - logger.log({project_id, exclude: excludeItems}, "getting docs via http"); - const timer = new Metrics.Timer("http.getAllDocs"); - const excludeVersions = {}; - for (const item of Array.from(excludeItems)) { - const [id,version] = Array.from(item != null ? item.split(':') : undefined); - excludeVersions[id] = version; - } - logger.log({project_id, projectStateHash, excludeVersions}, "excluding versions"); - return ProjectManager.getProjectDocsAndFlushIfOld(project_id, projectStateHash, excludeVersions, function(error, result) { - timer.done(); - if (error instanceof Errors.ProjectStateChangedError) { - return res.sendStatus(409); // conflict - } else if (error != null) { - return next(error); - } else { - logger.log({project_id, result: ((Array.from(result).map((doc) => `${doc._id}:${doc.v}`)))}, "got docs via http"); - return res.send(result); - } - }); - }, + getProjectDocsAndFlushIfOld(req, res, next) { + if (next == null) { + next = function (error) {} + } + const { project_id } = req.params + const projectStateHash = req.query != null ? req.query.state : undefined + // exclude is string of existing docs "id:version,id:version,..." + const excludeItems = + __guard__(req.query != null ? req.query.exclude : undefined, (x) => + x.split(',') + ) || [] + logger.log({ project_id, exclude: excludeItems }, 'getting docs via http') + const timer = new Metrics.Timer('http.getAllDocs') + const excludeVersions = {} + for (const item of Array.from(excludeItems)) { + const [id, version] = Array.from( + item != null ? item.split(':') : undefined + ) + excludeVersions[id] = version + } + logger.log( + { project_id, projectStateHash, excludeVersions }, + 'excluding versions' + ) + return ProjectManager.getProjectDocsAndFlushIfOld( + project_id, + projectStateHash, + excludeVersions, + function (error, result) { + timer.done() + if (error instanceof Errors.ProjectStateChangedError) { + return res.sendStatus(409) // conflict + } else if (error != null) { + return next(error) + } else { + logger.log( + { + project_id, + result: Array.from(result).map((doc) => `${doc._id}:${doc.v}`) + }, + 'got docs via http' + ) + return res.send(result) + } + } + ) + }, - clearProjectState(req, res, next) { - if (next == null) { next = function(error) {}; } - const { - project_id - } = req.params; - const timer = new Metrics.Timer("http.clearProjectState"); - logger.log({project_id}, "clearing project state via http"); - return ProjectManager.clearProjectState(project_id, function(error) { - timer.done(); - if (error != null) { - return next(error); - } else { - return res.sendStatus(200); - } - }); - }, + clearProjectState(req, res, next) { + if (next == null) { + next = function (error) {} + } + const { project_id } = req.params + const timer = new Metrics.Timer('http.clearProjectState') + logger.log({ project_id }, 'clearing project state via http') + return ProjectManager.clearProjectState(project_id, function (error) { + timer.done() + if (error != null) { + return next(error) + } else { + return res.sendStatus(200) + } + }) + }, - setDoc(req, res, next) { - if (next == null) { next = function(error) {}; } - const { - doc_id - } = req.params; - const { - project_id - } = req.params; - const {lines, source, user_id, undoing} = req.body; - const lineSize = HttpController._getTotalSizeOfLines(lines); - if (lineSize > TWO_MEGABYTES) { - logger.log({project_id, doc_id, source, lineSize, user_id}, "document too large, returning 406 response"); - return res.sendStatus(406); - } - logger.log({project_id, doc_id, lines, source, user_id, undoing}, "setting doc via http"); - const timer = new Metrics.Timer("http.setDoc"); - return DocumentManager.setDocWithLock(project_id, doc_id, lines, source, user_id, undoing, function(error) { - timer.done(); - if (error != null) { return next(error); } - logger.log({project_id, doc_id}, "set doc via http"); - return res.sendStatus(204); - }); - }, // No Content + setDoc(req, res, next) { + if (next == null) { + next = function (error) {} + } + const { doc_id } = req.params + const { project_id } = req.params + const { lines, source, user_id, undoing } = req.body + const lineSize = HttpController._getTotalSizeOfLines(lines) + if (lineSize > TWO_MEGABYTES) { + logger.log( + { project_id, doc_id, source, lineSize, user_id }, + 'document too large, returning 406 response' + ) + return res.sendStatus(406) + } + logger.log( + { project_id, doc_id, lines, source, user_id, undoing }, + 'setting doc via http' + ) + const timer = new Metrics.Timer('http.setDoc') + return DocumentManager.setDocWithLock( + project_id, + doc_id, + lines, + source, + user_id, + undoing, + function (error) { + timer.done() + if (error != null) { + return next(error) + } + logger.log({ project_id, doc_id }, 'set doc via http') + return res.sendStatus(204) + } + ) + }, // No Content + flushDocIfLoaded(req, res, next) { + if (next == null) { + next = function (error) {} + } + const { doc_id } = req.params + const { project_id } = req.params + logger.log({ project_id, doc_id }, 'flushing doc via http') + const timer = new Metrics.Timer('http.flushDoc') + return DocumentManager.flushDocIfLoadedWithLock( + project_id, + doc_id, + function (error) { + timer.done() + if (error != null) { + return next(error) + } + logger.log({ project_id, doc_id }, 'flushed doc via http') + return res.sendStatus(204) + } + ) + }, // No Content - flushDocIfLoaded(req, res, next) { - if (next == null) { next = function(error) {}; } - const { - doc_id - } = req.params; - const { - project_id - } = req.params; - logger.log({project_id, doc_id}, "flushing doc via http"); - const timer = new Metrics.Timer("http.flushDoc"); - return DocumentManager.flushDocIfLoadedWithLock(project_id, doc_id, function(error) { - timer.done(); - if (error != null) { return next(error); } - logger.log({project_id, doc_id}, "flushed doc via http"); - return res.sendStatus(204); - }); - }, // No Content + deleteDoc(req, res, next) { + if (next == null) { + next = function (error) {} + } + const { doc_id } = req.params + const { project_id } = req.params + const ignoreFlushErrors = req.query.ignore_flush_errors === 'true' + const timer = new Metrics.Timer('http.deleteDoc') + logger.log({ project_id, doc_id }, 'deleting doc via http') + return DocumentManager.flushAndDeleteDocWithLock( + project_id, + doc_id, + { ignoreFlushErrors }, + function (error) { + timer.done() + // There is no harm in flushing project history if the previous call + // failed and sometimes it is required + HistoryManager.flushProjectChangesAsync(project_id) - deleteDoc(req, res, next) { - if (next == null) { next = function(error) {}; } - const { - doc_id - } = req.params; - const { - project_id - } = req.params; - const ignoreFlushErrors = req.query.ignore_flush_errors === 'true'; - const timer = new Metrics.Timer("http.deleteDoc"); - logger.log({project_id, doc_id}, "deleting doc via http"); - return DocumentManager.flushAndDeleteDocWithLock(project_id, doc_id, { ignoreFlushErrors }, function(error) { - timer.done(); - // There is no harm in flushing project history if the previous call - // failed and sometimes it is required - HistoryManager.flushProjectChangesAsync(project_id); + if (error != null) { + return next(error) + } + logger.log({ project_id, doc_id }, 'deleted doc via http') + return res.sendStatus(204) + } + ) + }, // No Content - if (error != null) { return next(error); } - logger.log({project_id, doc_id}, "deleted doc via http"); - return res.sendStatus(204); - }); - }, // No Content + flushProject(req, res, next) { + if (next == null) { + next = function (error) {} + } + const { project_id } = req.params + logger.log({ project_id }, 'flushing project via http') + const timer = new Metrics.Timer('http.flushProject') + return ProjectManager.flushProjectWithLocks(project_id, function (error) { + timer.done() + if (error != null) { + return next(error) + } + logger.log({ project_id }, 'flushed project via http') + return res.sendStatus(204) + }) + }, // No Content - flushProject(req, res, next) { - if (next == null) { next = function(error) {}; } - const { - project_id - } = req.params; - logger.log({project_id}, "flushing project via http"); - const timer = new Metrics.Timer("http.flushProject"); - return ProjectManager.flushProjectWithLocks(project_id, function(error) { - timer.done(); - if (error != null) { return next(error); } - logger.log({project_id}, "flushed project via http"); - return res.sendStatus(204); - }); - }, // No Content + deleteProject(req, res, next) { + if (next == null) { + next = function (error) {} + } + const { project_id } = req.params + logger.log({ project_id }, 'deleting project via http') + const options = {} + if (req.query != null ? req.query.background : undefined) { + options.background = true + } // allow non-urgent flushes to be queued + if (req.query != null ? req.query.shutdown : undefined) { + options.skip_history_flush = true + } // don't flush history when realtime shuts down + if (req.query != null ? req.query.background : undefined) { + return ProjectManager.queueFlushAndDeleteProject(project_id, function ( + error + ) { + if (error != null) { + return next(error) + } + logger.log({ project_id }, 'queue delete of project via http') + return res.sendStatus(204) + }) // No Content + } else { + const timer = new Metrics.Timer('http.deleteProject') + return ProjectManager.flushAndDeleteProjectWithLocks( + project_id, + options, + function (error) { + timer.done() + if (error != null) { + return next(error) + } + logger.log({ project_id }, 'deleted project via http') + return res.sendStatus(204) + } + ) + } + }, // No Content - deleteProject(req, res, next) { - if (next == null) { next = function(error) {}; } - const { - project_id - } = req.params; - logger.log({project_id}, "deleting project via http"); - const options = {}; - if (req.query != null ? req.query.background : undefined) { options.background = true; } // allow non-urgent flushes to be queued - if (req.query != null ? req.query.shutdown : undefined) { options.skip_history_flush = true; } // don't flush history when realtime shuts down - if (req.query != null ? req.query.background : undefined) { - return ProjectManager.queueFlushAndDeleteProject(project_id, function(error) { - if (error != null) { return next(error); } - logger.log({project_id}, "queue delete of project via http"); - return res.sendStatus(204); - }); // No Content - } else { - const timer = new Metrics.Timer("http.deleteProject"); - return ProjectManager.flushAndDeleteProjectWithLocks(project_id, options, function(error) { - timer.done(); - if (error != null) { return next(error); } - logger.log({project_id}, "deleted project via http"); - return res.sendStatus(204); - }); - } - }, // No Content + deleteMultipleProjects(req, res, next) { + if (next == null) { + next = function (error) {} + } + const project_ids = + (req.body != null ? req.body.project_ids : undefined) || [] + logger.log({ project_ids }, 'deleting multiple projects via http') + return async.eachSeries( + project_ids, + function (project_id, cb) { + logger.log({ project_id }, 'queue delete of project via http') + return ProjectManager.queueFlushAndDeleteProject(project_id, cb) + }, + function (error) { + if (error != null) { + return next(error) + } + return res.sendStatus(204) + } + ) + }, // No Content - deleteMultipleProjects(req, res, next) { - if (next == null) { next = function(error) {}; } - const project_ids = (req.body != null ? req.body.project_ids : undefined) || []; - logger.log({project_ids}, "deleting multiple projects via http"); - return async.eachSeries(project_ids, function(project_id, cb) { - logger.log({project_id}, "queue delete of project via http"); - return ProjectManager.queueFlushAndDeleteProject(project_id, cb); - } - , function(error) { - if (error != null) { return next(error); } - return res.sendStatus(204); - }); - }, // No Content + acceptChanges(req, res, next) { + if (next == null) { + next = function (error) {} + } + const { project_id, doc_id } = req.params + let change_ids = req.body != null ? req.body.change_ids : undefined + if (change_ids == null) { + change_ids = [req.params.change_id] + } + logger.log( + { project_id, doc_id }, + `accepting ${change_ids.length} changes via http` + ) + const timer = new Metrics.Timer('http.acceptChanges') + return DocumentManager.acceptChangesWithLock( + project_id, + doc_id, + change_ids, + function (error) { + timer.done() + if (error != null) { + return next(error) + } + logger.log( + { project_id, doc_id }, + `accepted ${change_ids.length} changes via http` + ) + return res.sendStatus(204) + } + ) + }, // No Content - acceptChanges(req, res, next) { - if (next == null) { next = function(error) {}; } - const {project_id, doc_id} = req.params; - let change_ids = req.body != null ? req.body.change_ids : undefined; - if ((change_ids == null)) { - change_ids = [ req.params.change_id ]; - } - logger.log({project_id, doc_id}, `accepting ${ change_ids.length } changes via http`); - const timer = new Metrics.Timer("http.acceptChanges"); - return DocumentManager.acceptChangesWithLock(project_id, doc_id, change_ids, function(error) { - timer.done(); - if (error != null) { return next(error); } - logger.log({project_id, doc_id}, `accepted ${ change_ids.length } changes via http`); - return res.sendStatus(204); - }); - }, // No Content + deleteComment(req, res, next) { + if (next == null) { + next = function (error) {} + } + const { project_id, doc_id, comment_id } = req.params + logger.log({ project_id, doc_id, comment_id }, 'deleting comment via http') + const timer = new Metrics.Timer('http.deleteComment') + return DocumentManager.deleteCommentWithLock( + project_id, + doc_id, + comment_id, + function (error) { + timer.done() + if (error != null) { + return next(error) + } + logger.log( + { project_id, doc_id, comment_id }, + 'deleted comment via http' + ) + return res.sendStatus(204) + } + ) + }, // No Content - deleteComment(req, res, next) { - if (next == null) { next = function(error) {}; } - const {project_id, doc_id, comment_id} = req.params; - logger.log({project_id, doc_id, comment_id}, "deleting comment via http"); - const timer = new Metrics.Timer("http.deleteComment"); - return DocumentManager.deleteCommentWithLock(project_id, doc_id, comment_id, function(error) { - timer.done(); - if (error != null) { return next(error); } - logger.log({project_id, doc_id, comment_id}, "deleted comment via http"); - return res.sendStatus(204); - }); - }, // No Content + updateProject(req, res, next) { + if (next == null) { + next = function (error) {} + } + const timer = new Metrics.Timer('http.updateProject') + const { project_id } = req.params + const { + projectHistoryId, + userId, + docUpdates, + fileUpdates, + version + } = req.body + logger.log( + { project_id, docUpdates, fileUpdates, version }, + 'updating project via http' + ) - updateProject(req, res, next) { - if (next == null) { next = function(error) {}; } - const timer = new Metrics.Timer("http.updateProject"); - const { - project_id - } = req.params; - const {projectHistoryId, userId, docUpdates, fileUpdates, version} = req.body; - logger.log({project_id, docUpdates, fileUpdates, version}, "updating project via http"); + return ProjectManager.updateProjectWithLocks( + project_id, + projectHistoryId, + userId, + docUpdates, + fileUpdates, + version, + function (error) { + timer.done() + if (error != null) { + return next(error) + } + logger.log({ project_id }, 'updated project via http') + return res.sendStatus(204) + } + ) + }, // No Content - return ProjectManager.updateProjectWithLocks(project_id, projectHistoryId, userId, docUpdates, fileUpdates, version, function(error) { - timer.done(); - if (error != null) { return next(error); } - logger.log({project_id}, "updated project via http"); - return res.sendStatus(204); - }); - }, // No Content + resyncProjectHistory(req, res, next) { + if (next == null) { + next = function (error) {} + } + const { project_id } = req.params + const { projectHistoryId, docs, files } = req.body - resyncProjectHistory(req, res, next) { - if (next == null) { next = function(error) {}; } - const { - project_id - } = req.params; - const {projectHistoryId, docs, files} = req.body; + logger.log( + { project_id, docs, files }, + 'queuing project history resync via http' + ) + return HistoryManager.resyncProjectHistory( + project_id, + projectHistoryId, + docs, + files, + function (error) { + if (error != null) { + return next(error) + } + logger.log({ project_id }, 'queued project history resync via http') + return res.sendStatus(204) + } + ) + }, - logger.log({project_id, docs, files}, "queuing project history resync via http"); - return HistoryManager.resyncProjectHistory(project_id, projectHistoryId, docs, files, function(error) { - if (error != null) { return next(error); } - logger.log({project_id}, "queued project history resync via http"); - return res.sendStatus(204); - }); - }, + flushAllProjects(req, res, next) { + if (next == null) { + next = function (error) {} + } + res.setTimeout(5 * 60 * 1000) + const options = { + limit: req.query.limit || 1000, + concurrency: req.query.concurrency || 5, + dryRun: req.query.dryRun || false + } + return ProjectFlusher.flushAllProjects(options, function ( + err, + project_ids + ) { + if (err != null) { + logger.err({ err }, 'error bulk flushing projects') + return res.sendStatus(500) + } else { + return res.send(project_ids) + } + }) + }, - flushAllProjects(req, res, next ){ - if (next == null) { next = function(error){}; } - res.setTimeout(5 * 60 * 1000); - const options = { - limit : req.query.limit || 1000, - concurrency : req.query.concurrency || 5, - dryRun : req.query.dryRun || false - }; - return ProjectFlusher.flushAllProjects(options, function(err, project_ids){ - if (err != null) { - logger.err({err}, "error bulk flushing projects"); - return res.sendStatus(500); - } else { - return res.send(project_ids); - } - }); - }, - - flushQueuedProjects(req, res, next) { - if (next == null) { next = function(error) {}; } - res.setTimeout(10 * 60 * 1000); - const options = { - limit : req.query.limit || 1000, - timeout: 5 * 60 * 1000, - min_delete_age: req.query.min_delete_age || (5 * 60 * 1000) - }; - return DeleteQueueManager.flushAndDeleteOldProjects(options, function(err, flushed){ - if (err != null) { - logger.err({err}, "error flushing old projects"); - return res.sendStatus(500); - } else { - logger.log({flushed}, "flush of queued projects completed"); - return res.send({flushed}); - } - }); - } -}); + flushQueuedProjects(req, res, next) { + if (next == null) { + next = function (error) {} + } + res.setTimeout(10 * 60 * 1000) + const options = { + limit: req.query.limit || 1000, + timeout: 5 * 60 * 1000, + min_delete_age: req.query.min_delete_age || 5 * 60 * 1000 + } + return DeleteQueueManager.flushAndDeleteOldProjects(options, function ( + err, + flushed + ) { + if (err != null) { + logger.err({ err }, 'error flushing old projects') + return res.sendStatus(500) + } else { + logger.log({ flushed }, 'flush of queued projects completed') + return res.send({ flushed }) + } + }) + } +} function __guard__(value, transform) { - return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; -} \ No newline at end of file + return typeof value !== 'undefined' && value !== null + ? transform(value) + : undefined +} diff --git a/services/document-updater/app/js/LockManager.js b/services/document-updater/app/js/LockManager.js index a861ed8607..2f08dfd3dd 100644 --- a/services/document-updater/app/js/LockManager.js +++ b/services/document-updater/app/js/LockManager.js @@ -11,128 +11,159 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let LockManager; -const metrics = require('./Metrics'); -const Settings = require('settings-sharelatex'); -const redis = require("redis-sharelatex"); -const rclient = redis.createClient(Settings.redis.lock); -const keys = Settings.redis.lock.key_schema; -const logger = require("logger-sharelatex"); -const os = require("os"); -const crypto = require("crypto"); +let LockManager +const metrics = require('./Metrics') +const Settings = require('settings-sharelatex') +const redis = require('redis-sharelatex') +const rclient = redis.createClient(Settings.redis.lock) +const keys = Settings.redis.lock.key_schema +const logger = require('logger-sharelatex') +const os = require('os') +const crypto = require('crypto') -const Profiler = require("./Profiler"); +const Profiler = require('./Profiler') -const HOST = os.hostname(); -const PID = process.pid; -const RND = crypto.randomBytes(4).toString('hex'); -let COUNT = 0; +const HOST = os.hostname() +const PID = process.pid +const RND = crypto.randomBytes(4).toString('hex') +let COUNT = 0 -const MAX_REDIS_REQUEST_LENGTH = 5000; // 5 seconds +const MAX_REDIS_REQUEST_LENGTH = 5000 // 5 seconds -module.exports = (LockManager = { - LOCK_TEST_INTERVAL: 50, // 50ms between each test of the lock - MAX_TEST_INTERVAL: 1000, // back off to 1s between each test of the lock - MAX_LOCK_WAIT_TIME: 10000, // 10s maximum time to spend trying to get the lock - LOCK_TTL: 30, // seconds. Time until lock auto expires in redis. +module.exports = LockManager = { + LOCK_TEST_INTERVAL: 50, // 50ms between each test of the lock + MAX_TEST_INTERVAL: 1000, // back off to 1s between each test of the lock + MAX_LOCK_WAIT_TIME: 10000, // 10s maximum time to spend trying to get the lock + LOCK_TTL: 30, // seconds. Time until lock auto expires in redis. - // Use a signed lock value as described in - // http://redis.io/topics/distlock#correct-implementation-with-a-single-instance - // to prevent accidental unlocking by multiple processes - randomLock() { - const time = Date.now(); - return `locked:host=${HOST}:pid=${PID}:random=${RND}:time=${time}:count=${COUNT++}`; - }, + // Use a signed lock value as described in + // http://redis.io/topics/distlock#correct-implementation-with-a-single-instance + // to prevent accidental unlocking by multiple processes + randomLock() { + const time = Date.now() + return `locked:host=${HOST}:pid=${PID}:random=${RND}:time=${time}:count=${COUNT++}` + }, - unlockScript: 'if redis.call("get", KEYS[1]) == ARGV[1] then return redis.call("del", KEYS[1]) else return 0 end', + unlockScript: + 'if redis.call("get", KEYS[1]) == ARGV[1] then return redis.call("del", KEYS[1]) else return 0 end', - tryLock(doc_id, callback){ - if (callback == null) { callback = function(err, isFree){}; } - const lockValue = LockManager.randomLock(); - const key = keys.blockingKey({doc_id}); - const profile = new Profiler("tryLock", {doc_id, key, lockValue}); - return rclient.set(key, lockValue, "EX", this.LOCK_TTL, "NX", function(err, gotLock){ - if (err != null) { return callback(err); } - if (gotLock === "OK") { - metrics.inc("doc-not-blocking"); - const timeTaken = profile.log("got lock").end(); - if (timeTaken > MAX_REDIS_REQUEST_LENGTH) { - // took too long, so try to free the lock - return LockManager.releaseLock(doc_id, lockValue, function(err, result) { - if (err != null) { return callback(err); } // error freeing lock - return callback(null, false); - }); // tell caller they didn't get the lock - } else { - return callback(null, true, lockValue); - } - } else { - metrics.inc("doc-blocking"); - profile.log("doc is locked").end(); - return callback(null, false); - } - }); - }, + tryLock(doc_id, callback) { + if (callback == null) { + callback = function (err, isFree) {} + } + const lockValue = LockManager.randomLock() + const key = keys.blockingKey({ doc_id }) + const profile = new Profiler('tryLock', { doc_id, key, lockValue }) + return rclient.set(key, lockValue, 'EX', this.LOCK_TTL, 'NX', function ( + err, + gotLock + ) { + if (err != null) { + return callback(err) + } + if (gotLock === 'OK') { + metrics.inc('doc-not-blocking') + const timeTaken = profile.log('got lock').end() + if (timeTaken > MAX_REDIS_REQUEST_LENGTH) { + // took too long, so try to free the lock + return LockManager.releaseLock(doc_id, lockValue, function ( + err, + result + ) { + if (err != null) { + return callback(err) + } // error freeing lock + return callback(null, false) + }) // tell caller they didn't get the lock + } else { + return callback(null, true, lockValue) + } + } else { + metrics.inc('doc-blocking') + profile.log('doc is locked').end() + return callback(null, false) + } + }) + }, - getLock(doc_id, callback) { - let attempt; - if (callback == null) { callback = function(error, lockValue) {}; } - const startTime = Date.now(); - let testInterval = LockManager.LOCK_TEST_INTERVAL; - const profile = new Profiler("getLock", {doc_id}); - return (attempt = function() { - if ((Date.now() - startTime) > LockManager.MAX_LOCK_WAIT_TIME) { - const e = new Error("Timeout"); - e.doc_id = doc_id; - profile.log("timeout").end(); - return callback(e); - } + getLock(doc_id, callback) { + let attempt + if (callback == null) { + callback = function (error, lockValue) {} + } + const startTime = Date.now() + let testInterval = LockManager.LOCK_TEST_INTERVAL + const profile = new Profiler('getLock', { doc_id }) + return (attempt = function () { + if (Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME) { + const e = new Error('Timeout') + e.doc_id = doc_id + profile.log('timeout').end() + return callback(e) + } - return LockManager.tryLock(doc_id, function(error, gotLock, lockValue) { - if (error != null) { return callback(error); } - profile.log("tryLock"); - if (gotLock) { - profile.end(); - return callback(null, lockValue); - } else { - setTimeout(attempt, testInterval); - // back off when the lock is taken to avoid overloading - return testInterval = Math.min(testInterval * 2, LockManager.MAX_TEST_INTERVAL); - } - }); - })(); - }, + return LockManager.tryLock(doc_id, function (error, gotLock, lockValue) { + if (error != null) { + return callback(error) + } + profile.log('tryLock') + if (gotLock) { + profile.end() + return callback(null, lockValue) + } else { + setTimeout(attempt, testInterval) + // back off when the lock is taken to avoid overloading + return (testInterval = Math.min( + testInterval * 2, + LockManager.MAX_TEST_INTERVAL + )) + } + }) + })() + }, - checkLock(doc_id, callback){ - if (callback == null) { callback = function(err, isFree){}; } - const key = keys.blockingKey({doc_id}); - return rclient.exists(key, function(err, exists) { - if (err != null) { return callback(err); } - exists = parseInt(exists); - if (exists === 1) { - metrics.inc("doc-blocking"); - return callback(null, false); - } else { - metrics.inc("doc-not-blocking"); - return callback(null, true); - } - }); - }, + checkLock(doc_id, callback) { + if (callback == null) { + callback = function (err, isFree) {} + } + const key = keys.blockingKey({ doc_id }) + return rclient.exists(key, function (err, exists) { + if (err != null) { + return callback(err) + } + exists = parseInt(exists) + if (exists === 1) { + metrics.inc('doc-blocking') + return callback(null, false) + } else { + metrics.inc('doc-not-blocking') + return callback(null, true) + } + }) + }, - releaseLock(doc_id, lockValue, callback){ - const key = keys.blockingKey({doc_id}); - const profile = new Profiler("releaseLock", {doc_id, key, lockValue}); - return rclient.eval(LockManager.unlockScript, 1, key, lockValue, function(err, result) { - if (err != null) { - return callback(err); - } else if ((result != null) && (result !== 1)) { // successful unlock should release exactly one key - profile.log("unlockScript:expired-lock").end(); - logger.error({doc_id, key, lockValue, redis_err:err, redis_result:result}, "unlocking error"); - metrics.inc("unlock-error"); - return callback(new Error("tried to release timed out lock")); - } else { - profile.log("unlockScript:ok").end(); - return callback(null,result); - } - }); - } -}); + releaseLock(doc_id, lockValue, callback) { + const key = keys.blockingKey({ doc_id }) + const profile = new Profiler('releaseLock', { doc_id, key, lockValue }) + return rclient.eval(LockManager.unlockScript, 1, key, lockValue, function ( + err, + result + ) { + if (err != null) { + return callback(err) + } else if (result != null && result !== 1) { + // successful unlock should release exactly one key + profile.log('unlockScript:expired-lock').end() + logger.error( + { doc_id, key, lockValue, redis_err: err, redis_result: result }, + 'unlocking error' + ) + metrics.inc('unlock-error') + return callback(new Error('tried to release timed out lock')) + } else { + profile.log('unlockScript:ok').end() + return callback(null, result) + } + }) + } +} diff --git a/services/document-updater/app/js/LoggerSerializers.js b/services/document-updater/app/js/LoggerSerializers.js index bd55383fd3..b2c015f078 100644 --- a/services/document-updater/app/js/LoggerSerializers.js +++ b/services/document-updater/app/js/LoggerSerializers.js @@ -10,37 +10,58 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const _ = require('lodash'); +const _ = require('lodash') -const showLength = function(thing) { - if ((thing != null ? thing.length : undefined)) { return thing.length; } else { return thing; } -}; +const showLength = function (thing) { + if (thing != null ? thing.length : undefined) { + return thing.length + } else { + return thing + } +} -const showUpdateLength = function(update) { - if ((update != null ? update.op : undefined) instanceof Array) { - const copy = _.cloneDeep(update); - copy.op.forEach(function(element, index) { - if (__guard__(element != null ? element.i : undefined, x => x.length) != null) { copy.op[index].i = element.i.length; } - if (__guard__(element != null ? element.d : undefined, x1 => x1.length) != null) { copy.op[index].d = element.d.length; } - if (__guard__(element != null ? element.c : undefined, x2 => x2.length) != null) { return copy.op[index].c = element.c.length; } - }); - return copy; - } else { - return update; - } -}; +const showUpdateLength = function (update) { + if ((update != null ? update.op : undefined) instanceof Array) { + const copy = _.cloneDeep(update) + copy.op.forEach(function (element, index) { + if ( + __guard__(element != null ? element.i : undefined, (x) => x.length) != + null + ) { + copy.op[index].i = element.i.length + } + if ( + __guard__(element != null ? element.d : undefined, (x1) => x1.length) != + null + ) { + copy.op[index].d = element.d.length + } + if ( + __guard__(element != null ? element.c : undefined, (x2) => x2.length) != + null + ) { + return (copy.op[index].c = element.c.length) + } + }) + return copy + } else { + return update + } +} module.exports = { - // replace long values with their length - lines: showLength, - oldLines: showLength, - newLines: showLength, - docLines: showLength, - newDocLines: showLength, - ranges: showLength, - update: showUpdateLength -}; + // replace long values with their length + lines: showLength, + oldLines: showLength, + newLines: showLength, + docLines: showLength, + newDocLines: showLength, + ranges: showLength, + update: showUpdateLength +} function __guard__(value, transform) { - return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; -} \ No newline at end of file + return typeof value !== 'undefined' && value !== null + ? transform(value) + : undefined +} diff --git a/services/document-updater/app/js/Metrics.js b/services/document-updater/app/js/Metrics.js index d5bfb88492..e9676415ea 100644 --- a/services/document-updater/app/js/Metrics.js +++ b/services/document-updater/app/js/Metrics.js @@ -1,3 +1,3 @@ // TODO: This file was created by bulk-decaffeinate. // Sanity-check the conversion and remove this comment. -module.exports = require("metrics-sharelatex"); \ No newline at end of file +module.exports = require('metrics-sharelatex') diff --git a/services/document-updater/app/js/PersistenceManager.js b/services/document-updater/app/js/PersistenceManager.js index 2a9e5e9d04..a9f384afa1 100644 --- a/services/document-updater/app/js/PersistenceManager.js +++ b/services/document-updater/app/js/PersistenceManager.js @@ -14,129 +14,175 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let PersistenceManager; -const Settings = require("settings-sharelatex"); -const Errors = require("./Errors"); -const Metrics = require("./Metrics"); -const logger = require("logger-sharelatex"); -const request = (require("requestretry")).defaults({ - maxAttempts: 2, - retryDelay: 10 -}); +let PersistenceManager +const Settings = require('settings-sharelatex') +const Errors = require('./Errors') +const Metrics = require('./Metrics') +const logger = require('logger-sharelatex') +const request = require('requestretry').defaults({ + maxAttempts: 2, + retryDelay: 10 +}) // We have to be quick with HTTP calls because we're holding a lock that // expires after 30 seconds. We can't let any errors in the rest of the stack // hold us up, and need to bail out quickly if there is a problem. -const MAX_HTTP_REQUEST_LENGTH = 5000; // 5 seconds +const MAX_HTTP_REQUEST_LENGTH = 5000 // 5 seconds -const updateMetric = function(method, error, response) { - // find the status, with special handling for connection timeouts - // https://github.com/request/request#timeouts - const status = (() => { - if ((error != null ? error.connect : undefined) === true) { - return `${error.code} (connect)`; - } else if (error != null) { - return error.code; - } else if (response != null) { - return response.statusCode; - } - })(); - Metrics.inc(method, 1, {status}); - if ((error != null ? error.attempts : undefined) > 1) { - Metrics.inc(`${method}-retries`, 1, {status: 'error'}); - } - if ((response != null ? response.attempts : undefined) > 1) { - return Metrics.inc(`${method}-retries`, 1, {status: 'success'}); - } -}; +const updateMetric = function (method, error, response) { + // find the status, with special handling for connection timeouts + // https://github.com/request/request#timeouts + const status = (() => { + if ((error != null ? error.connect : undefined) === true) { + return `${error.code} (connect)` + } else if (error != null) { + return error.code + } else if (response != null) { + return response.statusCode + } + })() + Metrics.inc(method, 1, { status }) + if ((error != null ? error.attempts : undefined) > 1) { + Metrics.inc(`${method}-retries`, 1, { status: 'error' }) + } + if ((response != null ? response.attempts : undefined) > 1) { + return Metrics.inc(`${method}-retries`, 1, { status: 'success' }) + } +} -module.exports = (PersistenceManager = { - getDoc(project_id, doc_id, _callback) { - if (_callback == null) { _callback = function(error, lines, version, ranges, pathname, projectHistoryId, projectHistoryType) {}; } - const timer = new Metrics.Timer("persistenceManager.getDoc"); - const callback = function(...args) { - timer.done(); - return _callback(...Array.from(args || [])); - }; +module.exports = PersistenceManager = { + getDoc(project_id, doc_id, _callback) { + if (_callback == null) { + _callback = function ( + error, + lines, + version, + ranges, + pathname, + projectHistoryId, + projectHistoryType + ) {} + } + const timer = new Metrics.Timer('persistenceManager.getDoc') + const callback = function (...args) { + timer.done() + return _callback(...Array.from(args || [])) + } - const url = `${Settings.apis.web.url}/project/${project_id}/doc/${doc_id}`; - return request({ - url, - method: "GET", - headers: { - "accept": "application/json" - }, - auth: { - user: Settings.apis.web.user, - pass: Settings.apis.web.pass, - sendImmediately: true - }, - jar: false, - timeout: MAX_HTTP_REQUEST_LENGTH - }, function(error, res, body) { - updateMetric('getDoc', error, res); - if (error != null) { return callback(error); } - if ((res.statusCode >= 200) && (res.statusCode < 300)) { - try { - body = JSON.parse(body); - } catch (e) { - return callback(e); - } - if ((body.lines == null)) { - return callback(new Error("web API response had no doc lines")); - } - if ((body.version == null) || !body.version instanceof Number) { - return callback(new Error("web API response had no valid doc version")); - } - if ((body.pathname == null)) { - return callback(new Error("web API response had no valid doc pathname")); - } - return callback(null, body.lines, body.version, body.ranges, body.pathname, body.projectHistoryId, body.projectHistoryType); - } else if (res.statusCode === 404) { - return callback(new Errors.NotFoundError(`doc not not found: ${url}`)); - } else { - return callback(new Error(`error accessing web API: ${url} ${res.statusCode}`)); - } - }); - }, + const url = `${Settings.apis.web.url}/project/${project_id}/doc/${doc_id}` + return request( + { + url, + method: 'GET', + headers: { + accept: 'application/json' + }, + auth: { + user: Settings.apis.web.user, + pass: Settings.apis.web.pass, + sendImmediately: true + }, + jar: false, + timeout: MAX_HTTP_REQUEST_LENGTH + }, + function (error, res, body) { + updateMetric('getDoc', error, res) + if (error != null) { + return callback(error) + } + if (res.statusCode >= 200 && res.statusCode < 300) { + try { + body = JSON.parse(body) + } catch (e) { + return callback(e) + } + if (body.lines == null) { + return callback(new Error('web API response had no doc lines')) + } + if (body.version == null || !body.version instanceof Number) { + return callback( + new Error('web API response had no valid doc version') + ) + } + if (body.pathname == null) { + return callback( + new Error('web API response had no valid doc pathname') + ) + } + return callback( + null, + body.lines, + body.version, + body.ranges, + body.pathname, + body.projectHistoryId, + body.projectHistoryType + ) + } else if (res.statusCode === 404) { + return callback(new Errors.NotFoundError(`doc not not found: ${url}`)) + } else { + return callback( + new Error(`error accessing web API: ${url} ${res.statusCode}`) + ) + } + } + ) + }, - setDoc(project_id, doc_id, lines, version, ranges, lastUpdatedAt, lastUpdatedBy,_callback) { - if (_callback == null) { _callback = function(error) {}; } - const timer = new Metrics.Timer("persistenceManager.setDoc"); - const callback = function(...args) { - timer.done(); - return _callback(...Array.from(args || [])); - }; - - const url = `${Settings.apis.web.url}/project/${project_id}/doc/${doc_id}`; - return request({ - url, - method: "POST", - json: { - lines, - ranges, - version, - lastUpdatedBy, - lastUpdatedAt - }, - auth: { - user: Settings.apis.web.user, - pass: Settings.apis.web.pass, - sendImmediately: true - }, - jar: false, - timeout: MAX_HTTP_REQUEST_LENGTH - }, function(error, res, body) { - updateMetric('setDoc', error, res); - if (error != null) { return callback(error); } - if ((res.statusCode >= 200) && (res.statusCode < 300)) { - return callback(null); - } else if (res.statusCode === 404) { - return callback(new Errors.NotFoundError(`doc not not found: ${url}`)); - } else { - return callback(new Error(`error accessing web API: ${url} ${res.statusCode}`)); - } - }); - } -}); + setDoc( + project_id, + doc_id, + lines, + version, + ranges, + lastUpdatedAt, + lastUpdatedBy, + _callback + ) { + if (_callback == null) { + _callback = function (error) {} + } + const timer = new Metrics.Timer('persistenceManager.setDoc') + const callback = function (...args) { + timer.done() + return _callback(...Array.from(args || [])) + } + const url = `${Settings.apis.web.url}/project/${project_id}/doc/${doc_id}` + return request( + { + url, + method: 'POST', + json: { + lines, + ranges, + version, + lastUpdatedBy, + lastUpdatedAt + }, + auth: { + user: Settings.apis.web.user, + pass: Settings.apis.web.pass, + sendImmediately: true + }, + jar: false, + timeout: MAX_HTTP_REQUEST_LENGTH + }, + function (error, res, body) { + updateMetric('setDoc', error, res) + if (error != null) { + return callback(error) + } + if (res.statusCode >= 200 && res.statusCode < 300) { + return callback(null) + } else if (res.statusCode === 404) { + return callback(new Errors.NotFoundError(`doc not not found: ${url}`)) + } else { + return callback( + new Error(`error accessing web API: ${url} ${res.statusCode}`) + ) + } + } + ) + } +} diff --git a/services/document-updater/app/js/Profiler.js b/services/document-updater/app/js/Profiler.js index 23e480bea8..60431e64cb 100644 --- a/services/document-updater/app/js/Profiler.js +++ b/services/document-updater/app/js/Profiler.js @@ -8,54 +8,55 @@ * DS206: Consider reworking classes to avoid initClass * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let Profiler; -const Settings = require('settings-sharelatex'); -const logger = require('logger-sharelatex'); +let Profiler +const Settings = require('settings-sharelatex') +const logger = require('logger-sharelatex') -const deltaMs = function(ta, tb) { - const nanoSeconds = ((ta[0]-tb[0])*1e9) + (ta[1]-tb[1]); - const milliSeconds = Math.floor(nanoSeconds*1e-6); - return milliSeconds; -}; +const deltaMs = function (ta, tb) { + const nanoSeconds = (ta[0] - tb[0]) * 1e9 + (ta[1] - tb[1]) + const milliSeconds = Math.floor(nanoSeconds * 1e-6) + return milliSeconds +} -module.exports = (Profiler = (function() { - Profiler = class Profiler { - static initClass() { - this.prototype.LOG_CUTOFF_TIME = 1000; - } +module.exports = Profiler = (function () { + Profiler = class Profiler { + static initClass() { + this.prototype.LOG_CUTOFF_TIME = 1000 + } - constructor(name, args) { - this.name = name; - this.args = args; - this.t0 = (this.t = process.hrtime()); - this.start = new Date(); - this.updateTimes = []; - } + constructor(name, args) { + this.name = name + this.args = args + this.t0 = this.t = process.hrtime() + this.start = new Date() + this.updateTimes = [] + } - log(label) { - const t1 = process.hrtime(); - const dtMilliSec = deltaMs(t1, this.t); - this.t = t1; - this.updateTimes.push([label, dtMilliSec]); // timings in ms - return this; // make it chainable - } + log(label) { + const t1 = process.hrtime() + const dtMilliSec = deltaMs(t1, this.t) + this.t = t1 + this.updateTimes.push([label, dtMilliSec]) // timings in ms + return this // make it chainable + } - end(message) { - const totalTime = deltaMs(this.t, this.t0); - if (totalTime > this.LOG_CUTOFF_TIME) { // log anything greater than cutoff - const args = {}; - for (const k in this.args) { - const v = this.args[k]; - args[k] = v; - } - args.updateTimes = this.updateTimes; - args.start = this.start; - args.end = new Date(); - logger.log(args, this.name); - } - return totalTime; - } - }; - Profiler.initClass(); - return Profiler; -})()); + end(message) { + const totalTime = deltaMs(this.t, this.t0) + if (totalTime > this.LOG_CUTOFF_TIME) { + // log anything greater than cutoff + const args = {} + for (const k in this.args) { + const v = this.args[k] + args[k] = v + } + args.updateTimes = this.updateTimes + args.start = this.start + args.end = new Date() + logger.log(args, this.name) + } + return totalTime + } + } + Profiler.initClass() + return Profiler +})() diff --git a/services/document-updater/app/js/ProjectFlusher.js b/services/document-updater/app/js/ProjectFlusher.js index ef7bb834c2..7f9783ac0d 100644 --- a/services/document-updater/app/js/ProjectFlusher.js +++ b/services/document-updater/app/js/ProjectFlusher.js @@ -12,96 +12,118 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const request = require("request"); -const Settings = require('settings-sharelatex'); -const RedisManager = require("./RedisManager"); -const { - rclient -} = RedisManager; -const docUpdaterKeys = Settings.redis.documentupdater.key_schema; -const async = require("async"); -const ProjectManager = require("./ProjectManager"); -const _ = require("lodash"); -const logger = require("logger-sharelatex"); +const request = require('request') +const Settings = require('settings-sharelatex') +const RedisManager = require('./RedisManager') +const { rclient } = RedisManager +const docUpdaterKeys = Settings.redis.documentupdater.key_schema +const async = require('async') +const ProjectManager = require('./ProjectManager') +const _ = require('lodash') +const logger = require('logger-sharelatex') -var ProjectFlusher = { +var ProjectFlusher = { + // iterate over keys asynchronously using redis scan (non-blocking) + // handle all the cluster nodes or single redis server + _getKeys(pattern, limit, callback) { + const nodes = (typeof rclient.nodes === 'function' + ? rclient.nodes('master') + : undefined) || [rclient] + const doKeyLookupForNode = (node, cb) => + ProjectFlusher._getKeysFromNode(node, pattern, limit, cb) + return async.concatSeries(nodes, doKeyLookupForNode, callback) + }, - // iterate over keys asynchronously using redis scan (non-blocking) - // handle all the cluster nodes or single redis server - _getKeys(pattern, limit, callback) { - const nodes = (typeof rclient.nodes === 'function' ? rclient.nodes('master') : undefined) || [ rclient ]; - const doKeyLookupForNode = (node, cb) => ProjectFlusher._getKeysFromNode(node, pattern, limit, cb); - return async.concatSeries(nodes, doKeyLookupForNode, callback); - }, + _getKeysFromNode(node, pattern, limit, callback) { + if (limit == null) { + limit = 1000 + } + let cursor = 0 // redis iterator + const keySet = {} // use hash to avoid duplicate results + const batchSize = limit != null ? Math.min(limit, 1000) : 1000 + // scan over all keys looking for pattern + var doIteration = ( + cb // avoid hitting redis too hard + ) => + node.scan(cursor, 'MATCH', pattern, 'COUNT', batchSize, function ( + error, + reply + ) { + let keys + if (error != null) { + return callback(error) + } + ;[cursor, keys] = Array.from(reply) + for (const key of Array.from(keys)) { + keySet[key] = true + } + keys = Object.keys(keySet) + const noResults = cursor === '0' // redis returns string results not numeric + const limitReached = limit != null && keys.length >= limit + if (noResults || limitReached) { + return callback(null, keys) + } else { + return setTimeout(doIteration, 10) + } + }) + return doIteration() + }, - _getKeysFromNode(node, pattern, limit, callback) { - if (limit == null) { limit = 1000; } - let cursor = 0; // redis iterator - const keySet = {}; // use hash to avoid duplicate results - const batchSize = (limit != null) ? Math.min(limit, 1000) : 1000; - // scan over all keys looking for pattern - var doIteration = cb => // avoid hitting redis too hard - node.scan(cursor, "MATCH", pattern, "COUNT", batchSize, function(error, reply) { - let keys; - if (error != null) { return callback(error); } - [cursor, keys] = Array.from(reply); - for (const key of Array.from(keys)) { - keySet[key] = true; - } - keys = Object.keys(keySet); - const noResults = cursor === "0"; // redis returns string results not numeric - const limitReached = ((limit != null) && (keys.length >= limit)); - if (noResults || limitReached) { - return callback(null, keys); - } else { - return setTimeout(doIteration, 10); - } - }); - return doIteration(); - }, + // extract ids from keys like DocsWithHistoryOps:57fd0b1f53a8396d22b2c24b + // or docsInProject:{57fd0b1f53a8396d22b2c24b} (for redis cluster) + _extractIds(keyList) { + const ids = (() => { + const result = [] + for (const key of Array.from(keyList)) { + const m = key.match(/:\{?([0-9a-f]{24})\}?/) // extract object id + result.push(m[1]) + } + return result + })() + return ids + }, - // extract ids from keys like DocsWithHistoryOps:57fd0b1f53a8396d22b2c24b - // or docsInProject:{57fd0b1f53a8396d22b2c24b} (for redis cluster) - _extractIds(keyList) { - const ids = (() => { - const result = []; - for (const key of Array.from(keyList)) { - const m = key.match(/:\{?([0-9a-f]{24})\}?/); // extract object id - result.push(m[1]); - } - return result; - })(); - return ids; - }, + flushAllProjects(options, callback) { + logger.log({ options }, 'flushing all projects') + return ProjectFlusher._getKeys( + docUpdaterKeys.docsInProject({ project_id: '*' }), + options.limit, + function (error, project_keys) { + if (error != null) { + logger.err({ err: error }, 'error getting keys for flushing') + return callback(error) + } + const project_ids = ProjectFlusher._extractIds(project_keys) + if (options.dryRun) { + return callback(null, project_ids) + } + const jobs = _.map(project_ids, (project_id) => (cb) => + ProjectManager.flushAndDeleteProjectWithLocks( + project_id, + { background: true }, + cb + ) + ) + return async.parallelLimit( + async.reflectAll(jobs), + options.concurrency, + function (error, results) { + const success = [] + const failure = [] + _.each(results, function (result, i) { + if (result.error != null) { + return failure.push(project_ids[i]) + } else { + return success.push(project_ids[i]) + } + }) + logger.log({ success, failure }, 'finished flushing all projects') + return callback(error, { success, failure }) + } + ) + } + ) + } +} - flushAllProjects(options, callback){ - logger.log({options}, "flushing all projects"); - return ProjectFlusher._getKeys(docUpdaterKeys.docsInProject({project_id:"*"}), options.limit, function(error, project_keys) { - if (error != null) { - logger.err({err:error}, "error getting keys for flushing"); - return callback(error); - } - const project_ids = ProjectFlusher._extractIds(project_keys); - if (options.dryRun) { - return callback(null, project_ids); - } - const jobs = _.map(project_ids, project_id => cb => ProjectManager.flushAndDeleteProjectWithLocks(project_id, {background:true}, cb)); - return async.parallelLimit(async.reflectAll(jobs), options.concurrency, function(error, results){ - const success = []; - const failure = []; - _.each(results, function(result, i){ - if (result.error != null) { - return failure.push(project_ids[i]); - } else { - return success.push(project_ids[i]); - } - }); - logger.log({success, failure}, "finished flushing all projects"); - return callback(error, {success, failure}); - }); - }); - } -}; - - -module.exports = ProjectFlusher; \ No newline at end of file +module.exports = ProjectFlusher diff --git a/services/document-updater/app/js/ProjectHistoryRedisManager.js b/services/document-updater/app/js/ProjectHistoryRedisManager.js index de19542227..eb714738ca 100644 --- a/services/document-updater/app/js/ProjectHistoryRedisManager.js +++ b/services/document-updater/app/js/ProjectHistoryRedisManager.js @@ -13,105 +13,164 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let ProjectHistoryRedisManager; -const Settings = require('settings-sharelatex'); -const projectHistoryKeys = __guard__(Settings.redis != null ? Settings.redis.project_history : undefined, x => x.key_schema); -const rclient = require("redis-sharelatex").createClient(Settings.redis.project_history); -const logger = require('logger-sharelatex'); -const metrics = require('./Metrics'); +let ProjectHistoryRedisManager +const Settings = require('settings-sharelatex') +const projectHistoryKeys = __guard__( + Settings.redis != null ? Settings.redis.project_history : undefined, + (x) => x.key_schema +) +const rclient = require('redis-sharelatex').createClient( + Settings.redis.project_history +) +const logger = require('logger-sharelatex') +const metrics = require('./Metrics') -module.exports = (ProjectHistoryRedisManager = { - queueOps(project_id, ...rest) { - // Record metric for ops pushed onto queue - const adjustedLength = Math.max(rest.length, 1); const ops = rest.slice(0, adjustedLength - 1); const val = rest[adjustedLength - 1]; const callback = val != null ? val : function(error, projectUpdateCount) {}; - for (const op of Array.from(ops)) { - metrics.summary("redis.projectHistoryOps", op.length, {status: "push"}); - } - const multi = rclient.multi(); - // Push the ops onto the project history queue - multi.rpush(projectHistoryKeys.projectHistoryOps({project_id}), ...Array.from(ops)); - // To record the age of the oldest op on the queue set a timestamp if not - // already present (SETNX). - multi.setnx(projectHistoryKeys.projectHistoryFirstOpTimestamp({project_id}), Date.now()); - return multi.exec(function(error, result) { - if (error != null) { return callback(error); } - // return the number of entries pushed onto the project history queue - return callback(null, result[0]);}); - }, +module.exports = ProjectHistoryRedisManager = { + queueOps(project_id, ...rest) { + // Record metric for ops pushed onto queue + const adjustedLength = Math.max(rest.length, 1) + const ops = rest.slice(0, adjustedLength - 1) + const val = rest[adjustedLength - 1] + const callback = val != null ? val : function (error, projectUpdateCount) {} + for (const op of Array.from(ops)) { + metrics.summary('redis.projectHistoryOps', op.length, { status: 'push' }) + } + const multi = rclient.multi() + // Push the ops onto the project history queue + multi.rpush( + projectHistoryKeys.projectHistoryOps({ project_id }), + ...Array.from(ops) + ) + // To record the age of the oldest op on the queue set a timestamp if not + // already present (SETNX). + multi.setnx( + projectHistoryKeys.projectHistoryFirstOpTimestamp({ project_id }), + Date.now() + ) + return multi.exec(function (error, result) { + if (error != null) { + return callback(error) + } + // return the number of entries pushed onto the project history queue + return callback(null, result[0]) + }) + }, + queueRenameEntity( + project_id, + projectHistoryId, + entity_type, + entity_id, + user_id, + projectUpdate, + callback + ) { + projectUpdate = { + pathname: projectUpdate.pathname, + new_pathname: projectUpdate.newPathname, + meta: { + user_id, + ts: new Date() + }, + version: projectUpdate.version, + projectHistoryId + } + projectUpdate[entity_type] = entity_id - queueRenameEntity(project_id, projectHistoryId, entity_type, entity_id, user_id, projectUpdate, callback) { - projectUpdate = { - pathname: projectUpdate.pathname, - new_pathname: projectUpdate.newPathname, - meta: { - user_id, - ts: new Date() - }, - version: projectUpdate.version, - projectHistoryId - }; - projectUpdate[entity_type] = entity_id; + logger.log( + { project_id, projectUpdate }, + 'queue rename operation to project-history' + ) + const jsonUpdate = JSON.stringify(projectUpdate) - logger.log({project_id, projectUpdate}, "queue rename operation to project-history"); - const jsonUpdate = JSON.stringify(projectUpdate); + return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback) + }, - return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback); - }, + queueAddEntity( + project_id, + projectHistoryId, + entity_type, + entitiy_id, + user_id, + projectUpdate, + callback + ) { + if (callback == null) { + callback = function (error) {} + } + projectUpdate = { + pathname: projectUpdate.pathname, + docLines: projectUpdate.docLines, + url: projectUpdate.url, + meta: { + user_id, + ts: new Date() + }, + version: projectUpdate.version, + projectHistoryId + } + projectUpdate[entity_type] = entitiy_id - queueAddEntity(project_id, projectHistoryId, entity_type, entitiy_id, user_id, projectUpdate, callback) { - if (callback == null) { callback = function(error) {}; } - projectUpdate = { - pathname: projectUpdate.pathname, - docLines: projectUpdate.docLines, - url: projectUpdate.url, - meta: { - user_id, - ts: new Date() - }, - version: projectUpdate.version, - projectHistoryId - }; - projectUpdate[entity_type] = entitiy_id; + logger.log( + { project_id, projectUpdate }, + 'queue add operation to project-history' + ) + const jsonUpdate = JSON.stringify(projectUpdate) - logger.log({project_id, projectUpdate}, "queue add operation to project-history"); - const jsonUpdate = JSON.stringify(projectUpdate); + return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback) + }, - return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback); - }, + queueResyncProjectStructure( + project_id, + projectHistoryId, + docs, + files, + callback + ) { + logger.log({ project_id, docs, files }, 'queue project structure resync') + const projectUpdate = { + resyncProjectStructure: { docs, files }, + projectHistoryId, + meta: { + ts: new Date() + } + } + const jsonUpdate = JSON.stringify(projectUpdate) + return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback) + }, - queueResyncProjectStructure(project_id, projectHistoryId, docs, files, callback) { - logger.log({project_id, docs, files}, "queue project structure resync"); - const projectUpdate = { - resyncProjectStructure: { docs, files }, - projectHistoryId, - meta: { - ts: new Date() - } - }; - const jsonUpdate = JSON.stringify(projectUpdate); - return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback); - }, - - queueResyncDocContent(project_id, projectHistoryId, doc_id, lines, version, pathname, callback) { - logger.log({project_id, doc_id, lines, version, pathname}, "queue doc content resync"); - const projectUpdate = { - resyncDocContent: { - content: lines.join("\n"), - version - }, - projectHistoryId, - path: pathname, - doc: doc_id, - meta: { - ts: new Date() - } - }; - const jsonUpdate = JSON.stringify(projectUpdate); - return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback); - } -}); + queueResyncDocContent( + project_id, + projectHistoryId, + doc_id, + lines, + version, + pathname, + callback + ) { + logger.log( + { project_id, doc_id, lines, version, pathname }, + 'queue doc content resync' + ) + const projectUpdate = { + resyncDocContent: { + content: lines.join('\n'), + version + }, + projectHistoryId, + path: pathname, + doc: doc_id, + meta: { + ts: new Date() + } + } + const jsonUpdate = JSON.stringify(projectUpdate) + return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback) + } +} function __guard__(value, transform) { - return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; -} \ No newline at end of file + return typeof value !== 'undefined' && value !== null + ? transform(value) + : undefined +} diff --git a/services/document-updater/app/js/ProjectManager.js b/services/document-updater/app/js/ProjectManager.js index deac1e451a..2955904adb 100644 --- a/services/document-updater/app/js/ProjectManager.js +++ b/services/document-updater/app/js/ProjectManager.js @@ -12,221 +12,382 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let ProjectManager; -const RedisManager = require("./RedisManager"); -const ProjectHistoryRedisManager = require("./ProjectHistoryRedisManager"); -const DocumentManager = require("./DocumentManager"); -const HistoryManager = require("./HistoryManager"); -const async = require("async"); -const logger = require("logger-sharelatex"); -const Metrics = require("./Metrics"); -const Errors = require("./Errors"); +let ProjectManager +const RedisManager = require('./RedisManager') +const ProjectHistoryRedisManager = require('./ProjectHistoryRedisManager') +const DocumentManager = require('./DocumentManager') +const HistoryManager = require('./HistoryManager') +const async = require('async') +const logger = require('logger-sharelatex') +const Metrics = require('./Metrics') +const Errors = require('./Errors') -module.exports = (ProjectManager = { - flushProjectWithLocks(project_id, _callback) { - if (_callback == null) { _callback = function(error) {}; } - const timer = new Metrics.Timer("projectManager.flushProjectWithLocks"); - const callback = function(...args) { - timer.done(); - return _callback(...Array.from(args || [])); - }; +module.exports = ProjectManager = { + flushProjectWithLocks(project_id, _callback) { + if (_callback == null) { + _callback = function (error) {} + } + const timer = new Metrics.Timer('projectManager.flushProjectWithLocks') + const callback = function (...args) { + timer.done() + return _callback(...Array.from(args || [])) + } - return RedisManager.getDocIdsInProject(project_id, function(error, doc_ids) { - if (error != null) { return callback(error); } - const jobs = []; - const errors = []; - for (const doc_id of Array.from((doc_ids || []))) { - ((doc_id => jobs.push(callback => DocumentManager.flushDocIfLoadedWithLock(project_id, doc_id, function(error) { - if ((error != null) && error instanceof Errors.NotFoundError) { - logger.warn({err: error, project_id, doc_id}, "found deleted doc when flushing"); - return callback(); - } else if (error != null) { - logger.error({err: error, project_id, doc_id}, "error flushing doc"); - errors.push(error); - return callback(); - } else { - return callback(); - } - }))))(doc_id); - } - - logger.log({project_id, doc_ids}, "flushing docs"); - return async.series(jobs, function() { - if (errors.length > 0) { - return callback(new Error("Errors flushing docs. See log for details")); - } else { - return callback(null); - } - }); - }); - }, - - flushAndDeleteProjectWithLocks(project_id, options, _callback) { - if (_callback == null) { _callback = function(error) {}; } - const timer = new Metrics.Timer("projectManager.flushAndDeleteProjectWithLocks"); - const callback = function(...args) { - timer.done(); - return _callback(...Array.from(args || [])); - }; - - return RedisManager.getDocIdsInProject(project_id, function(error, doc_ids) { - if (error != null) { return callback(error); } - const jobs = []; - const errors = []; - for (const doc_id of Array.from((doc_ids || []))) { - ((doc_id => jobs.push(callback => DocumentManager.flushAndDeleteDocWithLock(project_id, doc_id, {}, function(error) { - if (error != null) { - logger.error({err: error, project_id, doc_id}, "error deleting doc"); - errors.push(error); - } - return callback(); - }))))(doc_id); - } - - logger.log({project_id, doc_ids}, "deleting docs"); - return async.series(jobs, () => // When deleting the project here we want to ensure that project - // history is completely flushed because the project may be - // deleted in web after this call completes, and so further - // attempts to flush would fail after that. - HistoryManager.flushProjectChanges(project_id, options, function(error) { - if (errors.length > 0) { - return callback(new Error("Errors deleting docs. See log for details")); + return RedisManager.getDocIdsInProject(project_id, function ( + error, + doc_ids + ) { + if (error != null) { + return callback(error) + } + const jobs = [] + const errors = [] + for (const doc_id of Array.from(doc_ids || [])) { + ;((doc_id) => + jobs.push((callback) => + DocumentManager.flushDocIfLoadedWithLock( + project_id, + doc_id, + function (error) { + if (error != null && error instanceof Errors.NotFoundError) { + logger.warn( + { err: error, project_id, doc_id }, + 'found deleted doc when flushing' + ) + return callback() } else if (error != null) { - return callback(error); + logger.error( + { err: error, project_id, doc_id }, + 'error flushing doc' + ) + errors.push(error) + return callback() } else { - return callback(null); + return callback() } - })); - }); - }, + } + ) + ))(doc_id) + } - queueFlushAndDeleteProject(project_id, callback) { - if (callback == null) { callback = function(error) {}; } - return RedisManager.queueFlushAndDeleteProject(project_id, function(error) { - if (error != null) { - logger.error({project_id, error}, "error adding project to flush and delete queue"); - return callback(error); - } - Metrics.inc("queued-delete"); - return callback(); - }); - }, + logger.log({ project_id, doc_ids }, 'flushing docs') + return async.series(jobs, function () { + if (errors.length > 0) { + return callback( + new Error('Errors flushing docs. See log for details') + ) + } else { + return callback(null) + } + }) + }) + }, - getProjectDocsTimestamps(project_id, callback) { - if (callback == null) { callback = function(error) {}; } - return RedisManager.getDocIdsInProject(project_id, function(error, doc_ids) { - if (error != null) { return callback(error); } - if (!(doc_ids != null ? doc_ids.length : undefined)) { return callback(null, []); } - return RedisManager.getDocTimestamps(doc_ids, function(error, timestamps) { - if (error != null) { return callback(error); } - return callback(null, timestamps); - }); - }); - }, + flushAndDeleteProjectWithLocks(project_id, options, _callback) { + if (_callback == null) { + _callback = function (error) {} + } + const timer = new Metrics.Timer( + 'projectManager.flushAndDeleteProjectWithLocks' + ) + const callback = function (...args) { + timer.done() + return _callback(...Array.from(args || [])) + } - getProjectDocsAndFlushIfOld(project_id, projectStateHash, excludeVersions, _callback) { - if (excludeVersions == null) { excludeVersions = {}; } - if (_callback == null) { _callback = function(error, docs) {}; } - const timer = new Metrics.Timer("projectManager.getProjectDocsAndFlushIfOld"); - const callback = function(...args) { - timer.done(); - return _callback(...Array.from(args || [])); - }; + return RedisManager.getDocIdsInProject(project_id, function ( + error, + doc_ids + ) { + if (error != null) { + return callback(error) + } + const jobs = [] + const errors = [] + for (const doc_id of Array.from(doc_ids || [])) { + ;((doc_id) => + jobs.push((callback) => + DocumentManager.flushAndDeleteDocWithLock( + project_id, + doc_id, + {}, + function (error) { + if (error != null) { + logger.error( + { err: error, project_id, doc_id }, + 'error deleting doc' + ) + errors.push(error) + } + return callback() + } + ) + ))(doc_id) + } - return RedisManager.checkOrSetProjectState(project_id, projectStateHash, function(error, projectStateChanged) { - if (error != null) { - logger.error({err: error, project_id}, "error getting/setting project state in getProjectDocsAndFlushIfOld"); - return callback(error); - } - // we can't return docs if project structure has changed - if (projectStateChanged) { - return callback(Errors.ProjectStateChangedError("project state changed")); - } - // project structure hasn't changed, return doc content from redis - return RedisManager.getDocIdsInProject(project_id, function(error, doc_ids) { - if (error != null) { - logger.error({err: error, project_id}, "error getting doc ids in getProjectDocs"); - return callback(error); - } - const jobs = []; - for (const doc_id of Array.from(doc_ids || [])) { - ((doc_id => jobs.push(cb => // get the doc lines from redis - DocumentManager.getDocAndFlushIfOldWithLock(project_id, doc_id, function(err, lines, version) { - if (err != null) { - logger.error({err, project_id, doc_id}, "error getting project doc lines in getProjectDocsAndFlushIfOld"); - return cb(err); - } - const doc = {_id:doc_id, lines, v:version}; // create a doc object to return - return cb(null, doc); - }))))(doc_id); - } - return async.series(jobs, function(error, docs) { - if (error != null) { return callback(error); } - return callback(null, docs); - }); - }); - }); - }, + logger.log({ project_id, doc_ids }, 'deleting docs') + return async.series(jobs, () => + // When deleting the project here we want to ensure that project + // history is completely flushed because the project may be + // deleted in web after this call completes, and so further + // attempts to flush would fail after that. + HistoryManager.flushProjectChanges(project_id, options, function ( + error + ) { + if (errors.length > 0) { + return callback( + new Error('Errors deleting docs. See log for details') + ) + } else if (error != null) { + return callback(error) + } else { + return callback(null) + } + }) + ) + }) + }, - clearProjectState(project_id, callback) { - if (callback == null) { callback = function(error) {}; } - return RedisManager.clearProjectState(project_id, callback); - }, + queueFlushAndDeleteProject(project_id, callback) { + if (callback == null) { + callback = function (error) {} + } + return RedisManager.queueFlushAndDeleteProject(project_id, function ( + error + ) { + if (error != null) { + logger.error( + { project_id, error }, + 'error adding project to flush and delete queue' + ) + return callback(error) + } + Metrics.inc('queued-delete') + return callback() + }) + }, - updateProjectWithLocks(project_id, projectHistoryId, user_id, docUpdates, fileUpdates, version, _callback) { - if (_callback == null) { _callback = function(error) {}; } - const timer = new Metrics.Timer("projectManager.updateProject"); - const callback = function(...args) { - timer.done(); - return _callback(...Array.from(args || [])); - }; + getProjectDocsTimestamps(project_id, callback) { + if (callback == null) { + callback = function (error) {} + } + return RedisManager.getDocIdsInProject(project_id, function ( + error, + doc_ids + ) { + if (error != null) { + return callback(error) + } + if (!(doc_ids != null ? doc_ids.length : undefined)) { + return callback(null, []) + } + return RedisManager.getDocTimestamps(doc_ids, function ( + error, + timestamps + ) { + if (error != null) { + return callback(error) + } + return callback(null, timestamps) + }) + }) + }, - const project_version = version; - let project_subversion = 0; // project versions can have multiple operations + getProjectDocsAndFlushIfOld( + project_id, + projectStateHash, + excludeVersions, + _callback + ) { + if (excludeVersions == null) { + excludeVersions = {} + } + if (_callback == null) { + _callback = function (error, docs) {} + } + const timer = new Metrics.Timer( + 'projectManager.getProjectDocsAndFlushIfOld' + ) + const callback = function (...args) { + timer.done() + return _callback(...Array.from(args || [])) + } - let project_ops_length = 0; + return RedisManager.checkOrSetProjectState( + project_id, + projectStateHash, + function (error, projectStateChanged) { + if (error != null) { + logger.error( + { err: error, project_id }, + 'error getting/setting project state in getProjectDocsAndFlushIfOld' + ) + return callback(error) + } + // we can't return docs if project structure has changed + if (projectStateChanged) { + return callback( + Errors.ProjectStateChangedError('project state changed') + ) + } + // project structure hasn't changed, return doc content from redis + return RedisManager.getDocIdsInProject(project_id, function ( + error, + doc_ids + ) { + if (error != null) { + logger.error( + { err: error, project_id }, + 'error getting doc ids in getProjectDocs' + ) + return callback(error) + } + const jobs = [] + for (const doc_id of Array.from(doc_ids || [])) { + ;((doc_id) => + jobs.push(( + cb // get the doc lines from redis + ) => + DocumentManager.getDocAndFlushIfOldWithLock( + project_id, + doc_id, + function (err, lines, version) { + if (err != null) { + logger.error( + { err, project_id, doc_id }, + 'error getting project doc lines in getProjectDocsAndFlushIfOld' + ) + return cb(err) + } + const doc = { _id: doc_id, lines, v: version } // create a doc object to return + return cb(null, doc) + } + ) + ))(doc_id) + } + return async.series(jobs, function (error, docs) { + if (error != null) { + return callback(error) + } + return callback(null, docs) + }) + }) + } + ) + }, - const handleDocUpdate = function(projectUpdate, cb) { - const doc_id = projectUpdate.id; - projectUpdate.version = `${project_version}.${project_subversion++}`; - if (projectUpdate.docLines != null) { - return ProjectHistoryRedisManager.queueAddEntity(project_id, projectHistoryId, 'doc', doc_id, user_id, projectUpdate, function(error, count) { - project_ops_length = count; - return cb(error); - }); - } else { - return DocumentManager.renameDocWithLock(project_id, doc_id, user_id, projectUpdate, projectHistoryId, function(error, count) { - project_ops_length = count; - return cb(error); - }); - } - }; + clearProjectState(project_id, callback) { + if (callback == null) { + callback = function (error) {} + } + return RedisManager.clearProjectState(project_id, callback) + }, - const handleFileUpdate = function(projectUpdate, cb) { - const file_id = projectUpdate.id; - projectUpdate.version = `${project_version}.${project_subversion++}`; - if (projectUpdate.url != null) { - return ProjectHistoryRedisManager.queueAddEntity(project_id, projectHistoryId, 'file', file_id, user_id, projectUpdate, function(error, count) { - project_ops_length = count; - return cb(error); - }); - } else { - return ProjectHistoryRedisManager.queueRenameEntity(project_id, projectHistoryId, 'file', file_id, user_id, projectUpdate, function(error, count) { - project_ops_length = count; - return cb(error); - }); - } - }; + updateProjectWithLocks( + project_id, + projectHistoryId, + user_id, + docUpdates, + fileUpdates, + version, + _callback + ) { + if (_callback == null) { + _callback = function (error) {} + } + const timer = new Metrics.Timer('projectManager.updateProject') + const callback = function (...args) { + timer.done() + return _callback(...Array.from(args || [])) + } - return async.eachSeries(docUpdates, handleDocUpdate, function(error) { - if (error != null) { return callback(error); } - return async.eachSeries(fileUpdates, handleFileUpdate, function(error) { - if (error != null) { return callback(error); } - if (HistoryManager.shouldFlushHistoryOps(project_ops_length, docUpdates.length + fileUpdates.length, HistoryManager.FLUSH_PROJECT_EVERY_N_OPS)) { - HistoryManager.flushProjectChangesAsync(project_id); - } - return callback(); - }); - }); - } -}); + const project_version = version + let project_subversion = 0 // project versions can have multiple operations + + let project_ops_length = 0 + + const handleDocUpdate = function (projectUpdate, cb) { + const doc_id = projectUpdate.id + projectUpdate.version = `${project_version}.${project_subversion++}` + if (projectUpdate.docLines != null) { + return ProjectHistoryRedisManager.queueAddEntity( + project_id, + projectHistoryId, + 'doc', + doc_id, + user_id, + projectUpdate, + function (error, count) { + project_ops_length = count + return cb(error) + } + ) + } else { + return DocumentManager.renameDocWithLock( + project_id, + doc_id, + user_id, + projectUpdate, + projectHistoryId, + function (error, count) { + project_ops_length = count + return cb(error) + } + ) + } + } + + const handleFileUpdate = function (projectUpdate, cb) { + const file_id = projectUpdate.id + projectUpdate.version = `${project_version}.${project_subversion++}` + if (projectUpdate.url != null) { + return ProjectHistoryRedisManager.queueAddEntity( + project_id, + projectHistoryId, + 'file', + file_id, + user_id, + projectUpdate, + function (error, count) { + project_ops_length = count + return cb(error) + } + ) + } else { + return ProjectHistoryRedisManager.queueRenameEntity( + project_id, + projectHistoryId, + 'file', + file_id, + user_id, + projectUpdate, + function (error, count) { + project_ops_length = count + return cb(error) + } + ) + } + } + + return async.eachSeries(docUpdates, handleDocUpdate, function (error) { + if (error != null) { + return callback(error) + } + return async.eachSeries(fileUpdates, handleFileUpdate, function (error) { + if (error != null) { + return callback(error) + } + if ( + HistoryManager.shouldFlushHistoryOps( + project_ops_length, + docUpdates.length + fileUpdates.length, + HistoryManager.FLUSH_PROJECT_EVERY_N_OPS + ) + ) { + HistoryManager.flushProjectChangesAsync(project_id) + } + return callback() + }) + }) + } +} diff --git a/services/document-updater/app/js/RangesManager.js b/services/document-updater/app/js/RangesManager.js index f5890f5ad3..636efcb5a6 100644 --- a/services/document-updater/app/js/RangesManager.js +++ b/services/document-updater/app/js/RangesManager.js @@ -11,108 +11,153 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let RangesManager; -const RangesTracker = require("./RangesTracker"); -const logger = require("logger-sharelatex"); -const _ = require("lodash"); +let RangesManager +const RangesTracker = require('./RangesTracker') +const logger = require('logger-sharelatex') +const _ = require('lodash') -module.exports = (RangesManager = { - MAX_COMMENTS: 500, - MAX_CHANGES: 2000, +module.exports = RangesManager = { + MAX_COMMENTS: 500, + MAX_CHANGES: 2000, - applyUpdate(project_id, doc_id, entries, updates, newDocLines, callback) { - let error; - if (entries == null) { entries = {}; } - if (updates == null) { updates = []; } - if (callback == null) { callback = function(error, new_entries, ranges_were_collapsed) {}; } - const {changes, comments} = _.cloneDeep(entries); - const rangesTracker = new RangesTracker(changes, comments); - const emptyRangeCountBefore = RangesManager._emptyRangesCount(rangesTracker); - for (const update of Array.from(updates)) { - rangesTracker.track_changes = !!update.meta.tc; - if (update.meta.tc) { - rangesTracker.setIdSeed(update.meta.tc); - } - for (const op of Array.from(update.op)) { - try { - rangesTracker.applyOp(op, { user_id: (update.meta != null ? update.meta.user_id : undefined) }); - } catch (error1) { - error = error1; - return callback(error); - } - } - } - - if (((rangesTracker.changes != null ? rangesTracker.changes.length : undefined) > RangesManager.MAX_CHANGES) || ((rangesTracker.comments != null ? rangesTracker.comments.length : undefined) > RangesManager.MAX_COMMENTS)) { - return callback(new Error("too many comments or tracked changes")); - } + applyUpdate(project_id, doc_id, entries, updates, newDocLines, callback) { + let error + if (entries == null) { + entries = {} + } + if (updates == null) { + updates = [] + } + if (callback == null) { + callback = function (error, new_entries, ranges_were_collapsed) {} + } + const { changes, comments } = _.cloneDeep(entries) + const rangesTracker = new RangesTracker(changes, comments) + const emptyRangeCountBefore = RangesManager._emptyRangesCount(rangesTracker) + for (const update of Array.from(updates)) { + rangesTracker.track_changes = !!update.meta.tc + if (update.meta.tc) { + rangesTracker.setIdSeed(update.meta.tc) + } + for (const op of Array.from(update.op)) { + try { + rangesTracker.applyOp(op, { + user_id: update.meta != null ? update.meta.user_id : undefined + }) + } catch (error1) { + error = error1 + return callback(error) + } + } + } - try { - // This is a consistency check that all of our ranges and - // comments still match the corresponding text - rangesTracker.validate(newDocLines.join("\n")); - } catch (error2) { - error = error2; - logger.error({err: error, project_id, doc_id, newDocLines, updates}, "error validating ranges"); - return callback(error); - } + if ( + (rangesTracker.changes != null + ? rangesTracker.changes.length + : undefined) > RangesManager.MAX_CHANGES || + (rangesTracker.comments != null + ? rangesTracker.comments.length + : undefined) > RangesManager.MAX_COMMENTS + ) { + return callback(new Error('too many comments or tracked changes')) + } - const emptyRangeCountAfter = RangesManager._emptyRangesCount(rangesTracker); - const rangesWereCollapsed = emptyRangeCountAfter > emptyRangeCountBefore; - const response = RangesManager._getRanges(rangesTracker); - logger.log({project_id, doc_id, changesCount: (response.changes != null ? response.changes.length : undefined), commentsCount: (response.comments != null ? response.comments.length : undefined), rangesWereCollapsed}, "applied updates to ranges"); - return callback(null, response, rangesWereCollapsed); - }, + try { + // This is a consistency check that all of our ranges and + // comments still match the corresponding text + rangesTracker.validate(newDocLines.join('\n')) + } catch (error2) { + error = error2 + logger.error( + { err: error, project_id, doc_id, newDocLines, updates }, + 'error validating ranges' + ) + return callback(error) + } - acceptChanges(change_ids, ranges, callback) { - if (callback == null) { callback = function(error, ranges) {}; } - const {changes, comments} = ranges; - logger.log(`accepting ${ change_ids.length } changes in ranges`); - const rangesTracker = new RangesTracker(changes, comments); - rangesTracker.removeChangeIds(change_ids); - const response = RangesManager._getRanges(rangesTracker); - return callback(null, response); - }, + const emptyRangeCountAfter = RangesManager._emptyRangesCount(rangesTracker) + const rangesWereCollapsed = emptyRangeCountAfter > emptyRangeCountBefore + const response = RangesManager._getRanges(rangesTracker) + logger.log( + { + project_id, + doc_id, + changesCount: + response.changes != null ? response.changes.length : undefined, + commentsCount: + response.comments != null ? response.comments.length : undefined, + rangesWereCollapsed + }, + 'applied updates to ranges' + ) + return callback(null, response, rangesWereCollapsed) + }, - deleteComment(comment_id, ranges, callback) { - if (callback == null) { callback = function(error, ranges) {}; } - const {changes, comments} = ranges; - logger.log({comment_id}, "deleting comment in ranges"); - const rangesTracker = new RangesTracker(changes, comments); - rangesTracker.removeCommentId(comment_id); - const response = RangesManager._getRanges(rangesTracker); - return callback(null, response); - }, - - _getRanges(rangesTracker) { - // Return the minimal data structure needed, since most documents won't have any - // changes or comments - let response = {}; - if ((rangesTracker.changes != null ? rangesTracker.changes.length : undefined) > 0) { - if (response == null) { response = {}; } - response.changes = rangesTracker.changes; - } - if ((rangesTracker.comments != null ? rangesTracker.comments.length : undefined) > 0) { - if (response == null) { response = {}; } - response.comments = rangesTracker.comments; - } - return response; - }, + acceptChanges(change_ids, ranges, callback) { + if (callback == null) { + callback = function (error, ranges) {} + } + const { changes, comments } = ranges + logger.log(`accepting ${change_ids.length} changes in ranges`) + const rangesTracker = new RangesTracker(changes, comments) + rangesTracker.removeChangeIds(change_ids) + const response = RangesManager._getRanges(rangesTracker) + return callback(null, response) + }, - _emptyRangesCount(ranges) { - let count = 0; - for (const comment of Array.from((ranges.comments || []))) { - if (comment.op.c === "") { - count++; - } - } - for (const change of Array.from((ranges.changes || []))) { - if (change.op.i != null) { - if (change.op.i === "") { - count++; - } - } - } - return count; - } -}); \ No newline at end of file + deleteComment(comment_id, ranges, callback) { + if (callback == null) { + callback = function (error, ranges) {} + } + const { changes, comments } = ranges + logger.log({ comment_id }, 'deleting comment in ranges') + const rangesTracker = new RangesTracker(changes, comments) + rangesTracker.removeCommentId(comment_id) + const response = RangesManager._getRanges(rangesTracker) + return callback(null, response) + }, + + _getRanges(rangesTracker) { + // Return the minimal data structure needed, since most documents won't have any + // changes or comments + let response = {} + if ( + (rangesTracker.changes != null + ? rangesTracker.changes.length + : undefined) > 0 + ) { + if (response == null) { + response = {} + } + response.changes = rangesTracker.changes + } + if ( + (rangesTracker.comments != null + ? rangesTracker.comments.length + : undefined) > 0 + ) { + if (response == null) { + response = {} + } + response.comments = rangesTracker.comments + } + return response + }, + + _emptyRangesCount(ranges) { + let count = 0 + for (const comment of Array.from(ranges.comments || [])) { + if (comment.op.c === '') { + count++ + } + } + for (const change of Array.from(ranges.changes || [])) { + if (change.op.i != null) { + if (change.op.i === '') { + count++ + } + } + } + return count + } +} diff --git a/services/document-updater/app/js/RangesTracker.js b/services/document-updater/app/js/RangesTracker.js index 80422ec54f..6107acf300 100644 --- a/services/document-updater/app/js/RangesTracker.js +++ b/services/document-updater/app/js/RangesTracker.js @@ -17,709 +17,829 @@ // This file is shared between document-updater and web, so that the server and client share // an identical track changes implementation. Do not edit it directly in web or document-updater, // instead edit it at https://github.com/sharelatex/ranges-tracker, where it has a suite of tests -const load = function() { - let RangesTracker; - return RangesTracker = class RangesTracker { - // The purpose of this class is to track a set of inserts and deletes to a document, like - // track changes in Word. We store these as a set of ShareJs style ranges: - // {i: "foo", p: 42} # Insert 'foo' at offset 42 - // {d: "bar", p: 37} # Delete 'bar' at offset 37 - // We only track the inserts and deletes, not the whole document, but by being given all - // updates that are applied to a document, we can update these appropriately. - // - // Note that the set of inserts and deletes we store applies to the document as-is at the moment. - // So inserts correspond to text which is in the document, while deletes correspond to text which - // is no longer there, so their lengths do not affect the position of later offsets. - // E.g. - // this is the current text of the document - // |-----| | - // {i: "current ", p:12} -^ ^- {d: "old ", p: 31} - // - // Track changes rules (should be consistent with Word): - // * When text is inserted at a delete, the text goes to the left of the delete - // I.e. "foo|bar" -> "foobaz|bar", where | is the delete, and 'baz' is inserted - // * Deleting content flagged as 'inserted' does not create a new delete marker, it only - // removes the insert marker. E.g. - // * "abdefghijkl" -> "abfghijkl" when 'de' is deleted. No delete marker added - // |---| <- inserted |-| <- inserted - // * Deletes overlapping regular text and inserted text will insert a delete marker for the - // regular text: - // "abcdefghijkl" -> "abcdejkl" when 'fghi' is deleted - // |----| |--|| - // ^- inserted 'bcdefg' \ ^- deleted 'hi' - // \--inserted 'bcde' - // * Deletes overlapping other deletes are merged. E.g. - // "abcghijkl" -> "ahijkl" when 'bcg is deleted' - // | <- delete 'def' | <- delete 'bcdefg' - // * Deletes by another user will consume deletes by the first user - // * Inserts by another user will not combine with inserts by the first user. If they are in the - // middle of a previous insert by the first user, the original insert will be split into two. - constructor(changes, comments) { - if (changes == null) { changes = []; } - this.changes = changes; - if (comments == null) { comments = []; } - this.comments = comments; - this.setIdSeed(RangesTracker.generateIdSeed()); - this.resetDirtyState(); - } +const load = function () { + let RangesTracker + return (RangesTracker = class RangesTracker { + // The purpose of this class is to track a set of inserts and deletes to a document, like + // track changes in Word. We store these as a set of ShareJs style ranges: + // {i: "foo", p: 42} # Insert 'foo' at offset 42 + // {d: "bar", p: 37} # Delete 'bar' at offset 37 + // We only track the inserts and deletes, not the whole document, but by being given all + // updates that are applied to a document, we can update these appropriately. + // + // Note that the set of inserts and deletes we store applies to the document as-is at the moment. + // So inserts correspond to text which is in the document, while deletes correspond to text which + // is no longer there, so their lengths do not affect the position of later offsets. + // E.g. + // this is the current text of the document + // |-----| | + // {i: "current ", p:12} -^ ^- {d: "old ", p: 31} + // + // Track changes rules (should be consistent with Word): + // * When text is inserted at a delete, the text goes to the left of the delete + // I.e. "foo|bar" -> "foobaz|bar", where | is the delete, and 'baz' is inserted + // * Deleting content flagged as 'inserted' does not create a new delete marker, it only + // removes the insert marker. E.g. + // * "abdefghijkl" -> "abfghijkl" when 'de' is deleted. No delete marker added + // |---| <- inserted |-| <- inserted + // * Deletes overlapping regular text and inserted text will insert a delete marker for the + // regular text: + // "abcdefghijkl" -> "abcdejkl" when 'fghi' is deleted + // |----| |--|| + // ^- inserted 'bcdefg' \ ^- deleted 'hi' + // \--inserted 'bcde' + // * Deletes overlapping other deletes are merged. E.g. + // "abcghijkl" -> "ahijkl" when 'bcg is deleted' + // | <- delete 'def' | <- delete 'bcdefg' + // * Deletes by another user will consume deletes by the first user + // * Inserts by another user will not combine with inserts by the first user. If they are in the + // middle of a previous insert by the first user, the original insert will be split into two. + constructor(changes, comments) { + if (changes == null) { + changes = [] + } + this.changes = changes + if (comments == null) { + comments = [] + } + this.comments = comments + this.setIdSeed(RangesTracker.generateIdSeed()) + this.resetDirtyState() + } - getIdSeed() { - return this.id_seed; - } + getIdSeed() { + return this.id_seed + } - setIdSeed(seed) { - this.id_seed = seed; - return this.id_increment = 0; - } - - static generateIdSeed() { - // Generate a the first 18 characters of Mongo ObjectId, leaving 6 for the increment part - // Reference: https://github.com/dreampulse/ObjectId.js/blob/master/src/main/javascript/Objectid.js - const pid = Math.floor(Math.random() * (32767)).toString(16); - const machine = Math.floor(Math.random() * (16777216)).toString(16); - const timestamp = Math.floor(new Date().valueOf() / 1000).toString(16); - return '00000000'.substr(0, 8 - timestamp.length) + timestamp + - '000000'.substr(0, 6 - machine.length) + machine + - '0000'.substr(0, 4 - pid.length) + pid; - } - - static generateId() { - return this.generateIdSeed() + "000001"; - } + setIdSeed(seed) { + this.id_seed = seed + return (this.id_increment = 0) + } - newId() { - this.id_increment++; - const increment = this.id_increment.toString(16); - const id = this.id_seed + '000000'.substr(0, 6 - increment.length) + increment; - return id; - } - - getComment(comment_id) { - let comment = null; - for (const c of Array.from(this.comments)) { - if (c.id === comment_id) { - comment = c; - break; - } - } - return comment; - } - - removeCommentId(comment_id) { - const comment = this.getComment(comment_id); - if ((comment == null)) { return; } - this.comments = this.comments.filter(c => c.id !== comment_id); - return this._markAsDirty(comment, "comment", "removed"); - } - - moveCommentId(comment_id, position, text) { - return (() => { - const result = []; - for (const comment of Array.from(this.comments)) { - if (comment.id === comment_id) { - comment.op.p = position; - comment.op.c = text; - result.push(this._markAsDirty(comment, "comment", "moved")); - } else { - result.push(undefined); - } - } - return result; - })(); - } + static generateIdSeed() { + // Generate a the first 18 characters of Mongo ObjectId, leaving 6 for the increment part + // Reference: https://github.com/dreampulse/ObjectId.js/blob/master/src/main/javascript/Objectid.js + const pid = Math.floor(Math.random() * 32767).toString(16) + const machine = Math.floor(Math.random() * 16777216).toString(16) + const timestamp = Math.floor(new Date().valueOf() / 1000).toString(16) + return ( + '00000000'.substr(0, 8 - timestamp.length) + + timestamp + + '000000'.substr(0, 6 - machine.length) + + machine + + '0000'.substr(0, 4 - pid.length) + + pid + ) + } - getChange(change_id) { - let change = null; - for (const c of Array.from(this.changes)) { - if (c.id === change_id) { - change = c; - break; - } - } - return change; - } + static generateId() { + return this.generateIdSeed() + '000001' + } - getChanges(change_ids) { - const changes_response = []; - const ids_map = {}; + newId() { + this.id_increment++ + const increment = this.id_increment.toString(16) + const id = + this.id_seed + '000000'.substr(0, 6 - increment.length) + increment + return id + } - for (const change_id of Array.from(change_ids)) { - ids_map[change_id] = true; - } + getComment(comment_id) { + let comment = null + for (const c of Array.from(this.comments)) { + if (c.id === comment_id) { + comment = c + break + } + } + return comment + } - for (const change of Array.from(this.changes)) { - if (ids_map[change.id]) { - delete ids_map[change.id]; - changes_response.push(change); - } - } + removeCommentId(comment_id) { + const comment = this.getComment(comment_id) + if (comment == null) { + return + } + this.comments = this.comments.filter((c) => c.id !== comment_id) + return this._markAsDirty(comment, 'comment', 'removed') + } - return changes_response; - } + moveCommentId(comment_id, position, text) { + return (() => { + const result = [] + for (const comment of Array.from(this.comments)) { + if (comment.id === comment_id) { + comment.op.p = position + comment.op.c = text + result.push(this._markAsDirty(comment, 'comment', 'moved')) + } else { + result.push(undefined) + } + } + return result + })() + } - removeChangeId(change_id) { - const change = this.getChange(change_id); - if ((change == null)) { return; } - return this._removeChange(change); - } + getChange(change_id) { + let change = null + for (const c of Array.from(this.changes)) { + if (c.id === change_id) { + change = c + break + } + } + return change + } - removeChangeIds(change_to_remove_ids) { - if (!(change_to_remove_ids != null ? change_to_remove_ids.length : undefined) > 0) { return; } - const i = this.changes.length; - const remove_change_id = {}; - for (const change_id of Array.from(change_to_remove_ids)) { - remove_change_id[change_id] = true; - } + getChanges(change_ids) { + const changes_response = [] + const ids_map = {} - const remaining_changes = []; + for (const change_id of Array.from(change_ids)) { + ids_map[change_id] = true + } - for (const change of Array.from(this.changes)) { - if (remove_change_id[change.id]) { - delete remove_change_id[change.id]; - this._markAsDirty(change, "change", "removed"); - } else { - remaining_changes.push(change); - } - } + for (const change of Array.from(this.changes)) { + if (ids_map[change.id]) { + delete ids_map[change.id] + changes_response.push(change) + } + } - return this.changes = remaining_changes; - } - - validate(text) { - let content; - for (const change of Array.from(this.changes)) { - if (change.op.i != null) { - content = text.slice(change.op.p, change.op.p + change.op.i.length); - if (content !== change.op.i) { - throw new Error(`Change (${JSON.stringify(change)}) doesn't match text (${JSON.stringify(content)})`); - } - } - } - for (const comment of Array.from(this.comments)) { - content = text.slice(comment.op.p, comment.op.p + comment.op.c.length); - if (content !== comment.op.c) { - throw new Error(`Comment (${JSON.stringify(comment)}) doesn't match text (${JSON.stringify(content)})`); - } - } - return true; - } + return changes_response + } - applyOp(op, metadata) { - if (metadata == null) { metadata = {}; } - if (metadata.ts == null) { metadata.ts = new Date(); } - // Apply an op that has been applied to the document to our changes to keep them up to date - if (op.i != null) { - this.applyInsertToChanges(op, metadata); - return this.applyInsertToComments(op); - } else if (op.d != null) { - this.applyDeleteToChanges(op, metadata); - return this.applyDeleteToComments(op); - } else if (op.c != null) { - return this.addComment(op, metadata); - } else { - throw new Error("unknown op type"); - } - } + removeChangeId(change_id) { + const change = this.getChange(change_id) + if (change == null) { + return + } + return this._removeChange(change) + } - applyOps(ops, metadata) { - if (metadata == null) { metadata = {}; } - return Array.from(ops).map((op) => - this.applyOp(op, metadata)); - } + removeChangeIds(change_to_remove_ids) { + if ( + !(change_to_remove_ids != null + ? change_to_remove_ids.length + : undefined) > 0 + ) { + return + } + const i = this.changes.length + const remove_change_id = {} + for (const change_id of Array.from(change_to_remove_ids)) { + remove_change_id[change_id] = true + } - addComment(op, metadata) { - const existing = this.getComment(op.t); - if (existing != null) { - this.moveCommentId(op.t, op.p, op.c); - return existing; - } else { - let comment; - this.comments.push(comment = { - id: op.t || this.newId(), - op: { // Copy because we'll modify in place - c: op.c, - p: op.p, - t: op.t - }, - metadata - }); - this._markAsDirty(comment, "comment", "added"); - return comment; - } - } - - applyInsertToComments(op) { - return (() => { - const result = []; - for (const comment of Array.from(this.comments)) { - if (op.p <= comment.op.p) { - comment.op.p += op.i.length; - result.push(this._markAsDirty(comment, "comment", "moved")); - } else if (op.p < (comment.op.p + comment.op.c.length)) { - const offset = op.p - comment.op.p; - comment.op.c = comment.op.c.slice(0, +(offset-1) + 1 || undefined) + op.i + comment.op.c.slice(offset); - result.push(this._markAsDirty(comment, "comment", "moved")); - } else { - result.push(undefined); - } - } - return result; - })(); - } + const remaining_changes = [] - applyDeleteToComments(op) { - const op_start = op.p; - const op_length = op.d.length; - const op_end = op.p + op_length; - return (() => { - const result = []; - for (const comment of Array.from(this.comments)) { - const comment_start = comment.op.p; - const comment_end = comment.op.p + comment.op.c.length; - const comment_length = comment_end - comment_start; - if (op_end <= comment_start) { - // delete is fully before comment - comment.op.p -= op_length; - result.push(this._markAsDirty(comment, "comment", "moved")); - } else if (op_start >= comment_end) { - // delete is fully after comment, nothing to do - } else { - // delete and comment overlap - var remaining_after, remaining_before; - if (op_start <= comment_start) { - remaining_before = ""; - } else { - remaining_before = comment.op.c.slice(0, op_start - comment_start); - } - if (op_end >= comment_end) { - remaining_after = ""; - } else { - remaining_after = comment.op.c.slice(op_end - comment_start); - } - - // Check deleted content matches delete op - const deleted_comment = comment.op.c.slice(remaining_before.length, comment_length - remaining_after.length); - const offset = Math.max(0, comment_start - op_start); - const deleted_op_content = op.d.slice(offset).slice(0, deleted_comment.length); - if (deleted_comment !== deleted_op_content) { - throw new Error("deleted content does not match comment content"); - } - - comment.op.p = Math.min(comment_start, op_start); - comment.op.c = remaining_before + remaining_after; - result.push(this._markAsDirty(comment, "comment", "moved")); - } - } - return result; - })(); - } + for (const change of Array.from(this.changes)) { + if (remove_change_id[change.id]) { + delete remove_change_id[change.id] + this._markAsDirty(change, 'change', 'removed') + } else { + remaining_changes.push(change) + } + } - applyInsertToChanges(op, metadata) { - let change; - const op_start = op.p; - const op_length = op.i.length; - const op_end = op.p + op_length; - const undoing = !!op.u; + return (this.changes = remaining_changes) + } + validate(text) { + let content + for (const change of Array.from(this.changes)) { + if (change.op.i != null) { + content = text.slice(change.op.p, change.op.p + change.op.i.length) + if (content !== change.op.i) { + throw new Error( + `Change (${JSON.stringify( + change + )}) doesn't match text (${JSON.stringify(content)})` + ) + } + } + } + for (const comment of Array.from(this.comments)) { + content = text.slice(comment.op.p, comment.op.p + comment.op.c.length) + if (content !== comment.op.c) { + throw new Error( + `Comment (${JSON.stringify( + comment + )}) doesn't match text (${JSON.stringify(content)})` + ) + } + } + return true + } - let already_merged = false; - let previous_change = null; - const moved_changes = []; - const remove_changes = []; - const new_changes = []; + applyOp(op, metadata) { + if (metadata == null) { + metadata = {} + } + if (metadata.ts == null) { + metadata.ts = new Date() + } + // Apply an op that has been applied to the document to our changes to keep them up to date + if (op.i != null) { + this.applyInsertToChanges(op, metadata) + return this.applyInsertToComments(op) + } else if (op.d != null) { + this.applyDeleteToChanges(op, metadata) + return this.applyDeleteToComments(op) + } else if (op.c != null) { + return this.addComment(op, metadata) + } else { + throw new Error('unknown op type') + } + } - for (let i = 0; i < this.changes.length; i++) { - change = this.changes[i]; - const change_start = change.op.p; - - if (change.op.d != null) { - // Shift any deletes after this along by the length of this insert - if (op_start < change_start) { - change.op.p += op_length; - moved_changes.push(change); - } else if (op_start === change_start) { - // If we are undoing, then we want to cancel any existing delete ranges if we can. - // Check if the insert matches the start of the delete, and just remove it from the delete instead if so. - if (undoing && (change.op.d.length >= op.i.length) && (change.op.d.slice(0, op.i.length) === op.i)) { - change.op.d = change.op.d.slice(op.i.length); - change.op.p += op.i.length; - if (change.op.d === "") { - remove_changes.push(change); - } else { - moved_changes.push(change); - } - already_merged = true; - } else { - change.op.p += op_length; - moved_changes.push(change); - } - } - } else if (change.op.i != null) { - var offset; - const change_end = change_start + change.op.i.length; - const is_change_overlapping = ((op_start >= change_start) && (op_start <= change_end)); - - // Only merge inserts if they are from the same user - const is_same_user = metadata.user_id === change.metadata.user_id; - - // If we are undoing, then our changes will be removed from any delete ops just after. In that case, if there is also - // an insert op just before, then we shouldn't append it to this insert, but instead only cancel the following delete. - // E.g. - // foo|<--- about to insert 'b' here - // inserted 'foo' --^ ^-- deleted 'bar' - // should become just 'foo' not 'foob' (with the delete marker becoming just 'ar'), . - const next_change = this.changes[i+1]; - const is_op_adjacent_to_next_delete = (next_change != null) && (next_change.op.d != null) && (op.p === change_end) && (next_change.op.p === op.p); - const will_op_cancel_next_delete = undoing && is_op_adjacent_to_next_delete && (next_change.op.d.slice(0, op.i.length) === op.i); - - // If there is a delete at the start of the insert, and we're inserting - // at the start, we SHOULDN'T merge since the delete acts as a partition. - // The previous op will be the delete, but it's already been shifted by this insert - // - // I.e. - // Originally: |-- existing insert --| - // | <- existing delete at same offset - // - // Now: |-- existing insert --| <- not shifted yet - // |-- this insert --|| <- existing delete shifted along to end of this op - // - // After: |-- existing insert --| - // |-- this insert --|| <- existing delete - // - // Without the delete, the inserts would be merged. - const is_insert_blocked_by_delete = ((previous_change != null) && (previous_change.op.d != null) && (previous_change.op.p === op_end)); + applyOps(ops, metadata) { + if (metadata == null) { + metadata = {} + } + return Array.from(ops).map((op) => this.applyOp(op, metadata)) + } - // If the insert is overlapping another insert, either at the beginning in the middle or touching the end, - // then we merge them into one. - if (this.track_changes && - is_change_overlapping && - !is_insert_blocked_by_delete && - !already_merged && - !will_op_cancel_next_delete && - is_same_user) { - offset = op_start - change_start; - change.op.i = change.op.i.slice(0, offset) + op.i + change.op.i.slice(offset); - change.metadata.ts = metadata.ts; - already_merged = true; - moved_changes.push(change); - } else if (op_start <= change_start) { - // If we're fully before the other insert we can just shift the other insert by our length. - // If they are touching, and should have been merged, they will have been above. - // If not merged above, then it must be blocked by a delete, and will be after this insert, so we shift it along as well - change.op.p += op_length; - moved_changes.push(change); - } else if ((!is_same_user || !this.track_changes) && (change_start < op_start && op_start < change_end)) { - // This user is inserting inside a change by another user, so we need to split the - // other user's change into one before and after this one. - offset = op_start - change_start; - const before_content = change.op.i.slice(0, offset); - const after_content = change.op.i.slice(offset); - - // The existing change can become the 'before' change - change.op.i = before_content; - moved_changes.push(change); - - // Create a new op afterwards - const after_change = { - op: { - i: after_content, - p: change_start + offset + op_length - }, - metadata: {} - }; - for (const key in change.metadata) { const value = change.metadata[key]; after_change.metadata[key] = value; } - new_changes.push(after_change); - } - } - - previous_change = change; - } + addComment(op, metadata) { + const existing = this.getComment(op.t) + if (existing != null) { + this.moveCommentId(op.t, op.p, op.c) + return existing + } else { + let comment + this.comments.push( + (comment = { + id: op.t || this.newId(), + op: { + // Copy because we'll modify in place + c: op.c, + p: op.p, + t: op.t + }, + metadata + }) + ) + this._markAsDirty(comment, 'comment', 'added') + return comment + } + } - if (this.track_changes && !already_merged) { - this._addOp(op, metadata); - } - for ({op, metadata} of Array.from(new_changes)) { - this._addOp(op, metadata); - } - - for (change of Array.from(remove_changes)) { - this._removeChange(change); - } - - return (() => { - const result = []; - for (change of Array.from(moved_changes)) { - result.push(this._markAsDirty(change, "change", "moved")); - } - return result; - })(); - } - - applyDeleteToChanges(op, metadata) { - let change; - const op_start = op.p; - const op_length = op.d.length; - const op_end = op.p + op_length; - const remove_changes = []; - let moved_changes = []; - - // We might end up modifying our delete op if it merges with existing deletes, or cancels out - // with an existing insert. Since we might do multiple modifications, we record them and do - // all the modifications after looping through the existing changes, so as not to mess up the - // offset indexes as we go. - const op_modifications = []; - for (change of Array.from(this.changes)) { - var change_start; - if (change.op.i != null) { - change_start = change.op.p; - const change_end = change_start + change.op.i.length; - if (op_end <= change_start) { - // Shift ops after us back by our length - change.op.p -= op_length; - moved_changes.push(change); - } else if (op_start >= change_end) { - // Delete is after insert, nothing to do - } else { - // When the new delete overlaps an insert, we should remove the part of the insert that - // is now deleted, and also remove the part of the new delete that overlapped. I.e. - // the two cancel out where they overlap. - var delete_remaining_after, delete_remaining_before, insert_remaining_after, insert_remaining_before; - if (op_start >= change_start) { - // |-- existing insert --| - // insert_remaining_before -> |.....||-- new delete --| - delete_remaining_before = ""; - insert_remaining_before = change.op.i.slice(0, op_start - change_start); - } else { - // delete_remaining_before -> |.....||-- existing insert --| - // |-- new delete --| - delete_remaining_before = op.d.slice(0, change_start - op_start); - insert_remaining_before = ""; - } + applyInsertToComments(op) { + return (() => { + const result = [] + for (const comment of Array.from(this.comments)) { + if (op.p <= comment.op.p) { + comment.op.p += op.i.length + result.push(this._markAsDirty(comment, 'comment', 'moved')) + } else if (op.p < comment.op.p + comment.op.c.length) { + const offset = op.p - comment.op.p + comment.op.c = + comment.op.c.slice(0, +(offset - 1) + 1 || undefined) + + op.i + + comment.op.c.slice(offset) + result.push(this._markAsDirty(comment, 'comment', 'moved')) + } else { + result.push(undefined) + } + } + return result + })() + } - if (op_end <= change_end) { - // |-- existing insert --| - // |-- new delete --||.....| <- insert_remaining_after - delete_remaining_after = ""; - insert_remaining_after = change.op.i.slice(op_end - change_start); - } else { - // |-- existing insert --||.....| <- delete_remaining_after - // |-- new delete --| - delete_remaining_after = op.d.slice(change_end - op_start); - insert_remaining_after = ""; - } + applyDeleteToComments(op) { + const op_start = op.p + const op_length = op.d.length + const op_end = op.p + op_length + return (() => { + const result = [] + for (const comment of Array.from(this.comments)) { + const comment_start = comment.op.p + const comment_end = comment.op.p + comment.op.c.length + const comment_length = comment_end - comment_start + if (op_end <= comment_start) { + // delete is fully before comment + comment.op.p -= op_length + result.push(this._markAsDirty(comment, 'comment', 'moved')) + } else if (op_start >= comment_end) { + // delete is fully after comment, nothing to do + } else { + // delete and comment overlap + var remaining_after, remaining_before + if (op_start <= comment_start) { + remaining_before = '' + } else { + remaining_before = comment.op.c.slice(0, op_start - comment_start) + } + if (op_end >= comment_end) { + remaining_after = '' + } else { + remaining_after = comment.op.c.slice(op_end - comment_start) + } - const insert_remaining = insert_remaining_before + insert_remaining_after; - if (insert_remaining.length > 0) { - change.op.i = insert_remaining; - change.op.p = Math.min(change_start, op_start); - change.metadata.ts = metadata.ts; - moved_changes.push(change); - } else { - remove_changes.push(change); - } + // Check deleted content matches delete op + const deleted_comment = comment.op.c.slice( + remaining_before.length, + comment_length - remaining_after.length + ) + const offset = Math.max(0, comment_start - op_start) + const deleted_op_content = op.d + .slice(offset) + .slice(0, deleted_comment.length) + if (deleted_comment !== deleted_op_content) { + throw new Error('deleted content does not match comment content') + } - // We know what we want to preserve of our delete op before (delete_remaining_before) and what we want to preserve - // afterwards (delete_remaining_before). Now we need to turn that into a modification which deletes the - // chunk in the middle not covered by these. - const delete_removed_length = op.d.length - delete_remaining_before.length - delete_remaining_after.length; - const delete_removed_start = delete_remaining_before.length; - const modification = { - d: op.d.slice(delete_removed_start, delete_removed_start + delete_removed_length), - p: delete_removed_start - }; - if (modification.d.length > 0) { - op_modifications.push(modification); - } - } - } else if (change.op.d != null) { - change_start = change.op.p; - if ((op_end < change_start) || (!this.track_changes && (op_end === change_start))) { - // Shift ops after us back by our length. - // If we're tracking changes, it must be strictly before, since we'll merge - // below if they are touching. Otherwise, touching is fine. - change.op.p -= op_length; - moved_changes.push(change); - } else if (op_start <= change_start && change_start <= op_end) { - if (this.track_changes) { - // If we overlap a delete, add it in our content, and delete the existing change. - // It's easier to do it this way, rather than modifying the existing delete in case - // we overlap many deletes and we'd need to track that. We have a workaround to - // update the delete in place if possible below. - const offset = change_start - op_start; - op_modifications.push({ i: change.op.d, p: offset }); - remove_changes.push(change); - } else { - change.op.p = op_start; - moved_changes.push(change); - } - } - } - } + comment.op.p = Math.min(comment_start, op_start) + comment.op.c = remaining_before + remaining_after + result.push(this._markAsDirty(comment, 'comment', 'moved')) + } + } + return result + })() + } - // Copy rather than modify because we still need to apply it to comments - op = { - p: op.p, - d: this._applyOpModifications(op.d, op_modifications) - }; + applyInsertToChanges(op, metadata) { + let change + const op_start = op.p + const op_length = op.i.length + const op_end = op.p + op_length + const undoing = !!op.u - for (change of Array.from(remove_changes)) { - // This is a bit of hack to avoid removing one delete and replacing it with another. - // If we don't do this, it causes the UI to flicker - if ((op.d.length > 0) && (change.op.d != null) && (op.p <= change.op.p && change.op.p <= op.p + op.d.length)) { - change.op.p = op.p; - change.op.d = op.d; - change.metadata = metadata; - moved_changes.push(change); - op.d = ""; // stop it being added - } else { - this._removeChange(change); - } - } + let already_merged = false + let previous_change = null + const moved_changes = [] + const remove_changes = [] + const new_changes = [] - if (this.track_changes && (op.d.length > 0)) { - this._addOp(op, metadata); - } else { - // It's possible that we deleted an insert between two other inserts. I.e. - // If we delete 'user_2 insert' in: - // |-- user_1 insert --||-- user_2 insert --||-- user_1 insert --| - // it becomes: - // |-- user_1 insert --||-- user_1 insert --| - // We need to merge these together again - const results = this._scanAndMergeAdjacentUpdates(); - moved_changes = moved_changes.concat(results.moved_changes); - for (change of Array.from(results.remove_changes)) { - this._removeChange(change); - moved_changes = moved_changes.filter(c => c !== change); - } - } - - return (() => { - const result = []; - for (change of Array.from(moved_changes)) { - result.push(this._markAsDirty(change, "change", "moved")); - } - return result; - })(); - } + for (let i = 0; i < this.changes.length; i++) { + change = this.changes[i] + const change_start = change.op.p - _addOp(op, metadata) { - const change = { - id: this.newId(), - op: this._clone(op), // Don't take a reference to the existing op since we'll modify this in place with future changes - metadata: this._clone(metadata) - }; - this.changes.push(change); + if (change.op.d != null) { + // Shift any deletes after this along by the length of this insert + if (op_start < change_start) { + change.op.p += op_length + moved_changes.push(change) + } else if (op_start === change_start) { + // If we are undoing, then we want to cancel any existing delete ranges if we can. + // Check if the insert matches the start of the delete, and just remove it from the delete instead if so. + if ( + undoing && + change.op.d.length >= op.i.length && + change.op.d.slice(0, op.i.length) === op.i + ) { + change.op.d = change.op.d.slice(op.i.length) + change.op.p += op.i.length + if (change.op.d === '') { + remove_changes.push(change) + } else { + moved_changes.push(change) + } + already_merged = true + } else { + change.op.p += op_length + moved_changes.push(change) + } + } + } else if (change.op.i != null) { + var offset + const change_end = change_start + change.op.i.length + const is_change_overlapping = + op_start >= change_start && op_start <= change_end - // Keep ops in order of offset, with deletes before inserts - this.changes.sort(function(c1, c2) { - const result = c1.op.p - c2.op.p; - if (result !== 0) { - return result; - } else if ((c1.op.i != null) && (c2.op.d != null)) { - return 1; - } else { - return -1; - } - }); + // Only merge inserts if they are from the same user + const is_same_user = metadata.user_id === change.metadata.user_id - return this._markAsDirty(change, "change", "added"); - } - - _removeChange(change) { - this.changes = this.changes.filter(c => c.id !== change.id); - return this._markAsDirty(change, "change", "removed"); - } - - _applyOpModifications(content, op_modifications) { - // Put in descending position order, with deleting first if at the same offset - // (Inserting first would modify the content that the delete will delete) - op_modifications.sort(function(a, b) { - const result = b.p - a.p; - if (result !== 0) { - return result; - } else if ((a.i != null) && (b.d != null)) { - return 1; - } else { - return -1; - } - }); + // If we are undoing, then our changes will be removed from any delete ops just after. In that case, if there is also + // an insert op just before, then we shouldn't append it to this insert, but instead only cancel the following delete. + // E.g. + // foo|<--- about to insert 'b' here + // inserted 'foo' --^ ^-- deleted 'bar' + // should become just 'foo' not 'foob' (with the delete marker becoming just 'ar'), . + const next_change = this.changes[i + 1] + const is_op_adjacent_to_next_delete = + next_change != null && + next_change.op.d != null && + op.p === change_end && + next_change.op.p === op.p + const will_op_cancel_next_delete = + undoing && + is_op_adjacent_to_next_delete && + next_change.op.d.slice(0, op.i.length) === op.i - for (const modification of Array.from(op_modifications)) { - if (modification.i != null) { - content = content.slice(0, modification.p) + modification.i + content.slice(modification.p); - } else if (modification.d != null) { - if (content.slice(modification.p, modification.p + modification.d.length) !== modification.d) { - throw new Error(`deleted content does not match. content: ${JSON.stringify(content)}; modification: ${JSON.stringify(modification)}`); - } - content = content.slice(0, modification.p) + content.slice(modification.p + modification.d.length); - } - } - return content; - } - - _scanAndMergeAdjacentUpdates() { - // This should only need calling when deleting an update between two - // other updates. There's no other way to get two adjacent updates from the - // same user, since they would be merged on insert. - let previous_change = null; - const remove_changes = []; - const moved_changes = []; - for (const change of Array.from(this.changes)) { - if (((previous_change != null ? previous_change.op.i : undefined) != null) && (change.op.i != null)) { - const previous_change_end = previous_change.op.p + previous_change.op.i.length; - const previous_change_user_id = previous_change.metadata.user_id; - const change_start = change.op.p; - const change_user_id = change.metadata.user_id; - if ((previous_change_end === change_start) && (previous_change_user_id === change_user_id)) { - remove_changes.push(change); - previous_change.op.i += change.op.i; - moved_changes.push(previous_change); - } - } else if (((previous_change != null ? previous_change.op.d : undefined) != null) && (change.op.d != null) && (previous_change.op.p === change.op.p)) { - // Merge adjacent deletes - previous_change.op.d += change.op.d; - remove_changes.push(change); - moved_changes.push(previous_change); - } else { // Only update to the current change if we haven't removed it. - previous_change = change; - } - } - return { moved_changes, remove_changes }; - } - - resetDirtyState() { - return this._dirtyState = { - comment: { - moved: {}, - removed: {}, - added: {} - }, - change: { - moved: {}, - removed: {}, - added: {} - } - }; - } - - getDirtyState() { - return this._dirtyState; - } - - _markAsDirty(object, type, action) { - return this._dirtyState[type][action][object.id] = object; - } - - _clone(object) { - const clone = {}; - for (const k in object) { const v = object[k]; clone[k] = v; } - return clone; - } - }; -}; + // If there is a delete at the start of the insert, and we're inserting + // at the start, we SHOULDN'T merge since the delete acts as a partition. + // The previous op will be the delete, but it's already been shifted by this insert + // + // I.e. + // Originally: |-- existing insert --| + // | <- existing delete at same offset + // + // Now: |-- existing insert --| <- not shifted yet + // |-- this insert --|| <- existing delete shifted along to end of this op + // + // After: |-- existing insert --| + // |-- this insert --|| <- existing delete + // + // Without the delete, the inserts would be merged. + const is_insert_blocked_by_delete = + previous_change != null && + previous_change.op.d != null && + previous_change.op.p === op_end + + // If the insert is overlapping another insert, either at the beginning in the middle or touching the end, + // then we merge them into one. + if ( + this.track_changes && + is_change_overlapping && + !is_insert_blocked_by_delete && + !already_merged && + !will_op_cancel_next_delete && + is_same_user + ) { + offset = op_start - change_start + change.op.i = + change.op.i.slice(0, offset) + op.i + change.op.i.slice(offset) + change.metadata.ts = metadata.ts + already_merged = true + moved_changes.push(change) + } else if (op_start <= change_start) { + // If we're fully before the other insert we can just shift the other insert by our length. + // If they are touching, and should have been merged, they will have been above. + // If not merged above, then it must be blocked by a delete, and will be after this insert, so we shift it along as well + change.op.p += op_length + moved_changes.push(change) + } else if ( + (!is_same_user || !this.track_changes) && + change_start < op_start && + op_start < change_end + ) { + // This user is inserting inside a change by another user, so we need to split the + // other user's change into one before and after this one. + offset = op_start - change_start + const before_content = change.op.i.slice(0, offset) + const after_content = change.op.i.slice(offset) + + // The existing change can become the 'before' change + change.op.i = before_content + moved_changes.push(change) + + // Create a new op afterwards + const after_change = { + op: { + i: after_content, + p: change_start + offset + op_length + }, + metadata: {} + } + for (const key in change.metadata) { + const value = change.metadata[key] + after_change.metadata[key] = value + } + new_changes.push(after_change) + } + } + + previous_change = change + } + + if (this.track_changes && !already_merged) { + this._addOp(op, metadata) + } + for ({ op, metadata } of Array.from(new_changes)) { + this._addOp(op, metadata) + } + + for (change of Array.from(remove_changes)) { + this._removeChange(change) + } + + return (() => { + const result = [] + for (change of Array.from(moved_changes)) { + result.push(this._markAsDirty(change, 'change', 'moved')) + } + return result + })() + } + + applyDeleteToChanges(op, metadata) { + let change + const op_start = op.p + const op_length = op.d.length + const op_end = op.p + op_length + const remove_changes = [] + let moved_changes = [] + + // We might end up modifying our delete op if it merges with existing deletes, or cancels out + // with an existing insert. Since we might do multiple modifications, we record them and do + // all the modifications after looping through the existing changes, so as not to mess up the + // offset indexes as we go. + const op_modifications = [] + for (change of Array.from(this.changes)) { + var change_start + if (change.op.i != null) { + change_start = change.op.p + const change_end = change_start + change.op.i.length + if (op_end <= change_start) { + // Shift ops after us back by our length + change.op.p -= op_length + moved_changes.push(change) + } else if (op_start >= change_end) { + // Delete is after insert, nothing to do + } else { + // When the new delete overlaps an insert, we should remove the part of the insert that + // is now deleted, and also remove the part of the new delete that overlapped. I.e. + // the two cancel out where they overlap. + var delete_remaining_after, + delete_remaining_before, + insert_remaining_after, + insert_remaining_before + if (op_start >= change_start) { + // |-- existing insert --| + // insert_remaining_before -> |.....||-- new delete --| + delete_remaining_before = '' + insert_remaining_before = change.op.i.slice( + 0, + op_start - change_start + ) + } else { + // delete_remaining_before -> |.....||-- existing insert --| + // |-- new delete --| + delete_remaining_before = op.d.slice(0, change_start - op_start) + insert_remaining_before = '' + } + + if (op_end <= change_end) { + // |-- existing insert --| + // |-- new delete --||.....| <- insert_remaining_after + delete_remaining_after = '' + insert_remaining_after = change.op.i.slice(op_end - change_start) + } else { + // |-- existing insert --||.....| <- delete_remaining_after + // |-- new delete --| + delete_remaining_after = op.d.slice(change_end - op_start) + insert_remaining_after = '' + } + + const insert_remaining = + insert_remaining_before + insert_remaining_after + if (insert_remaining.length > 0) { + change.op.i = insert_remaining + change.op.p = Math.min(change_start, op_start) + change.metadata.ts = metadata.ts + moved_changes.push(change) + } else { + remove_changes.push(change) + } + + // We know what we want to preserve of our delete op before (delete_remaining_before) and what we want to preserve + // afterwards (delete_remaining_before). Now we need to turn that into a modification which deletes the + // chunk in the middle not covered by these. + const delete_removed_length = + op.d.length - + delete_remaining_before.length - + delete_remaining_after.length + const delete_removed_start = delete_remaining_before.length + const modification = { + d: op.d.slice( + delete_removed_start, + delete_removed_start + delete_removed_length + ), + p: delete_removed_start + } + if (modification.d.length > 0) { + op_modifications.push(modification) + } + } + } else if (change.op.d != null) { + change_start = change.op.p + if ( + op_end < change_start || + (!this.track_changes && op_end === change_start) + ) { + // Shift ops after us back by our length. + // If we're tracking changes, it must be strictly before, since we'll merge + // below if they are touching. Otherwise, touching is fine. + change.op.p -= op_length + moved_changes.push(change) + } else if (op_start <= change_start && change_start <= op_end) { + if (this.track_changes) { + // If we overlap a delete, add it in our content, and delete the existing change. + // It's easier to do it this way, rather than modifying the existing delete in case + // we overlap many deletes and we'd need to track that. We have a workaround to + // update the delete in place if possible below. + const offset = change_start - op_start + op_modifications.push({ i: change.op.d, p: offset }) + remove_changes.push(change) + } else { + change.op.p = op_start + moved_changes.push(change) + } + } + } + } + + // Copy rather than modify because we still need to apply it to comments + op = { + p: op.p, + d: this._applyOpModifications(op.d, op_modifications) + } + + for (change of Array.from(remove_changes)) { + // This is a bit of hack to avoid removing one delete and replacing it with another. + // If we don't do this, it causes the UI to flicker + if ( + op.d.length > 0 && + change.op.d != null && + op.p <= change.op.p && + change.op.p <= op.p + op.d.length + ) { + change.op.p = op.p + change.op.d = op.d + change.metadata = metadata + moved_changes.push(change) + op.d = '' // stop it being added + } else { + this._removeChange(change) + } + } + + if (this.track_changes && op.d.length > 0) { + this._addOp(op, metadata) + } else { + // It's possible that we deleted an insert between two other inserts. I.e. + // If we delete 'user_2 insert' in: + // |-- user_1 insert --||-- user_2 insert --||-- user_1 insert --| + // it becomes: + // |-- user_1 insert --||-- user_1 insert --| + // We need to merge these together again + const results = this._scanAndMergeAdjacentUpdates() + moved_changes = moved_changes.concat(results.moved_changes) + for (change of Array.from(results.remove_changes)) { + this._removeChange(change) + moved_changes = moved_changes.filter((c) => c !== change) + } + } + + return (() => { + const result = [] + for (change of Array.from(moved_changes)) { + result.push(this._markAsDirty(change, 'change', 'moved')) + } + return result + })() + } + + _addOp(op, metadata) { + const change = { + id: this.newId(), + op: this._clone(op), // Don't take a reference to the existing op since we'll modify this in place with future changes + metadata: this._clone(metadata) + } + this.changes.push(change) + + // Keep ops in order of offset, with deletes before inserts + this.changes.sort(function (c1, c2) { + const result = c1.op.p - c2.op.p + if (result !== 0) { + return result + } else if (c1.op.i != null && c2.op.d != null) { + return 1 + } else { + return -1 + } + }) + + return this._markAsDirty(change, 'change', 'added') + } + + _removeChange(change) { + this.changes = this.changes.filter((c) => c.id !== change.id) + return this._markAsDirty(change, 'change', 'removed') + } + + _applyOpModifications(content, op_modifications) { + // Put in descending position order, with deleting first if at the same offset + // (Inserting first would modify the content that the delete will delete) + op_modifications.sort(function (a, b) { + const result = b.p - a.p + if (result !== 0) { + return result + } else if (a.i != null && b.d != null) { + return 1 + } else { + return -1 + } + }) + + for (const modification of Array.from(op_modifications)) { + if (modification.i != null) { + content = + content.slice(0, modification.p) + + modification.i + + content.slice(modification.p) + } else if (modification.d != null) { + if ( + content.slice( + modification.p, + modification.p + modification.d.length + ) !== modification.d + ) { + throw new Error( + `deleted content does not match. content: ${JSON.stringify( + content + )}; modification: ${JSON.stringify(modification)}` + ) + } + content = + content.slice(0, modification.p) + + content.slice(modification.p + modification.d.length) + } + } + return content + } + + _scanAndMergeAdjacentUpdates() { + // This should only need calling when deleting an update between two + // other updates. There's no other way to get two adjacent updates from the + // same user, since they would be merged on insert. + let previous_change = null + const remove_changes = [] + const moved_changes = [] + for (const change of Array.from(this.changes)) { + if ( + (previous_change != null ? previous_change.op.i : undefined) != + null && + change.op.i != null + ) { + const previous_change_end = + previous_change.op.p + previous_change.op.i.length + const previous_change_user_id = previous_change.metadata.user_id + const change_start = change.op.p + const change_user_id = change.metadata.user_id + if ( + previous_change_end === change_start && + previous_change_user_id === change_user_id + ) { + remove_changes.push(change) + previous_change.op.i += change.op.i + moved_changes.push(previous_change) + } + } else if ( + (previous_change != null ? previous_change.op.d : undefined) != + null && + change.op.d != null && + previous_change.op.p === change.op.p + ) { + // Merge adjacent deletes + previous_change.op.d += change.op.d + remove_changes.push(change) + moved_changes.push(previous_change) + } else { + // Only update to the current change if we haven't removed it. + previous_change = change + } + } + return { moved_changes, remove_changes } + } + + resetDirtyState() { + return (this._dirtyState = { + comment: { + moved: {}, + removed: {}, + added: {} + }, + change: { + moved: {}, + removed: {}, + added: {} + } + }) + } + + getDirtyState() { + return this._dirtyState + } + + _markAsDirty(object, type, action) { + return (this._dirtyState[type][action][object.id] = object) + } + + _clone(object) { + const clone = {} + for (const k in object) { + const v = object[k] + clone[k] = v + } + return clone + } + }) +} if (typeof define !== 'undefined' && define !== null) { - define([], load); + define([], load) } else { - module.exports = load(); + module.exports = load() } diff --git a/services/document-updater/app/js/RateLimitManager.js b/services/document-updater/app/js/RateLimitManager.js index 17803f1316..831b34eae1 100644 --- a/services/document-updater/app/js/RateLimitManager.js +++ b/services/document-updater/app/js/RateLimitManager.js @@ -9,55 +9,72 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let RateLimiter; -const Settings = require('settings-sharelatex'); -const logger = require('logger-sharelatex'); -const Metrics = require('./Metrics'); +let RateLimiter +const Settings = require('settings-sharelatex') +const logger = require('logger-sharelatex') +const Metrics = require('./Metrics') -module.exports = (RateLimiter = class RateLimiter { +module.exports = RateLimiter = class RateLimiter { + constructor(number) { + if (number == null) { + number = 10 + } + this.ActiveWorkerCount = 0 + this.CurrentWorkerLimit = number + this.BaseWorkerCount = number + } - constructor(number) { - if (number == null) { number = 10; } - this.ActiveWorkerCount = 0; - this.CurrentWorkerLimit = number; - this.BaseWorkerCount = number; - } + _adjustLimitUp() { + this.CurrentWorkerLimit += 0.1 // allow target worker limit to increase gradually + return Metrics.gauge('currentLimit', Math.ceil(this.CurrentWorkerLimit)) + } - _adjustLimitUp() { - this.CurrentWorkerLimit += 0.1; // allow target worker limit to increase gradually - return Metrics.gauge("currentLimit", Math.ceil(this.CurrentWorkerLimit)); - } + _adjustLimitDown() { + this.CurrentWorkerLimit = Math.max( + this.BaseWorkerCount, + this.CurrentWorkerLimit * 0.9 + ) + logger.log( + { currentLimit: Math.ceil(this.CurrentWorkerLimit) }, + 'reducing rate limit' + ) + return Metrics.gauge('currentLimit', Math.ceil(this.CurrentWorkerLimit)) + } - _adjustLimitDown() { - this.CurrentWorkerLimit = Math.max(this.BaseWorkerCount, (this.CurrentWorkerLimit * 0.9)); - logger.log({currentLimit: Math.ceil(this.CurrentWorkerLimit)}, "reducing rate limit"); - return Metrics.gauge("currentLimit", Math.ceil(this.CurrentWorkerLimit)); - } + _trackAndRun(task, callback) { + if (callback == null) { + callback = function () {} + } + this.ActiveWorkerCount++ + Metrics.gauge('processingUpdates', this.ActiveWorkerCount) + return task((err) => { + this.ActiveWorkerCount-- + Metrics.gauge('processingUpdates', this.ActiveWorkerCount) + return callback(err) + }) + } - _trackAndRun(task, callback) { - if (callback == null) { callback = function() {}; } - this.ActiveWorkerCount++; - Metrics.gauge("processingUpdates", this.ActiveWorkerCount); - return task(err => { - this.ActiveWorkerCount--; - Metrics.gauge("processingUpdates", this.ActiveWorkerCount); - return callback(err); - }); - } - - run(task, callback) { - if (this.ActiveWorkerCount < this.CurrentWorkerLimit) { - this._trackAndRun(task); // below the limit, just put the task in the background - callback(); // return immediately - if (this.CurrentWorkerLimit > this.BaseWorkerCount) { - return this._adjustLimitDown(); - } - } else { - logger.log({active: this.ActiveWorkerCount, currentLimit: Math.ceil(this.CurrentWorkerLimit)}, "hit rate limit"); - return this._trackAndRun(task, err => { - if ((err == null)) { this._adjustLimitUp(); } // don't increment rate limit if there was an error - return callback(err); - }); // only return after task completes - } - } -}); + run(task, callback) { + if (this.ActiveWorkerCount < this.CurrentWorkerLimit) { + this._trackAndRun(task) // below the limit, just put the task in the background + callback() // return immediately + if (this.CurrentWorkerLimit > this.BaseWorkerCount) { + return this._adjustLimitDown() + } + } else { + logger.log( + { + active: this.ActiveWorkerCount, + currentLimit: Math.ceil(this.CurrentWorkerLimit) + }, + 'hit rate limit' + ) + return this._trackAndRun(task, (err) => { + if (err == null) { + this._adjustLimitUp() + } // don't increment rate limit if there was an error + return callback(err) + }) // only return after task completes + } + } +} diff --git a/services/document-updater/app/js/RealTimeRedisManager.js b/services/document-updater/app/js/RealTimeRedisManager.js index e2aa12e8d3..537be23265 100644 --- a/services/document-updater/app/js/RealTimeRedisManager.js +++ b/services/document-updater/app/js/RealTimeRedisManager.js @@ -11,69 +11,77 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let RealTimeRedisManager; -const Settings = require('settings-sharelatex'); -const rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater); -const pubsubClient = require("redis-sharelatex").createClient(Settings.redis.pubsub); -const Keys = Settings.redis.documentupdater.key_schema; -const logger = require('logger-sharelatex'); -const os = require("os"); -const crypto = require("crypto"); -const metrics = require('./Metrics'); +let RealTimeRedisManager +const Settings = require('settings-sharelatex') +const rclient = require('redis-sharelatex').createClient( + Settings.redis.documentupdater +) +const pubsubClient = require('redis-sharelatex').createClient( + Settings.redis.pubsub +) +const Keys = Settings.redis.documentupdater.key_schema +const logger = require('logger-sharelatex') +const os = require('os') +const crypto = require('crypto') +const metrics = require('./Metrics') -const HOST = os.hostname(); -const RND = crypto.randomBytes(4).toString('hex'); // generate a random key for this process -let COUNT = 0; +const HOST = os.hostname() +const RND = crypto.randomBytes(4).toString('hex') // generate a random key for this process +let COUNT = 0 -const MAX_OPS_PER_ITERATION = 8; // process a limited number of ops for safety +const MAX_OPS_PER_ITERATION = 8 // process a limited number of ops for safety -module.exports = (RealTimeRedisManager = { - getPendingUpdatesForDoc(doc_id, callback){ - const multi = rclient.multi(); - multi.lrange(Keys.pendingUpdates({doc_id}), 0, (MAX_OPS_PER_ITERATION-1)); - multi.ltrim(Keys.pendingUpdates({doc_id}), MAX_OPS_PER_ITERATION, -1); - return multi.exec(function(error, replys) { - let jsonUpdate; - if (error != null) { return callback(error); } - const jsonUpdates = replys[0]; - for (jsonUpdate of Array.from(jsonUpdates)) { - // record metric for each update removed from queue - metrics.summary("redis.pendingUpdates", jsonUpdate.length, {status: "pop"}); - } - const updates = []; - for (jsonUpdate of Array.from(jsonUpdates)) { - var update; - try { - update = JSON.parse(jsonUpdate); - } catch (e) { - return callback(e); - } - updates.push(update); - } - return callback(error, updates); - }); - }, +module.exports = RealTimeRedisManager = { + getPendingUpdatesForDoc(doc_id, callback) { + const multi = rclient.multi() + multi.lrange(Keys.pendingUpdates({ doc_id }), 0, MAX_OPS_PER_ITERATION - 1) + multi.ltrim(Keys.pendingUpdates({ doc_id }), MAX_OPS_PER_ITERATION, -1) + return multi.exec(function (error, replys) { + let jsonUpdate + if (error != null) { + return callback(error) + } + const jsonUpdates = replys[0] + for (jsonUpdate of Array.from(jsonUpdates)) { + // record metric for each update removed from queue + metrics.summary('redis.pendingUpdates', jsonUpdate.length, { + status: 'pop' + }) + } + const updates = [] + for (jsonUpdate of Array.from(jsonUpdates)) { + var update + try { + update = JSON.parse(jsonUpdate) + } catch (e) { + return callback(e) + } + updates.push(update) + } + return callback(error, updates) + }) + }, - getUpdatesLength(doc_id, callback){ - return rclient.llen(Keys.pendingUpdates({doc_id}), callback); - }, + getUpdatesLength(doc_id, callback) { + return rclient.llen(Keys.pendingUpdates({ doc_id }), callback) + }, - sendData(data) { - // create a unique message id using a counter - const message_id = `doc:${HOST}:${RND}-${COUNT++}`; - if (data != null) { - data._id = message_id; - } + sendData(data) { + // create a unique message id using a counter + const message_id = `doc:${HOST}:${RND}-${COUNT++}` + if (data != null) { + data._id = message_id + } - const blob = JSON.stringify(data); - metrics.summary("redis.publish.applied-ops", blob.length); + const blob = JSON.stringify(data) + metrics.summary('redis.publish.applied-ops', blob.length) - // publish on separate channels for individual projects and docs when - // configured (needs realtime to be configured for this too). - if (Settings.publishOnIndividualChannels) { - return pubsubClient.publish(`applied-ops:${data.doc_id}`, blob); - } else { - return pubsubClient.publish("applied-ops", blob); - } - } -}); + // publish on separate channels for individual projects and docs when + // configured (needs realtime to be configured for this too). + if (Settings.publishOnIndividualChannels) { + return pubsubClient.publish(`applied-ops:${data.doc_id}`, blob) + } else { + return pubsubClient.publish('applied-ops', blob) + } + } +} diff --git a/services/document-updater/app/js/RedisManager.js b/services/document-updater/app/js/RedisManager.js index 80944e10be..64352b4e00 100644 --- a/services/document-updater/app/js/RedisManager.js +++ b/services/document-updater/app/js/RedisManager.js @@ -13,478 +13,750 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let RedisManager; -const Settings = require('settings-sharelatex'); -const rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater); -const logger = require('logger-sharelatex'); -const metrics = require('./Metrics'); -const Errors = require("./Errors"); -const crypto = require("crypto"); -const async = require("async"); -const ProjectHistoryRedisManager = require("./ProjectHistoryRedisManager"); +let RedisManager +const Settings = require('settings-sharelatex') +const rclient = require('redis-sharelatex').createClient( + Settings.redis.documentupdater +) +const logger = require('logger-sharelatex') +const metrics = require('./Metrics') +const Errors = require('./Errors') +const crypto = require('crypto') +const async = require('async') +const ProjectHistoryRedisManager = require('./ProjectHistoryRedisManager') // Sometimes Redis calls take an unexpectedly long time. We have to be // quick with Redis calls because we're holding a lock that expires // after 30 seconds. We can't let any errors in the rest of the stack // hold us up, and need to bail out quickly if there is a problem. -const MAX_REDIS_REQUEST_LENGTH = 5000; // 5 seconds +const MAX_REDIS_REQUEST_LENGTH = 5000 // 5 seconds // Make times easy to read -const minutes = 60; // seconds for Redis expire +const minutes = 60 // seconds for Redis expire -const logHashErrors = Settings.documentupdater != null ? Settings.documentupdater.logHashErrors : undefined; -const logHashReadErrors = logHashErrors != null ? logHashErrors.read : undefined; +const logHashErrors = + Settings.documentupdater != null + ? Settings.documentupdater.logHashErrors + : undefined +const logHashReadErrors = logHashErrors != null ? logHashErrors.read : undefined -const MEGABYTES = 1024 * 1024; -const MAX_RANGES_SIZE = 3 * MEGABYTES; +const MEGABYTES = 1024 * 1024 +const MAX_RANGES_SIZE = 3 * MEGABYTES -const keys = Settings.redis.documentupdater.key_schema; -const historyKeys = Settings.redis.history.key_schema; // note: this is track changes, not project-history +const keys = Settings.redis.documentupdater.key_schema +const historyKeys = Settings.redis.history.key_schema // note: this is track changes, not project-history -module.exports = (RedisManager = { - rclient, +module.exports = RedisManager = { + rclient, - putDocInMemory(project_id, doc_id, docLines, version, ranges, pathname, projectHistoryId, _callback){ - const timer = new metrics.Timer("redis.put-doc"); - const callback = function(error) { - timer.done(); - return _callback(error); - }; - docLines = JSON.stringify(docLines); - if (docLines.indexOf("\u0000") !== -1) { - const error = new Error("null bytes found in doc lines"); - // this check was added to catch memory corruption in JSON.stringify. - // It sometimes returned null bytes at the end of the string. - logger.error({err: error, doc_id, docLines}, error.message); - return callback(error); - } - const docHash = RedisManager._computeHash(docLines); - // record bytes sent to redis - metrics.summary("redis.docLines", docLines.length, {status: "set"}); - logger.log({project_id, doc_id, version, docHash, pathname, projectHistoryId}, "putting doc in redis"); - return RedisManager._serializeRanges(ranges, function(error, ranges) { - if (error != null) { - logger.error({err: error, doc_id, project_id}, error.message); - return callback(error); - } - const multi = rclient.multi(); - multi.set(keys.docLines({doc_id}), docLines); - multi.set(keys.projectKey({doc_id}), project_id); - multi.set(keys.docVersion({doc_id}), version); - multi.set(keys.docHash({doc_id}), docHash); - if (ranges != null) { - multi.set(keys.ranges({doc_id}), ranges); - } else { - multi.del(keys.ranges({doc_id})); - } - multi.set(keys.pathname({doc_id}), pathname); - multi.set(keys.projectHistoryId({doc_id}), projectHistoryId); - return multi.exec(function(error, result) { - if (error != null) { return callback(error); } - // update docsInProject set - return rclient.sadd(keys.docsInProject({project_id}), doc_id, callback); - }); - }); - }, + putDocInMemory( + project_id, + doc_id, + docLines, + version, + ranges, + pathname, + projectHistoryId, + _callback + ) { + const timer = new metrics.Timer('redis.put-doc') + const callback = function (error) { + timer.done() + return _callback(error) + } + docLines = JSON.stringify(docLines) + if (docLines.indexOf('\u0000') !== -1) { + const error = new Error('null bytes found in doc lines') + // this check was added to catch memory corruption in JSON.stringify. + // It sometimes returned null bytes at the end of the string. + logger.error({ err: error, doc_id, docLines }, error.message) + return callback(error) + } + const docHash = RedisManager._computeHash(docLines) + // record bytes sent to redis + metrics.summary('redis.docLines', docLines.length, { status: 'set' }) + logger.log( + { project_id, doc_id, version, docHash, pathname, projectHistoryId }, + 'putting doc in redis' + ) + return RedisManager._serializeRanges(ranges, function (error, ranges) { + if (error != null) { + logger.error({ err: error, doc_id, project_id }, error.message) + return callback(error) + } + const multi = rclient.multi() + multi.set(keys.docLines({ doc_id }), docLines) + multi.set(keys.projectKey({ doc_id }), project_id) + multi.set(keys.docVersion({ doc_id }), version) + multi.set(keys.docHash({ doc_id }), docHash) + if (ranges != null) { + multi.set(keys.ranges({ doc_id }), ranges) + } else { + multi.del(keys.ranges({ doc_id })) + } + multi.set(keys.pathname({ doc_id }), pathname) + multi.set(keys.projectHistoryId({ doc_id }), projectHistoryId) + return multi.exec(function (error, result) { + if (error != null) { + return callback(error) + } + // update docsInProject set + return rclient.sadd( + keys.docsInProject({ project_id }), + doc_id, + callback + ) + }) + }) + }, - removeDocFromMemory(project_id, doc_id, _callback){ - logger.log({project_id, doc_id}, "removing doc from redis"); - const callback = function(err) { - if (err != null) { - logger.err({project_id, doc_id, err}, "error removing doc from redis"); - return _callback(err); - } else { - logger.log({project_id, doc_id}, "removed doc from redis"); - return _callback(); - } - }; + removeDocFromMemory(project_id, doc_id, _callback) { + logger.log({ project_id, doc_id }, 'removing doc from redis') + const callback = function (err) { + if (err != null) { + logger.err({ project_id, doc_id, err }, 'error removing doc from redis') + return _callback(err) + } else { + logger.log({ project_id, doc_id }, 'removed doc from redis') + return _callback() + } + } - let multi = rclient.multi(); - multi.strlen(keys.docLines({doc_id})); - multi.del(keys.docLines({doc_id})); - multi.del(keys.projectKey({doc_id})); - multi.del(keys.docVersion({doc_id})); - multi.del(keys.docHash({doc_id})); - multi.del(keys.ranges({doc_id})); - multi.del(keys.pathname({doc_id})); - multi.del(keys.projectHistoryId({doc_id})); - multi.del(keys.projectHistoryType({doc_id})); - multi.del(keys.unflushedTime({doc_id})); - multi.del(keys.lastUpdatedAt({doc_id})); - multi.del(keys.lastUpdatedBy({doc_id})); - return multi.exec(function(error, response) { - if (error != null) { return callback(error); } - const length = response != null ? response[0] : undefined; - if (length > 0) { - // record bytes freed in redis - metrics.summary("redis.docLines", length, {status: "del"}); - } - multi = rclient.multi(); - multi.srem(keys.docsInProject({project_id}), doc_id); - multi.del(keys.projectState({project_id})); - return multi.exec(callback); - }); - }, + let multi = rclient.multi() + multi.strlen(keys.docLines({ doc_id })) + multi.del(keys.docLines({ doc_id })) + multi.del(keys.projectKey({ doc_id })) + multi.del(keys.docVersion({ doc_id })) + multi.del(keys.docHash({ doc_id })) + multi.del(keys.ranges({ doc_id })) + multi.del(keys.pathname({ doc_id })) + multi.del(keys.projectHistoryId({ doc_id })) + multi.del(keys.projectHistoryType({ doc_id })) + multi.del(keys.unflushedTime({ doc_id })) + multi.del(keys.lastUpdatedAt({ doc_id })) + multi.del(keys.lastUpdatedBy({ doc_id })) + return multi.exec(function (error, response) { + if (error != null) { + return callback(error) + } + const length = response != null ? response[0] : undefined + if (length > 0) { + // record bytes freed in redis + metrics.summary('redis.docLines', length, { status: 'del' }) + } + multi = rclient.multi() + multi.srem(keys.docsInProject({ project_id }), doc_id) + multi.del(keys.projectState({ project_id })) + return multi.exec(callback) + }) + }, - checkOrSetProjectState(project_id, newState, callback) { - if (callback == null) { callback = function(error, stateChanged) {}; } - const multi = rclient.multi(); - multi.getset(keys.projectState({project_id}), newState); - multi.expire(keys.projectState({project_id}), 30 * minutes); - return multi.exec(function(error, response) { - if (error != null) { return callback(error); } - logger.log({project_id, newState, oldState: response[0]}, "checking project state"); - return callback(null, response[0] !== newState); - }); - }, + checkOrSetProjectState(project_id, newState, callback) { + if (callback == null) { + callback = function (error, stateChanged) {} + } + const multi = rclient.multi() + multi.getset(keys.projectState({ project_id }), newState) + multi.expire(keys.projectState({ project_id }), 30 * minutes) + return multi.exec(function (error, response) { + if (error != null) { + return callback(error) + } + logger.log( + { project_id, newState, oldState: response[0] }, + 'checking project state' + ) + return callback(null, response[0] !== newState) + }) + }, - clearProjectState(project_id, callback) { - if (callback == null) { callback = function(error) {}; } - return rclient.del(keys.projectState({project_id}), callback); - }, + clearProjectState(project_id, callback) { + if (callback == null) { + callback = function (error) {} + } + return rclient.del(keys.projectState({ project_id }), callback) + }, - getDoc(project_id, doc_id, callback){ - if (callback == null) { callback = function(error, lines, version, ranges, pathname, projectHistoryId, unflushedTime) {}; } - const timer = new metrics.Timer("redis.get-doc"); - const multi = rclient.multi(); - multi.get(keys.docLines({doc_id})); - multi.get(keys.docVersion({doc_id})); - multi.get(keys.docHash({doc_id})); - multi.get(keys.projectKey({doc_id})); - multi.get(keys.ranges({doc_id})); - multi.get(keys.pathname({doc_id})); - multi.get(keys.projectHistoryId({doc_id})); - multi.get(keys.unflushedTime({doc_id})); - multi.get(keys.lastUpdatedAt({doc_id})); - multi.get(keys.lastUpdatedBy({doc_id})); - return multi.exec(function(error, ...rest){ - let [docLines, version, storedHash, doc_project_id, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy] = Array.from(rest[0]); - const timeSpan = timer.done(); - if (error != null) { return callback(error); } - // check if request took too long and bail out. only do this for - // get, because it is the first call in each update, so if this - // passes we'll assume others have a reasonable chance to succeed. - if (timeSpan > MAX_REDIS_REQUEST_LENGTH) { - error = new Error("redis getDoc exceeded timeout"); - return callback(error); - } - // record bytes loaded from redis - if (docLines != null) { - metrics.summary("redis.docLines", docLines.length, {status: "get"}); - } - // check sha1 hash value if present - if ((docLines != null) && (storedHash != null)) { - const computedHash = RedisManager._computeHash(docLines); - if (logHashReadErrors && (computedHash !== storedHash)) { - logger.error({project_id, doc_id, doc_project_id, computedHash, storedHash, docLines}, "hash mismatch on retrieved document"); - } - } + getDoc(project_id, doc_id, callback) { + if (callback == null) { + callback = function ( + error, + lines, + version, + ranges, + pathname, + projectHistoryId, + unflushedTime + ) {} + } + const timer = new metrics.Timer('redis.get-doc') + const multi = rclient.multi() + multi.get(keys.docLines({ doc_id })) + multi.get(keys.docVersion({ doc_id })) + multi.get(keys.docHash({ doc_id })) + multi.get(keys.projectKey({ doc_id })) + multi.get(keys.ranges({ doc_id })) + multi.get(keys.pathname({ doc_id })) + multi.get(keys.projectHistoryId({ doc_id })) + multi.get(keys.unflushedTime({ doc_id })) + multi.get(keys.lastUpdatedAt({ doc_id })) + multi.get(keys.lastUpdatedBy({ doc_id })) + return multi.exec(function (error, ...rest) { + let [ + docLines, + version, + storedHash, + doc_project_id, + ranges, + pathname, + projectHistoryId, + unflushedTime, + lastUpdatedAt, + lastUpdatedBy + ] = Array.from(rest[0]) + const timeSpan = timer.done() + if (error != null) { + return callback(error) + } + // check if request took too long and bail out. only do this for + // get, because it is the first call in each update, so if this + // passes we'll assume others have a reasonable chance to succeed. + if (timeSpan > MAX_REDIS_REQUEST_LENGTH) { + error = new Error('redis getDoc exceeded timeout') + return callback(error) + } + // record bytes loaded from redis + if (docLines != null) { + metrics.summary('redis.docLines', docLines.length, { status: 'get' }) + } + // check sha1 hash value if present + if (docLines != null && storedHash != null) { + const computedHash = RedisManager._computeHash(docLines) + if (logHashReadErrors && computedHash !== storedHash) { + logger.error( + { + project_id, + doc_id, + doc_project_id, + computedHash, + storedHash, + docLines + }, + 'hash mismatch on retrieved document' + ) + } + } - try { - docLines = JSON.parse(docLines); - ranges = RedisManager._deserializeRanges(ranges); - } catch (e) { - return callback(e); - } + try { + docLines = JSON.parse(docLines) + ranges = RedisManager._deserializeRanges(ranges) + } catch (e) { + return callback(e) + } - version = parseInt(version || 0, 10); - // check doc is in requested project - if ((doc_project_id != null) && (doc_project_id !== project_id)) { - logger.error({project_id, doc_id, doc_project_id}, "doc not in project"); - return callback(new Errors.NotFoundError("document not found")); - } + version = parseInt(version || 0, 10) + // check doc is in requested project + if (doc_project_id != null && doc_project_id !== project_id) { + logger.error( + { project_id, doc_id, doc_project_id }, + 'doc not in project' + ) + return callback(new Errors.NotFoundError('document not found')) + } - if (projectHistoryId != null) { - projectHistoryId = parseInt(projectHistoryId); - } + if (projectHistoryId != null) { + projectHistoryId = parseInt(projectHistoryId) + } - // doc is not in redis, bail out - if ((docLines == null)) { - return callback(null, docLines, version, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy); - } + // doc is not in redis, bail out + if (docLines == null) { + return callback( + null, + docLines, + version, + ranges, + pathname, + projectHistoryId, + unflushedTime, + lastUpdatedAt, + lastUpdatedBy + ) + } - // doc should be in project set, check if missing (workaround for missing docs from putDoc) - return rclient.sadd(keys.docsInProject({project_id}), doc_id, function(error, result) { - if (error != null) { return callback(error); } - if (result !== 0) { // doc should already be in set - logger.error({project_id, doc_id, doc_project_id}, "doc missing from docsInProject set"); - } - return callback(null, docLines, version, ranges, pathname, projectHistoryId, unflushedTime, lastUpdatedAt, lastUpdatedBy); - }); - }); - }, + // doc should be in project set, check if missing (workaround for missing docs from putDoc) + return rclient.sadd(keys.docsInProject({ project_id }), doc_id, function ( + error, + result + ) { + if (error != null) { + return callback(error) + } + if (result !== 0) { + // doc should already be in set + logger.error( + { project_id, doc_id, doc_project_id }, + 'doc missing from docsInProject set' + ) + } + return callback( + null, + docLines, + version, + ranges, + pathname, + projectHistoryId, + unflushedTime, + lastUpdatedAt, + lastUpdatedBy + ) + }) + }) + }, - getDocVersion(doc_id, callback) { - if (callback == null) { callback = function(error, version, projectHistoryType) {}; } - return rclient.mget(keys.docVersion({doc_id}), keys.projectHistoryType({doc_id}), function(error, result) { - if (error != null) { return callback(error); } - let [version, projectHistoryType] = Array.from(result || []); - version = parseInt(version, 10); - return callback(null, version, projectHistoryType); - }); - }, + getDocVersion(doc_id, callback) { + if (callback == null) { + callback = function (error, version, projectHistoryType) {} + } + return rclient.mget( + keys.docVersion({ doc_id }), + keys.projectHistoryType({ doc_id }), + function (error, result) { + if (error != null) { + return callback(error) + } + let [version, projectHistoryType] = Array.from(result || []) + version = parseInt(version, 10) + return callback(null, version, projectHistoryType) + } + ) + }, - getDocLines(doc_id, callback) { - if (callback == null) { callback = function(error, version) {}; } - return rclient.get(keys.docLines({doc_id}), function(error, docLines) { - if (error != null) { return callback(error); } - return callback(null, docLines); - }); - }, + getDocLines(doc_id, callback) { + if (callback == null) { + callback = function (error, version) {} + } + return rclient.get(keys.docLines({ doc_id }), function (error, docLines) { + if (error != null) { + return callback(error) + } + return callback(null, docLines) + }) + }, - getPreviousDocOps(doc_id, start, end, callback) { - if (callback == null) { callback = function(error, jsonOps) {}; } - const timer = new metrics.Timer("redis.get-prev-docops"); - return rclient.llen(keys.docOps({doc_id}), function(error, length) { - if (error != null) { return callback(error); } - return rclient.get(keys.docVersion({doc_id}), function(error, version) { - if (error != null) { return callback(error); } - version = parseInt(version, 10); - const first_version_in_redis = version - length; + getPreviousDocOps(doc_id, start, end, callback) { + if (callback == null) { + callback = function (error, jsonOps) {} + } + const timer = new metrics.Timer('redis.get-prev-docops') + return rclient.llen(keys.docOps({ doc_id }), function (error, length) { + if (error != null) { + return callback(error) + } + return rclient.get(keys.docVersion({ doc_id }), function ( + error, + version + ) { + if (error != null) { + return callback(error) + } + version = parseInt(version, 10) + const first_version_in_redis = version - length - if ((start < first_version_in_redis) || (end > version)) { - error = new Errors.OpRangeNotAvailableError("doc ops range is not loaded in redis"); - logger.warn({err: error, doc_id, length, version, start, end}, "doc ops range is not loaded in redis"); - return callback(error); - } + if (start < first_version_in_redis || end > version) { + error = new Errors.OpRangeNotAvailableError( + 'doc ops range is not loaded in redis' + ) + logger.warn( + { err: error, doc_id, length, version, start, end }, + 'doc ops range is not loaded in redis' + ) + return callback(error) + } - start = start - first_version_in_redis; - if (end > -1) { - end = end - first_version_in_redis; - } + start = start - first_version_in_redis + if (end > -1) { + end = end - first_version_in_redis + } - if (isNaN(start) || isNaN(end)) { - error = new Error("inconsistent version or lengths"); - logger.error({err: error, doc_id, length, version, start, end}, "inconsistent version or length"); - return callback(error); - } + if (isNaN(start) || isNaN(end)) { + error = new Error('inconsistent version or lengths') + logger.error( + { err: error, doc_id, length, version, start, end }, + 'inconsistent version or length' + ) + return callback(error) + } - return rclient.lrange(keys.docOps({doc_id}), start, end, function(error, jsonOps) { - let ops; - if (error != null) { return callback(error); } - try { - ops = jsonOps.map(jsonOp => JSON.parse(jsonOp)); - } catch (e) { - return callback(e); - } - const timeSpan = timer.done(); - if (timeSpan > MAX_REDIS_REQUEST_LENGTH) { - error = new Error("redis getPreviousDocOps exceeded timeout"); - return callback(error); - } - return callback(null, ops); - }); - }); - }); - }, + return rclient.lrange(keys.docOps({ doc_id }), start, end, function ( + error, + jsonOps + ) { + let ops + if (error != null) { + return callback(error) + } + try { + ops = jsonOps.map((jsonOp) => JSON.parse(jsonOp)) + } catch (e) { + return callback(e) + } + const timeSpan = timer.done() + if (timeSpan > MAX_REDIS_REQUEST_LENGTH) { + error = new Error('redis getPreviousDocOps exceeded timeout') + return callback(error) + } + return callback(null, ops) + }) + }) + }) + }, - getHistoryType(doc_id, callback) { - if (callback == null) { callback = function(error, projectHistoryType) {}; } - return rclient.get(keys.projectHistoryType({doc_id}), function(error, projectHistoryType) { - if (error != null) { return callback(error); } - return callback(null, projectHistoryType); - }); - }, + getHistoryType(doc_id, callback) { + if (callback == null) { + callback = function (error, projectHistoryType) {} + } + return rclient.get(keys.projectHistoryType({ doc_id }), function ( + error, + projectHistoryType + ) { + if (error != null) { + return callback(error) + } + return callback(null, projectHistoryType) + }) + }, - setHistoryType(doc_id, projectHistoryType, callback) { - if (callback == null) { callback = function(error) {}; } - return rclient.set(keys.projectHistoryType({doc_id}), projectHistoryType, callback); - }, + setHistoryType(doc_id, projectHistoryType, callback) { + if (callback == null) { + callback = function (error) {} + } + return rclient.set( + keys.projectHistoryType({ doc_id }), + projectHistoryType, + callback + ) + }, - DOC_OPS_TTL: 60 * minutes, - DOC_OPS_MAX_LENGTH: 100, - updateDocument(project_id, doc_id, docLines, newVersion, appliedOps, ranges, updateMeta, callback){ - if (appliedOps == null) { appliedOps = []; } - if (callback == null) { callback = function(error) {}; } - return RedisManager.getDocVersion(doc_id, function(error, currentVersion, projectHistoryType) { - if (error != null) { return callback(error); } - if ((currentVersion + appliedOps.length) !== newVersion) { - error = new Error(`Version mismatch. '${doc_id}' is corrupted.`); - logger.error({err: error, doc_id, currentVersion, newVersion, opsLength: appliedOps.length}, "version mismatch"); - return callback(error); - } + DOC_OPS_TTL: 60 * minutes, + DOC_OPS_MAX_LENGTH: 100, + updateDocument( + project_id, + doc_id, + docLines, + newVersion, + appliedOps, + ranges, + updateMeta, + callback + ) { + if (appliedOps == null) { + appliedOps = [] + } + if (callback == null) { + callback = function (error) {} + } + return RedisManager.getDocVersion(doc_id, function ( + error, + currentVersion, + projectHistoryType + ) { + if (error != null) { + return callback(error) + } + if (currentVersion + appliedOps.length !== newVersion) { + error = new Error(`Version mismatch. '${doc_id}' is corrupted.`) + logger.error( + { + err: error, + doc_id, + currentVersion, + newVersion, + opsLength: appliedOps.length + }, + 'version mismatch' + ) + return callback(error) + } - const jsonOps = appliedOps.map(op => JSON.stringify(op)); - for (const op of Array.from(jsonOps)) { - if (op.indexOf("\u0000") !== -1) { - error = new Error("null bytes found in jsonOps"); - // this check was added to catch memory corruption in JSON.stringify - logger.error({err: error, doc_id, jsonOps}, error.message); - return callback(error); - } - } + const jsonOps = appliedOps.map((op) => JSON.stringify(op)) + for (const op of Array.from(jsonOps)) { + if (op.indexOf('\u0000') !== -1) { + error = new Error('null bytes found in jsonOps') + // this check was added to catch memory corruption in JSON.stringify + logger.error({ err: error, doc_id, jsonOps }, error.message) + return callback(error) + } + } - const newDocLines = JSON.stringify(docLines); - if (newDocLines.indexOf("\u0000") !== -1) { - error = new Error("null bytes found in doc lines"); - // this check was added to catch memory corruption in JSON.stringify - logger.error({err: error, doc_id, newDocLines}, error.message); - return callback(error); - } - const newHash = RedisManager._computeHash(newDocLines); + const newDocLines = JSON.stringify(docLines) + if (newDocLines.indexOf('\u0000') !== -1) { + error = new Error('null bytes found in doc lines') + // this check was added to catch memory corruption in JSON.stringify + logger.error({ err: error, doc_id, newDocLines }, error.message) + return callback(error) + } + const newHash = RedisManager._computeHash(newDocLines) - const opVersions = appliedOps.map(op => op != null ? op.v : undefined); - logger.log({doc_id, version: newVersion, hash: newHash, op_versions: opVersions}, "updating doc in redis"); - // record bytes sent to redis in update - metrics.summary("redis.docLines", newDocLines.length, {status: "update"}); - return RedisManager._serializeRanges(ranges, function(error, ranges) { - if (error != null) { - logger.error({err: error, doc_id}, error.message); - return callback(error); - } - if ((ranges != null) && (ranges.indexOf("\u0000") !== -1)) { - error = new Error("null bytes found in ranges"); - // this check was added to catch memory corruption in JSON.stringify - logger.error({err: error, doc_id, ranges}, error.message); - return callback(error); - } - const multi = rclient.multi(); - multi.set(keys.docLines({doc_id}), newDocLines); // index 0 - multi.set(keys.docVersion({doc_id}), newVersion); // index 1 - multi.set(keys.docHash({doc_id}), newHash); // index 2 - multi.ltrim(keys.docOps({doc_id}), -RedisManager.DOC_OPS_MAX_LENGTH, -1); // index 3 - if (ranges != null) { - multi.set(keys.ranges({doc_id}), ranges); // index 4 - } else { - multi.del(keys.ranges({doc_id})); // also index 4 - } - // push the ops last so we can get the lengths at fixed index position 7 - if (jsonOps.length > 0) { - multi.rpush(keys.docOps({doc_id}), ...Array.from(jsonOps)); // index 5 - // expire must come after rpush since before it will be a no-op if the list is empty - multi.expire(keys.docOps({doc_id}), RedisManager.DOC_OPS_TTL); // index 6 - if (projectHistoryType === "project-history") { - metrics.inc('history-queue', 1, {status: 'skip-track-changes'}); - logger.log({doc_id}, "skipping push of uncompressed ops for project using project-history"); - } else { - // project is using old track-changes history service - metrics.inc('history-queue', 1, {status: 'track-changes'}); - multi.rpush(historyKeys.uncompressedHistoryOps({doc_id}), ...Array.from(jsonOps)); // index 7 - } - // Set the unflushed timestamp to the current time if the doc - // hasn't been modified before (the content in mongo has been - // valid up to this point). Otherwise leave it alone ("NX" flag). - multi.set(keys.unflushedTime({doc_id}), Date.now(), "NX"); - multi.set(keys.lastUpdatedAt({doc_id}), Date.now()); // index 8 - if ((updateMeta != null ? updateMeta.user_id : undefined)) { - multi.set(keys.lastUpdatedBy({doc_id}), updateMeta.user_id); // index 9 - } else { - multi.del(keys.lastUpdatedBy({doc_id})); // index 9 - } - } - return multi.exec(function(error, result) { - let docUpdateCount; - if (error != null) { return callback(error); } + const opVersions = appliedOps.map((op) => (op != null ? op.v : undefined)) + logger.log( + { doc_id, version: newVersion, hash: newHash, op_versions: opVersions }, + 'updating doc in redis' + ) + // record bytes sent to redis in update + metrics.summary('redis.docLines', newDocLines.length, { + status: 'update' + }) + return RedisManager._serializeRanges(ranges, function (error, ranges) { + if (error != null) { + logger.error({ err: error, doc_id }, error.message) + return callback(error) + } + if (ranges != null && ranges.indexOf('\u0000') !== -1) { + error = new Error('null bytes found in ranges') + // this check was added to catch memory corruption in JSON.stringify + logger.error({ err: error, doc_id, ranges }, error.message) + return callback(error) + } + const multi = rclient.multi() + multi.set(keys.docLines({ doc_id }), newDocLines) // index 0 + multi.set(keys.docVersion({ doc_id }), newVersion) // index 1 + multi.set(keys.docHash({ doc_id }), newHash) // index 2 + multi.ltrim( + keys.docOps({ doc_id }), + -RedisManager.DOC_OPS_MAX_LENGTH, + -1 + ) // index 3 + if (ranges != null) { + multi.set(keys.ranges({ doc_id }), ranges) // index 4 + } else { + multi.del(keys.ranges({ doc_id })) // also index 4 + } + // push the ops last so we can get the lengths at fixed index position 7 + if (jsonOps.length > 0) { + multi.rpush(keys.docOps({ doc_id }), ...Array.from(jsonOps)) // index 5 + // expire must come after rpush since before it will be a no-op if the list is empty + multi.expire(keys.docOps({ doc_id }), RedisManager.DOC_OPS_TTL) // index 6 + if (projectHistoryType === 'project-history') { + metrics.inc('history-queue', 1, { status: 'skip-track-changes' }) + logger.log( + { doc_id }, + 'skipping push of uncompressed ops for project using project-history' + ) + } else { + // project is using old track-changes history service + metrics.inc('history-queue', 1, { status: 'track-changes' }) + multi.rpush( + historyKeys.uncompressedHistoryOps({ doc_id }), + ...Array.from(jsonOps) + ) // index 7 + } + // Set the unflushed timestamp to the current time if the doc + // hasn't been modified before (the content in mongo has been + // valid up to this point). Otherwise leave it alone ("NX" flag). + multi.set(keys.unflushedTime({ doc_id }), Date.now(), 'NX') + multi.set(keys.lastUpdatedAt({ doc_id }), Date.now()) // index 8 + if (updateMeta != null ? updateMeta.user_id : undefined) { + multi.set(keys.lastUpdatedBy({ doc_id }), updateMeta.user_id) // index 9 + } else { + multi.del(keys.lastUpdatedBy({ doc_id })) // index 9 + } + } + return multi.exec(function (error, result) { + let docUpdateCount + if (error != null) { + return callback(error) + } - if (projectHistoryType === 'project-history') { - docUpdateCount = undefined; // only using project history, don't bother with track-changes - } else { - // project is using old track-changes history service - docUpdateCount = result[7]; // length of uncompressedHistoryOps queue (index 7) - } + if (projectHistoryType === 'project-history') { + docUpdateCount = undefined // only using project history, don't bother with track-changes + } else { + // project is using old track-changes history service + docUpdateCount = result[7] // length of uncompressedHistoryOps queue (index 7) + } - if ((jsonOps.length > 0) && __guard__(Settings.apis != null ? Settings.apis.project_history : undefined, x => x.enabled)) { - metrics.inc('history-queue', 1, {status: 'project-history'}); - return ProjectHistoryRedisManager.queueOps(project_id, ...Array.from(jsonOps), (error, projectUpdateCount) => callback(null, docUpdateCount, projectUpdateCount)); - } else { - return callback(null, docUpdateCount); - } - }); - }); - }); - }, + if ( + jsonOps.length > 0 && + __guard__( + Settings.apis != null ? Settings.apis.project_history : undefined, + (x) => x.enabled + ) + ) { + metrics.inc('history-queue', 1, { status: 'project-history' }) + return ProjectHistoryRedisManager.queueOps( + project_id, + ...Array.from(jsonOps), + (error, projectUpdateCount) => + callback(null, docUpdateCount, projectUpdateCount) + ) + } else { + return callback(null, docUpdateCount) + } + }) + }) + }) + }, - renameDoc(project_id, doc_id, user_id, update, projectHistoryId, callback) { - if (callback == null) { callback = function(error) {}; } - return RedisManager.getDoc(project_id, doc_id, function(error, lines, version) { - if (error != null) { return callback(error); } + renameDoc(project_id, doc_id, user_id, update, projectHistoryId, callback) { + if (callback == null) { + callback = function (error) {} + } + return RedisManager.getDoc(project_id, doc_id, function ( + error, + lines, + version + ) { + if (error != null) { + return callback(error) + } - if ((lines != null) && (version != null)) { - return rclient.set(keys.pathname({doc_id}), update.newPathname, function(error) { - if (error != null) { return callback(error); } - return ProjectHistoryRedisManager.queueRenameEntity(project_id, projectHistoryId, 'doc', doc_id, user_id, update, callback); - }); - } else { - return ProjectHistoryRedisManager.queueRenameEntity(project_id, projectHistoryId, 'doc', doc_id, user_id, update, callback); - } - }); - }, + if (lines != null && version != null) { + return rclient.set( + keys.pathname({ doc_id }), + update.newPathname, + function (error) { + if (error != null) { + return callback(error) + } + return ProjectHistoryRedisManager.queueRenameEntity( + project_id, + projectHistoryId, + 'doc', + doc_id, + user_id, + update, + callback + ) + } + ) + } else { + return ProjectHistoryRedisManager.queueRenameEntity( + project_id, + projectHistoryId, + 'doc', + doc_id, + user_id, + update, + callback + ) + } + }) + }, - clearUnflushedTime(doc_id, callback) { - if (callback == null) { callback = function(error) {}; } - return rclient.del(keys.unflushedTime({doc_id}), callback); - }, + clearUnflushedTime(doc_id, callback) { + if (callback == null) { + callback = function (error) {} + } + return rclient.del(keys.unflushedTime({ doc_id }), callback) + }, - getDocIdsInProject(project_id, callback) { - if (callback == null) { callback = function(error, doc_ids) {}; } - return rclient.smembers(keys.docsInProject({project_id}), callback); - }, + getDocIdsInProject(project_id, callback) { + if (callback == null) { + callback = function (error, doc_ids) {} + } + return rclient.smembers(keys.docsInProject({ project_id }), callback) + }, - getDocTimestamps(doc_ids, callback) { - // get lastupdatedat timestamps for an array of doc_ids - if (callback == null) { callback = function(error, result) {}; } - return async.mapSeries(doc_ids, (doc_id, cb) => rclient.get(keys.lastUpdatedAt({doc_id}), cb) - , callback); - }, + getDocTimestamps(doc_ids, callback) { + // get lastupdatedat timestamps for an array of doc_ids + if (callback == null) { + callback = function (error, result) {} + } + return async.mapSeries( + doc_ids, + (doc_id, cb) => rclient.get(keys.lastUpdatedAt({ doc_id }), cb), + callback + ) + }, - queueFlushAndDeleteProject(project_id, callback) { - // store the project id in a sorted set ordered by time with a random offset to smooth out spikes - const SMOOTHING_OFFSET = Settings.smoothingOffset > 0 ? Math.round(Settings.smoothingOffset * Math.random()) : 0; - return rclient.zadd(keys.flushAndDeleteQueue(), Date.now() + SMOOTHING_OFFSET, project_id, callback); - }, + queueFlushAndDeleteProject(project_id, callback) { + // store the project id in a sorted set ordered by time with a random offset to smooth out spikes + const SMOOTHING_OFFSET = + Settings.smoothingOffset > 0 + ? Math.round(Settings.smoothingOffset * Math.random()) + : 0 + return rclient.zadd( + keys.flushAndDeleteQueue(), + Date.now() + SMOOTHING_OFFSET, + project_id, + callback + ) + }, - getNextProjectToFlushAndDelete(cutoffTime, callback) { - // find the oldest queued flush that is before the cutoff time - if (callback == null) { callback = function(error, key, timestamp){}; } - return rclient.zrangebyscore(keys.flushAndDeleteQueue(), 0, cutoffTime, "WITHSCORES", "LIMIT", 0, 1, function(err, reply) { - if (err != null) { return callback(err); } - if (!(reply != null ? reply.length : undefined)) { return callback(); } // return if no projects ready to be processed - // pop the oldest entry (get and remove in a multi) - const multi = rclient.multi(); - // Poor man's version of ZPOPMIN, which is only available in Redis 5. - multi.zrange(keys.flushAndDeleteQueue(), 0, 0, "WITHSCORES"); - multi.zremrangebyrank(keys.flushAndDeleteQueue(), 0, 0); - multi.zcard(keys.flushAndDeleteQueue()); // the total length of the queue (for metrics) - return multi.exec(function(err, reply) { - if (err != null) { return callback(err); } - if (!(reply != null ? reply.length : undefined)) { return callback(); } - const [key, timestamp] = Array.from(reply[0]); - const queueLength = reply[2]; - return callback(null, key, timestamp, queueLength); - }); - }); - }, + getNextProjectToFlushAndDelete(cutoffTime, callback) { + // find the oldest queued flush that is before the cutoff time + if (callback == null) { + callback = function (error, key, timestamp) {} + } + return rclient.zrangebyscore( + keys.flushAndDeleteQueue(), + 0, + cutoffTime, + 'WITHSCORES', + 'LIMIT', + 0, + 1, + function (err, reply) { + if (err != null) { + return callback(err) + } + if (!(reply != null ? reply.length : undefined)) { + return callback() + } // return if no projects ready to be processed + // pop the oldest entry (get and remove in a multi) + const multi = rclient.multi() + // Poor man's version of ZPOPMIN, which is only available in Redis 5. + multi.zrange(keys.flushAndDeleteQueue(), 0, 0, 'WITHSCORES') + multi.zremrangebyrank(keys.flushAndDeleteQueue(), 0, 0) + multi.zcard(keys.flushAndDeleteQueue()) // the total length of the queue (for metrics) + return multi.exec(function (err, reply) { + if (err != null) { + return callback(err) + } + if (!(reply != null ? reply.length : undefined)) { + return callback() + } + const [key, timestamp] = Array.from(reply[0]) + const queueLength = reply[2] + return callback(null, key, timestamp, queueLength) + }) + } + ) + }, - _serializeRanges(ranges, callback) { - if (callback == null) { callback = function(error, serializedRanges) {}; } - let jsonRanges = JSON.stringify(ranges); - if ((jsonRanges != null) && (jsonRanges.length > MAX_RANGES_SIZE)) { - return callback(new Error("ranges are too large")); - } - if (jsonRanges === '{}') { - // Most doc will have empty ranges so don't fill redis with lots of '{}' keys - jsonRanges = null; - } - return callback(null, jsonRanges); - }, + _serializeRanges(ranges, callback) { + if (callback == null) { + callback = function (error, serializedRanges) {} + } + let jsonRanges = JSON.stringify(ranges) + if (jsonRanges != null && jsonRanges.length > MAX_RANGES_SIZE) { + return callback(new Error('ranges are too large')) + } + if (jsonRanges === '{}') { + // Most doc will have empty ranges so don't fill redis with lots of '{}' keys + jsonRanges = null + } + return callback(null, jsonRanges) + }, - _deserializeRanges(ranges) { - if ((ranges == null) || (ranges === "")) { - return {}; - } else { - return JSON.parse(ranges); - } - }, + _deserializeRanges(ranges) { + if (ranges == null || ranges === '') { + return {} + } else { + return JSON.parse(ranges) + } + }, - _computeHash(docLines) { - // use sha1 checksum of doclines to detect data corruption. - // - // note: must specify 'utf8' encoding explicitly, as the default is - // binary in node < v5 - return crypto.createHash('sha1').update(docLines, 'utf8').digest('hex'); - } -}); + _computeHash(docLines) { + // use sha1 checksum of doclines to detect data corruption. + // + // note: must specify 'utf8' encoding explicitly, as the default is + // binary in node < v5 + return crypto.createHash('sha1').update(docLines, 'utf8').digest('hex') + } +} function __guard__(value, transform) { - return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; -} \ No newline at end of file + return typeof value !== 'undefined' && value !== null + ? transform(value) + : undefined +} diff --git a/services/document-updater/app/js/ShareJsDB.js b/services/document-updater/app/js/ShareJsDB.js index 20bf42919f..2339eefab6 100644 --- a/services/document-updater/app/js/ShareJsDB.js +++ b/services/document-updater/app/js/ShareJsDB.js @@ -11,60 +11,75 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let ShareJsDB; -const Keys = require('./UpdateKeys'); -const RedisManager = require("./RedisManager"); -const Errors = require("./Errors"); +let ShareJsDB +const Keys = require('./UpdateKeys') +const RedisManager = require('./RedisManager') +const Errors = require('./Errors') -module.exports = (ShareJsDB = class ShareJsDB { - constructor(project_id, doc_id, lines, version) { - this.project_id = project_id; - this.doc_id = doc_id; - this.lines = lines; - this.version = version; - this.appliedOps = {}; - // ShareJS calls this detacted from the instance, so we need - // bind it to keep our context that can access @appliedOps - this.writeOp = this._writeOp.bind(this); - } - - getOps(doc_key, start, end, callback) { - if (start === end) { - return callback(null, []); - } +module.exports = ShareJsDB = class ShareJsDB { + constructor(project_id, doc_id, lines, version) { + this.project_id = project_id + this.doc_id = doc_id + this.lines = lines + this.version = version + this.appliedOps = {} + // ShareJS calls this detacted from the instance, so we need + // bind it to keep our context that can access @appliedOps + this.writeOp = this._writeOp.bind(this) + } - // In redis, lrange values are inclusive. - if (end != null) { - end--; - } else { - end = -1; - } + getOps(doc_key, start, end, callback) { + if (start === end) { + return callback(null, []) + } - const [project_id, doc_id] = Array.from(Keys.splitProjectIdAndDocId(doc_key)); - return RedisManager.getPreviousDocOps(doc_id, start, end, callback); - } - - _writeOp(doc_key, opData, callback) { - if (this.appliedOps[doc_key] == null) { this.appliedOps[doc_key] = []; } - this.appliedOps[doc_key].push(opData); - return callback(); - } + // In redis, lrange values are inclusive. + if (end != null) { + end-- + } else { + end = -1 + } - getSnapshot(doc_key, callback) { - if (doc_key !== Keys.combineProjectIdAndDocId(this.project_id, this.doc_id)) { - return callback(new Errors.NotFoundError(`unexpected doc_key ${doc_key}, expected ${Keys.combineProjectIdAndDocId(this.project_id, this.doc_id)}`)); - } else { - return callback(null, { - snapshot: this.lines.join("\n"), - v: parseInt(this.version, 10), - type: "text" - }); - } - } + const [project_id, doc_id] = Array.from( + Keys.splitProjectIdAndDocId(doc_key) + ) + return RedisManager.getPreviousDocOps(doc_id, start, end, callback) + } - // To be able to remove a doc from the ShareJS memory - // we need to called Model::delete, which calls this - // method on the database. However, we will handle removing - // it from Redis ourselves - delete(docName, dbMeta, callback) { return callback(); } -}); + _writeOp(doc_key, opData, callback) { + if (this.appliedOps[doc_key] == null) { + this.appliedOps[doc_key] = [] + } + this.appliedOps[doc_key].push(opData) + return callback() + } + + getSnapshot(doc_key, callback) { + if ( + doc_key !== Keys.combineProjectIdAndDocId(this.project_id, this.doc_id) + ) { + return callback( + new Errors.NotFoundError( + `unexpected doc_key ${doc_key}, expected ${Keys.combineProjectIdAndDocId( + this.project_id, + this.doc_id + )}` + ) + ) + } else { + return callback(null, { + snapshot: this.lines.join('\n'), + v: parseInt(this.version, 10), + type: 'text' + }) + } + } + + // To be able to remove a doc from the ShareJS memory + // we need to called Model::delete, which calls this + // method on the database. However, we will handle removing + // it from Redis ourselves + delete(docName, dbMeta, callback) { + return callback() + } +} diff --git a/services/document-updater/app/js/ShareJsUpdateManager.js b/services/document-updater/app/js/ShareJsUpdateManager.js index 574a5127fa..607ae2d9fa 100644 --- a/services/document-updater/app/js/ShareJsUpdateManager.js +++ b/services/document-updater/app/js/ShareJsUpdateManager.js @@ -12,98 +12,120 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let ShareJsUpdateManager; -const ShareJsModel = require("./sharejs/server/model"); -const ShareJsDB = require("./ShareJsDB"); -const logger = require("logger-sharelatex"); -const Settings = require('settings-sharelatex'); -const Keys = require("./UpdateKeys"); -const {EventEmitter} = require("events"); -const util = require("util"); -const RealTimeRedisManager = require("./RealTimeRedisManager"); -const crypto = require("crypto"); -const metrics = require('./Metrics'); -const Errors = require("./Errors"); +let ShareJsUpdateManager +const ShareJsModel = require('./sharejs/server/model') +const ShareJsDB = require('./ShareJsDB') +const logger = require('logger-sharelatex') +const Settings = require('settings-sharelatex') +const Keys = require('./UpdateKeys') +const { EventEmitter } = require('events') +const util = require('util') +const RealTimeRedisManager = require('./RealTimeRedisManager') +const crypto = require('crypto') +const metrics = require('./Metrics') +const Errors = require('./Errors') -ShareJsModel.prototype = {}; -util.inherits(ShareJsModel, EventEmitter); +ShareJsModel.prototype = {} +util.inherits(ShareJsModel, EventEmitter) -const MAX_AGE_OF_OP = 80; +const MAX_AGE_OF_OP = 80 -module.exports = (ShareJsUpdateManager = { - getNewShareJsModel(project_id, doc_id, lines, version) { - const db = new ShareJsDB(project_id, doc_id, lines, version); - const model = new ShareJsModel(db, {maxDocLength: Settings.max_doc_length, maximumAge: MAX_AGE_OF_OP}); - model.db = db; - return model; - }, +module.exports = ShareJsUpdateManager = { + getNewShareJsModel(project_id, doc_id, lines, version) { + const db = new ShareJsDB(project_id, doc_id, lines, version) + const model = new ShareJsModel(db, { + maxDocLength: Settings.max_doc_length, + maximumAge: MAX_AGE_OF_OP + }) + model.db = db + return model + }, - applyUpdate(project_id, doc_id, update, lines, version, callback) { - if (callback == null) { callback = function(error, updatedDocLines) {}; } - logger.log({project_id, doc_id, update}, "applying sharejs updates"); - const jobs = []; - // record the update version before it is modified - const incomingUpdateVersion = update.v; - // We could use a global model for all docs, but we're hitting issues with the - // internal state of ShareJS not being accessible for clearing caches, and - // getting stuck due to queued callbacks (line 260 of sharejs/server/model.coffee) - // This adds a small but hopefully acceptable overhead (~12ms per 1000 updates on - // my 2009 MBP). - const model = this.getNewShareJsModel(project_id, doc_id, lines, version); - this._listenForOps(model); - const doc_key = Keys.combineProjectIdAndDocId(project_id, doc_id); - return model.applyOp(doc_key, update, function(error) { - if (error != null) { - if (error === "Op already submitted") { - metrics.inc("sharejs.already-submitted"); - logger.warn({project_id, doc_id, update}, "op has already been submitted"); - update.dup = true; - ShareJsUpdateManager._sendOp(project_id, doc_id, update); - } else if (/^Delete component/.test(error)) { - metrics.inc("sharejs.delete-mismatch"); - logger.warn({project_id, doc_id, update, shareJsErr: error}, "sharejs delete does not match"); - error = new Errors.DeleteMismatchError("Delete component does not match"); - return callback(error); - } else { - metrics.inc("sharejs.other-error"); - return callback(error); - } - } - logger.log({project_id, doc_id, error}, "applied update"); - return model.getSnapshot(doc_key, (error, data) => { - if (error != null) { return callback(error); } - // only check hash when present and no other updates have been applied - if ((update.hash != null) && (incomingUpdateVersion === version)) { - const ourHash = ShareJsUpdateManager._computeHash(data.snapshot); - if (ourHash !== update.hash) { - metrics.inc("sharejs.hash-fail"); - return callback(new Error("Invalid hash")); - } else { - metrics.inc("sharejs.hash-pass", 0.001); - } - } - const docLines = data.snapshot.split(/\r\n|\n|\r/); - return callback(null, docLines, data.v, model.db.appliedOps[doc_key] || []); - }); - }); - }, + applyUpdate(project_id, doc_id, update, lines, version, callback) { + if (callback == null) { + callback = function (error, updatedDocLines) {} + } + logger.log({ project_id, doc_id, update }, 'applying sharejs updates') + const jobs = [] + // record the update version before it is modified + const incomingUpdateVersion = update.v + // We could use a global model for all docs, but we're hitting issues with the + // internal state of ShareJS not being accessible for clearing caches, and + // getting stuck due to queued callbacks (line 260 of sharejs/server/model.coffee) + // This adds a small but hopefully acceptable overhead (~12ms per 1000 updates on + // my 2009 MBP). + const model = this.getNewShareJsModel(project_id, doc_id, lines, version) + this._listenForOps(model) + const doc_key = Keys.combineProjectIdAndDocId(project_id, doc_id) + return model.applyOp(doc_key, update, function (error) { + if (error != null) { + if (error === 'Op already submitted') { + metrics.inc('sharejs.already-submitted') + logger.warn( + { project_id, doc_id, update }, + 'op has already been submitted' + ) + update.dup = true + ShareJsUpdateManager._sendOp(project_id, doc_id, update) + } else if (/^Delete component/.test(error)) { + metrics.inc('sharejs.delete-mismatch') + logger.warn( + { project_id, doc_id, update, shareJsErr: error }, + 'sharejs delete does not match' + ) + error = new Errors.DeleteMismatchError( + 'Delete component does not match' + ) + return callback(error) + } else { + metrics.inc('sharejs.other-error') + return callback(error) + } + } + logger.log({ project_id, doc_id, error }, 'applied update') + return model.getSnapshot(doc_key, (error, data) => { + if (error != null) { + return callback(error) + } + // only check hash when present and no other updates have been applied + if (update.hash != null && incomingUpdateVersion === version) { + const ourHash = ShareJsUpdateManager._computeHash(data.snapshot) + if (ourHash !== update.hash) { + metrics.inc('sharejs.hash-fail') + return callback(new Error('Invalid hash')) + } else { + metrics.inc('sharejs.hash-pass', 0.001) + } + } + const docLines = data.snapshot.split(/\r\n|\n|\r/) + return callback( + null, + docLines, + data.v, + model.db.appliedOps[doc_key] || [] + ) + }) + }) + }, - _listenForOps(model) { - return model.on("applyOp", function(doc_key, opData) { - const [project_id, doc_id] = Array.from(Keys.splitProjectIdAndDocId(doc_key)); - return ShareJsUpdateManager._sendOp(project_id, doc_id, opData); - }); - }, - - _sendOp(project_id, doc_id, op) { - return RealTimeRedisManager.sendData({project_id, doc_id, op}); - }, + _listenForOps(model) { + return model.on('applyOp', function (doc_key, opData) { + const [project_id, doc_id] = Array.from( + Keys.splitProjectIdAndDocId(doc_key) + ) + return ShareJsUpdateManager._sendOp(project_id, doc_id, opData) + }) + }, - _computeHash(content) { - return crypto.createHash('sha1') - .update("blob " + content.length + "\x00") - .update(content, 'utf8') - .digest('hex'); - } -}); + _sendOp(project_id, doc_id, op) { + return RealTimeRedisManager.sendData({ project_id, doc_id, op }) + }, + _computeHash(content) { + return crypto + .createHash('sha1') + .update('blob ' + content.length + '\x00') + .update(content, 'utf8') + .digest('hex') + } +} diff --git a/services/document-updater/app/js/SnapshotManager.js b/services/document-updater/app/js/SnapshotManager.js index ca03be85d6..728ee73626 100644 --- a/services/document-updater/app/js/SnapshotManager.js +++ b/services/document-updater/app/js/SnapshotManager.js @@ -11,58 +11,77 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let SnapshotManager; -const {db, ObjectId} = require("./mongojs"); +let SnapshotManager +const { db, ObjectId } = require('./mongojs') -module.exports = (SnapshotManager = { - recordSnapshot(project_id, doc_id, version, pathname, lines, ranges, callback) { +module.exports = SnapshotManager = { + recordSnapshot( + project_id, + doc_id, + version, + pathname, + lines, + ranges, + callback + ) { try { - project_id = ObjectId(project_id); - doc_id = ObjectId(doc_id); + project_id = ObjectId(project_id) + doc_id = ObjectId(doc_id) } catch (error) { - return callback(error); + return callback(error) } - return db.docSnapshots.insert({ - project_id, doc_id, version, lines, pathname, - ranges: SnapshotManager.jsonRangesToMongo(ranges), - ts: new Date() - }, callback); + return db.docSnapshots.insert( + { + project_id, + doc_id, + version, + lines, + pathname, + ranges: SnapshotManager.jsonRangesToMongo(ranges), + ts: new Date() + }, + callback + ) }, - // Suggested indexes: - // db.docSnapshots.createIndex({project_id:1, doc_id:1}) - // db.docSnapshots.createIndex({ts:1},{expiresAfterSeconds: 30*24*3600)) # expires after 30 days + // Suggested indexes: + // db.docSnapshots.createIndex({project_id:1, doc_id:1}) + // db.docSnapshots.createIndex({ts:1},{expiresAfterSeconds: 30*24*3600)) # expires after 30 days jsonRangesToMongo(ranges) { - if ((ranges == null)) { return null; } - - const updateMetadata = function(metadata) { + if (ranges == null) { + return null + } + + const updateMetadata = function (metadata) { if ((metadata != null ? metadata.ts : undefined) != null) { - metadata.ts = new Date(metadata.ts); + metadata.ts = new Date(metadata.ts) } if ((metadata != null ? metadata.user_id : undefined) != null) { - return metadata.user_id = SnapshotManager._safeObjectId(metadata.user_id); + return (metadata.user_id = SnapshotManager._safeObjectId( + metadata.user_id + )) } - }; - + } + for (const change of Array.from(ranges.changes || [])) { - change.id = SnapshotManager._safeObjectId(change.id); - updateMetadata(change.metadata); + change.id = SnapshotManager._safeObjectId(change.id) + updateMetadata(change.metadata) } for (const comment of Array.from(ranges.comments || [])) { - comment.id = SnapshotManager._safeObjectId(comment.id); + comment.id = SnapshotManager._safeObjectId(comment.id) if ((comment.op != null ? comment.op.t : undefined) != null) { - comment.op.t = SnapshotManager._safeObjectId(comment.op.t); + comment.op.t = SnapshotManager._safeObjectId(comment.op.t) } - updateMetadata(comment.metadata); + updateMetadata(comment.metadata) } - return ranges; + return ranges }, _safeObjectId(data) { try { - return ObjectId(data); + return ObjectId(data) } catch (error) { - return data; + return data } } -}); +} diff --git a/services/document-updater/app/js/UpdateKeys.js b/services/document-updater/app/js/UpdateKeys.js index bcafb807dc..8710272cfb 100644 --- a/services/document-updater/app/js/UpdateKeys.js +++ b/services/document-updater/app/js/UpdateKeys.js @@ -4,6 +4,10 @@ // TODO: This file was created by bulk-decaffeinate. // Fix any style issues and re-enable lint. module.exports = { - combineProjectIdAndDocId(project_id, doc_id) { return `${project_id}:${doc_id}`; }, - splitProjectIdAndDocId(project_and_doc_id) { return project_and_doc_id.split(":"); } -}; + combineProjectIdAndDocId(project_id, doc_id) { + return `${project_id}:${doc_id}` + }, + splitProjectIdAndDocId(project_and_doc_id) { + return project_and_doc_id.split(':') + } +} diff --git a/services/document-updater/app/js/UpdateManager.js b/services/document-updater/app/js/UpdateManager.js index de1656a336..e92e40f44c 100644 --- a/services/document-updater/app/js/UpdateManager.js +++ b/services/document-updater/app/js/UpdateManager.js @@ -14,226 +14,399 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let UpdateManager; -const LockManager = require("./LockManager"); -const RedisManager = require("./RedisManager"); -const RealTimeRedisManager = require("./RealTimeRedisManager"); -const ShareJsUpdateManager = require("./ShareJsUpdateManager"); -const HistoryManager = require("./HistoryManager"); -const Settings = require('settings-sharelatex'); -const _ = require("lodash"); -const async = require("async"); -const logger = require('logger-sharelatex'); -const Metrics = require("./Metrics"); -const Errors = require("./Errors"); -const DocumentManager = require("./DocumentManager"); -const RangesManager = require("./RangesManager"); -const SnapshotManager = require("./SnapshotManager"); -const Profiler = require("./Profiler"); +let UpdateManager +const LockManager = require('./LockManager') +const RedisManager = require('./RedisManager') +const RealTimeRedisManager = require('./RealTimeRedisManager') +const ShareJsUpdateManager = require('./ShareJsUpdateManager') +const HistoryManager = require('./HistoryManager') +const Settings = require('settings-sharelatex') +const _ = require('lodash') +const async = require('async') +const logger = require('logger-sharelatex') +const Metrics = require('./Metrics') +const Errors = require('./Errors') +const DocumentManager = require('./DocumentManager') +const RangesManager = require('./RangesManager') +const SnapshotManager = require('./SnapshotManager') +const Profiler = require('./Profiler') -module.exports = (UpdateManager = { - processOutstandingUpdates(project_id, doc_id, callback) { - if (callback == null) { callback = function(error) {}; } - const timer = new Metrics.Timer("updateManager.processOutstandingUpdates"); - return UpdateManager.fetchAndApplyUpdates(project_id, doc_id, function(error) { - timer.done(); - if (error != null) { return callback(error); } - return callback(); - }); - }, +module.exports = UpdateManager = { + processOutstandingUpdates(project_id, doc_id, callback) { + if (callback == null) { + callback = function (error) {} + } + const timer = new Metrics.Timer('updateManager.processOutstandingUpdates') + return UpdateManager.fetchAndApplyUpdates(project_id, doc_id, function ( + error + ) { + timer.done() + if (error != null) { + return callback(error) + } + return callback() + }) + }, - processOutstandingUpdatesWithLock(project_id, doc_id, callback) { - if (callback == null) { callback = function(error) {}; } - const profile = new Profiler("processOutstandingUpdatesWithLock", {project_id, doc_id}); - return LockManager.tryLock(doc_id, (error, gotLock, lockValue) => { - if (error != null) { return callback(error); } - if (!gotLock) { return callback(); } - profile.log("tryLock"); - return UpdateManager.processOutstandingUpdates(project_id, doc_id, function(error) { - if (error != null) { return UpdateManager._handleErrorInsideLock(doc_id, lockValue, error, callback); } - profile.log("processOutstandingUpdates"); - return LockManager.releaseLock(doc_id, lockValue, error => { - if (error != null) { return callback(error); } - profile.log("releaseLock").end(); - return UpdateManager.continueProcessingUpdatesWithLock(project_id, doc_id, callback); - }); - }); - }); - }, + processOutstandingUpdatesWithLock(project_id, doc_id, callback) { + if (callback == null) { + callback = function (error) {} + } + const profile = new Profiler('processOutstandingUpdatesWithLock', { + project_id, + doc_id + }) + return LockManager.tryLock(doc_id, (error, gotLock, lockValue) => { + if (error != null) { + return callback(error) + } + if (!gotLock) { + return callback() + } + profile.log('tryLock') + return UpdateManager.processOutstandingUpdates( + project_id, + doc_id, + function (error) { + if (error != null) { + return UpdateManager._handleErrorInsideLock( + doc_id, + lockValue, + error, + callback + ) + } + profile.log('processOutstandingUpdates') + return LockManager.releaseLock(doc_id, lockValue, (error) => { + if (error != null) { + return callback(error) + } + profile.log('releaseLock').end() + return UpdateManager.continueProcessingUpdatesWithLock( + project_id, + doc_id, + callback + ) + }) + } + ) + }) + }, - continueProcessingUpdatesWithLock(project_id, doc_id, callback) { - if (callback == null) { callback = function(error) {}; } - return RealTimeRedisManager.getUpdatesLength(doc_id, (error, length) => { - if (error != null) { return callback(error); } - if (length > 0) { - return UpdateManager.processOutstandingUpdatesWithLock(project_id, doc_id, callback); - } else { - return callback(); - } - }); - }, + continueProcessingUpdatesWithLock(project_id, doc_id, callback) { + if (callback == null) { + callback = function (error) {} + } + return RealTimeRedisManager.getUpdatesLength(doc_id, (error, length) => { + if (error != null) { + return callback(error) + } + if (length > 0) { + return UpdateManager.processOutstandingUpdatesWithLock( + project_id, + doc_id, + callback + ) + } else { + return callback() + } + }) + }, - fetchAndApplyUpdates(project_id, doc_id, callback) { - if (callback == null) { callback = function(error) {}; } - const profile = new Profiler("fetchAndApplyUpdates", {project_id, doc_id}); - return RealTimeRedisManager.getPendingUpdatesForDoc(doc_id, (error, updates) => { - if (error != null) { return callback(error); } - logger.log({project_id, doc_id, count: updates.length}, "processing updates"); - if (updates.length === 0) { - return callback(); - } - profile.log("getPendingUpdatesForDoc"); - const doUpdate = (update, cb) => UpdateManager.applyUpdate(project_id, doc_id, update, function(err) { - profile.log("applyUpdate"); - return cb(err); - }); - const finalCallback = function(err) { - profile.log("async done").end(); - return callback(err); - }; - return async.eachSeries(updates, doUpdate, finalCallback); - }); - }, + fetchAndApplyUpdates(project_id, doc_id, callback) { + if (callback == null) { + callback = function (error) {} + } + const profile = new Profiler('fetchAndApplyUpdates', { project_id, doc_id }) + return RealTimeRedisManager.getPendingUpdatesForDoc( + doc_id, + (error, updates) => { + if (error != null) { + return callback(error) + } + logger.log( + { project_id, doc_id, count: updates.length }, + 'processing updates' + ) + if (updates.length === 0) { + return callback() + } + profile.log('getPendingUpdatesForDoc') + const doUpdate = (update, cb) => + UpdateManager.applyUpdate(project_id, doc_id, update, function (err) { + profile.log('applyUpdate') + return cb(err) + }) + const finalCallback = function (err) { + profile.log('async done').end() + return callback(err) + } + return async.eachSeries(updates, doUpdate, finalCallback) + } + ) + }, - applyUpdate(project_id, doc_id, update, _callback) { - if (_callback == null) { _callback = function(error) {}; } - const callback = function(error) { - if (error != null) { - RealTimeRedisManager.sendData({project_id, doc_id, error: error.message || error}); - profile.log("sendData"); - } - profile.end(); - return _callback(error); - }; + applyUpdate(project_id, doc_id, update, _callback) { + if (_callback == null) { + _callback = function (error) {} + } + const callback = function (error) { + if (error != null) { + RealTimeRedisManager.sendData({ + project_id, + doc_id, + error: error.message || error + }) + profile.log('sendData') + } + profile.end() + return _callback(error) + } - var profile = new Profiler("applyUpdate", {project_id, doc_id}); - UpdateManager._sanitizeUpdate(update); - profile.log("sanitizeUpdate"); - return DocumentManager.getDoc(project_id, doc_id, function(error, lines, version, ranges, pathname, projectHistoryId) { - profile.log("getDoc"); - if (error != null) { return callback(error); } - if ((lines == null) || (version == null)) { - return callback(new Errors.NotFoundError(`document not found: ${doc_id}`)); - } - const previousVersion = version; - return ShareJsUpdateManager.applyUpdate(project_id, doc_id, update, lines, version, function(error, updatedDocLines, version, appliedOps) { - profile.log("sharejs.applyUpdate"); - if (error != null) { return callback(error); } - return RangesManager.applyUpdate(project_id, doc_id, ranges, appliedOps, updatedDocLines, function(error, new_ranges, ranges_were_collapsed) { - UpdateManager._addProjectHistoryMetadataToOps(appliedOps, pathname, projectHistoryId, lines); - profile.log("RangesManager.applyUpdate"); - if (error != null) { return callback(error); } - return RedisManager.updateDocument(project_id, doc_id, updatedDocLines, version, appliedOps, new_ranges, update.meta, function(error, doc_ops_length, project_ops_length) { - profile.log("RedisManager.updateDocument"); - if (error != null) { return callback(error); } - return HistoryManager.recordAndFlushHistoryOps(project_id, doc_id, appliedOps, doc_ops_length, project_ops_length, function(error) { - profile.log("recordAndFlushHistoryOps"); - if (error != null) { return callback(error); } - if (ranges_were_collapsed) { - logger.log({project_id, doc_id, previousVersion, lines, ranges, update}, "update collapsed some ranges, snapshotting previous content"); - // Do this last, since it's a mongo call, and so potentially longest running - // If it overruns the lock, it's ok, since all of our redis work is done - return SnapshotManager.recordSnapshot(project_id, doc_id, previousVersion, pathname, lines, ranges, function(error) { - if (error != null) { - logger.error({err: error, project_id, doc_id, version, lines, ranges}, "error recording snapshot"); - return callback(error); - } else { - return callback(); - } - }); - } else { - return callback(); - } - }); - }); - }); - }); - }); - }, + var profile = new Profiler('applyUpdate', { project_id, doc_id }) + UpdateManager._sanitizeUpdate(update) + profile.log('sanitizeUpdate') + return DocumentManager.getDoc(project_id, doc_id, function ( + error, + lines, + version, + ranges, + pathname, + projectHistoryId + ) { + profile.log('getDoc') + if (error != null) { + return callback(error) + } + if (lines == null || version == null) { + return callback( + new Errors.NotFoundError(`document not found: ${doc_id}`) + ) + } + const previousVersion = version + return ShareJsUpdateManager.applyUpdate( + project_id, + doc_id, + update, + lines, + version, + function (error, updatedDocLines, version, appliedOps) { + profile.log('sharejs.applyUpdate') + if (error != null) { + return callback(error) + } + return RangesManager.applyUpdate( + project_id, + doc_id, + ranges, + appliedOps, + updatedDocLines, + function (error, new_ranges, ranges_were_collapsed) { + UpdateManager._addProjectHistoryMetadataToOps( + appliedOps, + pathname, + projectHistoryId, + lines + ) + profile.log('RangesManager.applyUpdate') + if (error != null) { + return callback(error) + } + return RedisManager.updateDocument( + project_id, + doc_id, + updatedDocLines, + version, + appliedOps, + new_ranges, + update.meta, + function (error, doc_ops_length, project_ops_length) { + profile.log('RedisManager.updateDocument') + if (error != null) { + return callback(error) + } + return HistoryManager.recordAndFlushHistoryOps( + project_id, + doc_id, + appliedOps, + doc_ops_length, + project_ops_length, + function (error) { + profile.log('recordAndFlushHistoryOps') + if (error != null) { + return callback(error) + } + if (ranges_were_collapsed) { + logger.log( + { + project_id, + doc_id, + previousVersion, + lines, + ranges, + update + }, + 'update collapsed some ranges, snapshotting previous content' + ) + // Do this last, since it's a mongo call, and so potentially longest running + // If it overruns the lock, it's ok, since all of our redis work is done + return SnapshotManager.recordSnapshot( + project_id, + doc_id, + previousVersion, + pathname, + lines, + ranges, + function (error) { + if (error != null) { + logger.error( + { + err: error, + project_id, + doc_id, + version, + lines, + ranges + }, + 'error recording snapshot' + ) + return callback(error) + } else { + return callback() + } + } + ) + } else { + return callback() + } + } + ) + } + ) + } + ) + } + ) + }) + }, - lockUpdatesAndDo(method, project_id, doc_id, ...rest) { - const adjustedLength = Math.max(rest.length, 1); const args = rest.slice(0, adjustedLength - 1); const callback = rest[adjustedLength - 1]; - const profile = new Profiler("lockUpdatesAndDo", {project_id, doc_id}); - return LockManager.getLock(doc_id, function(error, lockValue) { - profile.log("getLock"); - if (error != null) { return callback(error); } - return UpdateManager.processOutstandingUpdates(project_id, doc_id, function(error) { - if (error != null) { return UpdateManager._handleErrorInsideLock(doc_id, lockValue, error, callback); } - profile.log("processOutstandingUpdates"); - return method(project_id, doc_id, ...Array.from(args), function(error, ...response_args) { - if (error != null) { return UpdateManager._handleErrorInsideLock(doc_id, lockValue, error, callback); } - profile.log("method"); - return LockManager.releaseLock(doc_id, lockValue, function(error) { - if (error != null) { return callback(error); } - profile.log("releaseLock").end(); - callback(null, ...Array.from(response_args)); - // We held the lock for a while so updates might have queued up - return UpdateManager.continueProcessingUpdatesWithLock(project_id, doc_id); - }); - }); - }); - }); - }, + lockUpdatesAndDo(method, project_id, doc_id, ...rest) { + const adjustedLength = Math.max(rest.length, 1) + const args = rest.slice(0, adjustedLength - 1) + const callback = rest[adjustedLength - 1] + const profile = new Profiler('lockUpdatesAndDo', { project_id, doc_id }) + return LockManager.getLock(doc_id, function (error, lockValue) { + profile.log('getLock') + if (error != null) { + return callback(error) + } + return UpdateManager.processOutstandingUpdates( + project_id, + doc_id, + function (error) { + if (error != null) { + return UpdateManager._handleErrorInsideLock( + doc_id, + lockValue, + error, + callback + ) + } + profile.log('processOutstandingUpdates') + return method(project_id, doc_id, ...Array.from(args), function ( + error, + ...response_args + ) { + if (error != null) { + return UpdateManager._handleErrorInsideLock( + doc_id, + lockValue, + error, + callback + ) + } + profile.log('method') + return LockManager.releaseLock(doc_id, lockValue, function (error) { + if (error != null) { + return callback(error) + } + profile.log('releaseLock').end() + callback(null, ...Array.from(response_args)) + // We held the lock for a while so updates might have queued up + return UpdateManager.continueProcessingUpdatesWithLock( + project_id, + doc_id + ) + }) + }) + } + ) + }) + }, - _handleErrorInsideLock(doc_id, lockValue, original_error, callback) { - if (callback == null) { callback = function(error) {}; } - return LockManager.releaseLock(doc_id, lockValue, lock_error => callback(original_error)); - }, + _handleErrorInsideLock(doc_id, lockValue, original_error, callback) { + if (callback == null) { + callback = function (error) {} + } + return LockManager.releaseLock(doc_id, lockValue, (lock_error) => + callback(original_error) + ) + }, - _sanitizeUpdate(update) { - // In Javascript, characters are 16-bits wide. It does not understand surrogates as characters. - // - // From Wikipedia (http://en.wikipedia.org/wiki/Plane_(Unicode)#Basic_Multilingual_Plane): - // "The High Surrogates (U+D800–U+DBFF) and Low Surrogate (U+DC00–U+DFFF) codes are reserved - // for encoding non-BMP characters in UTF-16 by using a pair of 16-bit codes: one High Surrogate - // and one Low Surrogate. A single surrogate code point will never be assigned a character."" - // - // The main offender seems to be \uD835 as a stand alone character, which would be the first - // 16-bit character of a blackboard bold character (http://www.fileformat.info/info/unicode/char/1d400/index.htm). - // Something must be going on client side that is screwing up the encoding and splitting the - // two 16-bit characters so that \uD835 is standalone. - for (const op of Array.from(update.op || [])) { - if (op.i != null) { - // Replace high and low surrogate characters with 'replacement character' (\uFFFD) - op.i = op.i.replace(/[\uD800-\uDFFF]/g, "\uFFFD"); - } - } - return update; - }, + _sanitizeUpdate(update) { + // In Javascript, characters are 16-bits wide. It does not understand surrogates as characters. + // + // From Wikipedia (http://en.wikipedia.org/wiki/Plane_(Unicode)#Basic_Multilingual_Plane): + // "The High Surrogates (U+D800–U+DBFF) and Low Surrogate (U+DC00–U+DFFF) codes are reserved + // for encoding non-BMP characters in UTF-16 by using a pair of 16-bit codes: one High Surrogate + // and one Low Surrogate. A single surrogate code point will never be assigned a character."" + // + // The main offender seems to be \uD835 as a stand alone character, which would be the first + // 16-bit character of a blackboard bold character (http://www.fileformat.info/info/unicode/char/1d400/index.htm). + // Something must be going on client side that is screwing up the encoding and splitting the + // two 16-bit characters so that \uD835 is standalone. + for (const op of Array.from(update.op || [])) { + if (op.i != null) { + // Replace high and low surrogate characters with 'replacement character' (\uFFFD) + op.i = op.i.replace(/[\uD800-\uDFFF]/g, '\uFFFD') + } + } + return update + }, - _addProjectHistoryMetadataToOps(updates, pathname, projectHistoryId, lines) { - let doc_length = _.reduce(lines, - (chars, line) => chars + line.length, - 0); - doc_length += lines.length - 1; // count newline characters - return updates.forEach(function(update) { - update.projectHistoryId = projectHistoryId; - if (!update.meta) { update.meta = {}; } - update.meta.pathname = pathname; - update.meta.doc_length = doc_length; - // Each update may contain multiple ops, i.e. - // [{ - // ops: [{i: "foo", p: 4}, {d: "bar", p:8}] - // }, { - // ops: [{d: "baz", p: 40}, {i: "qux", p:8}] - // }] - // We want to include the doc_length at the start of each update, - // before it's ops are applied. However, we need to track any - // changes to it for the next update. - return (() => { - const result = []; - for (const op of Array.from(update.op)) { - if (op.i != null) { - doc_length += op.i.length; - } - if (op.d != null) { - result.push(doc_length -= op.d.length); - } else { - result.push(undefined); - } - } - return result; - })(); - }); - } -}); + _addProjectHistoryMetadataToOps(updates, pathname, projectHistoryId, lines) { + let doc_length = _.reduce(lines, (chars, line) => chars + line.length, 0) + doc_length += lines.length - 1 // count newline characters + return updates.forEach(function (update) { + update.projectHistoryId = projectHistoryId + if (!update.meta) { + update.meta = {} + } + update.meta.pathname = pathname + update.meta.doc_length = doc_length + // Each update may contain multiple ops, i.e. + // [{ + // ops: [{i: "foo", p: 4}, {d: "bar", p:8}] + // }, { + // ops: [{d: "baz", p: 40}, {i: "qux", p:8}] + // }] + // We want to include the doc_length at the start of each update, + // before it's ops are applied. However, we need to track any + // changes to it for the next update. + return (() => { + const result = [] + for (const op of Array.from(update.op)) { + if (op.i != null) { + doc_length += op.i.length + } + if (op.d != null) { + result.push((doc_length -= op.d.length)) + } else { + result.push(undefined) + } + } + return result + })() + }) + } +} diff --git a/services/document-updater/app/js/mongojs.js b/services/document-updater/app/js/mongojs.js index 61092e1009..f88f08d15e 100644 --- a/services/document-updater/app/js/mongojs.js +++ b/services/document-updater/app/js/mongojs.js @@ -6,18 +6,22 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const Settings = require("settings-sharelatex"); -const mongojs = require("mongojs"); -const db = mongojs(Settings.mongo.url, ["docSnapshots"]); +const Settings = require('settings-sharelatex') +const mongojs = require('mongojs') +const db = mongojs(Settings.mongo.url, ['docSnapshots']) module.exports = { - db, - ObjectId: mongojs.ObjectId, - healthCheck(callback) { - return db.runCommand({ping: 1}, function(err, res) { - if (err != null) { return callback(err); } - if (!res.ok) { return callback(new Error("failed mongo ping")); } - return callback(); - }); - } -}; + db, + ObjectId: mongojs.ObjectId, + healthCheck(callback) { + return db.runCommand({ ping: 1 }, function (err, res) { + if (err != null) { + return callback(err) + } + if (!res.ok) { + return callback(new Error('failed mongo ping')) + } + return callback() + }) + } +} diff --git a/services/document-updater/app/js/sharejs/count.js b/services/document-updater/app/js/sharejs/count.js index c77b76b098..8d8477caf4 100644 --- a/services/document-updater/app/js/sharejs/count.js +++ b/services/document-updater/app/js/sharejs/count.js @@ -8,25 +8,30 @@ */ // This is a simple type used for testing other OT code. Each op is [expectedSnapshot, increment] -exports.name = 'count'; -exports.create = () => 1; +exports.name = 'count' +exports.create = () => 1 -exports.apply = function(snapshot, op) { - const [v, inc] = Array.from(op); - if (snapshot !== v) { throw new Error(`Op ${v} != snapshot ${snapshot}`); } - return snapshot + inc; -}; +exports.apply = function (snapshot, op) { + const [v, inc] = Array.from(op) + if (snapshot !== v) { + throw new Error(`Op ${v} != snapshot ${snapshot}`) + } + return snapshot + inc +} // transform op1 by op2. Return transformed version of op1. -exports.transform = function(op1, op2) { - if (op1[0] !== op2[0]) { throw new Error(`Op1 ${op1[0]} != op2 ${op2[0]}`); } - return [op1[0] + op2[1], op1[1]]; -}; +exports.transform = function (op1, op2) { + if (op1[0] !== op2[0]) { + throw new Error(`Op1 ${op1[0]} != op2 ${op2[0]}`) + } + return [op1[0] + op2[1], op1[1]] +} -exports.compose = function(op1, op2) { - if ((op1[0] + op1[1]) !== op2[0]) { throw new Error(`Op1 ${op1} + 1 != op2 ${op2}`); } - return [op1[0], op1[1] + op2[1]]; -}; - -exports.generateRandomOp = doc => [[doc, 1], doc + 1]; +exports.compose = function (op1, op2) { + if (op1[0] + op1[1] !== op2[0]) { + throw new Error(`Op1 ${op1} + 1 != op2 ${op2}`) + } + return [op1[0], op1[1] + op2[1]] +} +exports.generateRandomOp = (doc) => [[doc, 1], doc + 1] diff --git a/services/document-updater/app/js/sharejs/helpers.js b/services/document-updater/app/js/sharejs/helpers.js index b4500a3214..1d7b268e17 100644 --- a/services/document-updater/app/js/sharejs/helpers.js +++ b/services/document-updater/app/js/sharejs/helpers.js @@ -17,77 +17,96 @@ // Add transform and transformX functions for an OT type which has transformComponent defined. // transformComponent(destination array, component, other component, side) -let bootstrapTransform; -exports._bt = (bootstrapTransform = function(type, transformComponent, checkValidOp, append) { - let transformX; - const transformComponentX = function(left, right, destLeft, destRight) { - transformComponent(destLeft, left, right, 'left'); - return transformComponent(destRight, right, left, 'right'); - }; +let bootstrapTransform +exports._bt = bootstrapTransform = function ( + type, + transformComponent, + checkValidOp, + append +) { + let transformX + const transformComponentX = function (left, right, destLeft, destRight) { + transformComponent(destLeft, left, right, 'left') + return transformComponent(destRight, right, left, 'right') + } // Transforms rightOp by leftOp. Returns ['rightOp', clientOp'] - type.transformX = (type.transformX = (transformX = function(leftOp, rightOp) { - checkValidOp(leftOp); - checkValidOp(rightOp); + type.transformX = type.transformX = transformX = function (leftOp, rightOp) { + checkValidOp(leftOp) + checkValidOp(rightOp) - const newRightOp = []; + const newRightOp = [] for (let rightComponent of Array.from(rightOp)) { // Generate newLeftOp by composing leftOp by rightComponent - const newLeftOp = []; + const newLeftOp = [] - let k = 0; + let k = 0 while (k < leftOp.length) { - var l; - const nextC = []; - transformComponentX(leftOp[k], rightComponent, newLeftOp, nextC); - k++; + var l + const nextC = [] + transformComponentX(leftOp[k], rightComponent, newLeftOp, nextC) + k++ if (nextC.length === 1) { - rightComponent = nextC[0]; + rightComponent = nextC[0] } else if (nextC.length === 0) { - for (l of Array.from(leftOp.slice(k))) { append(newLeftOp, l); } - rightComponent = null; - break; + for (l of Array.from(leftOp.slice(k))) { + append(newLeftOp, l) + } + rightComponent = null + break } else { // Recurse. - const [l_, r_] = Array.from(transformX(leftOp.slice(k), nextC)); - for (l of Array.from(l_)) { append(newLeftOp, l); } - for (const r of Array.from(r_)) { append(newRightOp, r); } - rightComponent = null; - break; + const [l_, r_] = Array.from(transformX(leftOp.slice(k), nextC)) + for (l of Array.from(l_)) { + append(newLeftOp, l) + } + for (const r of Array.from(r_)) { + append(newRightOp, r) + } + rightComponent = null + break } } - - if (rightComponent != null) { append(newRightOp, rightComponent); } - leftOp = newLeftOp; + + if (rightComponent != null) { + append(newRightOp, rightComponent) + } + leftOp = newLeftOp } - - return [leftOp, newRightOp]; - })); + + return [leftOp, newRightOp] + } // Transforms op with specified type ('left' or 'right') by otherOp. - return type.transform = (type.transform = function(op, otherOp, type) { - let _; - if ((type !== 'left') && (type !== 'right')) { throw new Error("type must be 'left' or 'right'"); } + return (type.transform = type.transform = function (op, otherOp, type) { + let _ + if (type !== 'left' && type !== 'right') { + throw new Error("type must be 'left' or 'right'") + } - if (otherOp.length === 0) { return op; } + if (otherOp.length === 0) { + return op + } // TODO: Benchmark with and without this line. I _think_ it'll make a big difference...? - if ((op.length === 1) && (otherOp.length === 1)) { return transformComponent([], op[0], otherOp[0], type); } + if (op.length === 1 && otherOp.length === 1) { + return transformComponent([], op[0], otherOp[0], type) + } if (type === 'left') { - let left; - [left, _] = Array.from(transformX(op, otherOp)); - return left; + let left + ;[left, _] = Array.from(transformX(op, otherOp)) + return left } else { - let right; - [_, right] = Array.from(transformX(otherOp, op)); - return right; + let right + ;[_, right] = Array.from(transformX(otherOp, op)) + return right } - }); -}); + }) +} if (typeof WEB === 'undefined') { - exports.bootstrapTransform = bootstrapTransform; + exports.bootstrapTransform = bootstrapTransform } diff --git a/services/document-updater/app/js/sharejs/index.js b/services/document-updater/app/js/sharejs/index.js index a322063e83..7e3d6bbf26 100644 --- a/services/document-updater/app/js/sharejs/index.js +++ b/services/document-updater/app/js/sharejs/index.js @@ -6,18 +6,20 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const register = function(file) { - const type = require(file); - exports[type.name] = type; - try { return require(`${file}-api`); } catch (error) {} -}; +const register = function (file) { + const type = require(file) + exports[type.name] = type + try { + return require(`${file}-api`) + } catch (error) {} +} // Import all the built-in types. -register('./simple'); -register('./count'); +register('./simple') +register('./count') -register('./text'); -register('./text-composable'); -register('./text-tp2'); +register('./text') +register('./text-composable') +register('./text-tp2') -register('./json'); +register('./json') diff --git a/services/document-updater/app/js/sharejs/json-api.js b/services/document-updater/app/js/sharejs/json-api.js index 67e54f5334..a8db564fdf 100644 --- a/services/document-updater/app/js/sharejs/json-api.js +++ b/services/document-updater/app/js/sharejs/json-api.js @@ -14,267 +14,344 @@ */ // API for JSON OT -let json; -if (typeof WEB === 'undefined') { json = require('./json'); } - -if (typeof WEB !== 'undefined' && WEB !== null) { - const { - extendDoc - } = exports; - exports.extendDoc = function(name, fn) { - SubDoc.prototype[name] = fn; - return extendDoc(name, fn); - }; +let json +if (typeof WEB === 'undefined') { + json = require('./json') } -const depath = function(path) { - if ((path.length === 1) && (path[0].constructor === Array)) { - return path[0]; - } else { return path; } -}; +if (typeof WEB !== 'undefined' && WEB !== null) { + const { extendDoc } = exports + exports.extendDoc = function (name, fn) { + SubDoc.prototype[name] = fn + return extendDoc(name, fn) + } +} + +const depath = function (path) { + if (path.length === 1 && path[0].constructor === Array) { + return path[0] + } else { + return path + } +} class SubDoc { constructor(doc, path) { - this.doc = doc; - this.path = path; + this.doc = doc + this.path = path + } + + at(...path) { + return this.doc.at(this.path.concat(depath(path))) + } + + get() { + return this.doc.getAt(this.path) } - at(...path) { return this.doc.at(this.path.concat(depath(path))); } - get() { return this.doc.getAt(this.path); } // for objects and lists - set(value, cb) { return this.doc.setAt(this.path, value, cb); } + set(value, cb) { + return this.doc.setAt(this.path, value, cb) + } + // for strings and lists. - insert(pos, value, cb) { return this.doc.insertAt(this.path, pos, value, cb); } + insert(pos, value, cb) { + return this.doc.insertAt(this.path, pos, value, cb) + } + // for strings - del(pos, length, cb) { return this.doc.deleteTextAt(this.path, length, pos, cb); } + del(pos, length, cb) { + return this.doc.deleteTextAt(this.path, length, pos, cb) + } + // for objects and lists - remove(cb) { return this.doc.removeAt(this.path, cb); } - push(value, cb) { return this.insert(this.get().length, value, cb); } - move(from, to, cb) { return this.doc.moveAt(this.path, from, to, cb); } - add(amount, cb) { return this.doc.addAt(this.path, amount, cb); } - on(event, cb) { return this.doc.addListener(this.path, event, cb); } - removeListener(l) { return this.doc.removeListener(l); } + remove(cb) { + return this.doc.removeAt(this.path, cb) + } + + push(value, cb) { + return this.insert(this.get().length, value, cb) + } + + move(from, to, cb) { + return this.doc.moveAt(this.path, from, to, cb) + } + + add(amount, cb) { + return this.doc.addAt(this.path, amount, cb) + } + + on(event, cb) { + return this.doc.addListener(this.path, event, cb) + } + + removeListener(l) { + return this.doc.removeListener(l) + } // text API compatibility - getLength() { return this.get().length; } - getText() { return this.get(); } + getLength() { + return this.get().length + } + + getText() { + return this.get() + } } -const traverse = function(snapshot, path) { - const container = {data:snapshot}; - let key = 'data'; - let elem = container; +const traverse = function (snapshot, path) { + const container = { data: snapshot } + let key = 'data' + let elem = container for (const p of Array.from(path)) { - elem = elem[key]; - key = p; - if (typeof elem === 'undefined') { throw new Error('bad path'); } + elem = elem[key] + key = p + if (typeof elem === 'undefined') { + throw new Error('bad path') + } } - return {elem, key}; -}; + return { elem, key } +} -const pathEquals = function(p1, p2) { - if (p1.length !== p2.length) { return false; } - for (let i = 0; i < p1.length; i++) { - const e = p1[i]; - if (e !== p2[i]) { return false; } +const pathEquals = function (p1, p2) { + if (p1.length !== p2.length) { + return false } - return true; -}; + for (let i = 0; i < p1.length; i++) { + const e = p1[i] + if (e !== p2[i]) { + return false + } + } + return true +} json.api = { - provides: {json:true}, + provides: { json: true }, - at(...path) { return new SubDoc(this, depath(path)); }, + at(...path) { + return new SubDoc(this, depath(path)) + }, - get() { return this.snapshot; }, - set(value, cb) { return this.setAt([], value, cb); }, + get() { + return this.snapshot + }, + set(value, cb) { + return this.setAt([], value, cb) + }, getAt(path) { - const {elem, key} = traverse(this.snapshot, path); - return elem[key]; + const { elem, key } = traverse(this.snapshot, path) + return elem[key] }, setAt(path, value, cb) { - const {elem, key} = traverse(this.snapshot, path); - const op = {p:path}; + const { elem, key } = traverse(this.snapshot, path) + const op = { p: path } if (elem.constructor === Array) { - op.li = value; - if (typeof elem[key] !== 'undefined') { op.ld = elem[key]; } + op.li = value + if (typeof elem[key] !== 'undefined') { + op.ld = elem[key] + } } else if (typeof elem === 'object') { - op.oi = value; - if (typeof elem[key] !== 'undefined') { op.od = elem[key]; } - } else { throw new Error('bad path'); } - return this.submitOp([op], cb); + op.oi = value + if (typeof elem[key] !== 'undefined') { + op.od = elem[key] + } + } else { + throw new Error('bad path') + } + return this.submitOp([op], cb) }, removeAt(path, cb) { - const {elem, key} = traverse(this.snapshot, path); - if (typeof elem[key] === 'undefined') { throw new Error('no element at that path'); } - const op = {p:path}; + const { elem, key } = traverse(this.snapshot, path) + if (typeof elem[key] === 'undefined') { + throw new Error('no element at that path') + } + const op = { p: path } if (elem.constructor === Array) { - op.ld = elem[key]; + op.ld = elem[key] } else if (typeof elem === 'object') { - op.od = elem[key]; - } else { throw new Error('bad path'); } - return this.submitOp([op], cb); + op.od = elem[key] + } else { + throw new Error('bad path') + } + return this.submitOp([op], cb) }, insertAt(path, pos, value, cb) { - const {elem, key} = traverse(this.snapshot, path); - const op = {p:path.concat(pos)}; + const { elem, key } = traverse(this.snapshot, path) + const op = { p: path.concat(pos) } if (elem[key].constructor === Array) { - op.li = value; + op.li = value } else if (typeof elem[key] === 'string') { - op.si = value; + op.si = value } - return this.submitOp([op], cb); + return this.submitOp([op], cb) }, moveAt(path, from, to, cb) { - const op = [{p:path.concat(from), lm:to}]; - return this.submitOp(op, cb); + const op = [{ p: path.concat(from), lm: to }] + return this.submitOp(op, cb) }, addAt(path, amount, cb) { - const op = [{p:path, na:amount}]; - return this.submitOp(op, cb); + const op = [{ p: path, na: amount }] + return this.submitOp(op, cb) }, deleteTextAt(path, length, pos, cb) { - const {elem, key} = traverse(this.snapshot, path); - const op = [{p:path.concat(pos), sd:elem[key].slice(pos, (pos + length))}]; - return this.submitOp(op, cb); + const { elem, key } = traverse(this.snapshot, path) + const op = [{ p: path.concat(pos), sd: elem[key].slice(pos, pos + length) }] + return this.submitOp(op, cb) }, addListener(path, event, cb) { - const l = {path, event, cb}; - this._listeners.push(l); - return l; + const l = { path, event, cb } + this._listeners.push(l) + return l }, removeListener(l) { - const i = this._listeners.indexOf(l); - if (i < 0) { return false; } - this._listeners.splice(i, 1); - return true; + const i = this._listeners.indexOf(l) + if (i < 0) { + return false + } + this._listeners.splice(i, 1) + return true }, _register() { - this._listeners = []; - this.on('change', function(op) { + this._listeners = [] + this.on('change', function (op) { return (() => { - const result = []; + const result = [] for (const c of Array.from(op)) { - var i; - if ((c.na !== undefined) || (c.si !== undefined) || (c.sd !== undefined)) { + var i + if (c.na !== undefined || c.si !== undefined || c.sd !== undefined) { // no change to structure - continue; + continue } - var to_remove = []; + var to_remove = [] for (i = 0; i < this._listeners.length; i++) { // Transform a dummy op by the incoming op to work out what // should happen to the listener. - const l = this._listeners[i]; - const dummy = {p:l.path, na:0}; - const xformed = this.type.transformComponent([], dummy, c, 'left'); + const l = this._listeners[i] + const dummy = { p: l.path, na: 0 } + const xformed = this.type.transformComponent([], dummy, c, 'left') if (xformed.length === 0) { // The op was transformed to noop, so we should delete the listener. - to_remove.push(i); + to_remove.push(i) } else if (xformed.length === 1) { // The op remained, so grab its new path into the listener. - l.path = xformed[0].p; + l.path = xformed[0].p } else { - throw new Error("Bad assumption in json-api: xforming an 'si' op will always result in 0 or 1 components."); + throw new Error( + "Bad assumption in json-api: xforming an 'si' op will always result in 0 or 1 components." + ) } } - to_remove.sort((a, b) => b - a); - result.push((() => { - const result1 = []; - for (i of Array.from(to_remove)) { - result1.push(this._listeners.splice(i, 1)); - } - return result1; - })()); - } - return result; - })(); - }); - return this.on('remoteop', function(op) { - return (() => { - const result = []; - for (var c of Array.from(op)) { - var match_path = c.na === undefined ? c.p.slice(0, c.p.length-1) : c.p; - result.push((() => { - const result1 = []; - for (const {path, event, cb} of Array.from(this._listeners)) { - var common; - if (pathEquals(path, match_path)) { - switch (event) { - case 'insert': - if ((c.li !== undefined) && (c.ld === undefined)) { - result1.push(cb(c.p[c.p.length-1], c.li)); - } else if ((c.oi !== undefined) && (c.od === undefined)) { - result1.push(cb(c.p[c.p.length-1], c.oi)); - } else if (c.si !== undefined) { - result1.push(cb(c.p[c.p.length-1], c.si)); - } else { - result1.push(undefined); - } - break; - case 'delete': - if ((c.li === undefined) && (c.ld !== undefined)) { - result1.push(cb(c.p[c.p.length-1], c.ld)); - } else if ((c.oi === undefined) && (c.od !== undefined)) { - result1.push(cb(c.p[c.p.length-1], c.od)); - } else if (c.sd !== undefined) { - result1.push(cb(c.p[c.p.length-1], c.sd)); - } else { - result1.push(undefined); - } - break; - case 'replace': - if ((c.li !== undefined) && (c.ld !== undefined)) { - result1.push(cb(c.p[c.p.length-1], c.ld, c.li)); - } else if ((c.oi !== undefined) && (c.od !== undefined)) { - result1.push(cb(c.p[c.p.length-1], c.od, c.oi)); - } else { - result1.push(undefined); - } - break; - case 'move': - if (c.lm !== undefined) { - result1.push(cb(c.p[c.p.length-1], c.lm)); - } else { - result1.push(undefined); - } - break; - case 'add': - if (c.na !== undefined) { - result1.push(cb(c.na)); - } else { - result1.push(undefined); - } - break; - default: - result1.push(undefined); - } - } else if ((common = this.type.commonPath(match_path, path)) != null) { - if (event === 'child op') { - if (match_path.length === path.length && path.length === common) { - throw new Error("paths match length and have commonality, but aren't equal?"); - } - const child_path = c.p.slice(common+1); - result1.push(cb(child_path, c)); - } else { - result1.push(undefined); - } - } else { - result1.push(undefined); + to_remove.sort((a, b) => b - a) + result.push( + (() => { + const result1 = [] + for (i of Array.from(to_remove)) { + result1.push(this._listeners.splice(i, 1)) } - } - return result1; - })()); + return result1 + })() + ) } - return result; - })(); - }); + return result + })() + }) + return this.on('remoteop', function (op) { + return (() => { + const result = [] + for (var c of Array.from(op)) { + var match_path = + c.na === undefined ? c.p.slice(0, c.p.length - 1) : c.p + result.push( + (() => { + const result1 = [] + for (const { path, event, cb } of Array.from(this._listeners)) { + var common + if (pathEquals(path, match_path)) { + switch (event) { + case 'insert': + if (c.li !== undefined && c.ld === undefined) { + result1.push(cb(c.p[c.p.length - 1], c.li)) + } else if (c.oi !== undefined && c.od === undefined) { + result1.push(cb(c.p[c.p.length - 1], c.oi)) + } else if (c.si !== undefined) { + result1.push(cb(c.p[c.p.length - 1], c.si)) + } else { + result1.push(undefined) + } + break + case 'delete': + if (c.li === undefined && c.ld !== undefined) { + result1.push(cb(c.p[c.p.length - 1], c.ld)) + } else if (c.oi === undefined && c.od !== undefined) { + result1.push(cb(c.p[c.p.length - 1], c.od)) + } else if (c.sd !== undefined) { + result1.push(cb(c.p[c.p.length - 1], c.sd)) + } else { + result1.push(undefined) + } + break + case 'replace': + if (c.li !== undefined && c.ld !== undefined) { + result1.push(cb(c.p[c.p.length - 1], c.ld, c.li)) + } else if (c.oi !== undefined && c.od !== undefined) { + result1.push(cb(c.p[c.p.length - 1], c.od, c.oi)) + } else { + result1.push(undefined) + } + break + case 'move': + if (c.lm !== undefined) { + result1.push(cb(c.p[c.p.length - 1], c.lm)) + } else { + result1.push(undefined) + } + break + case 'add': + if (c.na !== undefined) { + result1.push(cb(c.na)) + } else { + result1.push(undefined) + } + break + default: + result1.push(undefined) + } + } else if ( + (common = this.type.commonPath(match_path, path)) != null + ) { + if (event === 'child op') { + if ( + match_path.length === path.length && + path.length === common + ) { + throw new Error( + "paths match length and have commonality, but aren't equal?" + ) + } + const child_path = c.p.slice(common + 1) + result1.push(cb(child_path, c)) + } else { + result1.push(undefined) + } + } else { + result1.push(undefined) + } + } + return result1 + })() + ) + } + return result + })() + }) } -}; +} diff --git a/services/document-updater/app/js/sharejs/json.js b/services/document-updater/app/js/sharejs/json.js index 5619c09be1..fa9b030a82 100644 --- a/services/document-updater/app/js/sharejs/json.js +++ b/services/document-updater/app/js/sharejs/json.js @@ -16,382 +16,459 @@ // // Spec is here: https://github.com/josephg/ShareJS/wiki/JSON-Operations -let text; +let text if (typeof WEB !== 'undefined' && WEB !== null) { - ({ - text - } = exports.types); + ;({ text } = exports.types) } else { - text = require('./text'); + text = require('./text') } -const json = {}; +const json = {} -json.name = 'json'; +json.name = 'json' -json.create = () => null; +json.create = () => null -json.invertComponent = function(c) { - const c_ = {p: c.p}; - if (c.si !== undefined) { c_.sd = c.si; } - if (c.sd !== undefined) { c_.si = c.sd; } - if (c.oi !== undefined) { c_.od = c.oi; } - if (c.od !== undefined) { c_.oi = c.od; } - if (c.li !== undefined) { c_.ld = c.li; } - if (c.ld !== undefined) { c_.li = c.ld; } - if (c.na !== undefined) { c_.na = -c.na; } - if (c.lm !== undefined) { - c_.lm = c.p[c.p.length-1]; - c_.p = c.p.slice(0, c.p.length - 1).concat([c.lm]); +json.invertComponent = function (c) { + const c_ = { p: c.p } + if (c.si !== undefined) { + c_.sd = c.si } - return c_; -}; + if (c.sd !== undefined) { + c_.si = c.sd + } + if (c.oi !== undefined) { + c_.od = c.oi + } + if (c.od !== undefined) { + c_.oi = c.od + } + if (c.li !== undefined) { + c_.ld = c.li + } + if (c.ld !== undefined) { + c_.li = c.ld + } + if (c.na !== undefined) { + c_.na = -c.na + } + if (c.lm !== undefined) { + c_.lm = c.p[c.p.length - 1] + c_.p = c.p.slice(0, c.p.length - 1).concat([c.lm]) + } + return c_ +} -json.invert = op => Array.from(op.slice().reverse()).map((c) => json.invertComponent(c)); +json.invert = (op) => + Array.from(op.slice().reverse()).map((c) => json.invertComponent(c)) -json.checkValidOp = function(op) {}; +json.checkValidOp = function (op) {} -const isArray = o => Object.prototype.toString.call(o) === '[object Array]'; -json.checkList = function(elem) { - if (!isArray(elem)) { throw new Error('Referenced element not a list'); } -}; +const isArray = (o) => Object.prototype.toString.call(o) === '[object Array]' +json.checkList = function (elem) { + if (!isArray(elem)) { + throw new Error('Referenced element not a list') + } +} -json.checkObj = function(elem) { - if (elem.constructor !== Object) { throw new Error(`Referenced element not an object (it was ${JSON.stringify(elem)})`); } -}; +json.checkObj = function (elem) { + if (elem.constructor !== Object) { + throw new Error( + `Referenced element not an object (it was ${JSON.stringify(elem)})` + ) + } +} -json.apply = function(snapshot, op) { - json.checkValidOp(op); - op = clone(op); +json.apply = function (snapshot, op) { + json.checkValidOp(op) + op = clone(op) - const container = {data: clone(snapshot)}; + const container = { data: clone(snapshot) } try { for (let i = 0; i < op.length; i++) { - const c = op[i]; - let parent = null; - let parentkey = null; - let elem = container; - let key = 'data'; + const c = op[i] + let parent = null + let parentkey = null + let elem = container + let key = 'data' for (const p of Array.from(c.p)) { - parent = elem; - parentkey = key; - elem = elem[key]; - key = p; + parent = elem + parentkey = key + elem = elem[key] + key = p - if (parent == null) { throw new Error('Path invalid'); } + if (parent == null) { + throw new Error('Path invalid') + } } if (c.na !== undefined) { // Number add - if (typeof elem[key] !== 'number') { throw new Error('Referenced element not a number'); } - elem[key] += c.na; - + if (typeof elem[key] !== 'number') { + throw new Error('Referenced element not a number') + } + elem[key] += c.na } else if (c.si !== undefined) { // String insert - if (typeof elem !== 'string') { throw new Error(`Referenced element not a string (it was ${JSON.stringify(elem)})`); } - parent[parentkey] = elem.slice(0, key) + c.si + elem.slice(key); + if (typeof elem !== 'string') { + throw new Error( + `Referenced element not a string (it was ${JSON.stringify(elem)})` + ) + } + parent[parentkey] = elem.slice(0, key) + c.si + elem.slice(key) } else if (c.sd !== undefined) { // String delete - if (typeof elem !== 'string') { throw new Error('Referenced element not a string'); } - if (elem.slice(key, key + c.sd.length) !== c.sd) { throw new Error('Deleted string does not match'); } - parent[parentkey] = elem.slice(0, key) + elem.slice(key + c.sd.length); - - } else if ((c.li !== undefined) && (c.ld !== undefined)) { + if (typeof elem !== 'string') { + throw new Error('Referenced element not a string') + } + if (elem.slice(key, key + c.sd.length) !== c.sd) { + throw new Error('Deleted string does not match') + } + parent[parentkey] = elem.slice(0, key) + elem.slice(key + c.sd.length) + } else if (c.li !== undefined && c.ld !== undefined) { // List replace - json.checkList(elem); + json.checkList(elem) // Should check the list element matches c.ld - elem[key] = c.li; + elem[key] = c.li } else if (c.li !== undefined) { // List insert - json.checkList(elem); + json.checkList(elem) - elem.splice(key, 0, c.li); + elem.splice(key, 0, c.li) } else if (c.ld !== undefined) { // List delete - json.checkList(elem); + json.checkList(elem) // Should check the list element matches c.ld here too. - elem.splice(key, 1); + elem.splice(key, 1) } else if (c.lm !== undefined) { // List move - json.checkList(elem); + json.checkList(elem) if (c.lm !== key) { - const e = elem[key]; + const e = elem[key] // Remove it... - elem.splice(key, 1); + elem.splice(key, 1) // And insert it back. - elem.splice(c.lm, 0, e); + elem.splice(c.lm, 0, e) } - } else if (c.oi !== undefined) { // Object insert / replace - json.checkObj(elem); - - // Should check that elem[key] == c.od - elem[key] = c.oi; - } else if (c.od !== undefined) { - // Object delete - json.checkObj(elem); + json.checkObj(elem) // Should check that elem[key] == c.od - delete elem[key]; + elem[key] = c.oi + } else if (c.od !== undefined) { + // Object delete + json.checkObj(elem) + + // Should check that elem[key] == c.od + delete elem[key] } else { - throw new Error('invalid / missing instruction in op'); + throw new Error('invalid / missing instruction in op') } } } catch (error) { // TODO: Roll back all already applied changes. Write tests before implementing this code. - throw error; + throw error } - return container.data; -}; + return container.data +} // Checks if two paths, p1 and p2 match. -json.pathMatches = function(p1, p2, ignoreLast) { - if (p1.length !== p2.length) { return false; } +json.pathMatches = function (p1, p2, ignoreLast) { + if (p1.length !== p2.length) { + return false + } for (let i = 0; i < p1.length; i++) { - const p = p1[i]; - if ((p !== p2[i]) && (!ignoreLast || (i !== (p1.length - 1)))) { return false; } + const p = p1[i] + if (p !== p2[i] && (!ignoreLast || i !== p1.length - 1)) { + return false + } } - - return true; -}; -json.append = function(dest, c) { - let last; - c = clone(c); - if ((dest.length !== 0) && json.pathMatches(c.p, (last = dest[dest.length - 1]).p)) { - if ((last.na !== undefined) && (c.na !== undefined)) { - return dest[dest.length - 1] = { p: last.p, na: last.na + c.na }; - } else if ((last.li !== undefined) && (c.li === undefined) && (c.ld === last.li)) { + return true +} + +json.append = function (dest, c) { + let last + c = clone(c) + if ( + dest.length !== 0 && + json.pathMatches(c.p, (last = dest[dest.length - 1]).p) + ) { + if (last.na !== undefined && c.na !== undefined) { + return (dest[dest.length - 1] = { p: last.p, na: last.na + c.na }) + } else if ( + last.li !== undefined && + c.li === undefined && + c.ld === last.li + ) { // insert immediately followed by delete becomes a noop. if (last.ld !== undefined) { // leave the delete part of the replace - return delete last.li; + return delete last.li } else { - return dest.pop(); + return dest.pop() } - } else if ((last.od !== undefined) && (last.oi === undefined) && - (c.oi !== undefined) && (c.od === undefined)) { - return last.oi = c.oi; - } else if ((c.lm !== undefined) && (c.p[c.p.length-1] === c.lm)) { - return null; // don't do anything + } else if ( + last.od !== undefined && + last.oi === undefined && + c.oi !== undefined && + c.od === undefined + ) { + return (last.oi = c.oi) + } else if (c.lm !== undefined && c.p[c.p.length - 1] === c.lm) { + return null // don't do anything } else { - return dest.push(c); + return dest.push(c) } } else { - return dest.push(c); + return dest.push(c) } -}; +} -json.compose = function(op1, op2) { - json.checkValidOp(op1); - json.checkValidOp(op2); +json.compose = function (op1, op2) { + json.checkValidOp(op1) + json.checkValidOp(op2) - const newOp = clone(op1); - for (const c of Array.from(op2)) { json.append(newOp, c); } + const newOp = clone(op1) + for (const c of Array.from(op2)) { + json.append(newOp, c) + } - return newOp; -}; + return newOp +} -json.normalize = function(op) { - const newOp = []; - - if (!isArray(op)) { op = [op]; } +json.normalize = function (op) { + const newOp = [] + + if (!isArray(op)) { + op = [op] + } for (const c of Array.from(op)) { - if (c.p == null) { c.p = []; } - json.append(newOp, c); + if (c.p == null) { + c.p = [] + } + json.append(newOp, c) } - - return newOp; -}; + + return newOp +} // hax, copied from test/types/json. Apparently this is still the fastest way to deep clone an object, assuming // we have browser support for JSON. // http://jsperf.com/cloning-an-object/12 -var clone = o => JSON.parse(JSON.stringify(o)); +var clone = (o) => JSON.parse(JSON.stringify(o)) -json.commonPath = function(p1, p2) { - p1 = p1.slice(); - p2 = p2.slice(); - p1.unshift('data'); - p2.unshift('data'); - p1 = p1.slice(0, p1.length-1); - p2 = p2.slice(0, p2.length-1); - if (p2.length === 0) { return -1; } - let i = 0; - while ((p1[i] === p2[i]) && (i < p1.length)) { - i++; +json.commonPath = function (p1, p2) { + p1 = p1.slice() + p2 = p2.slice() + p1.unshift('data') + p2.unshift('data') + p1 = p1.slice(0, p1.length - 1) + p2 = p2.slice(0, p2.length - 1) + if (p2.length === 0) { + return -1 + } + let i = 0 + while (p1[i] === p2[i] && i < p1.length) { + i++ if (i === p2.length) { - return i-1; + return i - 1 } } -}; +} // transform c so it applies to a document with otherC applied. -json.transformComponent = function(dest, c, otherC, type) { - let oc; - c = clone(c); - if (c.na !== undefined) { c.p.push(0); } - if (otherC.na !== undefined) { otherC.p.push(0); } +json.transformComponent = function (dest, c, otherC, type) { + let oc + c = clone(c) + if (c.na !== undefined) { + c.p.push(0) + } + if (otherC.na !== undefined) { + otherC.p.push(0) + } - const common = json.commonPath(c.p, otherC.p); - const common2 = json.commonPath(otherC.p, c.p); + const common = json.commonPath(c.p, otherC.p) + const common2 = json.commonPath(otherC.p, c.p) - const cplength = c.p.length; - const otherCplength = otherC.p.length; + const cplength = c.p.length + const otherCplength = otherC.p.length - if (c.na !== undefined) { c.p.pop(); } // hax - if (otherC.na !== undefined) { otherC.p.pop(); } + if (c.na !== undefined) { + c.p.pop() + } // hax + if (otherC.na !== undefined) { + otherC.p.pop() + } if (otherC.na) { - if ((common2 != null) && (otherCplength >= cplength) && (otherC.p[common2] === c.p[common2])) { + if ( + common2 != null && + otherCplength >= cplength && + otherC.p[common2] === c.p[common2] + ) { if (c.ld !== undefined) { - oc = clone(otherC); - oc.p = oc.p.slice(cplength); - c.ld = json.apply(clone(c.ld), [oc]); + oc = clone(otherC) + oc.p = oc.p.slice(cplength) + c.ld = json.apply(clone(c.ld), [oc]) } else if (c.od !== undefined) { - oc = clone(otherC); - oc.p = oc.p.slice(cplength); - c.od = json.apply(clone(c.od), [oc]); + oc = clone(otherC) + oc.p = oc.p.slice(cplength) + c.od = json.apply(clone(c.od), [oc]) } } - json.append(dest, c); - return dest; + json.append(dest, c) + return dest } - if ((common2 != null) && (otherCplength > cplength) && (c.p[common2] === otherC.p[common2])) { + if ( + common2 != null && + otherCplength > cplength && + c.p[common2] === otherC.p[common2] + ) { // transform based on c if (c.ld !== undefined) { - oc = clone(otherC); - oc.p = oc.p.slice(cplength); - c.ld = json.apply(clone(c.ld), [oc]); + oc = clone(otherC) + oc.p = oc.p.slice(cplength) + c.ld = json.apply(clone(c.ld), [oc]) } else if (c.od !== undefined) { - oc = clone(otherC); - oc.p = oc.p.slice(cplength); - c.od = json.apply(clone(c.od), [oc]); + oc = clone(otherC) + oc.p = oc.p.slice(cplength) + c.od = json.apply(clone(c.od), [oc]) } } - if (common != null) { - let from, p, to; - const commonOperand = cplength === otherCplength; + let from, p, to + const commonOperand = cplength === otherCplength // transform based on otherC if (otherC.na !== undefined) { // this case is handled above due to icky path hax - } else if ((otherC.si !== undefined) || (otherC.sd !== undefined)) { + } else if (otherC.si !== undefined || otherC.sd !== undefined) { // String op vs string op - pass through to text type - if ((c.si !== undefined) || (c.sd !== undefined)) { - if (!commonOperand) { throw new Error("must be a string?"); } + if (c.si !== undefined || c.sd !== undefined) { + if (!commonOperand) { + throw new Error('must be a string?') + } // Convert an op component to a text op component - const convert = function(component) { - const newC = {p:component.p[component.p.length - 1]}; + const convert = function (component) { + const newC = { p: component.p[component.p.length - 1] } if (component.si) { - newC.i = component.si; + newC.i = component.si } else { - newC.d = component.sd; + newC.d = component.sd } - return newC; - }; - - const tc1 = convert(c); - const tc2 = convert(otherC); - - const res = []; - text._tc(res, tc1, tc2, type); - for (const tc of Array.from(res)) { - const jc = { p: c.p.slice(0, common) }; - jc.p.push(tc.p); - if (tc.i != null) { jc.si = tc.i; } - if (tc.d != null) { jc.sd = tc.d; } - json.append(dest, jc); + return newC } - return dest; + + const tc1 = convert(c) + const tc2 = convert(otherC) + + const res = [] + text._tc(res, tc1, tc2, type) + for (const tc of Array.from(res)) { + const jc = { p: c.p.slice(0, common) } + jc.p.push(tc.p) + if (tc.i != null) { + jc.si = tc.i + } + if (tc.d != null) { + jc.sd = tc.d + } + json.append(dest, jc) + } + return dest } - } else if ((otherC.li !== undefined) && (otherC.ld !== undefined)) { + } else if (otherC.li !== undefined && otherC.ld !== undefined) { if (otherC.p[common] === c.p[common]) { // noop if (!commonOperand) { // we're below the deleted element, so -> noop - return dest; + return dest } else if (c.ld !== undefined) { // we're trying to delete the same element, -> noop - if ((c.li !== undefined) && (type === 'left')) { + if (c.li !== undefined && type === 'left') { // we're both replacing one element with another. only one can // survive! - c.ld = clone(otherC.li); + c.ld = clone(otherC.li) } else { - return dest; + return dest } } } } else if (otherC.li !== undefined) { - if ((c.li !== undefined) && (c.ld === undefined) && commonOperand && (c.p[common] === otherC.p[common])) { + if ( + c.li !== undefined && + c.ld === undefined && + commonOperand && + c.p[common] === otherC.p[common] + ) { // in li vs. li, left wins. if (type === 'right') { - c.p[common]++; + c.p[common]++ } } else if (otherC.p[common] <= c.p[common]) { - c.p[common]++; + c.p[common]++ } if (c.lm !== undefined) { if (commonOperand) { // otherC edits the same list we edit if (otherC.p[common] <= c.lm) { - c.lm++; + c.lm++ } } } - // changing c.from is handled above. + // changing c.from is handled above. } else if (otherC.ld !== undefined) { if (c.lm !== undefined) { if (commonOperand) { if (otherC.p[common] === c.p[common]) { // they deleted the thing we're trying to move - return dest; + return dest } // otherC edits the same list we edit - p = otherC.p[common]; - from = c.p[common]; - to = c.lm; - if ((p < to) || ((p === to) && (from < to))) { - c.lm--; + p = otherC.p[common] + from = c.p[common] + to = c.lm + if (p < to || (p === to && from < to)) { + c.lm-- } } } if (otherC.p[common] < c.p[common]) { - c.p[common]--; + c.p[common]-- } else if (otherC.p[common] === c.p[common]) { if (otherCplength < cplength) { // we're below the deleted element, so -> noop - return dest; + return dest } else if (c.ld !== undefined) { if (c.li !== undefined) { // we're replacing, they're deleting. we become an insert. - delete c.ld; + delete c.ld } else { // we're trying to delete the same element, -> noop - return dest; + return dest } } } } else if (otherC.lm !== undefined) { - if ((c.lm !== undefined) && (cplength === otherCplength)) { + if (c.lm !== undefined && cplength === otherCplength) { // lm vs lm, here we go! - from = c.p[common]; - to = c.lm; - const otherFrom = otherC.p[common]; - const otherTo = otherC.lm; + from = c.p[common] + to = c.lm + const otherFrom = otherC.p[common] + const otherTo = otherC.lm if (otherFrom !== otherTo) { // if otherFrom == otherTo, we don't need to change our op. @@ -399,143 +476,155 @@ json.transformComponent = function(dest, c, otherC, type) { if (from === otherFrom) { // they moved it! tie break. if (type === 'left') { - c.p[common] = otherTo; - if (from === to) { // ugh - c.lm = otherTo; + c.p[common] = otherTo + if (from === to) { + // ugh + c.lm = otherTo } } else { - return dest; + return dest } } else { // they moved around it if (from > otherFrom) { - c.p[common]--; + c.p[common]-- } if (from > otherTo) { - c.p[common]++; + c.p[common]++ } else if (from === otherTo) { if (otherFrom > otherTo) { - c.p[common]++; - if (from === to) { // ugh, again - c.lm++; + c.p[common]++ + if (from === to) { + // ugh, again + c.lm++ } } } // step 2: where am i going to put it? if (to > otherFrom) { - c.lm--; + c.lm-- } else if (to === otherFrom) { if (to > from) { - c.lm--; + c.lm-- } } if (to > otherTo) { - c.lm++; + c.lm++ } else if (to === otherTo) { // if we're both moving in the same direction, tie break - if (((otherTo > otherFrom) && (to > from)) || - ((otherTo < otherFrom) && (to < from))) { + if ( + (otherTo > otherFrom && to > from) || + (otherTo < otherFrom && to < from) + ) { if (type === 'right') { - c.lm++; + c.lm++ } } else { if (to > from) { - c.lm++; + c.lm++ } else if (to === otherFrom) { - c.lm--; + c.lm-- } } } } } - } else if ((c.li !== undefined) && (c.ld === undefined) && commonOperand) { + } else if (c.li !== undefined && c.ld === undefined && commonOperand) { // li - from = otherC.p[common]; - to = otherC.lm; - p = c.p[common]; + from = otherC.p[common] + to = otherC.lm + p = c.p[common] if (p > from) { - c.p[common]--; + c.p[common]-- } if (p > to) { - c.p[common]++; + c.p[common]++ } } else { // ld, ld+li, si, sd, na, oi, od, oi+od, any li on an element beneath // the lm // // i.e. things care about where their item is after the move. - from = otherC.p[common]; - to = otherC.lm; - p = c.p[common]; + from = otherC.p[common] + to = otherC.lm + p = c.p[common] if (p === from) { - c.p[common] = to; + c.p[common] = to } else { if (p > from) { - c.p[common]--; + c.p[common]-- } if (p > to) { - c.p[common]++; + c.p[common]++ } else if (p === to) { if (from > to) { - c.p[common]++; + c.p[common]++ } } } } - } else if ((otherC.oi !== undefined) && (otherC.od !== undefined)) { + } else if (otherC.oi !== undefined && otherC.od !== undefined) { if (c.p[common] === otherC.p[common]) { - if ((c.oi !== undefined) && commonOperand) { + if (c.oi !== undefined && commonOperand) { // we inserted where someone else replaced if (type === 'right') { // left wins - return dest; + return dest } else { // we win, make our op replace what they inserted - c.od = otherC.oi; + c.od = otherC.oi } } else { // -> noop if the other component is deleting the same object (or any // parent) - return dest; + return dest } } } else if (otherC.oi !== undefined) { - if ((c.oi !== undefined) && (c.p[common] === otherC.p[common])) { + if (c.oi !== undefined && c.p[common] === otherC.p[common]) { // left wins if we try to insert at the same place if (type === 'left') { - json.append(dest, {p:c.p, od:otherC.oi}); + json.append(dest, { p: c.p, od: otherC.oi }) } else { - return dest; + return dest } } } else if (otherC.od !== undefined) { if (c.p[common] === otherC.p[common]) { - if (!commonOperand) { return dest; } + if (!commonOperand) { + return dest + } if (c.oi !== undefined) { - delete c.od; + delete c.od } else { - return dest; + return dest } } } } - - json.append(dest, c); - return dest; -}; -if (typeof WEB !== 'undefined' && WEB !== null) { - if (!exports.types) { exports.types = {}; } - - // This is kind of awful - come up with a better way to hook this helper code up. - exports._bt(json, json.transformComponent, json.checkValidOp, json.append); - - // [] is used to prevent closure from renaming types.text - exports.types.json = json; -} else { - module.exports = json; - - require('./helpers').bootstrapTransform(json, json.transformComponent, json.checkValidOp, json.append); + json.append(dest, c) + return dest } +if (typeof WEB !== 'undefined' && WEB !== null) { + if (!exports.types) { + exports.types = {} + } + + // This is kind of awful - come up with a better way to hook this helper code up. + exports._bt(json, json.transformComponent, json.checkValidOp, json.append) + + // [] is used to prevent closure from renaming types.text + exports.types.json = json +} else { + module.exports = json + + require('./helpers').bootstrapTransform( + json, + json.transformComponent, + json.checkValidOp, + json.append + ) +} diff --git a/services/document-updater/app/js/sharejs/model.js b/services/document-updater/app/js/sharejs/model.js index 68f68f2e7d..69d258738e 100644 --- a/services/document-updater/app/js/sharejs/model.js +++ b/services/document-updater/app/js/sharejs/model.js @@ -21,13 +21,13 @@ // // Actual storage is handled by the database wrappers in db/*, wrapped by DocCache -let Model; -const {EventEmitter} = require('events'); +let Model +const { EventEmitter } = require('events') -const queue = require('./syncqueue'); -const types = require('../types'); +const queue = require('./syncqueue') +const types = require('../types') -const isArray = o => Object.prototype.toString.call(o) === '[object Array]'; +const isArray = (o) => Object.prototype.toString.call(o) === '[object Array]' // This constructor creates a new Model object. There will be one model object // per server context. @@ -41,15 +41,19 @@ const isArray = o => Object.prototype.toString.call(o) === '[object Array]'; // The model is an event emitter. It emits the following events: // // create(docName, data): A document has been created with the specified name & data -module.exports = (Model = function(db, options) { +module.exports = Model = function (db, options) { // db can be null if the user doesn't want persistance. - let getOps; - if (!(this instanceof Model)) { return new Model(db, options); } + let getOps + if (!(this instanceof Model)) { + return new Model(db, options) + } - const model = this; + const model = this - if (options == null) { options = {}; } + if (options == null) { + options = {} + } // This is a cache of 'live' documents. // @@ -78,164 +82,210 @@ module.exports = (Model = function(db, options) { // // In any case, the API to model is designed such that if we want to change that later // it should be pretty easy to do so without any external-to-the-model code changes. - const docs = {}; + const docs = {} // This is a map from docName -> [callback]. It is used when a document hasn't been // cached and multiple getSnapshot() / getVersion() requests come in. All requests // are added to the callback list and called when db.getSnapshot() returns. // // callback(error, snapshot data) - const awaitingGetSnapshot = {}; + const awaitingGetSnapshot = {} // The time that documents which no clients have open will stay in the cache. // Should be > 0. - if (options.reapTime == null) { options.reapTime = 3000; } + if (options.reapTime == null) { + options.reapTime = 3000 + } // The number of operations the cache holds before reusing the space - if (options.numCachedOps == null) { options.numCachedOps = 10; } + if (options.numCachedOps == null) { + options.numCachedOps = 10 + } // This option forces documents to be reaped, even when there's no database backend. // This is useful when you don't care about persistance and don't want to gradually // fill memory. // // You might want to set reapTime to a day or something. - if (options.forceReaping == null) { options.forceReaping = false; } + if (options.forceReaping == null) { + options.forceReaping = false + } // Until I come up with a better strategy, we'll save a copy of the document snapshot // to the database every ~20 submitted ops. - if (options.opsBeforeCommit == null) { options.opsBeforeCommit = 20; } + if (options.opsBeforeCommit == null) { + options.opsBeforeCommit = 20 + } // It takes some processing time to transform client ops. The server will punt ops back to the // client to transform if they're too old. - if (options.maximumAge == null) { options.maximumAge = 40; } + if (options.maximumAge == null) { + options.maximumAge = 40 + } // **** Cache API methods // Its important that all ops are applied in order. This helper method creates the op submission queue // for a single document. This contains the logic for transforming & applying ops. - const makeOpQueue = (docName, doc) => queue(function(opData, callback) { - if (!(opData.v >= 0)) { return callback('Version missing'); } - if (opData.v > doc.v) { return callback('Op at future version'); } - - // Punt the transforming work back to the client if the op is too old. - if ((opData.v + options.maximumAge) < doc.v) { return callback('Op too old'); } - - if (!opData.meta) { opData.meta = {}; } - opData.meta.ts = Date.now(); - - // We'll need to transform the op to the current version of the document. This - // calls the callback immediately if opVersion == doc.v. - return getOps(docName, opData.v, doc.v, function(error, ops) { - let snapshot; - if (error) { return callback(error); } - - if ((doc.v - opData.v) !== ops.length) { - // This should never happen. It indicates that we didn't get all the ops we - // asked for. Its important that the submitted op is correctly transformed. - console.error(`Could not get old ops in model for document ${docName}`); - console.error(`Expected ops ${opData.v} to ${doc.v} and got ${ops.length} ops`); - return callback('Internal error'); + const makeOpQueue = (docName, doc) => + queue(function (opData, callback) { + if (!(opData.v >= 0)) { + return callback('Version missing') + } + if (opData.v > doc.v) { + return callback('Op at future version') } - if (ops.length > 0) { - try { - // If there's enough ops, it might be worth spinning this out into a webworker thread. - for (const oldOp of Array.from(ops)) { - // Dup detection works by sending the id(s) the op has been submitted with previously. - // If the id matches, we reject it. The client can also detect the op has been submitted - // already if it sees its own previous id in the ops it sees when it does catchup. - if (oldOp.meta.source && opData.dupIfSource && Array.from(opData.dupIfSource).includes(oldOp.meta.source)) { - return callback('Op already submitted'); - } - - opData.op = doc.type.transform(opData.op, oldOp.op, 'left'); - opData.v++; - } - } catch (error1) { - error = error1; - console.error(error.stack); - return callback(error.message); - } + // Punt the transforming work back to the client if the op is too old. + if (opData.v + options.maximumAge < doc.v) { + return callback('Op too old') } - try { - snapshot = doc.type.apply(doc.snapshot, opData.op); - } catch (error2) { - error = error2; - console.error(error.stack); - return callback(error.message); + if (!opData.meta) { + opData.meta = {} } + opData.meta.ts = Date.now() - // The op data should be at the current version, and the new document data should be at - // the next version. - // - // This should never happen in practice, but its a nice little check to make sure everything - // is hunky-dory. - if (opData.v !== doc.v) { - // This should never happen. - console.error("Version mismatch detected in model. File a ticket - this is a bug."); - console.error(`Expecting ${opData.v} == ${doc.v}`); - return callback('Internal error'); - } - - // newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta} - const writeOp = (db != null ? db.writeOp : undefined) || ((docName, newOpData, callback) => callback()); - - return writeOp(docName, opData, function(error) { + // We'll need to transform the op to the current version of the document. This + // calls the callback immediately if opVersion == doc.v. + return getOps(docName, opData.v, doc.v, function (error, ops) { + let snapshot if (error) { - // The user should probably know about this. - console.warn(`Error writing ops to database: ${error}`); - return callback(error); + return callback(error) } - __guardMethod__(options.stats, 'writeOp', o => o.writeOp()); + if (doc.v - opData.v !== ops.length) { + // This should never happen. It indicates that we didn't get all the ops we + // asked for. Its important that the submitted op is correctly transformed. + console.error( + `Could not get old ops in model for document ${docName}` + ) + console.error( + `Expected ops ${opData.v} to ${doc.v} and got ${ops.length} ops` + ) + return callback('Internal error') + } - // This is needed when we emit the 'change' event, below. - const oldSnapshot = doc.snapshot; + if (ops.length > 0) { + try { + // If there's enough ops, it might be worth spinning this out into a webworker thread. + for (const oldOp of Array.from(ops)) { + // Dup detection works by sending the id(s) the op has been submitted with previously. + // If the id matches, we reject it. The client can also detect the op has been submitted + // already if it sees its own previous id in the ops it sees when it does catchup. + if ( + oldOp.meta.source && + opData.dupIfSource && + Array.from(opData.dupIfSource).includes(oldOp.meta.source) + ) { + return callback('Op already submitted') + } - // All the heavy lifting is now done. Finally, we'll update the cache with the new data - // and (maybe!) save a new document snapshot to the database. + opData.op = doc.type.transform(opData.op, oldOp.op, 'left') + opData.v++ + } + } catch (error1) { + error = error1 + console.error(error.stack) + return callback(error.message) + } + } - doc.v = opData.v + 1; - doc.snapshot = snapshot; + try { + snapshot = doc.type.apply(doc.snapshot, opData.op) + } catch (error2) { + error = error2 + console.error(error.stack) + return callback(error.message) + } - doc.ops.push(opData); - if (db && (doc.ops.length > options.numCachedOps)) { doc.ops.shift(); } - - model.emit('applyOp', docName, opData, snapshot, oldSnapshot); - doc.eventEmitter.emit('op', opData, snapshot, oldSnapshot); - - // The callback is called with the version of the document at which the op was applied. - // This is the op.v after transformation, and its doc.v - 1. - callback(null, opData.v); - - // I need a decent strategy here for deciding whether or not to save the snapshot. + // The op data should be at the current version, and the new document data should be at + // the next version. // - // The 'right' strategy looks something like "Store the snapshot whenever the snapshot - // is smaller than the accumulated op data". For now, I'll just store it every 20 - // ops or something. (Configurable with doc.committedVersion) - if (!doc.snapshotWriteLock && ((doc.committedVersion + options.opsBeforeCommit) <= doc.v)) { - return tryWriteSnapshot(docName, function(error) { - if (error) { return console.warn(`Error writing snapshot ${error}. This is nonfatal`); } - }); + // This should never happen in practice, but its a nice little check to make sure everything + // is hunky-dory. + if (opData.v !== doc.v) { + // This should never happen. + console.error( + 'Version mismatch detected in model. File a ticket - this is a bug.' + ) + console.error(`Expecting ${opData.v} == ${doc.v}`) + return callback('Internal error') } - }); - }); - }); + + // newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta} + const writeOp = + (db != null ? db.writeOp : undefined) || + ((docName, newOpData, callback) => callback()) + + return writeOp(docName, opData, function (error) { + if (error) { + // The user should probably know about this. + console.warn(`Error writing ops to database: ${error}`) + return callback(error) + } + + __guardMethod__(options.stats, 'writeOp', (o) => o.writeOp()) + + // This is needed when we emit the 'change' event, below. + const oldSnapshot = doc.snapshot + + // All the heavy lifting is now done. Finally, we'll update the cache with the new data + // and (maybe!) save a new document snapshot to the database. + + doc.v = opData.v + 1 + doc.snapshot = snapshot + + doc.ops.push(opData) + if (db && doc.ops.length > options.numCachedOps) { + doc.ops.shift() + } + + model.emit('applyOp', docName, opData, snapshot, oldSnapshot) + doc.eventEmitter.emit('op', opData, snapshot, oldSnapshot) + + // The callback is called with the version of the document at which the op was applied. + // This is the op.v after transformation, and its doc.v - 1. + callback(null, opData.v) + + // I need a decent strategy here for deciding whether or not to save the snapshot. + // + // The 'right' strategy looks something like "Store the snapshot whenever the snapshot + // is smaller than the accumulated op data". For now, I'll just store it every 20 + // ops or something. (Configurable with doc.committedVersion) + if ( + !doc.snapshotWriteLock && + doc.committedVersion + options.opsBeforeCommit <= doc.v + ) { + return tryWriteSnapshot(docName, function (error) { + if (error) { + return console.warn( + `Error writing snapshot ${error}. This is nonfatal` + ) + } + }) + } + }) + }) + }) // Add the data for the given docName to the cache. The named document shouldn't already // exist in the doc set. // // Returns the new doc. - const add = function(docName, error, data, committedVersion, ops, dbMeta) { - let callback, doc; - const callbacks = awaitingGetSnapshot[docName]; - delete awaitingGetSnapshot[docName]; + const add = function (docName, error, data, committedVersion, ops, dbMeta) { + let callback, doc + const callbacks = awaitingGetSnapshot[docName] + delete awaitingGetSnapshot[docName] if (error) { - if (callbacks) { for (callback of Array.from(callbacks)) { callback(error); } } + if (callbacks) { + for (callback of Array.from(callbacks)) { + callback(error) + } + } } else { - doc = (docs[docName] = { + doc = docs[docName] = { snapshot: data.snapshot, v: data.v, type: data.type, @@ -244,7 +294,7 @@ module.exports = (Model = function(db, options) { // Cache of ops ops: ops || [], - eventEmitter: new EventEmitter, + eventEmitter: new EventEmitter(), // Timer before the document will be invalidated from the cache (if the document has no // listeners) @@ -254,97 +304,121 @@ module.exports = (Model = function(db, options) { committedVersion: committedVersion != null ? committedVersion : data.v, snapshotWriteLock: false, dbMeta - }); + } - doc.opQueue = makeOpQueue(docName, doc); - - refreshReapingTimeout(docName); - model.emit('add', docName, data); - if (callbacks) { for (callback of Array.from(callbacks)) { callback(null, doc); } } + doc.opQueue = makeOpQueue(docName, doc) + + refreshReapingTimeout(docName) + model.emit('add', docName, data) + if (callbacks) { + for (callback of Array.from(callbacks)) { + callback(null, doc) + } + } } - return doc; - }; - + return doc + } + // This is a little helper wrapper around db.getOps. It does two things: // // - If there's no database set, it returns an error to the callback // - It adds version numbers to each op returned from the database // (These can be inferred from context so the DB doesn't store them, but its useful to have them). - const getOpsInternal = function(docName, start, end, callback) { - if (!db) { return (typeof callback === 'function' ? callback('Document does not exist') : undefined); } + const getOpsInternal = function (docName, start, end, callback) { + if (!db) { + return typeof callback === 'function' + ? callback('Document does not exist') + : undefined + } - return db.getOps(docName, start, end, function(error, ops) { - if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } + return db.getOps(docName, start, end, function (error, ops) { + if (error) { + return typeof callback === 'function' ? callback(error) : undefined + } - let v = start; - for (const op of Array.from(ops)) { op.v = v++; } + let v = start + for (const op of Array.from(ops)) { + op.v = v++ + } - return (typeof callback === 'function' ? callback(null, ops) : undefined); - }); - }; + return typeof callback === 'function' ? callback(null, ops) : undefined + }) + } // Load the named document into the cache. This function is re-entrant. // // The callback is called with (error, doc) - const load = function(docName, callback) { + const load = function (docName, callback) { if (docs[docName]) { // The document is already loaded. Return immediately. - __guardMethod__(options.stats, 'cacheHit', o => o.cacheHit('getSnapshot')); - return callback(null, docs[docName]); + __guardMethod__(options.stats, 'cacheHit', (o) => + o.cacheHit('getSnapshot') + ) + return callback(null, docs[docName]) } // We're a memory store. If we don't have it, nobody does. - if (!db) { return callback('Document does not exist'); } + if (!db) { + return callback('Document does not exist') + } - const callbacks = awaitingGetSnapshot[docName]; + const callbacks = awaitingGetSnapshot[docName] // The document is being loaded already. Add ourselves as a callback. - if (callbacks) { return callbacks.push(callback); } + if (callbacks) { + return callbacks.push(callback) + } - __guardMethod__(options.stats, 'cacheMiss', o1 => o1.cacheMiss('getSnapshot')); + __guardMethod__(options.stats, 'cacheMiss', (o1) => + o1.cacheMiss('getSnapshot') + ) // The document isn't loaded and isn't being loaded. Load it. - awaitingGetSnapshot[docName] = [callback]; - return db.getSnapshot(docName, function(error, data, dbMeta) { - if (error) { return add(docName, error); } - - const type = types[data.type]; - if (!type) { - console.warn(`Type '${data.type}' missing`); - return callback("Type not found"); + awaitingGetSnapshot[docName] = [callback] + return db.getSnapshot(docName, function (error, data, dbMeta) { + if (error) { + return add(docName, error) } - data.type = type; - const committedVersion = data.v; + const type = types[data.type] + if (!type) { + console.warn(`Type '${data.type}' missing`) + return callback('Type not found') + } + data.type = type + + const committedVersion = data.v // The server can close without saving the most recent document snapshot. // In this case, there are extra ops which need to be applied before // returning the snapshot. - return getOpsInternal(docName, data.v, null, function(error, ops) { - if (error) { return callback(error); } + return getOpsInternal(docName, data.v, null, function (error, ops) { + if (error) { + return callback(error) + } if (ops.length > 0) { - console.log(`Catchup ${docName} ${data.v} -> ${data.v + ops.length}`); + console.log(`Catchup ${docName} ${data.v} -> ${data.v + ops.length}`) try { for (const op of Array.from(ops)) { - data.snapshot = type.apply(data.snapshot, op.op); - data.v++; + data.snapshot = type.apply(data.snapshot, op.op) + data.v++ } } catch (e) { // This should never happen - it indicates that whats in the // database is invalid. - console.error(`Op data invalid for ${docName}: ${e.stack}`); - return callback('Op data invalid'); + console.error(`Op data invalid for ${docName}: ${e.stack}`) + return callback('Op data invalid') } } - model.emit('load', docName, data); - return add(docName, error, data, committedVersion, ops, dbMeta); - }); - }); - }; + model.emit('load', docName, data) + return add(docName, error, data, committedVersion, ops, dbMeta) + }) + }) + } // This makes sure the cache contains a document. If the doc cache doesn't contain // a document, it is loaded from the database and stored. @@ -352,52 +426,75 @@ module.exports = (Model = function(db, options) { // Documents are stored so long as either: // - They have been accessed within the past #{PERIOD} // - At least one client has the document open - var refreshReapingTimeout = function(docName) { - const doc = docs[docName]; - if (!doc) { return; } + var refreshReapingTimeout = function (docName) { + const doc = docs[docName] + if (!doc) { + return + } // I want to let the clients list be updated before this is called. - return process.nextTick(function() { + return process.nextTick(function () { // This is an awkward way to find out the number of clients on a document. If this // causes performance issues, add a numClients field to the document. // // The first check is because its possible that between refreshReapingTimeout being called and this // event being fired, someone called delete() on the document and hence the doc is something else now. - if ((doc === docs[docName]) && - (doc.eventEmitter.listeners('op').length === 0) && - (db || options.forceReaping) && - (doc.opQueue.busy === false)) { - - let reapTimer; - clearTimeout(doc.reapTimer); - return doc.reapTimer = (reapTimer = setTimeout(() => tryWriteSnapshot(docName, function() { - // If the reaping timeout has been refreshed while we're writing the snapshot, or if we're - // in the middle of applying an operation, don't reap. - if ((docs[docName].reapTimer === reapTimer) && (doc.opQueue.busy === false)) { return delete docs[docName]; } - }) - , options.reapTime)); + if ( + doc === docs[docName] && + doc.eventEmitter.listeners('op').length === 0 && + (db || options.forceReaping) && + doc.opQueue.busy === false + ) { + let reapTimer + clearTimeout(doc.reapTimer) + return (doc.reapTimer = reapTimer = setTimeout( + () => + tryWriteSnapshot(docName, function () { + // If the reaping timeout has been refreshed while we're writing the snapshot, or if we're + // in the middle of applying an operation, don't reap. + if ( + docs[docName].reapTimer === reapTimer && + doc.opQueue.busy === false + ) { + return delete docs[docName] + } + }), + options.reapTime + )) } - }); - }; + }) + } - var tryWriteSnapshot = function(docName, callback) { - if (!db) { return (typeof callback === 'function' ? callback() : undefined); } + var tryWriteSnapshot = function (docName, callback) { + if (!db) { + return typeof callback === 'function' ? callback() : undefined + } - const doc = docs[docName]; + const doc = docs[docName] // The doc is closed - if (!doc) { return (typeof callback === 'function' ? callback() : undefined); } + if (!doc) { + return typeof callback === 'function' ? callback() : undefined + } // The document is already saved. - if (doc.committedVersion === doc.v) { return (typeof callback === 'function' ? callback() : undefined); } + if (doc.committedVersion === doc.v) { + return typeof callback === 'function' ? callback() : undefined + } - if (doc.snapshotWriteLock) { return (typeof callback === 'function' ? callback('Another snapshot write is in progress') : undefined); } + if (doc.snapshotWriteLock) { + return typeof callback === 'function' + ? callback('Another snapshot write is in progress') + : undefined + } - doc.snapshotWriteLock = true; + doc.snapshotWriteLock = true - __guardMethod__(options.stats, 'writeSnapshot', o => o.writeSnapshot()); + __guardMethod__(options.stats, 'writeSnapshot', (o) => o.writeSnapshot()) - const writeSnapshot = (db != null ? db.writeSnapshot : undefined) || ((docName, docData, dbMeta, callback) => callback()); + const writeSnapshot = + (db != null ? db.writeSnapshot : undefined) || + ((docName, docData, dbMeta, callback) => callback()) const data = { v: doc.v, @@ -405,87 +502,107 @@ module.exports = (Model = function(db, options) { snapshot: doc.snapshot, // The database doesn't know about object types. type: doc.type.name - }; + } // Commit snapshot. - return writeSnapshot(docName, data, doc.dbMeta, function(error, dbMeta) { - doc.snapshotWriteLock = false; + return writeSnapshot(docName, data, doc.dbMeta, function (error, dbMeta) { + doc.snapshotWriteLock = false // We have to use data.v here because the version in the doc could // have been updated between the call to writeSnapshot() and now. - doc.committedVersion = data.v; - doc.dbMeta = dbMeta; + doc.committedVersion = data.v + doc.dbMeta = dbMeta - return (typeof callback === 'function' ? callback(error) : undefined); - }); - }; + return typeof callback === 'function' ? callback(error) : undefined + }) + } // *** Model interface methods // Create a new document. // // data should be {snapshot, type, [meta]}. The version of a new document is 0. - this.create = function(docName, type, meta, callback) { - if (typeof meta === 'function') { [meta, callback] = Array.from([{}, meta]); } + this.create = function (docName, type, meta, callback) { + if (typeof meta === 'function') { + ;[meta, callback] = Array.from([{}, meta]) + } - if (docName.match(/\//)) { return (typeof callback === 'function' ? callback('Invalid document name') : undefined); } - if (docs[docName]) { return (typeof callback === 'function' ? callback('Document already exists') : undefined); } + if (docName.match(/\//)) { + return typeof callback === 'function' + ? callback('Invalid document name') + : undefined + } + if (docs[docName]) { + return typeof callback === 'function' + ? callback('Document already exists') + : undefined + } - if (typeof type === 'string') { type = types[type]; } - if (!type) { return (typeof callback === 'function' ? callback('Type not found') : undefined); } + if (typeof type === 'string') { + type = types[type] + } + if (!type) { + return typeof callback === 'function' + ? callback('Type not found') + : undefined + } const data = { - snapshot:type.create(), - type:type.name, - meta:meta || {}, - v:0 - }; + snapshot: type.create(), + type: type.name, + meta: meta || {}, + v: 0 + } - const done = function(error, dbMeta) { + const done = function (error, dbMeta) { // dbMeta can be used to cache extra state needed by the database to access the document, like an ID or something. - if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } + if (error) { + return typeof callback === 'function' ? callback(error) : undefined + } // From here on we'll store the object version of the type name. - data.type = type; - add(docName, null, data, 0, [], dbMeta); - model.emit('create', docName, data); - return (typeof callback === 'function' ? callback() : undefined); - }; + data.type = type + add(docName, null, data, 0, [], dbMeta) + model.emit('create', docName, data) + return typeof callback === 'function' ? callback() : undefined + } if (db) { - return db.create(docName, data, done); + return db.create(docName, data, done) } else { - return done(); + return done() } - }; + } // Perminantly deletes the specified document. // If listeners are attached, they are removed. - // + // // The callback is called with (error) if there was an error. If error is null / undefined, the // document was deleted. // // WARNING: This isn't well supported throughout the code. (Eg, streaming clients aren't told about the // deletion. Subsequent op submissions will fail). - this.delete = function(docName, callback) { - const doc = docs[docName]; + this.delete = function (docName, callback) { + const doc = docs[docName] if (doc) { - clearTimeout(doc.reapTimer); - delete docs[docName]; + clearTimeout(doc.reapTimer) + delete docs[docName] } - const done = function(error) { - if (!error) { model.emit('delete', docName); } - return (typeof callback === 'function' ? callback(error) : undefined); - }; + const done = function (error) { + if (!error) { + model.emit('delete', docName) + } + return typeof callback === 'function' ? callback(error) : undefined + } if (db) { - return db.delete(docName, doc != null ? doc.dbMeta : undefined, done); + return db.delete(docName, doc != null ? doc.dbMeta : undefined, done) } else { - return done((!doc ? 'Document does not exist' : undefined)); + return done(!doc ? 'Document does not exist' : undefined) } - }; + } // This gets all operations from [start...end]. (That is, its not inclusive.) // @@ -502,102 +619,139 @@ module.exports = (Model = function(db, options) { // // Use getVersion() to determine if a document actually exists, if thats what you're // after. - this.getOps = (getOps = function(docName, start, end, callback) { + this.getOps = getOps = function (docName, start, end, callback) { // getOps will only use the op cache if its there. It won't fill the op cache in. - if (!(start >= 0)) { throw new Error('start must be 0+'); } + if (!(start >= 0)) { + throw new Error('start must be 0+') + } - if (typeof end === 'function') { [end, callback] = Array.from([null, end]); } + if (typeof end === 'function') { + ;[end, callback] = Array.from([null, end]) + } - const ops = docs[docName] != null ? docs[docName].ops : undefined; + const ops = docs[docName] != null ? docs[docName].ops : undefined if (ops) { - const version = docs[docName].v; + const version = docs[docName].v // Ops contains an array of ops. The last op in the list is the last op applied - if (end == null) { end = version; } - start = Math.min(start, end); + if (end == null) { + end = version + } + start = Math.min(start, end) - if (start === end) { return callback(null, []); } + if (start === end) { + return callback(null, []) + } // Base is the version number of the oldest op we have cached - const base = version - ops.length; + const base = version - ops.length // If the database is null, we'll trim to the ops we do have and hope thats enough. - if ((start >= base) || (db === null)) { - refreshReapingTimeout(docName); + if (start >= base || db === null) { + refreshReapingTimeout(docName) if (options.stats != null) { - options.stats.cacheHit('getOps'); + options.stats.cacheHit('getOps') } - return callback(null, ops.slice((start - base), (end - base))); + return callback(null, ops.slice(start - base, end - base)) } } if (options.stats != null) { - options.stats.cacheMiss('getOps'); + options.stats.cacheMiss('getOps') } - return getOpsInternal(docName, start, end, callback); - }); + return getOpsInternal(docName, start, end, callback) + } // Gets the snapshot data for the specified document. // getSnapshot(docName, callback) // Callback is called with (error, {v: , type: , snapshot: , meta: }) - this.getSnapshot = (docName, callback) => load(docName, (error, doc) => callback(error, doc ? {v:doc.v, type:doc.type, snapshot:doc.snapshot, meta:doc.meta} : undefined)); + this.getSnapshot = (docName, callback) => + load(docName, (error, doc) => + callback( + error, + doc + ? { v: doc.v, type: doc.type, snapshot: doc.snapshot, meta: doc.meta } + : undefined + ) + ) // Gets the latest version # of the document. // getVersion(docName, callback) // callback is called with (error, version). - this.getVersion = (docName, callback) => load(docName, (error, doc) => callback(error, doc != null ? doc.v : undefined)); + this.getVersion = (docName, callback) => + load(docName, (error, doc) => + callback(error, doc != null ? doc.v : undefined) + ) // Apply an op to the specified document. // The callback is passed (error, applied version #) // opData = {op:op, v:v, meta:metadata} - // + // // Ops are queued before being applied so that the following code applies op C before op B: // model.applyOp 'doc', OPA, -> model.applyOp 'doc', OPB // model.applyOp 'doc', OPC - this.applyOp = (docName, opData, callback) => // All the logic for this is in makeOpQueue, above. - load(docName, function(error, doc) { - if (error) { return callback(error); } + this.applyOp = ( + docName, + opData, + callback // All the logic for this is in makeOpQueue, above. + ) => + load(docName, function (error, doc) { + if (error) { + return callback(error) + } - return process.nextTick(() => doc.opQueue(opData, function(error, newVersion) { - refreshReapingTimeout(docName); - return (typeof callback === 'function' ? callback(error, newVersion) : undefined); - })); - }); + return process.nextTick(() => + doc.opQueue(opData, function (error, newVersion) { + refreshReapingTimeout(docName) + return typeof callback === 'function' + ? callback(error, newVersion) + : undefined + }) + ) + }) // TODO: store (some) metadata in DB // TODO: op and meta should be combineable in the op that gets sent - this.applyMetaOp = function(docName, metaOpData, callback) { - const {path, value} = metaOpData.meta; - - if (!isArray(path)) { return (typeof callback === 'function' ? callback("path should be an array") : undefined); } + this.applyMetaOp = function (docName, metaOpData, callback) { + const { path, value } = metaOpData.meta - return load(docName, function(error, doc) { + if (!isArray(path)) { + return typeof callback === 'function' + ? callback('path should be an array') + : undefined + } + + return load(docName, function (error, doc) { if (error != null) { - return (typeof callback === 'function' ? callback(error) : undefined); + return typeof callback === 'function' ? callback(error) : undefined } else { - let applied = false; + let applied = false switch (path[0]) { case 'shout': - doc.eventEmitter.emit('op', metaOpData); - applied = true; - break; + doc.eventEmitter.emit('op', metaOpData) + applied = true + break } - if (applied) { model.emit('applyMetaOp', docName, path, value); } - return (typeof callback === 'function' ? callback(null, doc.v) : undefined); + if (applied) { + model.emit('applyMetaOp', docName, path, value) + } + return typeof callback === 'function' + ? callback(null, doc.v) + : undefined } - }); - }; + }) + } // Listen to all ops from the specified version. If version is in the past, all // ops since that version are sent immediately to the listener. // // The callback is called once the listener is attached, but before any ops have been passed // to the listener. - // + // // This will _not_ edit the document metadata. // // If there are any listeners, we don't purge the document from the cache. But be aware, this behaviour @@ -609,98 +763,123 @@ module.exports = (Model = function(db, options) { // listener is called with (opData) each time an op is applied. // // callback(error, openedVersion) - this.listen = function(docName, version, listener, callback) { - if (typeof version === 'function') { [version, listener, callback] = Array.from([null, version, listener]); } + this.listen = function (docName, version, listener, callback) { + if (typeof version === 'function') { + ;[version, listener, callback] = Array.from([null, version, listener]) + } - return load(docName, function(error, doc) { - if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } + return load(docName, function (error, doc) { + if (error) { + return typeof callback === 'function' ? callback(error) : undefined + } - clearTimeout(doc.reapTimer); + clearTimeout(doc.reapTimer) if (version != null) { - return getOps(docName, version, null, function(error, data) { - if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } + return getOps(docName, version, null, function (error, data) { + if (error) { + return typeof callback === 'function' ? callback(error) : undefined + } - doc.eventEmitter.on('op', listener); + doc.eventEmitter.on('op', listener) if (typeof callback === 'function') { - callback(null, version); + callback(null, version) } return (() => { - const result = []; + const result = [] for (const op of Array.from(data)) { - var needle; - listener(op); + var needle + listener(op) // The listener may well remove itself during the catchup phase. If this happens, break early. // This is done in a quite inefficient way. (O(n) where n = #listeners on doc) - if ((needle = listener, !Array.from(doc.eventEmitter.listeners('op')).includes(needle))) { break; } else { - result.push(undefined); + if ( + ((needle = listener), + !Array.from(doc.eventEmitter.listeners('op')).includes(needle)) + ) { + break + } else { + result.push(undefined) } } - return result; - })(); - }); - - } else { // Version is null / undefined. Just add the listener. - doc.eventEmitter.on('op', listener); - return (typeof callback === 'function' ? callback(null, doc.v) : undefined); + return result + })() + }) + } else { + // Version is null / undefined. Just add the listener. + doc.eventEmitter.on('op', listener) + return typeof callback === 'function' + ? callback(null, doc.v) + : undefined } - }); - }; + }) + } // Remove a listener for a particular document. // // removeListener(docName, listener) // // This is synchronous. - this.removeListener = function(docName, listener) { + this.removeListener = function (docName, listener) { // The document should already be loaded. - const doc = docs[docName]; - if (!doc) { throw new Error('removeListener called but document not loaded'); } + const doc = docs[docName] + if (!doc) { + throw new Error('removeListener called but document not loaded') + } - doc.eventEmitter.removeListener('op', listener); - return refreshReapingTimeout(docName); - }; + doc.eventEmitter.removeListener('op', listener) + return refreshReapingTimeout(docName) + } // Flush saves all snapshot data to the database. I'm not sure whether or not this is actually needed - // sharejs will happily replay uncommitted ops when documents are re-opened anyway. - this.flush = function(callback) { - if (!db) { return (typeof callback === 'function' ? callback() : undefined); } + this.flush = function (callback) { + if (!db) { + return typeof callback === 'function' ? callback() : undefined + } - let pendingWrites = 0; + let pendingWrites = 0 for (const docName in docs) { - const doc = docs[docName]; + const doc = docs[docName] if (doc.committedVersion < doc.v) { - pendingWrites++; + pendingWrites++ // I'm hoping writeSnapshot will always happen in another thread. - tryWriteSnapshot(docName, () => process.nextTick(function() { - pendingWrites--; - if (pendingWrites === 0) { return (typeof callback === 'function' ? callback() : undefined); } - })); + tryWriteSnapshot(docName, () => + process.nextTick(function () { + pendingWrites-- + if (pendingWrites === 0) { + return typeof callback === 'function' ? callback() : undefined + } + }) + ) } } // If nothing was queued, terminate immediately. - if (pendingWrites === 0) { return (typeof callback === 'function' ? callback() : undefined); } - }; + if (pendingWrites === 0) { + return typeof callback === 'function' ? callback() : undefined + } + } // Close the database connection. This is needed so nodejs can shut down cleanly. - this.closeDb = function() { - __guardMethod__(db, 'close', o => o.close()); - return db = null; - }; - -}); + this.closeDb = function () { + __guardMethod__(db, 'close', (o) => o.close()) + return (db = null) + } +} // Model inherits from EventEmitter. -Model.prototype = new EventEmitter; - +Model.prototype = new EventEmitter() function __guardMethod__(obj, methodName, transform) { - if (typeof obj !== 'undefined' && obj !== null && typeof obj[methodName] === 'function') { - return transform(obj, methodName); + if ( + typeof obj !== 'undefined' && + obj !== null && + typeof obj[methodName] === 'function' + ) { + return transform(obj, methodName) } else { - return undefined; + return undefined } -} \ No newline at end of file +} diff --git a/services/document-updater/app/js/sharejs/server/model.js b/services/document-updater/app/js/sharejs/server/model.js index 485420c040..5bed90faa3 100644 --- a/services/document-updater/app/js/sharejs/server/model.js +++ b/services/document-updater/app/js/sharejs/server/model.js @@ -21,13 +21,13 @@ // // Actual storage is handled by the database wrappers in db/*, wrapped by DocCache -let Model; -const {EventEmitter} = require('events'); +let Model +const { EventEmitter } = require('events') -const queue = require('./syncqueue'); -const types = require('../types'); +const queue = require('./syncqueue') +const types = require('../types') -const isArray = o => Object.prototype.toString.call(o) === '[object Array]'; +const isArray = (o) => Object.prototype.toString.call(o) === '[object Array]' // This constructor creates a new Model object. There will be one model object // per server context. @@ -41,15 +41,19 @@ const isArray = o => Object.prototype.toString.call(o) === '[object Array]'; // The model is an event emitter. It emits the following events: // // create(docName, data): A document has been created with the specified name & data -module.exports = (Model = function(db, options) { +module.exports = Model = function (db, options) { // db can be null if the user doesn't want persistance. - let getOps; - if (!(this instanceof Model)) { return new Model(db, options); } + let getOps + if (!(this instanceof Model)) { + return new Model(db, options) + } - const model = this; + const model = this - if (options == null) { options = {}; } + if (options == null) { + options = {} + } // This is a cache of 'live' documents. // @@ -78,168 +82,217 @@ module.exports = (Model = function(db, options) { // // In any case, the API to model is designed such that if we want to change that later // it should be pretty easy to do so without any external-to-the-model code changes. - const docs = {}; + const docs = {} // This is a map from docName -> [callback]. It is used when a document hasn't been // cached and multiple getSnapshot() / getVersion() requests come in. All requests // are added to the callback list and called when db.getSnapshot() returns. // // callback(error, snapshot data) - const awaitingGetSnapshot = {}; + const awaitingGetSnapshot = {} // The time that documents which no clients have open will stay in the cache. // Should be > 0. - if (options.reapTime == null) { options.reapTime = 3000; } + if (options.reapTime == null) { + options.reapTime = 3000 + } // The number of operations the cache holds before reusing the space - if (options.numCachedOps == null) { options.numCachedOps = 10; } + if (options.numCachedOps == null) { + options.numCachedOps = 10 + } // This option forces documents to be reaped, even when there's no database backend. // This is useful when you don't care about persistance and don't want to gradually // fill memory. // // You might want to set reapTime to a day or something. - if (options.forceReaping == null) { options.forceReaping = false; } + if (options.forceReaping == null) { + options.forceReaping = false + } // Until I come up with a better strategy, we'll save a copy of the document snapshot // to the database every ~20 submitted ops. - if (options.opsBeforeCommit == null) { options.opsBeforeCommit = 20; } + if (options.opsBeforeCommit == null) { + options.opsBeforeCommit = 20 + } // It takes some processing time to transform client ops. The server will punt ops back to the // client to transform if they're too old. - if (options.maximumAge == null) { options.maximumAge = 40; } + if (options.maximumAge == null) { + options.maximumAge = 40 + } // **** Cache API methods // Its important that all ops are applied in order. This helper method creates the op submission queue // for a single document. This contains the logic for transforming & applying ops. - const makeOpQueue = (docName, doc) => queue(function(opData, callback) { - if (!(opData.v >= 0)) { return callback('Version missing'); } - if (opData.v > doc.v) { return callback('Op at future version'); } - - // Punt the transforming work back to the client if the op is too old. - if ((opData.v + options.maximumAge) < doc.v) { return callback('Op too old'); } - - if (!opData.meta) { opData.meta = {}; } - opData.meta.ts = Date.now(); - - // We'll need to transform the op to the current version of the document. This - // calls the callback immediately if opVersion == doc.v. - return getOps(docName, opData.v, doc.v, function(error, ops) { - let snapshot; - if (error) { return callback(error); } - - if ((doc.v - opData.v) !== ops.length) { - // This should never happen. It indicates that we didn't get all the ops we - // asked for. Its important that the submitted op is correctly transformed. - console.error(`Could not get old ops in model for document ${docName}`); - console.error(`Expected ops ${opData.v} to ${doc.v} and got ${ops.length} ops`); - return callback('Internal error'); + const makeOpQueue = (docName, doc) => + queue(function (opData, callback) { + if (!(opData.v >= 0)) { + return callback('Version missing') + } + if (opData.v > doc.v) { + return callback('Op at future version') } - if (ops.length > 0) { - try { - // If there's enough ops, it might be worth spinning this out into a webworker thread. - for (const oldOp of Array.from(ops)) { - // Dup detection works by sending the id(s) the op has been submitted with previously. - // If the id matches, we reject it. The client can also detect the op has been submitted - // already if it sees its own previous id in the ops it sees when it does catchup. - if (oldOp.meta.source && opData.dupIfSource && Array.from(opData.dupIfSource).includes(oldOp.meta.source)) { - return callback('Op already submitted'); - } - - opData.op = doc.type.transform(opData.op, oldOp.op, 'left'); - opData.v++; - } - } catch (error1) { - error = error1; - console.error(error.stack); - return callback(error.message); - } + // Punt the transforming work back to the client if the op is too old. + if (opData.v + options.maximumAge < doc.v) { + return callback('Op too old') } - try { - snapshot = doc.type.apply(doc.snapshot, opData.op); - } catch (error2) { - error = error2; - console.error(error.stack); - return callback(error.message); - } - - if ((options.maxDocLength != null) && (doc.snapshot.length > options.maxDocLength)) { - return callback("Update takes doc over max doc size"); + if (!opData.meta) { + opData.meta = {} } + opData.meta.ts = Date.now() - // The op data should be at the current version, and the new document data should be at - // the next version. - // - // This should never happen in practice, but its a nice little check to make sure everything - // is hunky-dory. - if (opData.v !== doc.v) { - // This should never happen. - console.error("Version mismatch detected in model. File a ticket - this is a bug."); - console.error(`Expecting ${opData.v} == ${doc.v}`); - return callback('Internal error'); - } - - // newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta} - const writeOp = (db != null ? db.writeOp : undefined) || ((docName, newOpData, callback) => callback()); - - return writeOp(docName, opData, function(error) { + // We'll need to transform the op to the current version of the document. This + // calls the callback immediately if opVersion == doc.v. + return getOps(docName, opData.v, doc.v, function (error, ops) { + let snapshot if (error) { - // The user should probably know about this. - console.warn(`Error writing ops to database: ${error}`); - return callback(error); + return callback(error) } - __guardMethod__(options.stats, 'writeOp', o => o.writeOp()); + if (doc.v - opData.v !== ops.length) { + // This should never happen. It indicates that we didn't get all the ops we + // asked for. Its important that the submitted op is correctly transformed. + console.error( + `Could not get old ops in model for document ${docName}` + ) + console.error( + `Expected ops ${opData.v} to ${doc.v} and got ${ops.length} ops` + ) + return callback('Internal error') + } - // This is needed when we emit the 'change' event, below. - const oldSnapshot = doc.snapshot; + if (ops.length > 0) { + try { + // If there's enough ops, it might be worth spinning this out into a webworker thread. + for (const oldOp of Array.from(ops)) { + // Dup detection works by sending the id(s) the op has been submitted with previously. + // If the id matches, we reject it. The client can also detect the op has been submitted + // already if it sees its own previous id in the ops it sees when it does catchup. + if ( + oldOp.meta.source && + opData.dupIfSource && + Array.from(opData.dupIfSource).includes(oldOp.meta.source) + ) { + return callback('Op already submitted') + } - // All the heavy lifting is now done. Finally, we'll update the cache with the new data - // and (maybe!) save a new document snapshot to the database. + opData.op = doc.type.transform(opData.op, oldOp.op, 'left') + opData.v++ + } + } catch (error1) { + error = error1 + console.error(error.stack) + return callback(error.message) + } + } - doc.v = opData.v + 1; - doc.snapshot = snapshot; + try { + snapshot = doc.type.apply(doc.snapshot, opData.op) + } catch (error2) { + error = error2 + console.error(error.stack) + return callback(error.message) + } - doc.ops.push(opData); - if (db && (doc.ops.length > options.numCachedOps)) { doc.ops.shift(); } + if ( + options.maxDocLength != null && + doc.snapshot.length > options.maxDocLength + ) { + return callback('Update takes doc over max doc size') + } - model.emit('applyOp', docName, opData, snapshot, oldSnapshot); - doc.eventEmitter.emit('op', opData, snapshot, oldSnapshot); - - // The callback is called with the version of the document at which the op was applied. - // This is the op.v after transformation, and its doc.v - 1. - callback(null, opData.v); - - // I need a decent strategy here for deciding whether or not to save the snapshot. + // The op data should be at the current version, and the new document data should be at + // the next version. // - // The 'right' strategy looks something like "Store the snapshot whenever the snapshot - // is smaller than the accumulated op data". For now, I'll just store it every 20 - // ops or something. (Configurable with doc.committedVersion) - if (!doc.snapshotWriteLock && ((doc.committedVersion + options.opsBeforeCommit) <= doc.v)) { - return tryWriteSnapshot(docName, function(error) { - if (error) { return console.warn(`Error writing snapshot ${error}. This is nonfatal`); } - }); + // This should never happen in practice, but its a nice little check to make sure everything + // is hunky-dory. + if (opData.v !== doc.v) { + // This should never happen. + console.error( + 'Version mismatch detected in model. File a ticket - this is a bug.' + ) + console.error(`Expecting ${opData.v} == ${doc.v}`) + return callback('Internal error') } - }); - }); - }); + + // newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta} + const writeOp = + (db != null ? db.writeOp : undefined) || + ((docName, newOpData, callback) => callback()) + + return writeOp(docName, opData, function (error) { + if (error) { + // The user should probably know about this. + console.warn(`Error writing ops to database: ${error}`) + return callback(error) + } + + __guardMethod__(options.stats, 'writeOp', (o) => o.writeOp()) + + // This is needed when we emit the 'change' event, below. + const oldSnapshot = doc.snapshot + + // All the heavy lifting is now done. Finally, we'll update the cache with the new data + // and (maybe!) save a new document snapshot to the database. + + doc.v = opData.v + 1 + doc.snapshot = snapshot + + doc.ops.push(opData) + if (db && doc.ops.length > options.numCachedOps) { + doc.ops.shift() + } + + model.emit('applyOp', docName, opData, snapshot, oldSnapshot) + doc.eventEmitter.emit('op', opData, snapshot, oldSnapshot) + + // The callback is called with the version of the document at which the op was applied. + // This is the op.v after transformation, and its doc.v - 1. + callback(null, opData.v) + + // I need a decent strategy here for deciding whether or not to save the snapshot. + // + // The 'right' strategy looks something like "Store the snapshot whenever the snapshot + // is smaller than the accumulated op data". For now, I'll just store it every 20 + // ops or something. (Configurable with doc.committedVersion) + if ( + !doc.snapshotWriteLock && + doc.committedVersion + options.opsBeforeCommit <= doc.v + ) { + return tryWriteSnapshot(docName, function (error) { + if (error) { + return console.warn( + `Error writing snapshot ${error}. This is nonfatal` + ) + } + }) + } + }) + }) + }) // Add the data for the given docName to the cache. The named document shouldn't already // exist in the doc set. // // Returns the new doc. - const add = function(docName, error, data, committedVersion, ops, dbMeta) { - let callback, doc; - const callbacks = awaitingGetSnapshot[docName]; - delete awaitingGetSnapshot[docName]; + const add = function (docName, error, data, committedVersion, ops, dbMeta) { + let callback, doc + const callbacks = awaitingGetSnapshot[docName] + delete awaitingGetSnapshot[docName] if (error) { - if (callbacks) { for (callback of Array.from(callbacks)) { callback(error); } } + if (callbacks) { + for (callback of Array.from(callbacks)) { + callback(error) + } + } } else { - doc = (docs[docName] = { + doc = docs[docName] = { snapshot: data.snapshot, v: data.v, type: data.type, @@ -248,7 +301,7 @@ module.exports = (Model = function(db, options) { // Cache of ops ops: ops || [], - eventEmitter: new EventEmitter, + eventEmitter: new EventEmitter(), // Timer before the document will be invalidated from the cache (if the document has no // listeners) @@ -258,97 +311,121 @@ module.exports = (Model = function(db, options) { committedVersion: committedVersion != null ? committedVersion : data.v, snapshotWriteLock: false, dbMeta - }); + } - doc.opQueue = makeOpQueue(docName, doc); - - refreshReapingTimeout(docName); - model.emit('add', docName, data); - if (callbacks) { for (callback of Array.from(callbacks)) { callback(null, doc); } } + doc.opQueue = makeOpQueue(docName, doc) + + refreshReapingTimeout(docName) + model.emit('add', docName, data) + if (callbacks) { + for (callback of Array.from(callbacks)) { + callback(null, doc) + } + } } - return doc; - }; - + return doc + } + // This is a little helper wrapper around db.getOps. It does two things: // // - If there's no database set, it returns an error to the callback // - It adds version numbers to each op returned from the database // (These can be inferred from context so the DB doesn't store them, but its useful to have them). - const getOpsInternal = function(docName, start, end, callback) { - if (!db) { return (typeof callback === 'function' ? callback('Document does not exist') : undefined); } + const getOpsInternal = function (docName, start, end, callback) { + if (!db) { + return typeof callback === 'function' + ? callback('Document does not exist') + : undefined + } - return db.getOps(docName, start, end, function(error, ops) { - if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } + return db.getOps(docName, start, end, function (error, ops) { + if (error) { + return typeof callback === 'function' ? callback(error) : undefined + } - let v = start; - for (const op of Array.from(ops)) { op.v = v++; } + let v = start + for (const op of Array.from(ops)) { + op.v = v++ + } - return (typeof callback === 'function' ? callback(null, ops) : undefined); - }); - }; + return typeof callback === 'function' ? callback(null, ops) : undefined + }) + } // Load the named document into the cache. This function is re-entrant. // // The callback is called with (error, doc) - const load = function(docName, callback) { + const load = function (docName, callback) { if (docs[docName]) { // The document is already loaded. Return immediately. - __guardMethod__(options.stats, 'cacheHit', o => o.cacheHit('getSnapshot')); - return callback(null, docs[docName]); + __guardMethod__(options.stats, 'cacheHit', (o) => + o.cacheHit('getSnapshot') + ) + return callback(null, docs[docName]) } // We're a memory store. If we don't have it, nobody does. - if (!db) { return callback('Document does not exist'); } + if (!db) { + return callback('Document does not exist') + } - const callbacks = awaitingGetSnapshot[docName]; + const callbacks = awaitingGetSnapshot[docName] // The document is being loaded already. Add ourselves as a callback. - if (callbacks) { return callbacks.push(callback); } + if (callbacks) { + return callbacks.push(callback) + } - __guardMethod__(options.stats, 'cacheMiss', o1 => o1.cacheMiss('getSnapshot')); + __guardMethod__(options.stats, 'cacheMiss', (o1) => + o1.cacheMiss('getSnapshot') + ) // The document isn't loaded and isn't being loaded. Load it. - awaitingGetSnapshot[docName] = [callback]; - return db.getSnapshot(docName, function(error, data, dbMeta) { - if (error) { return add(docName, error); } - - const type = types[data.type]; - if (!type) { - console.warn(`Type '${data.type}' missing`); - return callback("Type not found"); + awaitingGetSnapshot[docName] = [callback] + return db.getSnapshot(docName, function (error, data, dbMeta) { + if (error) { + return add(docName, error) } - data.type = type; - const committedVersion = data.v; + const type = types[data.type] + if (!type) { + console.warn(`Type '${data.type}' missing`) + return callback('Type not found') + } + data.type = type + + const committedVersion = data.v // The server can close without saving the most recent document snapshot. // In this case, there are extra ops which need to be applied before // returning the snapshot. - return getOpsInternal(docName, data.v, null, function(error, ops) { - if (error) { return callback(error); } + return getOpsInternal(docName, data.v, null, function (error, ops) { + if (error) { + return callback(error) + } if (ops.length > 0) { - console.log(`Catchup ${docName} ${data.v} -> ${data.v + ops.length}`); + console.log(`Catchup ${docName} ${data.v} -> ${data.v + ops.length}`) try { for (const op of Array.from(ops)) { - data.snapshot = type.apply(data.snapshot, op.op); - data.v++; + data.snapshot = type.apply(data.snapshot, op.op) + data.v++ } } catch (e) { // This should never happen - it indicates that whats in the // database is invalid. - console.error(`Op data invalid for ${docName}: ${e.stack}`); - return callback('Op data invalid'); + console.error(`Op data invalid for ${docName}: ${e.stack}`) + return callback('Op data invalid') } } - model.emit('load', docName, data); - return add(docName, error, data, committedVersion, ops, dbMeta); - }); - }); - }; + model.emit('load', docName, data) + return add(docName, error, data, committedVersion, ops, dbMeta) + }) + }) + } // This makes sure the cache contains a document. If the doc cache doesn't contain // a document, it is loaded from the database and stored. @@ -356,52 +433,75 @@ module.exports = (Model = function(db, options) { // Documents are stored so long as either: // - They have been accessed within the past #{PERIOD} // - At least one client has the document open - var refreshReapingTimeout = function(docName) { - const doc = docs[docName]; - if (!doc) { return; } + var refreshReapingTimeout = function (docName) { + const doc = docs[docName] + if (!doc) { + return + } // I want to let the clients list be updated before this is called. - return process.nextTick(function() { + return process.nextTick(function () { // This is an awkward way to find out the number of clients on a document. If this // causes performance issues, add a numClients field to the document. // // The first check is because its possible that between refreshReapingTimeout being called and this // event being fired, someone called delete() on the document and hence the doc is something else now. - if ((doc === docs[docName]) && - (doc.eventEmitter.listeners('op').length === 0) && - (db || options.forceReaping) && - (doc.opQueue.busy === false)) { - - let reapTimer; - clearTimeout(doc.reapTimer); - return doc.reapTimer = (reapTimer = setTimeout(() => tryWriteSnapshot(docName, function() { - // If the reaping timeout has been refreshed while we're writing the snapshot, or if we're - // in the middle of applying an operation, don't reap. - if ((docs[docName].reapTimer === reapTimer) && (doc.opQueue.busy === false)) { return delete docs[docName]; } - }) - , options.reapTime)); + if ( + doc === docs[docName] && + doc.eventEmitter.listeners('op').length === 0 && + (db || options.forceReaping) && + doc.opQueue.busy === false + ) { + let reapTimer + clearTimeout(doc.reapTimer) + return (doc.reapTimer = reapTimer = setTimeout( + () => + tryWriteSnapshot(docName, function () { + // If the reaping timeout has been refreshed while we're writing the snapshot, or if we're + // in the middle of applying an operation, don't reap. + if ( + docs[docName].reapTimer === reapTimer && + doc.opQueue.busy === false + ) { + return delete docs[docName] + } + }), + options.reapTime + )) } - }); - }; + }) + } - var tryWriteSnapshot = function(docName, callback) { - if (!db) { return (typeof callback === 'function' ? callback() : undefined); } + var tryWriteSnapshot = function (docName, callback) { + if (!db) { + return typeof callback === 'function' ? callback() : undefined + } - const doc = docs[docName]; + const doc = docs[docName] // The doc is closed - if (!doc) { return (typeof callback === 'function' ? callback() : undefined); } + if (!doc) { + return typeof callback === 'function' ? callback() : undefined + } // The document is already saved. - if (doc.committedVersion === doc.v) { return (typeof callback === 'function' ? callback() : undefined); } + if (doc.committedVersion === doc.v) { + return typeof callback === 'function' ? callback() : undefined + } - if (doc.snapshotWriteLock) { return (typeof callback === 'function' ? callback('Another snapshot write is in progress') : undefined); } + if (doc.snapshotWriteLock) { + return typeof callback === 'function' + ? callback('Another snapshot write is in progress') + : undefined + } - doc.snapshotWriteLock = true; + doc.snapshotWriteLock = true - __guardMethod__(options.stats, 'writeSnapshot', o => o.writeSnapshot()); + __guardMethod__(options.stats, 'writeSnapshot', (o) => o.writeSnapshot()) - const writeSnapshot = (db != null ? db.writeSnapshot : undefined) || ((docName, docData, dbMeta, callback) => callback()); + const writeSnapshot = + (db != null ? db.writeSnapshot : undefined) || + ((docName, docData, dbMeta, callback) => callback()) const data = { v: doc.v, @@ -409,87 +509,107 @@ module.exports = (Model = function(db, options) { snapshot: doc.snapshot, // The database doesn't know about object types. type: doc.type.name - }; + } // Commit snapshot. - return writeSnapshot(docName, data, doc.dbMeta, function(error, dbMeta) { - doc.snapshotWriteLock = false; + return writeSnapshot(docName, data, doc.dbMeta, function (error, dbMeta) { + doc.snapshotWriteLock = false // We have to use data.v here because the version in the doc could // have been updated between the call to writeSnapshot() and now. - doc.committedVersion = data.v; - doc.dbMeta = dbMeta; + doc.committedVersion = data.v + doc.dbMeta = dbMeta - return (typeof callback === 'function' ? callback(error) : undefined); - }); - }; + return typeof callback === 'function' ? callback(error) : undefined + }) + } // *** Model interface methods // Create a new document. // // data should be {snapshot, type, [meta]}. The version of a new document is 0. - this.create = function(docName, type, meta, callback) { - if (typeof meta === 'function') { [meta, callback] = Array.from([{}, meta]); } + this.create = function (docName, type, meta, callback) { + if (typeof meta === 'function') { + ;[meta, callback] = Array.from([{}, meta]) + } - if (docName.match(/\//)) { return (typeof callback === 'function' ? callback('Invalid document name') : undefined); } - if (docs[docName]) { return (typeof callback === 'function' ? callback('Document already exists') : undefined); } + if (docName.match(/\//)) { + return typeof callback === 'function' + ? callback('Invalid document name') + : undefined + } + if (docs[docName]) { + return typeof callback === 'function' + ? callback('Document already exists') + : undefined + } - if (typeof type === 'string') { type = types[type]; } - if (!type) { return (typeof callback === 'function' ? callback('Type not found') : undefined); } + if (typeof type === 'string') { + type = types[type] + } + if (!type) { + return typeof callback === 'function' + ? callback('Type not found') + : undefined + } const data = { - snapshot:type.create(), - type:type.name, - meta:meta || {}, - v:0 - }; + snapshot: type.create(), + type: type.name, + meta: meta || {}, + v: 0 + } - const done = function(error, dbMeta) { + const done = function (error, dbMeta) { // dbMeta can be used to cache extra state needed by the database to access the document, like an ID or something. - if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } + if (error) { + return typeof callback === 'function' ? callback(error) : undefined + } // From here on we'll store the object version of the type name. - data.type = type; - add(docName, null, data, 0, [], dbMeta); - model.emit('create', docName, data); - return (typeof callback === 'function' ? callback() : undefined); - }; + data.type = type + add(docName, null, data, 0, [], dbMeta) + model.emit('create', docName, data) + return typeof callback === 'function' ? callback() : undefined + } if (db) { - return db.create(docName, data, done); + return db.create(docName, data, done) } else { - return done(); + return done() } - }; + } // Perminantly deletes the specified document. // If listeners are attached, they are removed. - // + // // The callback is called with (error) if there was an error. If error is null / undefined, the // document was deleted. // // WARNING: This isn't well supported throughout the code. (Eg, streaming clients aren't told about the // deletion. Subsequent op submissions will fail). - this.delete = function(docName, callback) { - const doc = docs[docName]; + this.delete = function (docName, callback) { + const doc = docs[docName] if (doc) { - clearTimeout(doc.reapTimer); - delete docs[docName]; + clearTimeout(doc.reapTimer) + delete docs[docName] } - const done = function(error) { - if (!error) { model.emit('delete', docName); } - return (typeof callback === 'function' ? callback(error) : undefined); - }; + const done = function (error) { + if (!error) { + model.emit('delete', docName) + } + return typeof callback === 'function' ? callback(error) : undefined + } if (db) { - return db.delete(docName, doc != null ? doc.dbMeta : undefined, done); + return db.delete(docName, doc != null ? doc.dbMeta : undefined, done) } else { - return done((!doc ? 'Document does not exist' : undefined)); + return done(!doc ? 'Document does not exist' : undefined) } - }; + } // This gets all operations from [start...end]. (That is, its not inclusive.) // @@ -506,102 +626,139 @@ module.exports = (Model = function(db, options) { // // Use getVersion() to determine if a document actually exists, if thats what you're // after. - this.getOps = (getOps = function(docName, start, end, callback) { + this.getOps = getOps = function (docName, start, end, callback) { // getOps will only use the op cache if its there. It won't fill the op cache in. - if (!(start >= 0)) { throw new Error('start must be 0+'); } + if (!(start >= 0)) { + throw new Error('start must be 0+') + } - if (typeof end === 'function') { [end, callback] = Array.from([null, end]); } + if (typeof end === 'function') { + ;[end, callback] = Array.from([null, end]) + } - const ops = docs[docName] != null ? docs[docName].ops : undefined; + const ops = docs[docName] != null ? docs[docName].ops : undefined if (ops) { - const version = docs[docName].v; + const version = docs[docName].v // Ops contains an array of ops. The last op in the list is the last op applied - if (end == null) { end = version; } - start = Math.min(start, end); + if (end == null) { + end = version + } + start = Math.min(start, end) - if (start === end) { return callback(null, []); } + if (start === end) { + return callback(null, []) + } // Base is the version number of the oldest op we have cached - const base = version - ops.length; + const base = version - ops.length // If the database is null, we'll trim to the ops we do have and hope thats enough. - if ((start >= base) || (db === null)) { - refreshReapingTimeout(docName); + if (start >= base || db === null) { + refreshReapingTimeout(docName) if (options.stats != null) { - options.stats.cacheHit('getOps'); + options.stats.cacheHit('getOps') } - return callback(null, ops.slice((start - base), (end - base))); + return callback(null, ops.slice(start - base, end - base)) } } if (options.stats != null) { - options.stats.cacheMiss('getOps'); + options.stats.cacheMiss('getOps') } - return getOpsInternal(docName, start, end, callback); - }); + return getOpsInternal(docName, start, end, callback) + } // Gets the snapshot data for the specified document. // getSnapshot(docName, callback) // Callback is called with (error, {v: , type: , snapshot: , meta: }) - this.getSnapshot = (docName, callback) => load(docName, (error, doc) => callback(error, doc ? {v:doc.v, type:doc.type, snapshot:doc.snapshot, meta:doc.meta} : undefined)); + this.getSnapshot = (docName, callback) => + load(docName, (error, doc) => + callback( + error, + doc + ? { v: doc.v, type: doc.type, snapshot: doc.snapshot, meta: doc.meta } + : undefined + ) + ) // Gets the latest version # of the document. // getVersion(docName, callback) // callback is called with (error, version). - this.getVersion = (docName, callback) => load(docName, (error, doc) => callback(error, doc != null ? doc.v : undefined)); + this.getVersion = (docName, callback) => + load(docName, (error, doc) => + callback(error, doc != null ? doc.v : undefined) + ) // Apply an op to the specified document. // The callback is passed (error, applied version #) // opData = {op:op, v:v, meta:metadata} - // + // // Ops are queued before being applied so that the following code applies op C before op B: // model.applyOp 'doc', OPA, -> model.applyOp 'doc', OPB // model.applyOp 'doc', OPC - this.applyOp = (docName, opData, callback) => // All the logic for this is in makeOpQueue, above. - load(docName, function(error, doc) { - if (error) { return callback(error); } + this.applyOp = ( + docName, + opData, + callback // All the logic for this is in makeOpQueue, above. + ) => + load(docName, function (error, doc) { + if (error) { + return callback(error) + } - return process.nextTick(() => doc.opQueue(opData, function(error, newVersion) { - refreshReapingTimeout(docName); - return (typeof callback === 'function' ? callback(error, newVersion) : undefined); - })); - }); + return process.nextTick(() => + doc.opQueue(opData, function (error, newVersion) { + refreshReapingTimeout(docName) + return typeof callback === 'function' + ? callback(error, newVersion) + : undefined + }) + ) + }) // TODO: store (some) metadata in DB // TODO: op and meta should be combineable in the op that gets sent - this.applyMetaOp = function(docName, metaOpData, callback) { - const {path, value} = metaOpData.meta; - - if (!isArray(path)) { return (typeof callback === 'function' ? callback("path should be an array") : undefined); } + this.applyMetaOp = function (docName, metaOpData, callback) { + const { path, value } = metaOpData.meta - return load(docName, function(error, doc) { + if (!isArray(path)) { + return typeof callback === 'function' + ? callback('path should be an array') + : undefined + } + + return load(docName, function (error, doc) { if (error != null) { - return (typeof callback === 'function' ? callback(error) : undefined); + return typeof callback === 'function' ? callback(error) : undefined } else { - let applied = false; + let applied = false switch (path[0]) { case 'shout': - doc.eventEmitter.emit('op', metaOpData); - applied = true; - break; + doc.eventEmitter.emit('op', metaOpData) + applied = true + break } - if (applied) { model.emit('applyMetaOp', docName, path, value); } - return (typeof callback === 'function' ? callback(null, doc.v) : undefined); + if (applied) { + model.emit('applyMetaOp', docName, path, value) + } + return typeof callback === 'function' + ? callback(null, doc.v) + : undefined } - }); - }; + }) + } // Listen to all ops from the specified version. If version is in the past, all // ops since that version are sent immediately to the listener. // // The callback is called once the listener is attached, but before any ops have been passed // to the listener. - // + // // This will _not_ edit the document metadata. // // If there are any listeners, we don't purge the document from the cache. But be aware, this behaviour @@ -613,98 +770,123 @@ module.exports = (Model = function(db, options) { // listener is called with (opData) each time an op is applied. // // callback(error, openedVersion) - this.listen = function(docName, version, listener, callback) { - if (typeof version === 'function') { [version, listener, callback] = Array.from([null, version, listener]); } + this.listen = function (docName, version, listener, callback) { + if (typeof version === 'function') { + ;[version, listener, callback] = Array.from([null, version, listener]) + } - return load(docName, function(error, doc) { - if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } + return load(docName, function (error, doc) { + if (error) { + return typeof callback === 'function' ? callback(error) : undefined + } - clearTimeout(doc.reapTimer); + clearTimeout(doc.reapTimer) if (version != null) { - return getOps(docName, version, null, function(error, data) { - if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } + return getOps(docName, version, null, function (error, data) { + if (error) { + return typeof callback === 'function' ? callback(error) : undefined + } - doc.eventEmitter.on('op', listener); + doc.eventEmitter.on('op', listener) if (typeof callback === 'function') { - callback(null, version); + callback(null, version) } return (() => { - const result = []; + const result = [] for (const op of Array.from(data)) { - var needle; - listener(op); + var needle + listener(op) // The listener may well remove itself during the catchup phase. If this happens, break early. // This is done in a quite inefficient way. (O(n) where n = #listeners on doc) - if ((needle = listener, !Array.from(doc.eventEmitter.listeners('op')).includes(needle))) { break; } else { - result.push(undefined); + if ( + ((needle = listener), + !Array.from(doc.eventEmitter.listeners('op')).includes(needle)) + ) { + break + } else { + result.push(undefined) } } - return result; - })(); - }); - - } else { // Version is null / undefined. Just add the listener. - doc.eventEmitter.on('op', listener); - return (typeof callback === 'function' ? callback(null, doc.v) : undefined); + return result + })() + }) + } else { + // Version is null / undefined. Just add the listener. + doc.eventEmitter.on('op', listener) + return typeof callback === 'function' + ? callback(null, doc.v) + : undefined } - }); - }; + }) + } // Remove a listener for a particular document. // // removeListener(docName, listener) // // This is synchronous. - this.removeListener = function(docName, listener) { + this.removeListener = function (docName, listener) { // The document should already be loaded. - const doc = docs[docName]; - if (!doc) { throw new Error('removeListener called but document not loaded'); } + const doc = docs[docName] + if (!doc) { + throw new Error('removeListener called but document not loaded') + } - doc.eventEmitter.removeListener('op', listener); - return refreshReapingTimeout(docName); - }; + doc.eventEmitter.removeListener('op', listener) + return refreshReapingTimeout(docName) + } // Flush saves all snapshot data to the database. I'm not sure whether or not this is actually needed - // sharejs will happily replay uncommitted ops when documents are re-opened anyway. - this.flush = function(callback) { - if (!db) { return (typeof callback === 'function' ? callback() : undefined); } + this.flush = function (callback) { + if (!db) { + return typeof callback === 'function' ? callback() : undefined + } - let pendingWrites = 0; + let pendingWrites = 0 for (const docName in docs) { - const doc = docs[docName]; + const doc = docs[docName] if (doc.committedVersion < doc.v) { - pendingWrites++; + pendingWrites++ // I'm hoping writeSnapshot will always happen in another thread. - tryWriteSnapshot(docName, () => process.nextTick(function() { - pendingWrites--; - if (pendingWrites === 0) { return (typeof callback === 'function' ? callback() : undefined); } - })); + tryWriteSnapshot(docName, () => + process.nextTick(function () { + pendingWrites-- + if (pendingWrites === 0) { + return typeof callback === 'function' ? callback() : undefined + } + }) + ) } } // If nothing was queued, terminate immediately. - if (pendingWrites === 0) { return (typeof callback === 'function' ? callback() : undefined); } - }; + if (pendingWrites === 0) { + return typeof callback === 'function' ? callback() : undefined + } + } // Close the database connection. This is needed so nodejs can shut down cleanly. - this.closeDb = function() { - __guardMethod__(db, 'close', o => o.close()); - return db = null; - }; - -}); + this.closeDb = function () { + __guardMethod__(db, 'close', (o) => o.close()) + return (db = null) + } +} // Model inherits from EventEmitter. -Model.prototype = new EventEmitter; - +Model.prototype = new EventEmitter() function __guardMethod__(obj, methodName, transform) { - if (typeof obj !== 'undefined' && obj !== null && typeof obj[methodName] === 'function') { - return transform(obj, methodName); + if ( + typeof obj !== 'undefined' && + obj !== null && + typeof obj[methodName] === 'function' + ) { + return transform(obj, methodName) } else { - return undefined; + return undefined } -} \ No newline at end of file +} diff --git a/services/document-updater/app/js/sharejs/server/syncqueue.js b/services/document-updater/app/js/sharejs/server/syncqueue.js index 2eecb615e6..7b83c5b436 100644 --- a/services/document-updater/app/js/sharejs/server/syncqueue.js +++ b/services/document-updater/app/js/sharejs/server/syncqueue.js @@ -25,30 +25,36 @@ // // ^--- async thing will only be running once at any time. -module.exports = function(process) { - if (typeof process !== 'function') { throw new Error('process is not a function'); } - const queue = []; - - const enqueue = function(data, callback) { - queue.push([data, callback]); - return flush(); - }; +module.exports = function (process) { + if (typeof process !== 'function') { + throw new Error('process is not a function') + } + const queue = [] - enqueue.busy = false; + const enqueue = function (data, callback) { + queue.push([data, callback]) + return flush() + } - var flush = function() { - if (enqueue.busy || (queue.length === 0)) { return; } + enqueue.busy = false - enqueue.busy = true; - const [data, callback] = Array.from(queue.shift()); - return process(data, function(...result) { // TODO: Make this not use varargs - varargs are really slow. - enqueue.busy = false; + var flush = function () { + if (enqueue.busy || queue.length === 0) { + return + } + + enqueue.busy = true + const [data, callback] = Array.from(queue.shift()) + return process(data, function (...result) { + // TODO: Make this not use varargs - varargs are really slow. + enqueue.busy = false // This is called after busy = false so a user can check if enqueue.busy is set in the callback. - if (callback) { callback.apply(null, result); } - return flush(); - }); - }; - - return enqueue; -}; + if (callback) { + callback.apply(null, result) + } + return flush() + }) + } + return enqueue +} diff --git a/services/document-updater/app/js/sharejs/simple.js b/services/document-updater/app/js/sharejs/simple.js index c0e8e85394..781cdc0293 100644 --- a/services/document-updater/app/js/sharejs/simple.js +++ b/services/document-updater/app/js/sharejs/simple.js @@ -23,28 +23,32 @@ module.exports = { name: 'simple', // Create a new document snapshot - create() { return {str:""}; }, + create() { + return { str: '' } + }, // Apply the given op to the document snapshot. Returns the new snapshot. // // The original snapshot should not be modified. apply(snapshot, op) { - if (!(op.position >= 0 && op.position <= snapshot.str.length)) { throw new Error('Invalid position'); } + if (!(op.position >= 0 && op.position <= snapshot.str.length)) { + throw new Error('Invalid position') + } - let { - str - } = snapshot; - str = str.slice(0, op.position) + op.text + str.slice(op.position); - return {str}; + let { str } = snapshot + str = str.slice(0, op.position) + op.text + str.slice(op.position) + return { str } }, // transform op1 by op2. Return transformed version of op1. // sym describes the symmetry of the op. Its 'left' or 'right' depending on whether the // op being transformed comes from the client or the server. transform(op1, op2, sym) { - let pos = op1.position; - if ((op2.position < pos) || ((op2.position === pos) && (sym === 'left'))) { pos += op2.text.length; } + let pos = op1.position + if (op2.position < pos || (op2.position === pos && sym === 'left')) { + pos += op2.text.length + } - return {position:pos, text:op1.text}; + return { position: pos, text: op1.text } } -}; +} diff --git a/services/document-updater/app/js/sharejs/syncqueue.js b/services/document-updater/app/js/sharejs/syncqueue.js index 2eecb615e6..7b83c5b436 100644 --- a/services/document-updater/app/js/sharejs/syncqueue.js +++ b/services/document-updater/app/js/sharejs/syncqueue.js @@ -25,30 +25,36 @@ // // ^--- async thing will only be running once at any time. -module.exports = function(process) { - if (typeof process !== 'function') { throw new Error('process is not a function'); } - const queue = []; - - const enqueue = function(data, callback) { - queue.push([data, callback]); - return flush(); - }; +module.exports = function (process) { + if (typeof process !== 'function') { + throw new Error('process is not a function') + } + const queue = [] - enqueue.busy = false; + const enqueue = function (data, callback) { + queue.push([data, callback]) + return flush() + } - var flush = function() { - if (enqueue.busy || (queue.length === 0)) { return; } + enqueue.busy = false - enqueue.busy = true; - const [data, callback] = Array.from(queue.shift()); - return process(data, function(...result) { // TODO: Make this not use varargs - varargs are really slow. - enqueue.busy = false; + var flush = function () { + if (enqueue.busy || queue.length === 0) { + return + } + + enqueue.busy = true + const [data, callback] = Array.from(queue.shift()) + return process(data, function (...result) { + // TODO: Make this not use varargs - varargs are really slow. + enqueue.busy = false // This is called after busy = false so a user can check if enqueue.busy is set in the callback. - if (callback) { callback.apply(null, result); } - return flush(); - }); - }; - - return enqueue; -}; + if (callback) { + callback.apply(null, result) + } + return flush() + }) + } + return enqueue +} diff --git a/services/document-updater/app/js/sharejs/text-api.js b/services/document-updater/app/js/sharejs/text-api.js index 7c39b25899..d30f009cdb 100644 --- a/services/document-updater/app/js/sharejs/text-api.js +++ b/services/document-updater/app/js/sharejs/text-api.js @@ -8,39 +8,45 @@ */ // Text document API for text -let text; -if (typeof WEB === 'undefined') { text = require('./text'); } +let text +if (typeof WEB === 'undefined') { + text = require('./text') +} text.api = { - provides: {text:true}, + provides: { text: true }, // The number of characters in the string - getLength() { return this.snapshot.length; }, + getLength() { + return this.snapshot.length + }, // Get the text contents of a document - getText() { return this.snapshot; }, + getText() { + return this.snapshot + }, insert(pos, text, callback) { - const op = [{p:pos, i:text}]; - - this.submitOp(op, callback); - return op; - }, - - del(pos, length, callback) { - const op = [{p:pos, d:this.snapshot.slice(pos, (pos + length))}]; + const op = [{ p: pos, i: text }] - this.submitOp(op, callback); - return op; + this.submitOp(op, callback) + return op }, - + + del(pos, length, callback) { + const op = [{ p: pos, d: this.snapshot.slice(pos, pos + length) }] + + this.submitOp(op, callback) + return op + }, + _register() { - return this.on('remoteop', function(op) { + return this.on('remoteop', function (op) { return Array.from(op).map((component) => - component.i !== undefined ? - this.emit('insert', component.p, component.i) - : - this.emit('delete', component.p, component.d)); - }); + component.i !== undefined + ? this.emit('insert', component.p, component.i) + : this.emit('delete', component.p, component.d) + ) + }) } -}; +} diff --git a/services/document-updater/app/js/sharejs/text-composable-api.js b/services/document-updater/app/js/sharejs/text-composable-api.js index ba6e5f0242..9b237ce91b 100644 --- a/services/document-updater/app/js/sharejs/text-composable-api.js +++ b/services/document-updater/app/js/sharejs/text-composable-api.js @@ -13,57 +13,64 @@ */ // Text document API for text -let type; +let type if (typeof WEB !== 'undefined' && WEB !== null) { - type = exports.types['text-composable']; + type = exports.types['text-composable'] } else { - type = require('./text-composable'); + type = require('./text-composable') } type.api = { - provides: {'text':true}, + provides: { text: true }, // The number of characters in the string - 'getLength'() { return this.snapshot.length; }, + getLength() { + return this.snapshot.length + }, // Get the text contents of a document - 'getText'() { return this.snapshot; }, - - 'insert'(pos, text, callback) { - const op = type.normalize([pos, {'i':text}, (this.snapshot.length - pos)]); - - this.submitOp(op, callback); - return op; + getText() { + return this.snapshot }, - - 'del'(pos, length, callback) { - const op = type.normalize([pos, {'d':this.snapshot.slice(pos, (pos + length))}, (this.snapshot.length - pos - length)]); - this.submitOp(op, callback); - return op; + insert(pos, text, callback) { + const op = type.normalize([pos, { i: text }, this.snapshot.length - pos]) + + this.submitOp(op, callback) + return op + }, + + del(pos, length, callback) { + const op = type.normalize([ + pos, + { d: this.snapshot.slice(pos, pos + length) }, + this.snapshot.length - pos - length + ]) + + this.submitOp(op, callback) + return op }, _register() { - return this.on('remoteop', function(op) { - let pos = 0; + return this.on('remoteop', function (op) { + let pos = 0 return (() => { - const result = []; + const result = [] for (const component of Array.from(op)) { if (typeof component === 'number') { - result.push(pos += component); + result.push((pos += component)) } else if (component.i !== undefined) { - this.emit('insert', pos, component.i); - result.push(pos += component.i.length); + this.emit('insert', pos, component.i) + result.push((pos += component.i.length)) } else { // delete - result.push(this.emit('delete', pos, component.d)); + result.push(this.emit('delete', pos, component.d)) } } - return result; - })(); - }); + return result + })() + }) } -}; - // We don't increment pos, because the position - // specified is after the delete has happened. - +} +// We don't increment pos, because the position +// specified is after the delete has happened. diff --git a/services/document-updater/app/js/sharejs/text-composable.js b/services/document-updater/app/js/sharejs/text-composable.js index 79dfb63308..6898589908 100644 --- a/services/document-updater/app/js/sharejs/text-composable.js +++ b/services/document-updater/app/js/sharejs/text-composable.js @@ -27,297 +27,373 @@ // // Snapshots are strings. -let makeAppend; -const p = function() {}; // require('util').debug -const i = function() {}; // require('util').inspect +let makeAppend +const p = function () {} // require('util').debug +const i = function () {} // require('util').inspect -const exports = (typeof WEB !== 'undefined' && WEB !== null) ? {} : module.exports; +const exports = typeof WEB !== 'undefined' && WEB !== null ? {} : module.exports -exports.name = 'text-composable'; +exports.name = 'text-composable' -exports.create = () => ''; +exports.create = () => '' // -------- Utility methods -const checkOp = function(op) { - if (!Array.isArray(op)) { throw new Error('Op must be an array of components'); } - let last = null; +const checkOp = function (op) { + if (!Array.isArray(op)) { + throw new Error('Op must be an array of components') + } + let last = null return (() => { - const result = []; + const result = [] for (const c of Array.from(op)) { - if (typeof(c) === 'object') { - if (((c.i == null) || !(c.i.length > 0)) && ((c.d == null) || !(c.d.length > 0))) { throw new Error(`Invalid op component: ${i(c)}`); } + if (typeof c === 'object') { + if ( + (c.i == null || !(c.i.length > 0)) && + (c.d == null || !(c.d.length > 0)) + ) { + throw new Error(`Invalid op component: ${i(c)}`) + } } else { - if (typeof(c) !== 'number') { throw new Error('Op components must be objects or numbers'); } - if (!(c > 0)) { throw new Error('Skip components must be a positive number'); } - if (typeof(last) === 'number') { throw new Error('Adjacent skip components should be added'); } + if (typeof c !== 'number') { + throw new Error('Op components must be objects or numbers') + } + if (!(c > 0)) { + throw new Error('Skip components must be a positive number') + } + if (typeof last === 'number') { + throw new Error('Adjacent skip components should be added') + } } - result.push(last = c); + result.push((last = c)) } - return result; - })(); -}; + return result + })() +} // Makes a function for appending components to a given op. // Exported for the randomOpGenerator. -exports._makeAppend = (makeAppend = op => (function(component) { - if ((component === 0) || (component.i === '') || (component.d === '')) { - - } else if (op.length === 0) { - return op.push(component); - } else if ((typeof(component) === 'number') && (typeof(op[op.length - 1]) === 'number')) { - return op[op.length - 1] += component; - } else if ((component.i != null) && (op[op.length - 1].i != null)) { - return op[op.length - 1].i += component.i; - } else if ((component.d != null) && (op[op.length - 1].d != null)) { - return op[op.length - 1].d += component.d; - } else { - return op.push(component); +exports._makeAppend = makeAppend = (op) => + function (component) { + if (component === 0 || component.i === '' || component.d === '') { + } else if (op.length === 0) { + return op.push(component) + } else if ( + typeof component === 'number' && + typeof op[op.length - 1] === 'number' + ) { + return (op[op.length - 1] += component) + } else if (component.i != null && op[op.length - 1].i != null) { + return (op[op.length - 1].i += component.i) + } else if (component.d != null && op[op.length - 1].d != null) { + return (op[op.length - 1].d += component.d) + } else { + return op.push(component) + } } -})); - + // checkOp op // Makes 2 functions for taking components from the start of an op, and for peeking // at the next op that could be taken. -const makeTake = function(op) { +const makeTake = function (op) { // The index of the next component to take - let idx = 0; + let idx = 0 // The offset into the component - let offset = 0; + let offset = 0 // Take up to length n from the front of op. If n is null, take the next // op component. If indivisableField == 'd', delete components won't be separated. // If indivisableField == 'i', insert components won't be separated. - const take = function(n, indivisableField) { - let c; - if (idx === op.length) { return null; } + const take = function (n, indivisableField) { + let c + if (idx === op.length) { + return null + } // assert.notStrictEqual op.length, i, 'The op is too short to traverse the document' - if (typeof(op[idx]) === 'number') { - if ((n == null) || ((op[idx] - offset) <= n)) { - c = op[idx] - offset; - ++idx; offset = 0; - return c; + if (typeof op[idx] === 'number') { + if (n == null || op[idx] - offset <= n) { + c = op[idx] - offset + ++idx + offset = 0 + return c } else { - offset += n; - return n; + offset += n + return n } } else { // Take from the string - const field = op[idx].i ? 'i' : 'd'; - c = {}; - if ((n == null) || ((op[idx][field].length - offset) <= n) || (field === indivisableField)) { - c[field] = op[idx][field].slice(offset); - ++idx; offset = 0; + const field = op[idx].i ? 'i' : 'd' + c = {} + if ( + n == null || + op[idx][field].length - offset <= n || + field === indivisableField + ) { + c[field] = op[idx][field].slice(offset) + ++idx + offset = 0 } else { - c[field] = op[idx][field].slice(offset, (offset + n)); - offset += n; + c[field] = op[idx][field].slice(offset, offset + n) + offset += n } - return c; + return c } - }; - - const peekType = () => op[idx]; - - return [take, peekType]; -}; + } + + const peekType = () => op[idx] + + return [take, peekType] +} // Find and return the length of an op component -const componentLength = function(component) { - if (typeof(component) === 'number') { - return component; +const componentLength = function (component) { + if (typeof component === 'number') { + return component } else if (component.i != null) { - return component.i.length; + return component.i.length } else { - return component.d.length; + return component.d.length } -}; +} // Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate // adjacent inserts and deletes. -exports.normalize = function(op) { - const newOp = []; - const append = makeAppend(newOp); - for (const component of Array.from(op)) { append(component); } - return newOp; -}; +exports.normalize = function (op) { + const newOp = [] + const append = makeAppend(newOp) + for (const component of Array.from(op)) { + append(component) + } + return newOp +} // Apply the op to the string. Returns the new string. -exports.apply = function(str, op) { - p(`Applying ${i(op)} to '${str}'`); - if (typeof(str) !== 'string') { throw new Error('Snapshot should be a string'); } - checkOp(op); +exports.apply = function (str, op) { + p(`Applying ${i(op)} to '${str}'`) + if (typeof str !== 'string') { + throw new Error('Snapshot should be a string') + } + checkOp(op) - const pos = 0; - const newDoc = []; + const pos = 0 + const newDoc = [] for (const component of Array.from(op)) { - if (typeof(component) === 'number') { - if (component > str.length) { throw new Error('The op is too long for this document'); } - newDoc.push(str.slice(0, component)); - str = str.slice(component); + if (typeof component === 'number') { + if (component > str.length) { + throw new Error('The op is too long for this document') + } + newDoc.push(str.slice(0, component)) + str = str.slice(component) } else if (component.i != null) { - newDoc.push(component.i); + newDoc.push(component.i) } else { - if (component.d !== str.slice(0, component.d.length)) { throw new Error(`The deleted text '${component.d}' doesn't match the next characters in the document '${str.slice(0, component.d.length)}'`); } - str = str.slice(component.d.length); + if (component.d !== str.slice(0, component.d.length)) { + throw new Error( + `The deleted text '${ + component.d + }' doesn't match the next characters in the document '${str.slice( + 0, + component.d.length + )}'` + ) + } + str = str.slice(component.d.length) } } - - if (str !== '') { throw new Error("The applied op doesn't traverse the entire document"); } - return newDoc.join(''); -}; + if (str !== '') { + throw new Error("The applied op doesn't traverse the entire document") + } + + return newDoc.join('') +} // transform op1 by op2. Return transformed version of op1. // op1 and op2 are unchanged by transform. -exports.transform = function(op, otherOp, side) { - let component; - if ((side !== 'left') && (side !== 'right')) { throw new Error(`side (${side} must be 'left' or 'right'`); } +exports.transform = function (op, otherOp, side) { + let component + if (side !== 'left' && side !== 'right') { + throw new Error(`side (${side} must be 'left' or 'right'`) + } - checkOp(op); - checkOp(otherOp); - const newOp = []; + checkOp(op) + checkOp(otherOp) + const newOp = [] - const append = makeAppend(newOp); - const [take, peek] = Array.from(makeTake(op)); + const append = makeAppend(newOp) + const [take, peek] = Array.from(makeTake(op)) for (component of Array.from(otherOp)) { - var chunk, length; - if (typeof(component) === 'number') { // Skip - length = component; + var chunk, length + if (typeof component === 'number') { + // Skip + length = component while (length > 0) { - chunk = take(length, 'i'); - if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } + chunk = take(length, 'i') + if (chunk === null) { + throw new Error( + 'The op traverses more elements than the document has' + ) + } - append(chunk); - if ((typeof(chunk) !== 'object') || (chunk.i == null)) { length -= componentLength(chunk); } + append(chunk) + if (typeof chunk !== 'object' || chunk.i == null) { + length -= componentLength(chunk) + } } - } else if (component.i != null) { // Insert + } else if (component.i != null) { + // Insert if (side === 'left') { // The left insert should go first. - const o = peek(); - if (o != null ? o.i : undefined) { append(take()); } + const o = peek() + if (o != null ? o.i : undefined) { + append(take()) + } } // Otherwise, skip the inserted text. - append(component.i.length); - } else { // Delete. + append(component.i.length) + } else { + // Delete. // assert.ok component.d - ({ - length - } = component.d); + ;({ length } = component.d) while (length > 0) { - chunk = take(length, 'i'); - if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } + chunk = take(length, 'i') + if (chunk === null) { + throw new Error( + 'The op traverses more elements than the document has' + ) + } - if (typeof(chunk) === 'number') { - length -= chunk; + if (typeof chunk === 'number') { + length -= chunk } else if (chunk.i != null) { - append(chunk); + append(chunk) } else { // assert.ok chunk.d // The delete is unnecessary now. - length -= chunk.d.length; + length -= chunk.d.length } } } } - + // Append extras from op1 - while (component = take()) { - if ((component != null ? component.i : undefined) == null) { throw new Error(`Remaining fragments in the op: ${i(component)}`); } - append(component); + while ((component = take())) { + if ((component != null ? component.i : undefined) == null) { + throw new Error(`Remaining fragments in the op: ${i(component)}`) + } + append(component) } - return newOp; -}; - + return newOp +} // Compose 2 ops into 1 op. -exports.compose = function(op1, op2) { - let component; - p(`COMPOSE ${i(op1)} + ${i(op2)}`); - checkOp(op1); - checkOp(op2); +exports.compose = function (op1, op2) { + let component + p(`COMPOSE ${i(op1)} + ${i(op2)}`) + checkOp(op1) + checkOp(op2) - const result = []; + const result = [] - const append = makeAppend(result); - const [take, _] = Array.from(makeTake(op1)); + const append = makeAppend(result) + const [take, _] = Array.from(makeTake(op1)) for (component of Array.from(op2)) { - var chunk, length; - if (typeof(component) === 'number') { // Skip - length = component; + var chunk, length + if (typeof component === 'number') { + // Skip + length = component while (length > 0) { - chunk = take(length, 'd'); - if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } + chunk = take(length, 'd') + if (chunk === null) { + throw new Error( + 'The op traverses more elements than the document has' + ) + } - append(chunk); - if ((typeof(chunk) !== 'object') || (chunk.d == null)) { length -= componentLength(chunk); } + append(chunk) + if (typeof chunk !== 'object' || chunk.d == null) { + length -= componentLength(chunk) + } } - - } else if (component.i != null) { // Insert - append({i:component.i}); - - } else { // Delete - let offset = 0; + } else if (component.i != null) { + // Insert + append({ i: component.i }) + } else { + // Delete + let offset = 0 while (offset < component.d.length) { - chunk = take(component.d.length - offset, 'd'); - if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } + chunk = take(component.d.length - offset, 'd') + if (chunk === null) { + throw new Error( + 'The op traverses more elements than the document has' + ) + } // If its delete, append it. If its skip, drop it and decrease length. If its insert, check the strings match, drop it and decrease length. - if (typeof(chunk) === 'number') { - append({d:component.d.slice(offset, (offset + chunk))}); - offset += chunk; + if (typeof chunk === 'number') { + append({ d: component.d.slice(offset, offset + chunk) }) + offset += chunk } else if (chunk.i != null) { - if (component.d.slice(offset, (offset + chunk.i.length)) !== chunk.i) { throw new Error("The deleted text doesn't match the inserted text"); } - offset += chunk.i.length; + if (component.d.slice(offset, offset + chunk.i.length) !== chunk.i) { + throw new Error("The deleted text doesn't match the inserted text") + } + offset += chunk.i.length // The ops cancel each other out. } else { // Delete - append(chunk); + append(chunk) } } } } - + // Append extras from op1 - while (component = take()) { - if ((component != null ? component.d : undefined) == null) { throw new Error(`Trailing stuff in op1 ${i(component)}`); } - append(component); + while ((component = take())) { + if ((component != null ? component.d : undefined) == null) { + throw new Error(`Trailing stuff in op1 ${i(component)}`) + } + append(component) } - return result; -}; - - -const invertComponent = function(c) { - if (typeof(c) === 'number') { - return c; - } else if (c.i != null) { - return {d:c.i}; - } else { - return {i:c.d}; - } -}; - -// Invert an op -exports.invert = function(op) { - const result = []; - const append = makeAppend(result); - - for (const component of Array.from(op)) { append(invertComponent(component)); } - - return result; -}; - -if (typeof window !== 'undefined' && window !== null) { - if (!window.ot) { window.ot = {}; } - if (!window.ot.types) { window.ot.types = {}; } - window.ot.types.text = exports; + return result } +const invertComponent = function (c) { + if (typeof c === 'number') { + return c + } else if (c.i != null) { + return { d: c.i } + } else { + return { i: c.d } + } +} + +// Invert an op +exports.invert = function (op) { + const result = [] + const append = makeAppend(result) + + for (const component of Array.from(op)) { + append(invertComponent(component)) + } + + return result +} + +if (typeof window !== 'undefined' && window !== null) { + if (!window.ot) { + window.ot = {} + } + if (!window.ot.types) { + window.ot.types = {} + } + window.ot.types.text = exports +} diff --git a/services/document-updater/app/js/sharejs/text-tp2-api.js b/services/document-updater/app/js/sharejs/text-tp2-api.js index 97bf606267..3ab7ef1cb5 100644 --- a/services/document-updater/app/js/sharejs/text-tp2-api.js +++ b/services/document-updater/app/js/sharejs/text-tp2-api.js @@ -13,111 +13,121 @@ */ // Text document API for text-tp2 -let type; +let type if (typeof WEB !== 'undefined' && WEB !== null) { - type = exports.types['text-tp2']; + type = exports.types['text-tp2'] } else { - type = require('./text-tp2'); + type = require('./text-tp2') } -const {_takeDoc:takeDoc, _append:append} = type; +const { _takeDoc: takeDoc, _append: append } = type -const appendSkipChars = (op, doc, pos, maxlength) => (() => { - const result = []; - while (((maxlength === undefined) || (maxlength > 0)) && (pos.index < doc.data.length)) { - const part = takeDoc(doc, pos, maxlength, true); - if ((maxlength !== undefined) && (typeof part === 'string')) { maxlength -= part.length; } - result.push(append(op, (part.length || part))); - } - return result; -})(); +const appendSkipChars = (op, doc, pos, maxlength) => + (() => { + const result = [] + while ( + (maxlength === undefined || maxlength > 0) && + pos.index < doc.data.length + ) { + const part = takeDoc(doc, pos, maxlength, true) + if (maxlength !== undefined && typeof part === 'string') { + maxlength -= part.length + } + result.push(append(op, part.length || part)) + } + return result + })() type.api = { - 'provides': {'text':true}, + provides: { text: true }, // The number of characters in the string - 'getLength'() { return this.snapshot.charLength; }, + getLength() { + return this.snapshot.charLength + }, // Flatten a document into a string - 'getText'() { - const strings = (Array.from(this.snapshot.data).filter((elem) => typeof elem === 'string')); - return strings.join(''); + getText() { + const strings = Array.from(this.snapshot.data).filter( + (elem) => typeof elem === 'string' + ) + return strings.join('') }, - 'insert'(pos, text, callback) { - if (pos === undefined) { pos = 0; } + insert(pos, text, callback) { + if (pos === undefined) { + pos = 0 + } - const op = []; - const docPos = {index:0, offset:0}; + const op = [] + const docPos = { index: 0, offset: 0 } - appendSkipChars(op, this.snapshot, docPos, pos); - append(op, {'i':text}); - appendSkipChars(op, this.snapshot, docPos); - - this.submitOp(op, callback); - return op; + appendSkipChars(op, this.snapshot, docPos, pos) + append(op, { i: text }) + appendSkipChars(op, this.snapshot, docPos) + + this.submitOp(op, callback) + return op }, - - 'del'(pos, length, callback) { - const op = []; - const docPos = {index:0, offset:0}; - appendSkipChars(op, this.snapshot, docPos, pos); - + del(pos, length, callback) { + const op = [] + const docPos = { index: 0, offset: 0 } + + appendSkipChars(op, this.snapshot, docPos, pos) + while (length > 0) { - const part = takeDoc(this.snapshot, docPos, length, true); + const part = takeDoc(this.snapshot, docPos, length, true) if (typeof part === 'string') { - append(op, {'d':part.length}); - length -= part.length; + append(op, { d: part.length }) + length -= part.length } else { - append(op, part); + append(op, part) } } - - appendSkipChars(op, this.snapshot, docPos); - this.submitOp(op, callback); - return op; + appendSkipChars(op, this.snapshot, docPos) + + this.submitOp(op, callback) + return op }, - '_register'() { + _register() { // Interpret recieved ops + generate more detailed events for them - return this.on('remoteop', function(op, snapshot) { - let textPos = 0; - const docPos = {index:0, offset:0}; + return this.on('remoteop', function (op, snapshot) { + let textPos = 0 + const docPos = { index: 0, offset: 0 } for (const component of Array.from(op)) { - var part, remainder; + var part, remainder if (typeof component === 'number') { // Skip - remainder = component; + remainder = component while (remainder > 0) { - part = takeDoc(snapshot, docPos, remainder); + part = takeDoc(snapshot, docPos, remainder) if (typeof part === 'string') { - textPos += part.length; + textPos += part.length } - remainder -= part.length || part; + remainder -= part.length || part } } else if (component.i !== undefined) { // Insert if (typeof component.i === 'string') { - this.emit('insert', textPos, component.i); - textPos += component.i.length; + this.emit('insert', textPos, component.i) + textPos += component.i.length } } else { // Delete - remainder = component.d; + remainder = component.d while (remainder > 0) { - part = takeDoc(snapshot, docPos, remainder); + part = takeDoc(snapshot, docPos, remainder) if (typeof part === 'string') { - this.emit('delete', textPos, part); + this.emit('delete', textPos, part) } - remainder -= part.length || part; + remainder -= part.length || part } } } - - }); + }) } -}; - +} diff --git a/services/document-updater/app/js/sharejs/text-tp2.js b/services/document-updater/app/js/sharejs/text-tp2.js index 4efcb05871..f1e3c97dd6 100644 --- a/services/document-updater/app/js/sharejs/text-tp2.js +++ b/services/document-updater/app/js/sharejs/text-tp2.js @@ -38,369 +38,460 @@ // Eg, the document: 'Hello .....world' ('.' denotes tombstoned (deleted) characters) // would be represented by a document snapshot of ['Hello ', 5, 'world'] -let append, appendDoc, takeDoc; +let append, appendDoc, takeDoc var type = { name: 'text-tp2', tp2: true, - create() { return {charLength:0, totalLength:0, positionCache:[], data:[]}; }, + create() { + return { charLength: 0, totalLength: 0, positionCache: [], data: [] } + }, serialize(doc) { - if (!doc.data) { throw new Error('invalid doc snapshot'); } - return doc.data; + if (!doc.data) { + throw new Error('invalid doc snapshot') + } + return doc.data }, deserialize(data) { - const doc = type.create(); - doc.data = data; - + const doc = type.create() + doc.data = data + for (const component of Array.from(data)) { if (typeof component === 'string') { - doc.charLength += component.length; - doc.totalLength += component.length; + doc.charLength += component.length + doc.totalLength += component.length } else { - doc.totalLength += component; + doc.totalLength += component } } - - return doc; + + return doc } -}; +} - -const checkOp = function(op) { - if (!Array.isArray(op)) { throw new Error('Op must be an array of components'); } - let last = null; +const checkOp = function (op) { + if (!Array.isArray(op)) { + throw new Error('Op must be an array of components') + } + let last = null return (() => { - const result = []; + const result = [] for (const c of Array.from(op)) { - if (typeof(c) === 'object') { + if (typeof c === 'object') { if (c.i !== undefined) { - if (((typeof(c.i) !== 'string') || !(c.i.length > 0)) && ((typeof(c.i) !== 'number') || !(c.i > 0))) { throw new Error('Inserts must insert a string or a +ive number'); } + if ( + (typeof c.i !== 'string' || !(c.i.length > 0)) && + (typeof c.i !== 'number' || !(c.i > 0)) + ) { + throw new Error('Inserts must insert a string or a +ive number') + } } else if (c.d !== undefined) { - if ((typeof(c.d) !== 'number') || !(c.d > 0)) { throw new Error('Deletes must be a +ive number'); } + if (typeof c.d !== 'number' || !(c.d > 0)) { + throw new Error('Deletes must be a +ive number') + } } else { - throw new Error('Operation component must define .i or .d'); + throw new Error('Operation component must define .i or .d') } } else { - if (typeof(c) !== 'number') { throw new Error('Op components must be objects or numbers'); } - if (!(c > 0)) { throw new Error('Skip components must be a positive number'); } - if (typeof(last) === 'number') { throw new Error('Adjacent skip components should be combined'); } + if (typeof c !== 'number') { + throw new Error('Op components must be objects or numbers') + } + if (!(c > 0)) { + throw new Error('Skip components must be a positive number') + } + if (typeof last === 'number') { + throw new Error('Adjacent skip components should be combined') + } } - result.push(last = c); + result.push((last = c)) } - return result; - })(); -}; + return result + })() +} // Take the next part from the specified position in a document snapshot. // position = {index, offset}. It will be updated. -type._takeDoc = (takeDoc = function(doc, position, maxlength, tombsIndivisible) { - if (position.index >= doc.data.length) { throw new Error('Operation goes past the end of the document'); } - - const part = doc.data[position.index]; - // peel off data[0] - const result = typeof(part) === 'string' ? - maxlength !== undefined ? - part.slice(position.offset, (position.offset + maxlength)) - : - part.slice(position.offset) - : - (maxlength === undefined) || tombsIndivisible ? - part - position.offset - : - Math.min(maxlength, part - position.offset); - - const resultLen = result.length || result; - - if (((part.length || part) - position.offset) > resultLen) { - position.offset += resultLen; - } else { - position.index++; - position.offset = 0; +type._takeDoc = takeDoc = function ( + doc, + position, + maxlength, + tombsIndivisible +) { + if (position.index >= doc.data.length) { + throw new Error('Operation goes past the end of the document') } - - return result; -}); + + const part = doc.data[position.index] + // peel off data[0] + const result = + typeof part === 'string' + ? maxlength !== undefined + ? part.slice(position.offset, position.offset + maxlength) + : part.slice(position.offset) + : maxlength === undefined || tombsIndivisible + ? part - position.offset + : Math.min(maxlength, part - position.offset) + + const resultLen = result.length || result + + if ((part.length || part) - position.offset > resultLen) { + position.offset += resultLen + } else { + position.index++ + position.offset = 0 + } + + return result +} // Append a part to the end of a document -type._appendDoc = (appendDoc = function(doc, p) { - if ((p === 0) || (p === '')) { return; } +type._appendDoc = appendDoc = function (doc, p) { + if (p === 0 || p === '') { + return + } if (typeof p === 'string') { - doc.charLength += p.length; - doc.totalLength += p.length; + doc.charLength += p.length + doc.totalLength += p.length } else { - doc.totalLength += p; + doc.totalLength += p } - const { - data - } = doc; + const { data } = doc if (data.length === 0) { - data.push(p); - } else if (typeof(data[data.length - 1]) === typeof(p)) { - data[data.length - 1] += p; + data.push(p) + } else if (typeof data[data.length - 1] === typeof p) { + data[data.length - 1] += p } else { - data.push(p); + data.push(p) } -}); +} // Apply the op to the document. The document is not modified in the process. -type.apply = function(doc, op) { - if ((doc.totalLength === undefined) || (doc.charLength === undefined) || (doc.data.length === undefined)) { - throw new Error('Snapshot is invalid'); +type.apply = function (doc, op) { + if ( + doc.totalLength === undefined || + doc.charLength === undefined || + doc.data.length === undefined + ) { + throw new Error('Snapshot is invalid') } - checkOp(op); + checkOp(op) - const newDoc = type.create(); - const position = {index:0, offset:0}; + const newDoc = type.create() + const position = { index: 0, offset: 0 } for (const component of Array.from(op)) { - var part, remainder; - if (typeof(component) === 'number') { - remainder = component; + var part, remainder + if (typeof component === 'number') { + remainder = component while (remainder > 0) { - part = takeDoc(doc, position, remainder); - - appendDoc(newDoc, part); - remainder -= part.length || part; - } + part = takeDoc(doc, position, remainder) - } else if (component.i !== undefined) { - appendDoc(newDoc, component.i); - } else if (component.d !== undefined) { - remainder = component.d; - while (remainder > 0) { - part = takeDoc(doc, position, remainder); - remainder -= part.length || part; + appendDoc(newDoc, part) + remainder -= part.length || part } - appendDoc(newDoc, component.d); + } else if (component.i !== undefined) { + appendDoc(newDoc, component.i) + } else if (component.d !== undefined) { + remainder = component.d + while (remainder > 0) { + part = takeDoc(doc, position, remainder) + remainder -= part.length || part + } + appendDoc(newDoc, component.d) } } - - return newDoc; -}; + + return newDoc +} // Append an op component to the end of the specified op. // Exported for the randomOpGenerator. -type._append = (append = function(op, component) { - if ((component === 0) || (component.i === '') || (component.i === 0) || (component.d === 0)) { - +type._append = append = function (op, component) { + if ( + component === 0 || + component.i === '' || + component.i === 0 || + component.d === 0 + ) { } else if (op.length === 0) { - return op.push(component); + return op.push(component) } else { - const last = op[op.length - 1]; - if ((typeof(component) === 'number') && (typeof(last) === 'number')) { - return op[op.length - 1] += component; - } else if ((component.i !== undefined) && (last.i != null) && (typeof(last.i) === typeof(component.i))) { - return last.i += component.i; - } else if ((component.d !== undefined) && (last.d != null)) { - return last.d += component.d; + const last = op[op.length - 1] + if (typeof component === 'number' && typeof last === 'number') { + return (op[op.length - 1] += component) + } else if ( + component.i !== undefined && + last.i != null && + typeof last.i === typeof component.i + ) { + return (last.i += component.i) + } else if (component.d !== undefined && last.d != null) { + return (last.d += component.d) } else { - return op.push(component); + return op.push(component) } } -}); - +} + // Makes 2 functions for taking components from the start of an op, and for peeking // at the next op that could be taken. -const makeTake = function(op) { +const makeTake = function (op) { // The index of the next component to take - let index = 0; + let index = 0 // The offset into the component - let offset = 0; + let offset = 0 // Take up to length maxlength from the op. If maxlength is not defined, there is no max. // If insertsIndivisible is true, inserts (& insert tombstones) won't be separated. // // Returns null when op is fully consumed. - const take = function(maxlength, insertsIndivisible) { - let current; - if (index === op.length) { return null; } + const take = function (maxlength, insertsIndivisible) { + let current + if (index === op.length) { + return null + } - const e = op[index]; - if ((typeof((current = e)) === 'number') || (typeof((current = e.i)) === 'number') || ((current = e.d) !== undefined)) { - let c; - if ((maxlength == null) || ((current - offset) <= maxlength) || (insertsIndivisible && (e.i !== undefined))) { + const e = op[index] + if ( + typeof (current = e) === 'number' || + typeof (current = e.i) === 'number' || + (current = e.d) !== undefined + ) { + let c + if ( + maxlength == null || + current - offset <= maxlength || + (insertsIndivisible && e.i !== undefined) + ) { // Return the rest of the current element. - c = current - offset; - ++index; offset = 0; + c = current - offset + ++index + offset = 0 } else { - offset += maxlength; - c = maxlength; + offset += maxlength + c = maxlength + } + if (e.i !== undefined) { + return { i: c } + } else if (e.d !== undefined) { + return { d: c } + } else { + return c } - if (e.i !== undefined) { return {i:c}; } else if (e.d !== undefined) { return {d:c}; } else { return c; } } else { // Take from the inserted string - let result; - if ((maxlength == null) || ((e.i.length - offset) <= maxlength) || insertsIndivisible) { - result = {i:e.i.slice(offset)}; - ++index; offset = 0; + let result + if ( + maxlength == null || + e.i.length - offset <= maxlength || + insertsIndivisible + ) { + result = { i: e.i.slice(offset) } + ++index + offset = 0 } else { - result = {i:e.i.slice(offset, offset + maxlength)}; - offset += maxlength; + result = { i: e.i.slice(offset, offset + maxlength) } + offset += maxlength } - return result; + return result } - }; - - const peekType = () => op[index]; - - return [take, peekType]; -}; + } + + const peekType = () => op[index] + + return [take, peekType] +} // Find and return the length of an op component -const componentLength = function(component) { - if (typeof(component) === 'number') { - return component; - } else if (typeof(component.i) === 'string') { - return component.i.length; +const componentLength = function (component) { + if (typeof component === 'number') { + return component + } else if (typeof component.i === 'string') { + return component.i.length } else { // This should work because c.d and c.i must be +ive. - return component.d || component.i; + return component.d || component.i } -}; +} // Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate // adjacent inserts and deletes. -type.normalize = function(op) { - const newOp = []; - for (const component of Array.from(op)) { append(newOp, component); } - return newOp; -}; +type.normalize = function (op) { + const newOp = [] + for (const component of Array.from(op)) { + append(newOp, component) + } + return newOp +} // This is a helper method to transform and prune. goForwards is true for transform, false for prune. -const transformer = function(op, otherOp, goForwards, side) { - let component; - checkOp(op); - checkOp(otherOp); - const newOp = []; +const transformer = function (op, otherOp, goForwards, side) { + let component + checkOp(op) + checkOp(otherOp) + const newOp = [] - const [take, peek] = Array.from(makeTake(op)); + const [take, peek] = Array.from(makeTake(op)) for (component of Array.from(otherOp)) { - var chunk; - let length = componentLength(component); + var chunk + let length = componentLength(component) - if (component.i !== undefined) { // Insert text or tombs - if (goForwards) { // transform - insert skips over inserted parts + if (component.i !== undefined) { + // Insert text or tombs + if (goForwards) { + // transform - insert skips over inserted parts if (side === 'left') { // The left insert should go first. - while (__guard__(peek(), x => x.i) !== undefined) { append(newOp, take()); } + while (__guard__(peek(), (x) => x.i) !== undefined) { + append(newOp, take()) + } } // In any case, skip the inserted text. - append(newOp, length); - - } else { // Prune. Remove skips for inserts. + append(newOp, length) + } else { + // Prune. Remove skips for inserts. while (length > 0) { - chunk = take(length, true); + chunk = take(length, true) - if (chunk === null) { throw new Error('The transformed op is invalid'); } - if (chunk.d !== undefined) { throw new Error('The transformed op deletes locally inserted characters - it cannot be purged of the insert.'); } + if (chunk === null) { + throw new Error('The transformed op is invalid') + } + if (chunk.d !== undefined) { + throw new Error( + 'The transformed op deletes locally inserted characters - it cannot be purged of the insert.' + ) + } if (typeof chunk === 'number') { - length -= chunk; + length -= chunk } else { - append(newOp, chunk); + append(newOp, chunk) } } } - - } else { // Skip or delete + } else { + // Skip or delete while (length > 0) { - chunk = take(length, true); - if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } + chunk = take(length, true) + if (chunk === null) { + throw new Error( + 'The op traverses more elements than the document has' + ) + } - append(newOp, chunk); - if (!chunk.i) { length -= componentLength(chunk); } + append(newOp, chunk) + if (!chunk.i) { + length -= componentLength(chunk) + } } } } - + // Append extras from op1 - while (component = take()) { - if (component.i === undefined) { throw new Error(`Remaining fragments in the op: ${component}`); } - append(newOp, component); + while ((component = take())) { + if (component.i === undefined) { + throw new Error(`Remaining fragments in the op: ${component}`) + } + append(newOp, component) } - return newOp; -}; + return newOp +} // transform op1 by op2. Return transformed version of op1. // op1 and op2 are unchanged by transform. // side should be 'left' or 'right', depending on if op1.id <> op2.id. 'left' == client op. -type.transform = function(op, otherOp, side) { - if ((side !== 'left') && (side !== 'right')) { throw new Error(`side (${side}) should be 'left' or 'right'`); } - return transformer(op, otherOp, true, side); -}; +type.transform = function (op, otherOp, side) { + if (side !== 'left' && side !== 'right') { + throw new Error(`side (${side}) should be 'left' or 'right'`) + } + return transformer(op, otherOp, true, side) +} // Prune is the inverse of transform. -type.prune = (op, otherOp) => transformer(op, otherOp, false); +type.prune = (op, otherOp) => transformer(op, otherOp, false) // Compose 2 ops into 1 op. -type.compose = function(op1, op2) { - let component; - if ((op1 === null) || (op1 === undefined)) { return op2; } +type.compose = function (op1, op2) { + let component + if (op1 === null || op1 === undefined) { + return op2 + } - checkOp(op1); - checkOp(op2); + checkOp(op1) + checkOp(op2) - const result = []; + const result = [] - const [take, _] = Array.from(makeTake(op1)); + const [take, _] = Array.from(makeTake(op1)) for (component of Array.from(op2)) { - - var chunk, length; - if (typeof(component) === 'number') { // Skip + var chunk, length + if (typeof component === 'number') { + // Skip // Just copy from op1. - length = component; + length = component while (length > 0) { - chunk = take(length); - if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } - - append(result, chunk); - length -= componentLength(chunk); - } - - } else if (component.i !== undefined) { // Insert - append(result, {i:component.i}); - - } else { // Delete - length = component.d; - while (length > 0) { - chunk = take(length); - if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } - - const chunkLength = componentLength(chunk); - if (chunk.i !== undefined) { - append(result, {i:chunkLength}); - } else { - append(result, {d:chunkLength}); + chunk = take(length) + if (chunk === null) { + throw new Error( + 'The op traverses more elements than the document has' + ) } - length -= chunkLength; + append(result, chunk) + length -= componentLength(chunk) + } + } else if (component.i !== undefined) { + // Insert + append(result, { i: component.i }) + } else { + // Delete + length = component.d + while (length > 0) { + chunk = take(length) + if (chunk === null) { + throw new Error( + 'The op traverses more elements than the document has' + ) + } + + const chunkLength = componentLength(chunk) + if (chunk.i !== undefined) { + append(result, { i: chunkLength }) + } else { + append(result, { d: chunkLength }) + } + + length -= chunkLength } } } - + // Append extras from op1 - while (component = take()) { - if (component.i === undefined) { throw new Error(`Remaining fragments in op1: ${component}`); } - append(result, component); + while ((component = take())) { + if (component.i === undefined) { + throw new Error(`Remaining fragments in op1: ${component}`) + } + append(result, component) } - return result; -}; - -if (typeof WEB !== 'undefined' && WEB !== null) { - exports.types['text-tp2'] = type; -} else { - module.exports = type; + return result } +if (typeof WEB !== 'undefined' && WEB !== null) { + exports.types['text-tp2'] = type +} else { + module.exports = type +} function __guard__(value, transform) { - return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; -} \ No newline at end of file + return typeof value !== 'undefined' && value !== null + ? transform(value) + : undefined +} diff --git a/services/document-updater/app/js/sharejs/text.js b/services/document-updater/app/js/sharejs/text.js index 3e28b898b6..34886b597d 100644 --- a/services/document-updater/app/js/sharejs/text.js +++ b/services/document-updater/app/js/sharejs/text.js @@ -32,99 +32,139 @@ // NOTE: The global scope here is shared with other sharejs files when built with closure. // Be careful what ends up in your namespace. -let append, transformComponent; -const text = {}; +let append, transformComponent +const text = {} -text.name = 'text'; +text.name = 'text' -text.create = () => ''; +text.create = () => '' -const strInject = (s1, pos, s2) => s1.slice(0, pos) + s2 + s1.slice(pos); +const strInject = (s1, pos, s2) => s1.slice(0, pos) + s2 + s1.slice(pos) -const checkValidComponent = function(c) { - if (typeof c.p !== 'number') { throw new Error('component missing position field'); } +const checkValidComponent = function (c) { + if (typeof c.p !== 'number') { + throw new Error('component missing position field') + } - const i_type = typeof c.i; - const d_type = typeof c.d; - if (!((i_type === 'string') ^ (d_type === 'string'))) { throw new Error('component needs an i or d field'); } + const i_type = typeof c.i + const d_type = typeof c.d + if (!((i_type === 'string') ^ (d_type === 'string'))) { + throw new Error('component needs an i or d field') + } - if (!(c.p >= 0)) { throw new Error('position cannot be negative'); } -}; + if (!(c.p >= 0)) { + throw new Error('position cannot be negative') + } +} -const checkValidOp = function(op) { - for (const c of Array.from(op)) { checkValidComponent(c); } - return true; -}; +const checkValidOp = function (op) { + for (const c of Array.from(op)) { + checkValidComponent(c) + } + return true +} -text.apply = function(snapshot, op) { - checkValidOp(op); +text.apply = function (snapshot, op) { + checkValidOp(op) for (const component of Array.from(op)) { if (component.i != null) { - snapshot = strInject(snapshot, component.p, component.i); + snapshot = strInject(snapshot, component.p, component.i) } else { - const deleted = snapshot.slice(component.p, (component.p + component.d.length)); - if (component.d !== deleted) { throw new Error(`Delete component '${component.d}' does not match deleted text '${deleted}'`); } - snapshot = snapshot.slice(0, component.p) + snapshot.slice((component.p + component.d.length)); + const deleted = snapshot.slice( + component.p, + component.p + component.d.length + ) + if (component.d !== deleted) { + throw new Error( + `Delete component '${component.d}' does not match deleted text '${deleted}'` + ) + } + snapshot = + snapshot.slice(0, component.p) + + snapshot.slice(component.p + component.d.length) } } - - return snapshot; -}; + return snapshot +} // Exported for use by the random op generator. // // For simplicity, this version of append does not compress adjacent inserts and deletes of // the same text. It would be nice to change that at some stage. -text._append = (append = function(newOp, c) { - if ((c.i === '') || (c.d === '')) { return; } +text._append = append = function (newOp, c) { + if (c.i === '' || c.d === '') { + return + } if (newOp.length === 0) { - return newOp.push(c); + return newOp.push(c) } else { - const last = newOp[newOp.length - 1]; + const last = newOp[newOp.length - 1] // Compose the insert into the previous insert if possible - if ((last.i != null) && (c.i != null) && (last.p <= c.p && c.p <= (last.p + last.i.length))) { - return newOp[newOp.length - 1] = {i:strInject(last.i, c.p - last.p, c.i), p:last.p}; - } else if ((last.d != null) && (c.d != null) && (c.p <= last.p && last.p <= (c.p + c.d.length))) { - return newOp[newOp.length - 1] = {d:strInject(c.d, last.p - c.p, last.d), p:c.p}; + if ( + last.i != null && + c.i != null && + last.p <= c.p && + c.p <= last.p + last.i.length + ) { + return (newOp[newOp.length - 1] = { + i: strInject(last.i, c.p - last.p, c.i), + p: last.p + }) + } else if ( + last.d != null && + c.d != null && + c.p <= last.p && + last.p <= c.p + c.d.length + ) { + return (newOp[newOp.length - 1] = { + d: strInject(c.d, last.p - c.p, last.d), + p: c.p + }) } else { - return newOp.push(c); + return newOp.push(c) } } -}); +} -text.compose = function(op1, op2) { - checkValidOp(op1); - checkValidOp(op2); +text.compose = function (op1, op2) { + checkValidOp(op1) + checkValidOp(op2) - const newOp = op1.slice(); - for (const c of Array.from(op2)) { append(newOp, c); } + const newOp = op1.slice() + for (const c of Array.from(op2)) { + append(newOp, c) + } - return newOp; -}; + return newOp +} // Attempt to compress the op components together 'as much as possible'. // This implementation preserves order and preserves create/delete pairs. -text.compress = op => text.compose([], op); +text.compress = (op) => text.compose([], op) + +text.normalize = function (op) { + const newOp = [] -text.normalize = function(op) { - const newOp = []; - // Normalize should allow ops which are a single (unwrapped) component: // {i:'asdf', p:23}. // There's no good way to test if something is an array: // http://perfectionkills.com/instanceof-considered-harmful-or-how-to-write-a-robust-isarray/ // so this is probably the least bad solution. - if ((op.i != null) || (op.p != null)) { op = [op]; } + if (op.i != null || op.p != null) { + op = [op] + } for (const c of Array.from(op)) { - if (c.p == null) { c.p = 0; } - append(newOp, c); + if (c.p == null) { + c.p = 0 + } + append(newOp, c) } - - return newOp; -}; + + return newOp +} // This helper method transforms a position by an op component. // @@ -132,121 +172,143 @@ text.normalize = function(op) { // is pushed after the insert (true) or before it (false). // // insertAfter is optional for deletes. -const transformPosition = function(pos, c, insertAfter) { +const transformPosition = function (pos, c, insertAfter) { if (c.i != null) { - if ((c.p < pos) || ((c.p === pos) && insertAfter)) { - return pos + c.i.length; + if (c.p < pos || (c.p === pos && insertAfter)) { + return pos + c.i.length } else { - return pos; + return pos } } else { // I think this could also be written as: Math.min(c.p, Math.min(c.p - otherC.p, otherC.d.length)) // but I think its harder to read that way, and it compiles using ternary operators anyway // so its no slower written like this. if (pos <= c.p) { - return pos; - } else if (pos <= (c.p + c.d.length)) { - return c.p; + return pos + } else if (pos <= c.p + c.d.length) { + return c.p } else { - return pos - c.d.length; + return pos - c.d.length } } -}; +} // Helper method to transform a cursor position as a result of an op. // // Like transformPosition above, if c is an insert, insertAfter specifies whether the cursor position // is pushed after an insert (true) or before it (false). -text.transformCursor = function(position, op, side) { - const insertAfter = side === 'right'; - for (const c of Array.from(op)) { position = transformPosition(position, c, insertAfter); } - return position; -}; +text.transformCursor = function (position, op, side) { + const insertAfter = side === 'right' + for (const c of Array.from(op)) { + position = transformPosition(position, c, insertAfter) + } + return position +} // Transform an op component by another op component. Asymmetric. // The result will be appended to destination. // // exported for use in JSON type -text._tc = (transformComponent = function(dest, c, otherC, side) { - checkValidOp([c]); - checkValidOp([otherC]); +text._tc = transformComponent = function (dest, c, otherC, side) { + checkValidOp([c]) + checkValidOp([otherC]) if (c.i != null) { - append(dest, {i:c.i, p:transformPosition(c.p, otherC, side === 'right')}); - - } else { // Delete - if (otherC.i != null) { // delete vs insert - let s = c.d; + append(dest, { + i: c.i, + p: transformPosition(c.p, otherC, side === 'right') + }) + } else { + // Delete + if (otherC.i != null) { + // delete vs insert + let s = c.d if (c.p < otherC.p) { - append(dest, {d:s.slice(0, otherC.p - c.p), p:c.p}); - s = s.slice((otherC.p - c.p)); + append(dest, { d: s.slice(0, otherC.p - c.p), p: c.p }) + s = s.slice(otherC.p - c.p) } if (s !== '') { - append(dest, {d:s, p:c.p + otherC.i.length}); + append(dest, { d: s, p: c.p + otherC.i.length }) } - - } else { // Delete vs delete - if (c.p >= (otherC.p + otherC.d.length)) { - append(dest, {d:c.d, p:c.p - otherC.d.length}); - } else if ((c.p + c.d.length) <= otherC.p) { - append(dest, c); + } else { + // Delete vs delete + if (c.p >= otherC.p + otherC.d.length) { + append(dest, { d: c.d, p: c.p - otherC.d.length }) + } else if (c.p + c.d.length <= otherC.p) { + append(dest, c) } else { // They overlap somewhere. - const newC = {d:'', p:c.p}; + const newC = { d: '', p: c.p } if (c.p < otherC.p) { - newC.d = c.d.slice(0, (otherC.p - c.p)); + newC.d = c.d.slice(0, otherC.p - c.p) } - if ((c.p + c.d.length) > (otherC.p + otherC.d.length)) { - newC.d += c.d.slice(((otherC.p + otherC.d.length) - c.p)); + if (c.p + c.d.length > otherC.p + otherC.d.length) { + newC.d += c.d.slice(otherC.p + otherC.d.length - c.p) } // This is entirely optional - just for a check that the deleted // text in the two ops matches - const intersectStart = Math.max(c.p, otherC.p); - const intersectEnd = Math.min(c.p + c.d.length, otherC.p + otherC.d.length); - const cIntersect = c.d.slice(intersectStart - c.p, intersectEnd - c.p); - const otherIntersect = otherC.d.slice(intersectStart - otherC.p, intersectEnd - otherC.p); - if (cIntersect !== otherIntersect) { throw new Error('Delete ops delete different text in the same region of the document'); } + const intersectStart = Math.max(c.p, otherC.p) + const intersectEnd = Math.min( + c.p + c.d.length, + otherC.p + otherC.d.length + ) + const cIntersect = c.d.slice(intersectStart - c.p, intersectEnd - c.p) + const otherIntersect = otherC.d.slice( + intersectStart - otherC.p, + intersectEnd - otherC.p + ) + if (cIntersect !== otherIntersect) { + throw new Error( + 'Delete ops delete different text in the same region of the document' + ) + } if (newC.d !== '') { // This could be rewritten similarly to insert v delete, above. - newC.p = transformPosition(newC.p, otherC); - append(dest, newC); + newC.p = transformPosition(newC.p, otherC) + append(dest, newC) } } } } - - return dest; -}); -const invertComponent = function(c) { + return dest +} + +const invertComponent = function (c) { if (c.i != null) { - return {d:c.i, p:c.p}; + return { d: c.i, p: c.p } } else { - return {i:c.d, p:c.p}; + return { i: c.d, p: c.p } } -}; +} // No need to use append for invert, because the components won't be able to // cancel with one another. -text.invert = op => Array.from(op.slice().reverse()).map((c) => invertComponent(c)); - +text.invert = (op) => + Array.from(op.slice().reverse()).map((c) => invertComponent(c)) if (typeof WEB !== 'undefined' && WEB !== null) { - if (!exports.types) { exports.types = {}; } + if (!exports.types) { + exports.types = {} + } // This is kind of awful - come up with a better way to hook this helper code up. - bootstrapTransform(text, transformComponent, checkValidOp, append); + bootstrapTransform(text, transformComponent, checkValidOp, append) // [] is used to prevent closure from renaming types.text - exports.types.text = text; + exports.types.text = text } else { - module.exports = text; + module.exports = text // The text type really shouldn't need this - it should be possible to define // an efficient transform function by making a sort of transform map and passing each // op component through it. - require('./helpers').bootstrapTransform(text, transformComponent, checkValidOp, append); + require('./helpers').bootstrapTransform( + text, + transformComponent, + checkValidOp, + append + ) } - diff --git a/services/document-updater/app/js/sharejs/types/count.js b/services/document-updater/app/js/sharejs/types/count.js index c77b76b098..8d8477caf4 100644 --- a/services/document-updater/app/js/sharejs/types/count.js +++ b/services/document-updater/app/js/sharejs/types/count.js @@ -8,25 +8,30 @@ */ // This is a simple type used for testing other OT code. Each op is [expectedSnapshot, increment] -exports.name = 'count'; -exports.create = () => 1; +exports.name = 'count' +exports.create = () => 1 -exports.apply = function(snapshot, op) { - const [v, inc] = Array.from(op); - if (snapshot !== v) { throw new Error(`Op ${v} != snapshot ${snapshot}`); } - return snapshot + inc; -}; +exports.apply = function (snapshot, op) { + const [v, inc] = Array.from(op) + if (snapshot !== v) { + throw new Error(`Op ${v} != snapshot ${snapshot}`) + } + return snapshot + inc +} // transform op1 by op2. Return transformed version of op1. -exports.transform = function(op1, op2) { - if (op1[0] !== op2[0]) { throw new Error(`Op1 ${op1[0]} != op2 ${op2[0]}`); } - return [op1[0] + op2[1], op1[1]]; -}; +exports.transform = function (op1, op2) { + if (op1[0] !== op2[0]) { + throw new Error(`Op1 ${op1[0]} != op2 ${op2[0]}`) + } + return [op1[0] + op2[1], op1[1]] +} -exports.compose = function(op1, op2) { - if ((op1[0] + op1[1]) !== op2[0]) { throw new Error(`Op1 ${op1} + 1 != op2 ${op2}`); } - return [op1[0], op1[1] + op2[1]]; -}; - -exports.generateRandomOp = doc => [[doc, 1], doc + 1]; +exports.compose = function (op1, op2) { + if (op1[0] + op1[1] !== op2[0]) { + throw new Error(`Op1 ${op1} + 1 != op2 ${op2}`) + } + return [op1[0], op1[1] + op2[1]] +} +exports.generateRandomOp = (doc) => [[doc, 1], doc + 1] diff --git a/services/document-updater/app/js/sharejs/types/helpers.js b/services/document-updater/app/js/sharejs/types/helpers.js index b4500a3214..1d7b268e17 100644 --- a/services/document-updater/app/js/sharejs/types/helpers.js +++ b/services/document-updater/app/js/sharejs/types/helpers.js @@ -17,77 +17,96 @@ // Add transform and transformX functions for an OT type which has transformComponent defined. // transformComponent(destination array, component, other component, side) -let bootstrapTransform; -exports._bt = (bootstrapTransform = function(type, transformComponent, checkValidOp, append) { - let transformX; - const transformComponentX = function(left, right, destLeft, destRight) { - transformComponent(destLeft, left, right, 'left'); - return transformComponent(destRight, right, left, 'right'); - }; +let bootstrapTransform +exports._bt = bootstrapTransform = function ( + type, + transformComponent, + checkValidOp, + append +) { + let transformX + const transformComponentX = function (left, right, destLeft, destRight) { + transformComponent(destLeft, left, right, 'left') + return transformComponent(destRight, right, left, 'right') + } // Transforms rightOp by leftOp. Returns ['rightOp', clientOp'] - type.transformX = (type.transformX = (transformX = function(leftOp, rightOp) { - checkValidOp(leftOp); - checkValidOp(rightOp); + type.transformX = type.transformX = transformX = function (leftOp, rightOp) { + checkValidOp(leftOp) + checkValidOp(rightOp) - const newRightOp = []; + const newRightOp = [] for (let rightComponent of Array.from(rightOp)) { // Generate newLeftOp by composing leftOp by rightComponent - const newLeftOp = []; + const newLeftOp = [] - let k = 0; + let k = 0 while (k < leftOp.length) { - var l; - const nextC = []; - transformComponentX(leftOp[k], rightComponent, newLeftOp, nextC); - k++; + var l + const nextC = [] + transformComponentX(leftOp[k], rightComponent, newLeftOp, nextC) + k++ if (nextC.length === 1) { - rightComponent = nextC[0]; + rightComponent = nextC[0] } else if (nextC.length === 0) { - for (l of Array.from(leftOp.slice(k))) { append(newLeftOp, l); } - rightComponent = null; - break; + for (l of Array.from(leftOp.slice(k))) { + append(newLeftOp, l) + } + rightComponent = null + break } else { // Recurse. - const [l_, r_] = Array.from(transformX(leftOp.slice(k), nextC)); - for (l of Array.from(l_)) { append(newLeftOp, l); } - for (const r of Array.from(r_)) { append(newRightOp, r); } - rightComponent = null; - break; + const [l_, r_] = Array.from(transformX(leftOp.slice(k), nextC)) + for (l of Array.from(l_)) { + append(newLeftOp, l) + } + for (const r of Array.from(r_)) { + append(newRightOp, r) + } + rightComponent = null + break } } - - if (rightComponent != null) { append(newRightOp, rightComponent); } - leftOp = newLeftOp; + + if (rightComponent != null) { + append(newRightOp, rightComponent) + } + leftOp = newLeftOp } - - return [leftOp, newRightOp]; - })); + + return [leftOp, newRightOp] + } // Transforms op with specified type ('left' or 'right') by otherOp. - return type.transform = (type.transform = function(op, otherOp, type) { - let _; - if ((type !== 'left') && (type !== 'right')) { throw new Error("type must be 'left' or 'right'"); } + return (type.transform = type.transform = function (op, otherOp, type) { + let _ + if (type !== 'left' && type !== 'right') { + throw new Error("type must be 'left' or 'right'") + } - if (otherOp.length === 0) { return op; } + if (otherOp.length === 0) { + return op + } // TODO: Benchmark with and without this line. I _think_ it'll make a big difference...? - if ((op.length === 1) && (otherOp.length === 1)) { return transformComponent([], op[0], otherOp[0], type); } + if (op.length === 1 && otherOp.length === 1) { + return transformComponent([], op[0], otherOp[0], type) + } if (type === 'left') { - let left; - [left, _] = Array.from(transformX(op, otherOp)); - return left; + let left + ;[left, _] = Array.from(transformX(op, otherOp)) + return left } else { - let right; - [_, right] = Array.from(transformX(otherOp, op)); - return right; + let right + ;[_, right] = Array.from(transformX(otherOp, op)) + return right } - }); -}); + }) +} if (typeof WEB === 'undefined') { - exports.bootstrapTransform = bootstrapTransform; + exports.bootstrapTransform = bootstrapTransform } diff --git a/services/document-updater/app/js/sharejs/types/index.js b/services/document-updater/app/js/sharejs/types/index.js index a322063e83..7e3d6bbf26 100644 --- a/services/document-updater/app/js/sharejs/types/index.js +++ b/services/document-updater/app/js/sharejs/types/index.js @@ -6,18 +6,20 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const register = function(file) { - const type = require(file); - exports[type.name] = type; - try { return require(`${file}-api`); } catch (error) {} -}; +const register = function (file) { + const type = require(file) + exports[type.name] = type + try { + return require(`${file}-api`) + } catch (error) {} +} // Import all the built-in types. -register('./simple'); -register('./count'); +register('./simple') +register('./count') -register('./text'); -register('./text-composable'); -register('./text-tp2'); +register('./text') +register('./text-composable') +register('./text-tp2') -register('./json'); +register('./json') diff --git a/services/document-updater/app/js/sharejs/types/json-api.js b/services/document-updater/app/js/sharejs/types/json-api.js index 67e54f5334..a8db564fdf 100644 --- a/services/document-updater/app/js/sharejs/types/json-api.js +++ b/services/document-updater/app/js/sharejs/types/json-api.js @@ -14,267 +14,344 @@ */ // API for JSON OT -let json; -if (typeof WEB === 'undefined') { json = require('./json'); } - -if (typeof WEB !== 'undefined' && WEB !== null) { - const { - extendDoc - } = exports; - exports.extendDoc = function(name, fn) { - SubDoc.prototype[name] = fn; - return extendDoc(name, fn); - }; +let json +if (typeof WEB === 'undefined') { + json = require('./json') } -const depath = function(path) { - if ((path.length === 1) && (path[0].constructor === Array)) { - return path[0]; - } else { return path; } -}; +if (typeof WEB !== 'undefined' && WEB !== null) { + const { extendDoc } = exports + exports.extendDoc = function (name, fn) { + SubDoc.prototype[name] = fn + return extendDoc(name, fn) + } +} + +const depath = function (path) { + if (path.length === 1 && path[0].constructor === Array) { + return path[0] + } else { + return path + } +} class SubDoc { constructor(doc, path) { - this.doc = doc; - this.path = path; + this.doc = doc + this.path = path + } + + at(...path) { + return this.doc.at(this.path.concat(depath(path))) + } + + get() { + return this.doc.getAt(this.path) } - at(...path) { return this.doc.at(this.path.concat(depath(path))); } - get() { return this.doc.getAt(this.path); } // for objects and lists - set(value, cb) { return this.doc.setAt(this.path, value, cb); } + set(value, cb) { + return this.doc.setAt(this.path, value, cb) + } + // for strings and lists. - insert(pos, value, cb) { return this.doc.insertAt(this.path, pos, value, cb); } + insert(pos, value, cb) { + return this.doc.insertAt(this.path, pos, value, cb) + } + // for strings - del(pos, length, cb) { return this.doc.deleteTextAt(this.path, length, pos, cb); } + del(pos, length, cb) { + return this.doc.deleteTextAt(this.path, length, pos, cb) + } + // for objects and lists - remove(cb) { return this.doc.removeAt(this.path, cb); } - push(value, cb) { return this.insert(this.get().length, value, cb); } - move(from, to, cb) { return this.doc.moveAt(this.path, from, to, cb); } - add(amount, cb) { return this.doc.addAt(this.path, amount, cb); } - on(event, cb) { return this.doc.addListener(this.path, event, cb); } - removeListener(l) { return this.doc.removeListener(l); } + remove(cb) { + return this.doc.removeAt(this.path, cb) + } + + push(value, cb) { + return this.insert(this.get().length, value, cb) + } + + move(from, to, cb) { + return this.doc.moveAt(this.path, from, to, cb) + } + + add(amount, cb) { + return this.doc.addAt(this.path, amount, cb) + } + + on(event, cb) { + return this.doc.addListener(this.path, event, cb) + } + + removeListener(l) { + return this.doc.removeListener(l) + } // text API compatibility - getLength() { return this.get().length; } - getText() { return this.get(); } + getLength() { + return this.get().length + } + + getText() { + return this.get() + } } -const traverse = function(snapshot, path) { - const container = {data:snapshot}; - let key = 'data'; - let elem = container; +const traverse = function (snapshot, path) { + const container = { data: snapshot } + let key = 'data' + let elem = container for (const p of Array.from(path)) { - elem = elem[key]; - key = p; - if (typeof elem === 'undefined') { throw new Error('bad path'); } + elem = elem[key] + key = p + if (typeof elem === 'undefined') { + throw new Error('bad path') + } } - return {elem, key}; -}; + return { elem, key } +} -const pathEquals = function(p1, p2) { - if (p1.length !== p2.length) { return false; } - for (let i = 0; i < p1.length; i++) { - const e = p1[i]; - if (e !== p2[i]) { return false; } +const pathEquals = function (p1, p2) { + if (p1.length !== p2.length) { + return false } - return true; -}; + for (let i = 0; i < p1.length; i++) { + const e = p1[i] + if (e !== p2[i]) { + return false + } + } + return true +} json.api = { - provides: {json:true}, + provides: { json: true }, - at(...path) { return new SubDoc(this, depath(path)); }, + at(...path) { + return new SubDoc(this, depath(path)) + }, - get() { return this.snapshot; }, - set(value, cb) { return this.setAt([], value, cb); }, + get() { + return this.snapshot + }, + set(value, cb) { + return this.setAt([], value, cb) + }, getAt(path) { - const {elem, key} = traverse(this.snapshot, path); - return elem[key]; + const { elem, key } = traverse(this.snapshot, path) + return elem[key] }, setAt(path, value, cb) { - const {elem, key} = traverse(this.snapshot, path); - const op = {p:path}; + const { elem, key } = traverse(this.snapshot, path) + const op = { p: path } if (elem.constructor === Array) { - op.li = value; - if (typeof elem[key] !== 'undefined') { op.ld = elem[key]; } + op.li = value + if (typeof elem[key] !== 'undefined') { + op.ld = elem[key] + } } else if (typeof elem === 'object') { - op.oi = value; - if (typeof elem[key] !== 'undefined') { op.od = elem[key]; } - } else { throw new Error('bad path'); } - return this.submitOp([op], cb); + op.oi = value + if (typeof elem[key] !== 'undefined') { + op.od = elem[key] + } + } else { + throw new Error('bad path') + } + return this.submitOp([op], cb) }, removeAt(path, cb) { - const {elem, key} = traverse(this.snapshot, path); - if (typeof elem[key] === 'undefined') { throw new Error('no element at that path'); } - const op = {p:path}; + const { elem, key } = traverse(this.snapshot, path) + if (typeof elem[key] === 'undefined') { + throw new Error('no element at that path') + } + const op = { p: path } if (elem.constructor === Array) { - op.ld = elem[key]; + op.ld = elem[key] } else if (typeof elem === 'object') { - op.od = elem[key]; - } else { throw new Error('bad path'); } - return this.submitOp([op], cb); + op.od = elem[key] + } else { + throw new Error('bad path') + } + return this.submitOp([op], cb) }, insertAt(path, pos, value, cb) { - const {elem, key} = traverse(this.snapshot, path); - const op = {p:path.concat(pos)}; + const { elem, key } = traverse(this.snapshot, path) + const op = { p: path.concat(pos) } if (elem[key].constructor === Array) { - op.li = value; + op.li = value } else if (typeof elem[key] === 'string') { - op.si = value; + op.si = value } - return this.submitOp([op], cb); + return this.submitOp([op], cb) }, moveAt(path, from, to, cb) { - const op = [{p:path.concat(from), lm:to}]; - return this.submitOp(op, cb); + const op = [{ p: path.concat(from), lm: to }] + return this.submitOp(op, cb) }, addAt(path, amount, cb) { - const op = [{p:path, na:amount}]; - return this.submitOp(op, cb); + const op = [{ p: path, na: amount }] + return this.submitOp(op, cb) }, deleteTextAt(path, length, pos, cb) { - const {elem, key} = traverse(this.snapshot, path); - const op = [{p:path.concat(pos), sd:elem[key].slice(pos, (pos + length))}]; - return this.submitOp(op, cb); + const { elem, key } = traverse(this.snapshot, path) + const op = [{ p: path.concat(pos), sd: elem[key].slice(pos, pos + length) }] + return this.submitOp(op, cb) }, addListener(path, event, cb) { - const l = {path, event, cb}; - this._listeners.push(l); - return l; + const l = { path, event, cb } + this._listeners.push(l) + return l }, removeListener(l) { - const i = this._listeners.indexOf(l); - if (i < 0) { return false; } - this._listeners.splice(i, 1); - return true; + const i = this._listeners.indexOf(l) + if (i < 0) { + return false + } + this._listeners.splice(i, 1) + return true }, _register() { - this._listeners = []; - this.on('change', function(op) { + this._listeners = [] + this.on('change', function (op) { return (() => { - const result = []; + const result = [] for (const c of Array.from(op)) { - var i; - if ((c.na !== undefined) || (c.si !== undefined) || (c.sd !== undefined)) { + var i + if (c.na !== undefined || c.si !== undefined || c.sd !== undefined) { // no change to structure - continue; + continue } - var to_remove = []; + var to_remove = [] for (i = 0; i < this._listeners.length; i++) { // Transform a dummy op by the incoming op to work out what // should happen to the listener. - const l = this._listeners[i]; - const dummy = {p:l.path, na:0}; - const xformed = this.type.transformComponent([], dummy, c, 'left'); + const l = this._listeners[i] + const dummy = { p: l.path, na: 0 } + const xformed = this.type.transformComponent([], dummy, c, 'left') if (xformed.length === 0) { // The op was transformed to noop, so we should delete the listener. - to_remove.push(i); + to_remove.push(i) } else if (xformed.length === 1) { // The op remained, so grab its new path into the listener. - l.path = xformed[0].p; + l.path = xformed[0].p } else { - throw new Error("Bad assumption in json-api: xforming an 'si' op will always result in 0 or 1 components."); + throw new Error( + "Bad assumption in json-api: xforming an 'si' op will always result in 0 or 1 components." + ) } } - to_remove.sort((a, b) => b - a); - result.push((() => { - const result1 = []; - for (i of Array.from(to_remove)) { - result1.push(this._listeners.splice(i, 1)); - } - return result1; - })()); - } - return result; - })(); - }); - return this.on('remoteop', function(op) { - return (() => { - const result = []; - for (var c of Array.from(op)) { - var match_path = c.na === undefined ? c.p.slice(0, c.p.length-1) : c.p; - result.push((() => { - const result1 = []; - for (const {path, event, cb} of Array.from(this._listeners)) { - var common; - if (pathEquals(path, match_path)) { - switch (event) { - case 'insert': - if ((c.li !== undefined) && (c.ld === undefined)) { - result1.push(cb(c.p[c.p.length-1], c.li)); - } else if ((c.oi !== undefined) && (c.od === undefined)) { - result1.push(cb(c.p[c.p.length-1], c.oi)); - } else if (c.si !== undefined) { - result1.push(cb(c.p[c.p.length-1], c.si)); - } else { - result1.push(undefined); - } - break; - case 'delete': - if ((c.li === undefined) && (c.ld !== undefined)) { - result1.push(cb(c.p[c.p.length-1], c.ld)); - } else if ((c.oi === undefined) && (c.od !== undefined)) { - result1.push(cb(c.p[c.p.length-1], c.od)); - } else if (c.sd !== undefined) { - result1.push(cb(c.p[c.p.length-1], c.sd)); - } else { - result1.push(undefined); - } - break; - case 'replace': - if ((c.li !== undefined) && (c.ld !== undefined)) { - result1.push(cb(c.p[c.p.length-1], c.ld, c.li)); - } else if ((c.oi !== undefined) && (c.od !== undefined)) { - result1.push(cb(c.p[c.p.length-1], c.od, c.oi)); - } else { - result1.push(undefined); - } - break; - case 'move': - if (c.lm !== undefined) { - result1.push(cb(c.p[c.p.length-1], c.lm)); - } else { - result1.push(undefined); - } - break; - case 'add': - if (c.na !== undefined) { - result1.push(cb(c.na)); - } else { - result1.push(undefined); - } - break; - default: - result1.push(undefined); - } - } else if ((common = this.type.commonPath(match_path, path)) != null) { - if (event === 'child op') { - if (match_path.length === path.length && path.length === common) { - throw new Error("paths match length and have commonality, but aren't equal?"); - } - const child_path = c.p.slice(common+1); - result1.push(cb(child_path, c)); - } else { - result1.push(undefined); - } - } else { - result1.push(undefined); + to_remove.sort((a, b) => b - a) + result.push( + (() => { + const result1 = [] + for (i of Array.from(to_remove)) { + result1.push(this._listeners.splice(i, 1)) } - } - return result1; - })()); + return result1 + })() + ) } - return result; - })(); - }); + return result + })() + }) + return this.on('remoteop', function (op) { + return (() => { + const result = [] + for (var c of Array.from(op)) { + var match_path = + c.na === undefined ? c.p.slice(0, c.p.length - 1) : c.p + result.push( + (() => { + const result1 = [] + for (const { path, event, cb } of Array.from(this._listeners)) { + var common + if (pathEquals(path, match_path)) { + switch (event) { + case 'insert': + if (c.li !== undefined && c.ld === undefined) { + result1.push(cb(c.p[c.p.length - 1], c.li)) + } else if (c.oi !== undefined && c.od === undefined) { + result1.push(cb(c.p[c.p.length - 1], c.oi)) + } else if (c.si !== undefined) { + result1.push(cb(c.p[c.p.length - 1], c.si)) + } else { + result1.push(undefined) + } + break + case 'delete': + if (c.li === undefined && c.ld !== undefined) { + result1.push(cb(c.p[c.p.length - 1], c.ld)) + } else if (c.oi === undefined && c.od !== undefined) { + result1.push(cb(c.p[c.p.length - 1], c.od)) + } else if (c.sd !== undefined) { + result1.push(cb(c.p[c.p.length - 1], c.sd)) + } else { + result1.push(undefined) + } + break + case 'replace': + if (c.li !== undefined && c.ld !== undefined) { + result1.push(cb(c.p[c.p.length - 1], c.ld, c.li)) + } else if (c.oi !== undefined && c.od !== undefined) { + result1.push(cb(c.p[c.p.length - 1], c.od, c.oi)) + } else { + result1.push(undefined) + } + break + case 'move': + if (c.lm !== undefined) { + result1.push(cb(c.p[c.p.length - 1], c.lm)) + } else { + result1.push(undefined) + } + break + case 'add': + if (c.na !== undefined) { + result1.push(cb(c.na)) + } else { + result1.push(undefined) + } + break + default: + result1.push(undefined) + } + } else if ( + (common = this.type.commonPath(match_path, path)) != null + ) { + if (event === 'child op') { + if ( + match_path.length === path.length && + path.length === common + ) { + throw new Error( + "paths match length and have commonality, but aren't equal?" + ) + } + const child_path = c.p.slice(common + 1) + result1.push(cb(child_path, c)) + } else { + result1.push(undefined) + } + } else { + result1.push(undefined) + } + } + return result1 + })() + ) + } + return result + })() + }) } -}; +} diff --git a/services/document-updater/app/js/sharejs/types/json.js b/services/document-updater/app/js/sharejs/types/json.js index 5619c09be1..fa9b030a82 100644 --- a/services/document-updater/app/js/sharejs/types/json.js +++ b/services/document-updater/app/js/sharejs/types/json.js @@ -16,382 +16,459 @@ // // Spec is here: https://github.com/josephg/ShareJS/wiki/JSON-Operations -let text; +let text if (typeof WEB !== 'undefined' && WEB !== null) { - ({ - text - } = exports.types); + ;({ text } = exports.types) } else { - text = require('./text'); + text = require('./text') } -const json = {}; +const json = {} -json.name = 'json'; +json.name = 'json' -json.create = () => null; +json.create = () => null -json.invertComponent = function(c) { - const c_ = {p: c.p}; - if (c.si !== undefined) { c_.sd = c.si; } - if (c.sd !== undefined) { c_.si = c.sd; } - if (c.oi !== undefined) { c_.od = c.oi; } - if (c.od !== undefined) { c_.oi = c.od; } - if (c.li !== undefined) { c_.ld = c.li; } - if (c.ld !== undefined) { c_.li = c.ld; } - if (c.na !== undefined) { c_.na = -c.na; } - if (c.lm !== undefined) { - c_.lm = c.p[c.p.length-1]; - c_.p = c.p.slice(0, c.p.length - 1).concat([c.lm]); +json.invertComponent = function (c) { + const c_ = { p: c.p } + if (c.si !== undefined) { + c_.sd = c.si } - return c_; -}; + if (c.sd !== undefined) { + c_.si = c.sd + } + if (c.oi !== undefined) { + c_.od = c.oi + } + if (c.od !== undefined) { + c_.oi = c.od + } + if (c.li !== undefined) { + c_.ld = c.li + } + if (c.ld !== undefined) { + c_.li = c.ld + } + if (c.na !== undefined) { + c_.na = -c.na + } + if (c.lm !== undefined) { + c_.lm = c.p[c.p.length - 1] + c_.p = c.p.slice(0, c.p.length - 1).concat([c.lm]) + } + return c_ +} -json.invert = op => Array.from(op.slice().reverse()).map((c) => json.invertComponent(c)); +json.invert = (op) => + Array.from(op.slice().reverse()).map((c) => json.invertComponent(c)) -json.checkValidOp = function(op) {}; +json.checkValidOp = function (op) {} -const isArray = o => Object.prototype.toString.call(o) === '[object Array]'; -json.checkList = function(elem) { - if (!isArray(elem)) { throw new Error('Referenced element not a list'); } -}; +const isArray = (o) => Object.prototype.toString.call(o) === '[object Array]' +json.checkList = function (elem) { + if (!isArray(elem)) { + throw new Error('Referenced element not a list') + } +} -json.checkObj = function(elem) { - if (elem.constructor !== Object) { throw new Error(`Referenced element not an object (it was ${JSON.stringify(elem)})`); } -}; +json.checkObj = function (elem) { + if (elem.constructor !== Object) { + throw new Error( + `Referenced element not an object (it was ${JSON.stringify(elem)})` + ) + } +} -json.apply = function(snapshot, op) { - json.checkValidOp(op); - op = clone(op); +json.apply = function (snapshot, op) { + json.checkValidOp(op) + op = clone(op) - const container = {data: clone(snapshot)}; + const container = { data: clone(snapshot) } try { for (let i = 0; i < op.length; i++) { - const c = op[i]; - let parent = null; - let parentkey = null; - let elem = container; - let key = 'data'; + const c = op[i] + let parent = null + let parentkey = null + let elem = container + let key = 'data' for (const p of Array.from(c.p)) { - parent = elem; - parentkey = key; - elem = elem[key]; - key = p; + parent = elem + parentkey = key + elem = elem[key] + key = p - if (parent == null) { throw new Error('Path invalid'); } + if (parent == null) { + throw new Error('Path invalid') + } } if (c.na !== undefined) { // Number add - if (typeof elem[key] !== 'number') { throw new Error('Referenced element not a number'); } - elem[key] += c.na; - + if (typeof elem[key] !== 'number') { + throw new Error('Referenced element not a number') + } + elem[key] += c.na } else if (c.si !== undefined) { // String insert - if (typeof elem !== 'string') { throw new Error(`Referenced element not a string (it was ${JSON.stringify(elem)})`); } - parent[parentkey] = elem.slice(0, key) + c.si + elem.slice(key); + if (typeof elem !== 'string') { + throw new Error( + `Referenced element not a string (it was ${JSON.stringify(elem)})` + ) + } + parent[parentkey] = elem.slice(0, key) + c.si + elem.slice(key) } else if (c.sd !== undefined) { // String delete - if (typeof elem !== 'string') { throw new Error('Referenced element not a string'); } - if (elem.slice(key, key + c.sd.length) !== c.sd) { throw new Error('Deleted string does not match'); } - parent[parentkey] = elem.slice(0, key) + elem.slice(key + c.sd.length); - - } else if ((c.li !== undefined) && (c.ld !== undefined)) { + if (typeof elem !== 'string') { + throw new Error('Referenced element not a string') + } + if (elem.slice(key, key + c.sd.length) !== c.sd) { + throw new Error('Deleted string does not match') + } + parent[parentkey] = elem.slice(0, key) + elem.slice(key + c.sd.length) + } else if (c.li !== undefined && c.ld !== undefined) { // List replace - json.checkList(elem); + json.checkList(elem) // Should check the list element matches c.ld - elem[key] = c.li; + elem[key] = c.li } else if (c.li !== undefined) { // List insert - json.checkList(elem); + json.checkList(elem) - elem.splice(key, 0, c.li); + elem.splice(key, 0, c.li) } else if (c.ld !== undefined) { // List delete - json.checkList(elem); + json.checkList(elem) // Should check the list element matches c.ld here too. - elem.splice(key, 1); + elem.splice(key, 1) } else if (c.lm !== undefined) { // List move - json.checkList(elem); + json.checkList(elem) if (c.lm !== key) { - const e = elem[key]; + const e = elem[key] // Remove it... - elem.splice(key, 1); + elem.splice(key, 1) // And insert it back. - elem.splice(c.lm, 0, e); + elem.splice(c.lm, 0, e) } - } else if (c.oi !== undefined) { // Object insert / replace - json.checkObj(elem); - - // Should check that elem[key] == c.od - elem[key] = c.oi; - } else if (c.od !== undefined) { - // Object delete - json.checkObj(elem); + json.checkObj(elem) // Should check that elem[key] == c.od - delete elem[key]; + elem[key] = c.oi + } else if (c.od !== undefined) { + // Object delete + json.checkObj(elem) + + // Should check that elem[key] == c.od + delete elem[key] } else { - throw new Error('invalid / missing instruction in op'); + throw new Error('invalid / missing instruction in op') } } } catch (error) { // TODO: Roll back all already applied changes. Write tests before implementing this code. - throw error; + throw error } - return container.data; -}; + return container.data +} // Checks if two paths, p1 and p2 match. -json.pathMatches = function(p1, p2, ignoreLast) { - if (p1.length !== p2.length) { return false; } +json.pathMatches = function (p1, p2, ignoreLast) { + if (p1.length !== p2.length) { + return false + } for (let i = 0; i < p1.length; i++) { - const p = p1[i]; - if ((p !== p2[i]) && (!ignoreLast || (i !== (p1.length - 1)))) { return false; } + const p = p1[i] + if (p !== p2[i] && (!ignoreLast || i !== p1.length - 1)) { + return false + } } - - return true; -}; -json.append = function(dest, c) { - let last; - c = clone(c); - if ((dest.length !== 0) && json.pathMatches(c.p, (last = dest[dest.length - 1]).p)) { - if ((last.na !== undefined) && (c.na !== undefined)) { - return dest[dest.length - 1] = { p: last.p, na: last.na + c.na }; - } else if ((last.li !== undefined) && (c.li === undefined) && (c.ld === last.li)) { + return true +} + +json.append = function (dest, c) { + let last + c = clone(c) + if ( + dest.length !== 0 && + json.pathMatches(c.p, (last = dest[dest.length - 1]).p) + ) { + if (last.na !== undefined && c.na !== undefined) { + return (dest[dest.length - 1] = { p: last.p, na: last.na + c.na }) + } else if ( + last.li !== undefined && + c.li === undefined && + c.ld === last.li + ) { // insert immediately followed by delete becomes a noop. if (last.ld !== undefined) { // leave the delete part of the replace - return delete last.li; + return delete last.li } else { - return dest.pop(); + return dest.pop() } - } else if ((last.od !== undefined) && (last.oi === undefined) && - (c.oi !== undefined) && (c.od === undefined)) { - return last.oi = c.oi; - } else if ((c.lm !== undefined) && (c.p[c.p.length-1] === c.lm)) { - return null; // don't do anything + } else if ( + last.od !== undefined && + last.oi === undefined && + c.oi !== undefined && + c.od === undefined + ) { + return (last.oi = c.oi) + } else if (c.lm !== undefined && c.p[c.p.length - 1] === c.lm) { + return null // don't do anything } else { - return dest.push(c); + return dest.push(c) } } else { - return dest.push(c); + return dest.push(c) } -}; +} -json.compose = function(op1, op2) { - json.checkValidOp(op1); - json.checkValidOp(op2); +json.compose = function (op1, op2) { + json.checkValidOp(op1) + json.checkValidOp(op2) - const newOp = clone(op1); - for (const c of Array.from(op2)) { json.append(newOp, c); } + const newOp = clone(op1) + for (const c of Array.from(op2)) { + json.append(newOp, c) + } - return newOp; -}; + return newOp +} -json.normalize = function(op) { - const newOp = []; - - if (!isArray(op)) { op = [op]; } +json.normalize = function (op) { + const newOp = [] + + if (!isArray(op)) { + op = [op] + } for (const c of Array.from(op)) { - if (c.p == null) { c.p = []; } - json.append(newOp, c); + if (c.p == null) { + c.p = [] + } + json.append(newOp, c) } - - return newOp; -}; + + return newOp +} // hax, copied from test/types/json. Apparently this is still the fastest way to deep clone an object, assuming // we have browser support for JSON. // http://jsperf.com/cloning-an-object/12 -var clone = o => JSON.parse(JSON.stringify(o)); +var clone = (o) => JSON.parse(JSON.stringify(o)) -json.commonPath = function(p1, p2) { - p1 = p1.slice(); - p2 = p2.slice(); - p1.unshift('data'); - p2.unshift('data'); - p1 = p1.slice(0, p1.length-1); - p2 = p2.slice(0, p2.length-1); - if (p2.length === 0) { return -1; } - let i = 0; - while ((p1[i] === p2[i]) && (i < p1.length)) { - i++; +json.commonPath = function (p1, p2) { + p1 = p1.slice() + p2 = p2.slice() + p1.unshift('data') + p2.unshift('data') + p1 = p1.slice(0, p1.length - 1) + p2 = p2.slice(0, p2.length - 1) + if (p2.length === 0) { + return -1 + } + let i = 0 + while (p1[i] === p2[i] && i < p1.length) { + i++ if (i === p2.length) { - return i-1; + return i - 1 } } -}; +} // transform c so it applies to a document with otherC applied. -json.transformComponent = function(dest, c, otherC, type) { - let oc; - c = clone(c); - if (c.na !== undefined) { c.p.push(0); } - if (otherC.na !== undefined) { otherC.p.push(0); } +json.transformComponent = function (dest, c, otherC, type) { + let oc + c = clone(c) + if (c.na !== undefined) { + c.p.push(0) + } + if (otherC.na !== undefined) { + otherC.p.push(0) + } - const common = json.commonPath(c.p, otherC.p); - const common2 = json.commonPath(otherC.p, c.p); + const common = json.commonPath(c.p, otherC.p) + const common2 = json.commonPath(otherC.p, c.p) - const cplength = c.p.length; - const otherCplength = otherC.p.length; + const cplength = c.p.length + const otherCplength = otherC.p.length - if (c.na !== undefined) { c.p.pop(); } // hax - if (otherC.na !== undefined) { otherC.p.pop(); } + if (c.na !== undefined) { + c.p.pop() + } // hax + if (otherC.na !== undefined) { + otherC.p.pop() + } if (otherC.na) { - if ((common2 != null) && (otherCplength >= cplength) && (otherC.p[common2] === c.p[common2])) { + if ( + common2 != null && + otherCplength >= cplength && + otherC.p[common2] === c.p[common2] + ) { if (c.ld !== undefined) { - oc = clone(otherC); - oc.p = oc.p.slice(cplength); - c.ld = json.apply(clone(c.ld), [oc]); + oc = clone(otherC) + oc.p = oc.p.slice(cplength) + c.ld = json.apply(clone(c.ld), [oc]) } else if (c.od !== undefined) { - oc = clone(otherC); - oc.p = oc.p.slice(cplength); - c.od = json.apply(clone(c.od), [oc]); + oc = clone(otherC) + oc.p = oc.p.slice(cplength) + c.od = json.apply(clone(c.od), [oc]) } } - json.append(dest, c); - return dest; + json.append(dest, c) + return dest } - if ((common2 != null) && (otherCplength > cplength) && (c.p[common2] === otherC.p[common2])) { + if ( + common2 != null && + otherCplength > cplength && + c.p[common2] === otherC.p[common2] + ) { // transform based on c if (c.ld !== undefined) { - oc = clone(otherC); - oc.p = oc.p.slice(cplength); - c.ld = json.apply(clone(c.ld), [oc]); + oc = clone(otherC) + oc.p = oc.p.slice(cplength) + c.ld = json.apply(clone(c.ld), [oc]) } else if (c.od !== undefined) { - oc = clone(otherC); - oc.p = oc.p.slice(cplength); - c.od = json.apply(clone(c.od), [oc]); + oc = clone(otherC) + oc.p = oc.p.slice(cplength) + c.od = json.apply(clone(c.od), [oc]) } } - if (common != null) { - let from, p, to; - const commonOperand = cplength === otherCplength; + let from, p, to + const commonOperand = cplength === otherCplength // transform based on otherC if (otherC.na !== undefined) { // this case is handled above due to icky path hax - } else if ((otherC.si !== undefined) || (otherC.sd !== undefined)) { + } else if (otherC.si !== undefined || otherC.sd !== undefined) { // String op vs string op - pass through to text type - if ((c.si !== undefined) || (c.sd !== undefined)) { - if (!commonOperand) { throw new Error("must be a string?"); } + if (c.si !== undefined || c.sd !== undefined) { + if (!commonOperand) { + throw new Error('must be a string?') + } // Convert an op component to a text op component - const convert = function(component) { - const newC = {p:component.p[component.p.length - 1]}; + const convert = function (component) { + const newC = { p: component.p[component.p.length - 1] } if (component.si) { - newC.i = component.si; + newC.i = component.si } else { - newC.d = component.sd; + newC.d = component.sd } - return newC; - }; - - const tc1 = convert(c); - const tc2 = convert(otherC); - - const res = []; - text._tc(res, tc1, tc2, type); - for (const tc of Array.from(res)) { - const jc = { p: c.p.slice(0, common) }; - jc.p.push(tc.p); - if (tc.i != null) { jc.si = tc.i; } - if (tc.d != null) { jc.sd = tc.d; } - json.append(dest, jc); + return newC } - return dest; + + const tc1 = convert(c) + const tc2 = convert(otherC) + + const res = [] + text._tc(res, tc1, tc2, type) + for (const tc of Array.from(res)) { + const jc = { p: c.p.slice(0, common) } + jc.p.push(tc.p) + if (tc.i != null) { + jc.si = tc.i + } + if (tc.d != null) { + jc.sd = tc.d + } + json.append(dest, jc) + } + return dest } - } else if ((otherC.li !== undefined) && (otherC.ld !== undefined)) { + } else if (otherC.li !== undefined && otherC.ld !== undefined) { if (otherC.p[common] === c.p[common]) { // noop if (!commonOperand) { // we're below the deleted element, so -> noop - return dest; + return dest } else if (c.ld !== undefined) { // we're trying to delete the same element, -> noop - if ((c.li !== undefined) && (type === 'left')) { + if (c.li !== undefined && type === 'left') { // we're both replacing one element with another. only one can // survive! - c.ld = clone(otherC.li); + c.ld = clone(otherC.li) } else { - return dest; + return dest } } } } else if (otherC.li !== undefined) { - if ((c.li !== undefined) && (c.ld === undefined) && commonOperand && (c.p[common] === otherC.p[common])) { + if ( + c.li !== undefined && + c.ld === undefined && + commonOperand && + c.p[common] === otherC.p[common] + ) { // in li vs. li, left wins. if (type === 'right') { - c.p[common]++; + c.p[common]++ } } else if (otherC.p[common] <= c.p[common]) { - c.p[common]++; + c.p[common]++ } if (c.lm !== undefined) { if (commonOperand) { // otherC edits the same list we edit if (otherC.p[common] <= c.lm) { - c.lm++; + c.lm++ } } } - // changing c.from is handled above. + // changing c.from is handled above. } else if (otherC.ld !== undefined) { if (c.lm !== undefined) { if (commonOperand) { if (otherC.p[common] === c.p[common]) { // they deleted the thing we're trying to move - return dest; + return dest } // otherC edits the same list we edit - p = otherC.p[common]; - from = c.p[common]; - to = c.lm; - if ((p < to) || ((p === to) && (from < to))) { - c.lm--; + p = otherC.p[common] + from = c.p[common] + to = c.lm + if (p < to || (p === to && from < to)) { + c.lm-- } } } if (otherC.p[common] < c.p[common]) { - c.p[common]--; + c.p[common]-- } else if (otherC.p[common] === c.p[common]) { if (otherCplength < cplength) { // we're below the deleted element, so -> noop - return dest; + return dest } else if (c.ld !== undefined) { if (c.li !== undefined) { // we're replacing, they're deleting. we become an insert. - delete c.ld; + delete c.ld } else { // we're trying to delete the same element, -> noop - return dest; + return dest } } } } else if (otherC.lm !== undefined) { - if ((c.lm !== undefined) && (cplength === otherCplength)) { + if (c.lm !== undefined && cplength === otherCplength) { // lm vs lm, here we go! - from = c.p[common]; - to = c.lm; - const otherFrom = otherC.p[common]; - const otherTo = otherC.lm; + from = c.p[common] + to = c.lm + const otherFrom = otherC.p[common] + const otherTo = otherC.lm if (otherFrom !== otherTo) { // if otherFrom == otherTo, we don't need to change our op. @@ -399,143 +476,155 @@ json.transformComponent = function(dest, c, otherC, type) { if (from === otherFrom) { // they moved it! tie break. if (type === 'left') { - c.p[common] = otherTo; - if (from === to) { // ugh - c.lm = otherTo; + c.p[common] = otherTo + if (from === to) { + // ugh + c.lm = otherTo } } else { - return dest; + return dest } } else { // they moved around it if (from > otherFrom) { - c.p[common]--; + c.p[common]-- } if (from > otherTo) { - c.p[common]++; + c.p[common]++ } else if (from === otherTo) { if (otherFrom > otherTo) { - c.p[common]++; - if (from === to) { // ugh, again - c.lm++; + c.p[common]++ + if (from === to) { + // ugh, again + c.lm++ } } } // step 2: where am i going to put it? if (to > otherFrom) { - c.lm--; + c.lm-- } else if (to === otherFrom) { if (to > from) { - c.lm--; + c.lm-- } } if (to > otherTo) { - c.lm++; + c.lm++ } else if (to === otherTo) { // if we're both moving in the same direction, tie break - if (((otherTo > otherFrom) && (to > from)) || - ((otherTo < otherFrom) && (to < from))) { + if ( + (otherTo > otherFrom && to > from) || + (otherTo < otherFrom && to < from) + ) { if (type === 'right') { - c.lm++; + c.lm++ } } else { if (to > from) { - c.lm++; + c.lm++ } else if (to === otherFrom) { - c.lm--; + c.lm-- } } } } } - } else if ((c.li !== undefined) && (c.ld === undefined) && commonOperand) { + } else if (c.li !== undefined && c.ld === undefined && commonOperand) { // li - from = otherC.p[common]; - to = otherC.lm; - p = c.p[common]; + from = otherC.p[common] + to = otherC.lm + p = c.p[common] if (p > from) { - c.p[common]--; + c.p[common]-- } if (p > to) { - c.p[common]++; + c.p[common]++ } } else { // ld, ld+li, si, sd, na, oi, od, oi+od, any li on an element beneath // the lm // // i.e. things care about where their item is after the move. - from = otherC.p[common]; - to = otherC.lm; - p = c.p[common]; + from = otherC.p[common] + to = otherC.lm + p = c.p[common] if (p === from) { - c.p[common] = to; + c.p[common] = to } else { if (p > from) { - c.p[common]--; + c.p[common]-- } if (p > to) { - c.p[common]++; + c.p[common]++ } else if (p === to) { if (from > to) { - c.p[common]++; + c.p[common]++ } } } } - } else if ((otherC.oi !== undefined) && (otherC.od !== undefined)) { + } else if (otherC.oi !== undefined && otherC.od !== undefined) { if (c.p[common] === otherC.p[common]) { - if ((c.oi !== undefined) && commonOperand) { + if (c.oi !== undefined && commonOperand) { // we inserted where someone else replaced if (type === 'right') { // left wins - return dest; + return dest } else { // we win, make our op replace what they inserted - c.od = otherC.oi; + c.od = otherC.oi } } else { // -> noop if the other component is deleting the same object (or any // parent) - return dest; + return dest } } } else if (otherC.oi !== undefined) { - if ((c.oi !== undefined) && (c.p[common] === otherC.p[common])) { + if (c.oi !== undefined && c.p[common] === otherC.p[common]) { // left wins if we try to insert at the same place if (type === 'left') { - json.append(dest, {p:c.p, od:otherC.oi}); + json.append(dest, { p: c.p, od: otherC.oi }) } else { - return dest; + return dest } } } else if (otherC.od !== undefined) { if (c.p[common] === otherC.p[common]) { - if (!commonOperand) { return dest; } + if (!commonOperand) { + return dest + } if (c.oi !== undefined) { - delete c.od; + delete c.od } else { - return dest; + return dest } } } } - - json.append(dest, c); - return dest; -}; -if (typeof WEB !== 'undefined' && WEB !== null) { - if (!exports.types) { exports.types = {}; } - - // This is kind of awful - come up with a better way to hook this helper code up. - exports._bt(json, json.transformComponent, json.checkValidOp, json.append); - - // [] is used to prevent closure from renaming types.text - exports.types.json = json; -} else { - module.exports = json; - - require('./helpers').bootstrapTransform(json, json.transformComponent, json.checkValidOp, json.append); + json.append(dest, c) + return dest } +if (typeof WEB !== 'undefined' && WEB !== null) { + if (!exports.types) { + exports.types = {} + } + + // This is kind of awful - come up with a better way to hook this helper code up. + exports._bt(json, json.transformComponent, json.checkValidOp, json.append) + + // [] is used to prevent closure from renaming types.text + exports.types.json = json +} else { + module.exports = json + + require('./helpers').bootstrapTransform( + json, + json.transformComponent, + json.checkValidOp, + json.append + ) +} diff --git a/services/document-updater/app/js/sharejs/types/model.js b/services/document-updater/app/js/sharejs/types/model.js index 68f68f2e7d..69d258738e 100644 --- a/services/document-updater/app/js/sharejs/types/model.js +++ b/services/document-updater/app/js/sharejs/types/model.js @@ -21,13 +21,13 @@ // // Actual storage is handled by the database wrappers in db/*, wrapped by DocCache -let Model; -const {EventEmitter} = require('events'); +let Model +const { EventEmitter } = require('events') -const queue = require('./syncqueue'); -const types = require('../types'); +const queue = require('./syncqueue') +const types = require('../types') -const isArray = o => Object.prototype.toString.call(o) === '[object Array]'; +const isArray = (o) => Object.prototype.toString.call(o) === '[object Array]' // This constructor creates a new Model object. There will be one model object // per server context. @@ -41,15 +41,19 @@ const isArray = o => Object.prototype.toString.call(o) === '[object Array]'; // The model is an event emitter. It emits the following events: // // create(docName, data): A document has been created with the specified name & data -module.exports = (Model = function(db, options) { +module.exports = Model = function (db, options) { // db can be null if the user doesn't want persistance. - let getOps; - if (!(this instanceof Model)) { return new Model(db, options); } + let getOps + if (!(this instanceof Model)) { + return new Model(db, options) + } - const model = this; + const model = this - if (options == null) { options = {}; } + if (options == null) { + options = {} + } // This is a cache of 'live' documents. // @@ -78,164 +82,210 @@ module.exports = (Model = function(db, options) { // // In any case, the API to model is designed such that if we want to change that later // it should be pretty easy to do so without any external-to-the-model code changes. - const docs = {}; + const docs = {} // This is a map from docName -> [callback]. It is used when a document hasn't been // cached and multiple getSnapshot() / getVersion() requests come in. All requests // are added to the callback list and called when db.getSnapshot() returns. // // callback(error, snapshot data) - const awaitingGetSnapshot = {}; + const awaitingGetSnapshot = {} // The time that documents which no clients have open will stay in the cache. // Should be > 0. - if (options.reapTime == null) { options.reapTime = 3000; } + if (options.reapTime == null) { + options.reapTime = 3000 + } // The number of operations the cache holds before reusing the space - if (options.numCachedOps == null) { options.numCachedOps = 10; } + if (options.numCachedOps == null) { + options.numCachedOps = 10 + } // This option forces documents to be reaped, even when there's no database backend. // This is useful when you don't care about persistance and don't want to gradually // fill memory. // // You might want to set reapTime to a day or something. - if (options.forceReaping == null) { options.forceReaping = false; } + if (options.forceReaping == null) { + options.forceReaping = false + } // Until I come up with a better strategy, we'll save a copy of the document snapshot // to the database every ~20 submitted ops. - if (options.opsBeforeCommit == null) { options.opsBeforeCommit = 20; } + if (options.opsBeforeCommit == null) { + options.opsBeforeCommit = 20 + } // It takes some processing time to transform client ops. The server will punt ops back to the // client to transform if they're too old. - if (options.maximumAge == null) { options.maximumAge = 40; } + if (options.maximumAge == null) { + options.maximumAge = 40 + } // **** Cache API methods // Its important that all ops are applied in order. This helper method creates the op submission queue // for a single document. This contains the logic for transforming & applying ops. - const makeOpQueue = (docName, doc) => queue(function(opData, callback) { - if (!(opData.v >= 0)) { return callback('Version missing'); } - if (opData.v > doc.v) { return callback('Op at future version'); } - - // Punt the transforming work back to the client if the op is too old. - if ((opData.v + options.maximumAge) < doc.v) { return callback('Op too old'); } - - if (!opData.meta) { opData.meta = {}; } - opData.meta.ts = Date.now(); - - // We'll need to transform the op to the current version of the document. This - // calls the callback immediately if opVersion == doc.v. - return getOps(docName, opData.v, doc.v, function(error, ops) { - let snapshot; - if (error) { return callback(error); } - - if ((doc.v - opData.v) !== ops.length) { - // This should never happen. It indicates that we didn't get all the ops we - // asked for. Its important that the submitted op is correctly transformed. - console.error(`Could not get old ops in model for document ${docName}`); - console.error(`Expected ops ${opData.v} to ${doc.v} and got ${ops.length} ops`); - return callback('Internal error'); + const makeOpQueue = (docName, doc) => + queue(function (opData, callback) { + if (!(opData.v >= 0)) { + return callback('Version missing') + } + if (opData.v > doc.v) { + return callback('Op at future version') } - if (ops.length > 0) { - try { - // If there's enough ops, it might be worth spinning this out into a webworker thread. - for (const oldOp of Array.from(ops)) { - // Dup detection works by sending the id(s) the op has been submitted with previously. - // If the id matches, we reject it. The client can also detect the op has been submitted - // already if it sees its own previous id in the ops it sees when it does catchup. - if (oldOp.meta.source && opData.dupIfSource && Array.from(opData.dupIfSource).includes(oldOp.meta.source)) { - return callback('Op already submitted'); - } - - opData.op = doc.type.transform(opData.op, oldOp.op, 'left'); - opData.v++; - } - } catch (error1) { - error = error1; - console.error(error.stack); - return callback(error.message); - } + // Punt the transforming work back to the client if the op is too old. + if (opData.v + options.maximumAge < doc.v) { + return callback('Op too old') } - try { - snapshot = doc.type.apply(doc.snapshot, opData.op); - } catch (error2) { - error = error2; - console.error(error.stack); - return callback(error.message); + if (!opData.meta) { + opData.meta = {} } + opData.meta.ts = Date.now() - // The op data should be at the current version, and the new document data should be at - // the next version. - // - // This should never happen in practice, but its a nice little check to make sure everything - // is hunky-dory. - if (opData.v !== doc.v) { - // This should never happen. - console.error("Version mismatch detected in model. File a ticket - this is a bug."); - console.error(`Expecting ${opData.v} == ${doc.v}`); - return callback('Internal error'); - } - - // newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta} - const writeOp = (db != null ? db.writeOp : undefined) || ((docName, newOpData, callback) => callback()); - - return writeOp(docName, opData, function(error) { + // We'll need to transform the op to the current version of the document. This + // calls the callback immediately if opVersion == doc.v. + return getOps(docName, opData.v, doc.v, function (error, ops) { + let snapshot if (error) { - // The user should probably know about this. - console.warn(`Error writing ops to database: ${error}`); - return callback(error); + return callback(error) } - __guardMethod__(options.stats, 'writeOp', o => o.writeOp()); + if (doc.v - opData.v !== ops.length) { + // This should never happen. It indicates that we didn't get all the ops we + // asked for. Its important that the submitted op is correctly transformed. + console.error( + `Could not get old ops in model for document ${docName}` + ) + console.error( + `Expected ops ${opData.v} to ${doc.v} and got ${ops.length} ops` + ) + return callback('Internal error') + } - // This is needed when we emit the 'change' event, below. - const oldSnapshot = doc.snapshot; + if (ops.length > 0) { + try { + // If there's enough ops, it might be worth spinning this out into a webworker thread. + for (const oldOp of Array.from(ops)) { + // Dup detection works by sending the id(s) the op has been submitted with previously. + // If the id matches, we reject it. The client can also detect the op has been submitted + // already if it sees its own previous id in the ops it sees when it does catchup. + if ( + oldOp.meta.source && + opData.dupIfSource && + Array.from(opData.dupIfSource).includes(oldOp.meta.source) + ) { + return callback('Op already submitted') + } - // All the heavy lifting is now done. Finally, we'll update the cache with the new data - // and (maybe!) save a new document snapshot to the database. + opData.op = doc.type.transform(opData.op, oldOp.op, 'left') + opData.v++ + } + } catch (error1) { + error = error1 + console.error(error.stack) + return callback(error.message) + } + } - doc.v = opData.v + 1; - doc.snapshot = snapshot; + try { + snapshot = doc.type.apply(doc.snapshot, opData.op) + } catch (error2) { + error = error2 + console.error(error.stack) + return callback(error.message) + } - doc.ops.push(opData); - if (db && (doc.ops.length > options.numCachedOps)) { doc.ops.shift(); } - - model.emit('applyOp', docName, opData, snapshot, oldSnapshot); - doc.eventEmitter.emit('op', opData, snapshot, oldSnapshot); - - // The callback is called with the version of the document at which the op was applied. - // This is the op.v after transformation, and its doc.v - 1. - callback(null, opData.v); - - // I need a decent strategy here for deciding whether or not to save the snapshot. + // The op data should be at the current version, and the new document data should be at + // the next version. // - // The 'right' strategy looks something like "Store the snapshot whenever the snapshot - // is smaller than the accumulated op data". For now, I'll just store it every 20 - // ops or something. (Configurable with doc.committedVersion) - if (!doc.snapshotWriteLock && ((doc.committedVersion + options.opsBeforeCommit) <= doc.v)) { - return tryWriteSnapshot(docName, function(error) { - if (error) { return console.warn(`Error writing snapshot ${error}. This is nonfatal`); } - }); + // This should never happen in practice, but its a nice little check to make sure everything + // is hunky-dory. + if (opData.v !== doc.v) { + // This should never happen. + console.error( + 'Version mismatch detected in model. File a ticket - this is a bug.' + ) + console.error(`Expecting ${opData.v} == ${doc.v}`) + return callback('Internal error') } - }); - }); - }); + + // newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta} + const writeOp = + (db != null ? db.writeOp : undefined) || + ((docName, newOpData, callback) => callback()) + + return writeOp(docName, opData, function (error) { + if (error) { + // The user should probably know about this. + console.warn(`Error writing ops to database: ${error}`) + return callback(error) + } + + __guardMethod__(options.stats, 'writeOp', (o) => o.writeOp()) + + // This is needed when we emit the 'change' event, below. + const oldSnapshot = doc.snapshot + + // All the heavy lifting is now done. Finally, we'll update the cache with the new data + // and (maybe!) save a new document snapshot to the database. + + doc.v = opData.v + 1 + doc.snapshot = snapshot + + doc.ops.push(opData) + if (db && doc.ops.length > options.numCachedOps) { + doc.ops.shift() + } + + model.emit('applyOp', docName, opData, snapshot, oldSnapshot) + doc.eventEmitter.emit('op', opData, snapshot, oldSnapshot) + + // The callback is called with the version of the document at which the op was applied. + // This is the op.v after transformation, and its doc.v - 1. + callback(null, opData.v) + + // I need a decent strategy here for deciding whether or not to save the snapshot. + // + // The 'right' strategy looks something like "Store the snapshot whenever the snapshot + // is smaller than the accumulated op data". For now, I'll just store it every 20 + // ops or something. (Configurable with doc.committedVersion) + if ( + !doc.snapshotWriteLock && + doc.committedVersion + options.opsBeforeCommit <= doc.v + ) { + return tryWriteSnapshot(docName, function (error) { + if (error) { + return console.warn( + `Error writing snapshot ${error}. This is nonfatal` + ) + } + }) + } + }) + }) + }) // Add the data for the given docName to the cache. The named document shouldn't already // exist in the doc set. // // Returns the new doc. - const add = function(docName, error, data, committedVersion, ops, dbMeta) { - let callback, doc; - const callbacks = awaitingGetSnapshot[docName]; - delete awaitingGetSnapshot[docName]; + const add = function (docName, error, data, committedVersion, ops, dbMeta) { + let callback, doc + const callbacks = awaitingGetSnapshot[docName] + delete awaitingGetSnapshot[docName] if (error) { - if (callbacks) { for (callback of Array.from(callbacks)) { callback(error); } } + if (callbacks) { + for (callback of Array.from(callbacks)) { + callback(error) + } + } } else { - doc = (docs[docName] = { + doc = docs[docName] = { snapshot: data.snapshot, v: data.v, type: data.type, @@ -244,7 +294,7 @@ module.exports = (Model = function(db, options) { // Cache of ops ops: ops || [], - eventEmitter: new EventEmitter, + eventEmitter: new EventEmitter(), // Timer before the document will be invalidated from the cache (if the document has no // listeners) @@ -254,97 +304,121 @@ module.exports = (Model = function(db, options) { committedVersion: committedVersion != null ? committedVersion : data.v, snapshotWriteLock: false, dbMeta - }); + } - doc.opQueue = makeOpQueue(docName, doc); - - refreshReapingTimeout(docName); - model.emit('add', docName, data); - if (callbacks) { for (callback of Array.from(callbacks)) { callback(null, doc); } } + doc.opQueue = makeOpQueue(docName, doc) + + refreshReapingTimeout(docName) + model.emit('add', docName, data) + if (callbacks) { + for (callback of Array.from(callbacks)) { + callback(null, doc) + } + } } - return doc; - }; - + return doc + } + // This is a little helper wrapper around db.getOps. It does two things: // // - If there's no database set, it returns an error to the callback // - It adds version numbers to each op returned from the database // (These can be inferred from context so the DB doesn't store them, but its useful to have them). - const getOpsInternal = function(docName, start, end, callback) { - if (!db) { return (typeof callback === 'function' ? callback('Document does not exist') : undefined); } + const getOpsInternal = function (docName, start, end, callback) { + if (!db) { + return typeof callback === 'function' + ? callback('Document does not exist') + : undefined + } - return db.getOps(docName, start, end, function(error, ops) { - if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } + return db.getOps(docName, start, end, function (error, ops) { + if (error) { + return typeof callback === 'function' ? callback(error) : undefined + } - let v = start; - for (const op of Array.from(ops)) { op.v = v++; } + let v = start + for (const op of Array.from(ops)) { + op.v = v++ + } - return (typeof callback === 'function' ? callback(null, ops) : undefined); - }); - }; + return typeof callback === 'function' ? callback(null, ops) : undefined + }) + } // Load the named document into the cache. This function is re-entrant. // // The callback is called with (error, doc) - const load = function(docName, callback) { + const load = function (docName, callback) { if (docs[docName]) { // The document is already loaded. Return immediately. - __guardMethod__(options.stats, 'cacheHit', o => o.cacheHit('getSnapshot')); - return callback(null, docs[docName]); + __guardMethod__(options.stats, 'cacheHit', (o) => + o.cacheHit('getSnapshot') + ) + return callback(null, docs[docName]) } // We're a memory store. If we don't have it, nobody does. - if (!db) { return callback('Document does not exist'); } + if (!db) { + return callback('Document does not exist') + } - const callbacks = awaitingGetSnapshot[docName]; + const callbacks = awaitingGetSnapshot[docName] // The document is being loaded already. Add ourselves as a callback. - if (callbacks) { return callbacks.push(callback); } + if (callbacks) { + return callbacks.push(callback) + } - __guardMethod__(options.stats, 'cacheMiss', o1 => o1.cacheMiss('getSnapshot')); + __guardMethod__(options.stats, 'cacheMiss', (o1) => + o1.cacheMiss('getSnapshot') + ) // The document isn't loaded and isn't being loaded. Load it. - awaitingGetSnapshot[docName] = [callback]; - return db.getSnapshot(docName, function(error, data, dbMeta) { - if (error) { return add(docName, error); } - - const type = types[data.type]; - if (!type) { - console.warn(`Type '${data.type}' missing`); - return callback("Type not found"); + awaitingGetSnapshot[docName] = [callback] + return db.getSnapshot(docName, function (error, data, dbMeta) { + if (error) { + return add(docName, error) } - data.type = type; - const committedVersion = data.v; + const type = types[data.type] + if (!type) { + console.warn(`Type '${data.type}' missing`) + return callback('Type not found') + } + data.type = type + + const committedVersion = data.v // The server can close without saving the most recent document snapshot. // In this case, there are extra ops which need to be applied before // returning the snapshot. - return getOpsInternal(docName, data.v, null, function(error, ops) { - if (error) { return callback(error); } + return getOpsInternal(docName, data.v, null, function (error, ops) { + if (error) { + return callback(error) + } if (ops.length > 0) { - console.log(`Catchup ${docName} ${data.v} -> ${data.v + ops.length}`); + console.log(`Catchup ${docName} ${data.v} -> ${data.v + ops.length}`) try { for (const op of Array.from(ops)) { - data.snapshot = type.apply(data.snapshot, op.op); - data.v++; + data.snapshot = type.apply(data.snapshot, op.op) + data.v++ } } catch (e) { // This should never happen - it indicates that whats in the // database is invalid. - console.error(`Op data invalid for ${docName}: ${e.stack}`); - return callback('Op data invalid'); + console.error(`Op data invalid for ${docName}: ${e.stack}`) + return callback('Op data invalid') } } - model.emit('load', docName, data); - return add(docName, error, data, committedVersion, ops, dbMeta); - }); - }); - }; + model.emit('load', docName, data) + return add(docName, error, data, committedVersion, ops, dbMeta) + }) + }) + } // This makes sure the cache contains a document. If the doc cache doesn't contain // a document, it is loaded from the database and stored. @@ -352,52 +426,75 @@ module.exports = (Model = function(db, options) { // Documents are stored so long as either: // - They have been accessed within the past #{PERIOD} // - At least one client has the document open - var refreshReapingTimeout = function(docName) { - const doc = docs[docName]; - if (!doc) { return; } + var refreshReapingTimeout = function (docName) { + const doc = docs[docName] + if (!doc) { + return + } // I want to let the clients list be updated before this is called. - return process.nextTick(function() { + return process.nextTick(function () { // This is an awkward way to find out the number of clients on a document. If this // causes performance issues, add a numClients field to the document. // // The first check is because its possible that between refreshReapingTimeout being called and this // event being fired, someone called delete() on the document and hence the doc is something else now. - if ((doc === docs[docName]) && - (doc.eventEmitter.listeners('op').length === 0) && - (db || options.forceReaping) && - (doc.opQueue.busy === false)) { - - let reapTimer; - clearTimeout(doc.reapTimer); - return doc.reapTimer = (reapTimer = setTimeout(() => tryWriteSnapshot(docName, function() { - // If the reaping timeout has been refreshed while we're writing the snapshot, or if we're - // in the middle of applying an operation, don't reap. - if ((docs[docName].reapTimer === reapTimer) && (doc.opQueue.busy === false)) { return delete docs[docName]; } - }) - , options.reapTime)); + if ( + doc === docs[docName] && + doc.eventEmitter.listeners('op').length === 0 && + (db || options.forceReaping) && + doc.opQueue.busy === false + ) { + let reapTimer + clearTimeout(doc.reapTimer) + return (doc.reapTimer = reapTimer = setTimeout( + () => + tryWriteSnapshot(docName, function () { + // If the reaping timeout has been refreshed while we're writing the snapshot, or if we're + // in the middle of applying an operation, don't reap. + if ( + docs[docName].reapTimer === reapTimer && + doc.opQueue.busy === false + ) { + return delete docs[docName] + } + }), + options.reapTime + )) } - }); - }; + }) + } - var tryWriteSnapshot = function(docName, callback) { - if (!db) { return (typeof callback === 'function' ? callback() : undefined); } + var tryWriteSnapshot = function (docName, callback) { + if (!db) { + return typeof callback === 'function' ? callback() : undefined + } - const doc = docs[docName]; + const doc = docs[docName] // The doc is closed - if (!doc) { return (typeof callback === 'function' ? callback() : undefined); } + if (!doc) { + return typeof callback === 'function' ? callback() : undefined + } // The document is already saved. - if (doc.committedVersion === doc.v) { return (typeof callback === 'function' ? callback() : undefined); } + if (doc.committedVersion === doc.v) { + return typeof callback === 'function' ? callback() : undefined + } - if (doc.snapshotWriteLock) { return (typeof callback === 'function' ? callback('Another snapshot write is in progress') : undefined); } + if (doc.snapshotWriteLock) { + return typeof callback === 'function' + ? callback('Another snapshot write is in progress') + : undefined + } - doc.snapshotWriteLock = true; + doc.snapshotWriteLock = true - __guardMethod__(options.stats, 'writeSnapshot', o => o.writeSnapshot()); + __guardMethod__(options.stats, 'writeSnapshot', (o) => o.writeSnapshot()) - const writeSnapshot = (db != null ? db.writeSnapshot : undefined) || ((docName, docData, dbMeta, callback) => callback()); + const writeSnapshot = + (db != null ? db.writeSnapshot : undefined) || + ((docName, docData, dbMeta, callback) => callback()) const data = { v: doc.v, @@ -405,87 +502,107 @@ module.exports = (Model = function(db, options) { snapshot: doc.snapshot, // The database doesn't know about object types. type: doc.type.name - }; + } // Commit snapshot. - return writeSnapshot(docName, data, doc.dbMeta, function(error, dbMeta) { - doc.snapshotWriteLock = false; + return writeSnapshot(docName, data, doc.dbMeta, function (error, dbMeta) { + doc.snapshotWriteLock = false // We have to use data.v here because the version in the doc could // have been updated between the call to writeSnapshot() and now. - doc.committedVersion = data.v; - doc.dbMeta = dbMeta; + doc.committedVersion = data.v + doc.dbMeta = dbMeta - return (typeof callback === 'function' ? callback(error) : undefined); - }); - }; + return typeof callback === 'function' ? callback(error) : undefined + }) + } // *** Model interface methods // Create a new document. // // data should be {snapshot, type, [meta]}. The version of a new document is 0. - this.create = function(docName, type, meta, callback) { - if (typeof meta === 'function') { [meta, callback] = Array.from([{}, meta]); } + this.create = function (docName, type, meta, callback) { + if (typeof meta === 'function') { + ;[meta, callback] = Array.from([{}, meta]) + } - if (docName.match(/\//)) { return (typeof callback === 'function' ? callback('Invalid document name') : undefined); } - if (docs[docName]) { return (typeof callback === 'function' ? callback('Document already exists') : undefined); } + if (docName.match(/\//)) { + return typeof callback === 'function' + ? callback('Invalid document name') + : undefined + } + if (docs[docName]) { + return typeof callback === 'function' + ? callback('Document already exists') + : undefined + } - if (typeof type === 'string') { type = types[type]; } - if (!type) { return (typeof callback === 'function' ? callback('Type not found') : undefined); } + if (typeof type === 'string') { + type = types[type] + } + if (!type) { + return typeof callback === 'function' + ? callback('Type not found') + : undefined + } const data = { - snapshot:type.create(), - type:type.name, - meta:meta || {}, - v:0 - }; + snapshot: type.create(), + type: type.name, + meta: meta || {}, + v: 0 + } - const done = function(error, dbMeta) { + const done = function (error, dbMeta) { // dbMeta can be used to cache extra state needed by the database to access the document, like an ID or something. - if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } + if (error) { + return typeof callback === 'function' ? callback(error) : undefined + } // From here on we'll store the object version of the type name. - data.type = type; - add(docName, null, data, 0, [], dbMeta); - model.emit('create', docName, data); - return (typeof callback === 'function' ? callback() : undefined); - }; + data.type = type + add(docName, null, data, 0, [], dbMeta) + model.emit('create', docName, data) + return typeof callback === 'function' ? callback() : undefined + } if (db) { - return db.create(docName, data, done); + return db.create(docName, data, done) } else { - return done(); + return done() } - }; + } // Perminantly deletes the specified document. // If listeners are attached, they are removed. - // + // // The callback is called with (error) if there was an error. If error is null / undefined, the // document was deleted. // // WARNING: This isn't well supported throughout the code. (Eg, streaming clients aren't told about the // deletion. Subsequent op submissions will fail). - this.delete = function(docName, callback) { - const doc = docs[docName]; + this.delete = function (docName, callback) { + const doc = docs[docName] if (doc) { - clearTimeout(doc.reapTimer); - delete docs[docName]; + clearTimeout(doc.reapTimer) + delete docs[docName] } - const done = function(error) { - if (!error) { model.emit('delete', docName); } - return (typeof callback === 'function' ? callback(error) : undefined); - }; + const done = function (error) { + if (!error) { + model.emit('delete', docName) + } + return typeof callback === 'function' ? callback(error) : undefined + } if (db) { - return db.delete(docName, doc != null ? doc.dbMeta : undefined, done); + return db.delete(docName, doc != null ? doc.dbMeta : undefined, done) } else { - return done((!doc ? 'Document does not exist' : undefined)); + return done(!doc ? 'Document does not exist' : undefined) } - }; + } // This gets all operations from [start...end]. (That is, its not inclusive.) // @@ -502,102 +619,139 @@ module.exports = (Model = function(db, options) { // // Use getVersion() to determine if a document actually exists, if thats what you're // after. - this.getOps = (getOps = function(docName, start, end, callback) { + this.getOps = getOps = function (docName, start, end, callback) { // getOps will only use the op cache if its there. It won't fill the op cache in. - if (!(start >= 0)) { throw new Error('start must be 0+'); } + if (!(start >= 0)) { + throw new Error('start must be 0+') + } - if (typeof end === 'function') { [end, callback] = Array.from([null, end]); } + if (typeof end === 'function') { + ;[end, callback] = Array.from([null, end]) + } - const ops = docs[docName] != null ? docs[docName].ops : undefined; + const ops = docs[docName] != null ? docs[docName].ops : undefined if (ops) { - const version = docs[docName].v; + const version = docs[docName].v // Ops contains an array of ops. The last op in the list is the last op applied - if (end == null) { end = version; } - start = Math.min(start, end); + if (end == null) { + end = version + } + start = Math.min(start, end) - if (start === end) { return callback(null, []); } + if (start === end) { + return callback(null, []) + } // Base is the version number of the oldest op we have cached - const base = version - ops.length; + const base = version - ops.length // If the database is null, we'll trim to the ops we do have and hope thats enough. - if ((start >= base) || (db === null)) { - refreshReapingTimeout(docName); + if (start >= base || db === null) { + refreshReapingTimeout(docName) if (options.stats != null) { - options.stats.cacheHit('getOps'); + options.stats.cacheHit('getOps') } - return callback(null, ops.slice((start - base), (end - base))); + return callback(null, ops.slice(start - base, end - base)) } } if (options.stats != null) { - options.stats.cacheMiss('getOps'); + options.stats.cacheMiss('getOps') } - return getOpsInternal(docName, start, end, callback); - }); + return getOpsInternal(docName, start, end, callback) + } // Gets the snapshot data for the specified document. // getSnapshot(docName, callback) // Callback is called with (error, {v: , type: , snapshot: , meta: }) - this.getSnapshot = (docName, callback) => load(docName, (error, doc) => callback(error, doc ? {v:doc.v, type:doc.type, snapshot:doc.snapshot, meta:doc.meta} : undefined)); + this.getSnapshot = (docName, callback) => + load(docName, (error, doc) => + callback( + error, + doc + ? { v: doc.v, type: doc.type, snapshot: doc.snapshot, meta: doc.meta } + : undefined + ) + ) // Gets the latest version # of the document. // getVersion(docName, callback) // callback is called with (error, version). - this.getVersion = (docName, callback) => load(docName, (error, doc) => callback(error, doc != null ? doc.v : undefined)); + this.getVersion = (docName, callback) => + load(docName, (error, doc) => + callback(error, doc != null ? doc.v : undefined) + ) // Apply an op to the specified document. // The callback is passed (error, applied version #) // opData = {op:op, v:v, meta:metadata} - // + // // Ops are queued before being applied so that the following code applies op C before op B: // model.applyOp 'doc', OPA, -> model.applyOp 'doc', OPB // model.applyOp 'doc', OPC - this.applyOp = (docName, opData, callback) => // All the logic for this is in makeOpQueue, above. - load(docName, function(error, doc) { - if (error) { return callback(error); } + this.applyOp = ( + docName, + opData, + callback // All the logic for this is in makeOpQueue, above. + ) => + load(docName, function (error, doc) { + if (error) { + return callback(error) + } - return process.nextTick(() => doc.opQueue(opData, function(error, newVersion) { - refreshReapingTimeout(docName); - return (typeof callback === 'function' ? callback(error, newVersion) : undefined); - })); - }); + return process.nextTick(() => + doc.opQueue(opData, function (error, newVersion) { + refreshReapingTimeout(docName) + return typeof callback === 'function' + ? callback(error, newVersion) + : undefined + }) + ) + }) // TODO: store (some) metadata in DB // TODO: op and meta should be combineable in the op that gets sent - this.applyMetaOp = function(docName, metaOpData, callback) { - const {path, value} = metaOpData.meta; - - if (!isArray(path)) { return (typeof callback === 'function' ? callback("path should be an array") : undefined); } + this.applyMetaOp = function (docName, metaOpData, callback) { + const { path, value } = metaOpData.meta - return load(docName, function(error, doc) { + if (!isArray(path)) { + return typeof callback === 'function' + ? callback('path should be an array') + : undefined + } + + return load(docName, function (error, doc) { if (error != null) { - return (typeof callback === 'function' ? callback(error) : undefined); + return typeof callback === 'function' ? callback(error) : undefined } else { - let applied = false; + let applied = false switch (path[0]) { case 'shout': - doc.eventEmitter.emit('op', metaOpData); - applied = true; - break; + doc.eventEmitter.emit('op', metaOpData) + applied = true + break } - if (applied) { model.emit('applyMetaOp', docName, path, value); } - return (typeof callback === 'function' ? callback(null, doc.v) : undefined); + if (applied) { + model.emit('applyMetaOp', docName, path, value) + } + return typeof callback === 'function' + ? callback(null, doc.v) + : undefined } - }); - }; + }) + } // Listen to all ops from the specified version. If version is in the past, all // ops since that version are sent immediately to the listener. // // The callback is called once the listener is attached, but before any ops have been passed // to the listener. - // + // // This will _not_ edit the document metadata. // // If there are any listeners, we don't purge the document from the cache. But be aware, this behaviour @@ -609,98 +763,123 @@ module.exports = (Model = function(db, options) { // listener is called with (opData) each time an op is applied. // // callback(error, openedVersion) - this.listen = function(docName, version, listener, callback) { - if (typeof version === 'function') { [version, listener, callback] = Array.from([null, version, listener]); } + this.listen = function (docName, version, listener, callback) { + if (typeof version === 'function') { + ;[version, listener, callback] = Array.from([null, version, listener]) + } - return load(docName, function(error, doc) { - if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } + return load(docName, function (error, doc) { + if (error) { + return typeof callback === 'function' ? callback(error) : undefined + } - clearTimeout(doc.reapTimer); + clearTimeout(doc.reapTimer) if (version != null) { - return getOps(docName, version, null, function(error, data) { - if (error) { return (typeof callback === 'function' ? callback(error) : undefined); } + return getOps(docName, version, null, function (error, data) { + if (error) { + return typeof callback === 'function' ? callback(error) : undefined + } - doc.eventEmitter.on('op', listener); + doc.eventEmitter.on('op', listener) if (typeof callback === 'function') { - callback(null, version); + callback(null, version) } return (() => { - const result = []; + const result = [] for (const op of Array.from(data)) { - var needle; - listener(op); + var needle + listener(op) // The listener may well remove itself during the catchup phase. If this happens, break early. // This is done in a quite inefficient way. (O(n) where n = #listeners on doc) - if ((needle = listener, !Array.from(doc.eventEmitter.listeners('op')).includes(needle))) { break; } else { - result.push(undefined); + if ( + ((needle = listener), + !Array.from(doc.eventEmitter.listeners('op')).includes(needle)) + ) { + break + } else { + result.push(undefined) } } - return result; - })(); - }); - - } else { // Version is null / undefined. Just add the listener. - doc.eventEmitter.on('op', listener); - return (typeof callback === 'function' ? callback(null, doc.v) : undefined); + return result + })() + }) + } else { + // Version is null / undefined. Just add the listener. + doc.eventEmitter.on('op', listener) + return typeof callback === 'function' + ? callback(null, doc.v) + : undefined } - }); - }; + }) + } // Remove a listener for a particular document. // // removeListener(docName, listener) // // This is synchronous. - this.removeListener = function(docName, listener) { + this.removeListener = function (docName, listener) { // The document should already be loaded. - const doc = docs[docName]; - if (!doc) { throw new Error('removeListener called but document not loaded'); } + const doc = docs[docName] + if (!doc) { + throw new Error('removeListener called but document not loaded') + } - doc.eventEmitter.removeListener('op', listener); - return refreshReapingTimeout(docName); - }; + doc.eventEmitter.removeListener('op', listener) + return refreshReapingTimeout(docName) + } // Flush saves all snapshot data to the database. I'm not sure whether or not this is actually needed - // sharejs will happily replay uncommitted ops when documents are re-opened anyway. - this.flush = function(callback) { - if (!db) { return (typeof callback === 'function' ? callback() : undefined); } + this.flush = function (callback) { + if (!db) { + return typeof callback === 'function' ? callback() : undefined + } - let pendingWrites = 0; + let pendingWrites = 0 for (const docName in docs) { - const doc = docs[docName]; + const doc = docs[docName] if (doc.committedVersion < doc.v) { - pendingWrites++; + pendingWrites++ // I'm hoping writeSnapshot will always happen in another thread. - tryWriteSnapshot(docName, () => process.nextTick(function() { - pendingWrites--; - if (pendingWrites === 0) { return (typeof callback === 'function' ? callback() : undefined); } - })); + tryWriteSnapshot(docName, () => + process.nextTick(function () { + pendingWrites-- + if (pendingWrites === 0) { + return typeof callback === 'function' ? callback() : undefined + } + }) + ) } } // If nothing was queued, terminate immediately. - if (pendingWrites === 0) { return (typeof callback === 'function' ? callback() : undefined); } - }; + if (pendingWrites === 0) { + return typeof callback === 'function' ? callback() : undefined + } + } // Close the database connection. This is needed so nodejs can shut down cleanly. - this.closeDb = function() { - __guardMethod__(db, 'close', o => o.close()); - return db = null; - }; - -}); + this.closeDb = function () { + __guardMethod__(db, 'close', (o) => o.close()) + return (db = null) + } +} // Model inherits from EventEmitter. -Model.prototype = new EventEmitter; - +Model.prototype = new EventEmitter() function __guardMethod__(obj, methodName, transform) { - if (typeof obj !== 'undefined' && obj !== null && typeof obj[methodName] === 'function') { - return transform(obj, methodName); + if ( + typeof obj !== 'undefined' && + obj !== null && + typeof obj[methodName] === 'function' + ) { + return transform(obj, methodName) } else { - return undefined; + return undefined } -} \ No newline at end of file +} diff --git a/services/document-updater/app/js/sharejs/types/simple.js b/services/document-updater/app/js/sharejs/types/simple.js index c0e8e85394..781cdc0293 100644 --- a/services/document-updater/app/js/sharejs/types/simple.js +++ b/services/document-updater/app/js/sharejs/types/simple.js @@ -23,28 +23,32 @@ module.exports = { name: 'simple', // Create a new document snapshot - create() { return {str:""}; }, + create() { + return { str: '' } + }, // Apply the given op to the document snapshot. Returns the new snapshot. // // The original snapshot should not be modified. apply(snapshot, op) { - if (!(op.position >= 0 && op.position <= snapshot.str.length)) { throw new Error('Invalid position'); } + if (!(op.position >= 0 && op.position <= snapshot.str.length)) { + throw new Error('Invalid position') + } - let { - str - } = snapshot; - str = str.slice(0, op.position) + op.text + str.slice(op.position); - return {str}; + let { str } = snapshot + str = str.slice(0, op.position) + op.text + str.slice(op.position) + return { str } }, // transform op1 by op2. Return transformed version of op1. // sym describes the symmetry of the op. Its 'left' or 'right' depending on whether the // op being transformed comes from the client or the server. transform(op1, op2, sym) { - let pos = op1.position; - if ((op2.position < pos) || ((op2.position === pos) && (sym === 'left'))) { pos += op2.text.length; } + let pos = op1.position + if (op2.position < pos || (op2.position === pos && sym === 'left')) { + pos += op2.text.length + } - return {position:pos, text:op1.text}; + return { position: pos, text: op1.text } } -}; +} diff --git a/services/document-updater/app/js/sharejs/types/syncqueue.js b/services/document-updater/app/js/sharejs/types/syncqueue.js index 2eecb615e6..7b83c5b436 100644 --- a/services/document-updater/app/js/sharejs/types/syncqueue.js +++ b/services/document-updater/app/js/sharejs/types/syncqueue.js @@ -25,30 +25,36 @@ // // ^--- async thing will only be running once at any time. -module.exports = function(process) { - if (typeof process !== 'function') { throw new Error('process is not a function'); } - const queue = []; - - const enqueue = function(data, callback) { - queue.push([data, callback]); - return flush(); - }; +module.exports = function (process) { + if (typeof process !== 'function') { + throw new Error('process is not a function') + } + const queue = [] - enqueue.busy = false; + const enqueue = function (data, callback) { + queue.push([data, callback]) + return flush() + } - var flush = function() { - if (enqueue.busy || (queue.length === 0)) { return; } + enqueue.busy = false - enqueue.busy = true; - const [data, callback] = Array.from(queue.shift()); - return process(data, function(...result) { // TODO: Make this not use varargs - varargs are really slow. - enqueue.busy = false; + var flush = function () { + if (enqueue.busy || queue.length === 0) { + return + } + + enqueue.busy = true + const [data, callback] = Array.from(queue.shift()) + return process(data, function (...result) { + // TODO: Make this not use varargs - varargs are really slow. + enqueue.busy = false // This is called after busy = false so a user can check if enqueue.busy is set in the callback. - if (callback) { callback.apply(null, result); } - return flush(); - }); - }; - - return enqueue; -}; + if (callback) { + callback.apply(null, result) + } + return flush() + }) + } + return enqueue +} diff --git a/services/document-updater/app/js/sharejs/types/text-api.js b/services/document-updater/app/js/sharejs/types/text-api.js index 7c39b25899..d30f009cdb 100644 --- a/services/document-updater/app/js/sharejs/types/text-api.js +++ b/services/document-updater/app/js/sharejs/types/text-api.js @@ -8,39 +8,45 @@ */ // Text document API for text -let text; -if (typeof WEB === 'undefined') { text = require('./text'); } +let text +if (typeof WEB === 'undefined') { + text = require('./text') +} text.api = { - provides: {text:true}, + provides: { text: true }, // The number of characters in the string - getLength() { return this.snapshot.length; }, + getLength() { + return this.snapshot.length + }, // Get the text contents of a document - getText() { return this.snapshot; }, + getText() { + return this.snapshot + }, insert(pos, text, callback) { - const op = [{p:pos, i:text}]; - - this.submitOp(op, callback); - return op; - }, - - del(pos, length, callback) { - const op = [{p:pos, d:this.snapshot.slice(pos, (pos + length))}]; + const op = [{ p: pos, i: text }] - this.submitOp(op, callback); - return op; + this.submitOp(op, callback) + return op }, - + + del(pos, length, callback) { + const op = [{ p: pos, d: this.snapshot.slice(pos, pos + length) }] + + this.submitOp(op, callback) + return op + }, + _register() { - return this.on('remoteop', function(op) { + return this.on('remoteop', function (op) { return Array.from(op).map((component) => - component.i !== undefined ? - this.emit('insert', component.p, component.i) - : - this.emit('delete', component.p, component.d)); - }); + component.i !== undefined + ? this.emit('insert', component.p, component.i) + : this.emit('delete', component.p, component.d) + ) + }) } -}; +} diff --git a/services/document-updater/app/js/sharejs/types/text-composable-api.js b/services/document-updater/app/js/sharejs/types/text-composable-api.js index ba6e5f0242..9b237ce91b 100644 --- a/services/document-updater/app/js/sharejs/types/text-composable-api.js +++ b/services/document-updater/app/js/sharejs/types/text-composable-api.js @@ -13,57 +13,64 @@ */ // Text document API for text -let type; +let type if (typeof WEB !== 'undefined' && WEB !== null) { - type = exports.types['text-composable']; + type = exports.types['text-composable'] } else { - type = require('./text-composable'); + type = require('./text-composable') } type.api = { - provides: {'text':true}, + provides: { text: true }, // The number of characters in the string - 'getLength'() { return this.snapshot.length; }, + getLength() { + return this.snapshot.length + }, // Get the text contents of a document - 'getText'() { return this.snapshot; }, - - 'insert'(pos, text, callback) { - const op = type.normalize([pos, {'i':text}, (this.snapshot.length - pos)]); - - this.submitOp(op, callback); - return op; + getText() { + return this.snapshot }, - - 'del'(pos, length, callback) { - const op = type.normalize([pos, {'d':this.snapshot.slice(pos, (pos + length))}, (this.snapshot.length - pos - length)]); - this.submitOp(op, callback); - return op; + insert(pos, text, callback) { + const op = type.normalize([pos, { i: text }, this.snapshot.length - pos]) + + this.submitOp(op, callback) + return op + }, + + del(pos, length, callback) { + const op = type.normalize([ + pos, + { d: this.snapshot.slice(pos, pos + length) }, + this.snapshot.length - pos - length + ]) + + this.submitOp(op, callback) + return op }, _register() { - return this.on('remoteop', function(op) { - let pos = 0; + return this.on('remoteop', function (op) { + let pos = 0 return (() => { - const result = []; + const result = [] for (const component of Array.from(op)) { if (typeof component === 'number') { - result.push(pos += component); + result.push((pos += component)) } else if (component.i !== undefined) { - this.emit('insert', pos, component.i); - result.push(pos += component.i.length); + this.emit('insert', pos, component.i) + result.push((pos += component.i.length)) } else { // delete - result.push(this.emit('delete', pos, component.d)); + result.push(this.emit('delete', pos, component.d)) } } - return result; - })(); - }); + return result + })() + }) } -}; - // We don't increment pos, because the position - // specified is after the delete has happened. - +} +// We don't increment pos, because the position +// specified is after the delete has happened. diff --git a/services/document-updater/app/js/sharejs/types/text-composable.js b/services/document-updater/app/js/sharejs/types/text-composable.js index 79dfb63308..6898589908 100644 --- a/services/document-updater/app/js/sharejs/types/text-composable.js +++ b/services/document-updater/app/js/sharejs/types/text-composable.js @@ -27,297 +27,373 @@ // // Snapshots are strings. -let makeAppend; -const p = function() {}; // require('util').debug -const i = function() {}; // require('util').inspect +let makeAppend +const p = function () {} // require('util').debug +const i = function () {} // require('util').inspect -const exports = (typeof WEB !== 'undefined' && WEB !== null) ? {} : module.exports; +const exports = typeof WEB !== 'undefined' && WEB !== null ? {} : module.exports -exports.name = 'text-composable'; +exports.name = 'text-composable' -exports.create = () => ''; +exports.create = () => '' // -------- Utility methods -const checkOp = function(op) { - if (!Array.isArray(op)) { throw new Error('Op must be an array of components'); } - let last = null; +const checkOp = function (op) { + if (!Array.isArray(op)) { + throw new Error('Op must be an array of components') + } + let last = null return (() => { - const result = []; + const result = [] for (const c of Array.from(op)) { - if (typeof(c) === 'object') { - if (((c.i == null) || !(c.i.length > 0)) && ((c.d == null) || !(c.d.length > 0))) { throw new Error(`Invalid op component: ${i(c)}`); } + if (typeof c === 'object') { + if ( + (c.i == null || !(c.i.length > 0)) && + (c.d == null || !(c.d.length > 0)) + ) { + throw new Error(`Invalid op component: ${i(c)}`) + } } else { - if (typeof(c) !== 'number') { throw new Error('Op components must be objects or numbers'); } - if (!(c > 0)) { throw new Error('Skip components must be a positive number'); } - if (typeof(last) === 'number') { throw new Error('Adjacent skip components should be added'); } + if (typeof c !== 'number') { + throw new Error('Op components must be objects or numbers') + } + if (!(c > 0)) { + throw new Error('Skip components must be a positive number') + } + if (typeof last === 'number') { + throw new Error('Adjacent skip components should be added') + } } - result.push(last = c); + result.push((last = c)) } - return result; - })(); -}; + return result + })() +} // Makes a function for appending components to a given op. // Exported for the randomOpGenerator. -exports._makeAppend = (makeAppend = op => (function(component) { - if ((component === 0) || (component.i === '') || (component.d === '')) { - - } else if (op.length === 0) { - return op.push(component); - } else if ((typeof(component) === 'number') && (typeof(op[op.length - 1]) === 'number')) { - return op[op.length - 1] += component; - } else if ((component.i != null) && (op[op.length - 1].i != null)) { - return op[op.length - 1].i += component.i; - } else if ((component.d != null) && (op[op.length - 1].d != null)) { - return op[op.length - 1].d += component.d; - } else { - return op.push(component); +exports._makeAppend = makeAppend = (op) => + function (component) { + if (component === 0 || component.i === '' || component.d === '') { + } else if (op.length === 0) { + return op.push(component) + } else if ( + typeof component === 'number' && + typeof op[op.length - 1] === 'number' + ) { + return (op[op.length - 1] += component) + } else if (component.i != null && op[op.length - 1].i != null) { + return (op[op.length - 1].i += component.i) + } else if (component.d != null && op[op.length - 1].d != null) { + return (op[op.length - 1].d += component.d) + } else { + return op.push(component) + } } -})); - + // checkOp op // Makes 2 functions for taking components from the start of an op, and for peeking // at the next op that could be taken. -const makeTake = function(op) { +const makeTake = function (op) { // The index of the next component to take - let idx = 0; + let idx = 0 // The offset into the component - let offset = 0; + let offset = 0 // Take up to length n from the front of op. If n is null, take the next // op component. If indivisableField == 'd', delete components won't be separated. // If indivisableField == 'i', insert components won't be separated. - const take = function(n, indivisableField) { - let c; - if (idx === op.length) { return null; } + const take = function (n, indivisableField) { + let c + if (idx === op.length) { + return null + } // assert.notStrictEqual op.length, i, 'The op is too short to traverse the document' - if (typeof(op[idx]) === 'number') { - if ((n == null) || ((op[idx] - offset) <= n)) { - c = op[idx] - offset; - ++idx; offset = 0; - return c; + if (typeof op[idx] === 'number') { + if (n == null || op[idx] - offset <= n) { + c = op[idx] - offset + ++idx + offset = 0 + return c } else { - offset += n; - return n; + offset += n + return n } } else { // Take from the string - const field = op[idx].i ? 'i' : 'd'; - c = {}; - if ((n == null) || ((op[idx][field].length - offset) <= n) || (field === indivisableField)) { - c[field] = op[idx][field].slice(offset); - ++idx; offset = 0; + const field = op[idx].i ? 'i' : 'd' + c = {} + if ( + n == null || + op[idx][field].length - offset <= n || + field === indivisableField + ) { + c[field] = op[idx][field].slice(offset) + ++idx + offset = 0 } else { - c[field] = op[idx][field].slice(offset, (offset + n)); - offset += n; + c[field] = op[idx][field].slice(offset, offset + n) + offset += n } - return c; + return c } - }; - - const peekType = () => op[idx]; - - return [take, peekType]; -}; + } + + const peekType = () => op[idx] + + return [take, peekType] +} // Find and return the length of an op component -const componentLength = function(component) { - if (typeof(component) === 'number') { - return component; +const componentLength = function (component) { + if (typeof component === 'number') { + return component } else if (component.i != null) { - return component.i.length; + return component.i.length } else { - return component.d.length; + return component.d.length } -}; +} // Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate // adjacent inserts and deletes. -exports.normalize = function(op) { - const newOp = []; - const append = makeAppend(newOp); - for (const component of Array.from(op)) { append(component); } - return newOp; -}; +exports.normalize = function (op) { + const newOp = [] + const append = makeAppend(newOp) + for (const component of Array.from(op)) { + append(component) + } + return newOp +} // Apply the op to the string. Returns the new string. -exports.apply = function(str, op) { - p(`Applying ${i(op)} to '${str}'`); - if (typeof(str) !== 'string') { throw new Error('Snapshot should be a string'); } - checkOp(op); +exports.apply = function (str, op) { + p(`Applying ${i(op)} to '${str}'`) + if (typeof str !== 'string') { + throw new Error('Snapshot should be a string') + } + checkOp(op) - const pos = 0; - const newDoc = []; + const pos = 0 + const newDoc = [] for (const component of Array.from(op)) { - if (typeof(component) === 'number') { - if (component > str.length) { throw new Error('The op is too long for this document'); } - newDoc.push(str.slice(0, component)); - str = str.slice(component); + if (typeof component === 'number') { + if (component > str.length) { + throw new Error('The op is too long for this document') + } + newDoc.push(str.slice(0, component)) + str = str.slice(component) } else if (component.i != null) { - newDoc.push(component.i); + newDoc.push(component.i) } else { - if (component.d !== str.slice(0, component.d.length)) { throw new Error(`The deleted text '${component.d}' doesn't match the next characters in the document '${str.slice(0, component.d.length)}'`); } - str = str.slice(component.d.length); + if (component.d !== str.slice(0, component.d.length)) { + throw new Error( + `The deleted text '${ + component.d + }' doesn't match the next characters in the document '${str.slice( + 0, + component.d.length + )}'` + ) + } + str = str.slice(component.d.length) } } - - if (str !== '') { throw new Error("The applied op doesn't traverse the entire document"); } - return newDoc.join(''); -}; + if (str !== '') { + throw new Error("The applied op doesn't traverse the entire document") + } + + return newDoc.join('') +} // transform op1 by op2. Return transformed version of op1. // op1 and op2 are unchanged by transform. -exports.transform = function(op, otherOp, side) { - let component; - if ((side !== 'left') && (side !== 'right')) { throw new Error(`side (${side} must be 'left' or 'right'`); } +exports.transform = function (op, otherOp, side) { + let component + if (side !== 'left' && side !== 'right') { + throw new Error(`side (${side} must be 'left' or 'right'`) + } - checkOp(op); - checkOp(otherOp); - const newOp = []; + checkOp(op) + checkOp(otherOp) + const newOp = [] - const append = makeAppend(newOp); - const [take, peek] = Array.from(makeTake(op)); + const append = makeAppend(newOp) + const [take, peek] = Array.from(makeTake(op)) for (component of Array.from(otherOp)) { - var chunk, length; - if (typeof(component) === 'number') { // Skip - length = component; + var chunk, length + if (typeof component === 'number') { + // Skip + length = component while (length > 0) { - chunk = take(length, 'i'); - if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } + chunk = take(length, 'i') + if (chunk === null) { + throw new Error( + 'The op traverses more elements than the document has' + ) + } - append(chunk); - if ((typeof(chunk) !== 'object') || (chunk.i == null)) { length -= componentLength(chunk); } + append(chunk) + if (typeof chunk !== 'object' || chunk.i == null) { + length -= componentLength(chunk) + } } - } else if (component.i != null) { // Insert + } else if (component.i != null) { + // Insert if (side === 'left') { // The left insert should go first. - const o = peek(); - if (o != null ? o.i : undefined) { append(take()); } + const o = peek() + if (o != null ? o.i : undefined) { + append(take()) + } } // Otherwise, skip the inserted text. - append(component.i.length); - } else { // Delete. + append(component.i.length) + } else { + // Delete. // assert.ok component.d - ({ - length - } = component.d); + ;({ length } = component.d) while (length > 0) { - chunk = take(length, 'i'); - if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } + chunk = take(length, 'i') + if (chunk === null) { + throw new Error( + 'The op traverses more elements than the document has' + ) + } - if (typeof(chunk) === 'number') { - length -= chunk; + if (typeof chunk === 'number') { + length -= chunk } else if (chunk.i != null) { - append(chunk); + append(chunk) } else { // assert.ok chunk.d // The delete is unnecessary now. - length -= chunk.d.length; + length -= chunk.d.length } } } } - + // Append extras from op1 - while (component = take()) { - if ((component != null ? component.i : undefined) == null) { throw new Error(`Remaining fragments in the op: ${i(component)}`); } - append(component); + while ((component = take())) { + if ((component != null ? component.i : undefined) == null) { + throw new Error(`Remaining fragments in the op: ${i(component)}`) + } + append(component) } - return newOp; -}; - + return newOp +} // Compose 2 ops into 1 op. -exports.compose = function(op1, op2) { - let component; - p(`COMPOSE ${i(op1)} + ${i(op2)}`); - checkOp(op1); - checkOp(op2); +exports.compose = function (op1, op2) { + let component + p(`COMPOSE ${i(op1)} + ${i(op2)}`) + checkOp(op1) + checkOp(op2) - const result = []; + const result = [] - const append = makeAppend(result); - const [take, _] = Array.from(makeTake(op1)); + const append = makeAppend(result) + const [take, _] = Array.from(makeTake(op1)) for (component of Array.from(op2)) { - var chunk, length; - if (typeof(component) === 'number') { // Skip - length = component; + var chunk, length + if (typeof component === 'number') { + // Skip + length = component while (length > 0) { - chunk = take(length, 'd'); - if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } + chunk = take(length, 'd') + if (chunk === null) { + throw new Error( + 'The op traverses more elements than the document has' + ) + } - append(chunk); - if ((typeof(chunk) !== 'object') || (chunk.d == null)) { length -= componentLength(chunk); } + append(chunk) + if (typeof chunk !== 'object' || chunk.d == null) { + length -= componentLength(chunk) + } } - - } else if (component.i != null) { // Insert - append({i:component.i}); - - } else { // Delete - let offset = 0; + } else if (component.i != null) { + // Insert + append({ i: component.i }) + } else { + // Delete + let offset = 0 while (offset < component.d.length) { - chunk = take(component.d.length - offset, 'd'); - if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } + chunk = take(component.d.length - offset, 'd') + if (chunk === null) { + throw new Error( + 'The op traverses more elements than the document has' + ) + } // If its delete, append it. If its skip, drop it and decrease length. If its insert, check the strings match, drop it and decrease length. - if (typeof(chunk) === 'number') { - append({d:component.d.slice(offset, (offset + chunk))}); - offset += chunk; + if (typeof chunk === 'number') { + append({ d: component.d.slice(offset, offset + chunk) }) + offset += chunk } else if (chunk.i != null) { - if (component.d.slice(offset, (offset + chunk.i.length)) !== chunk.i) { throw new Error("The deleted text doesn't match the inserted text"); } - offset += chunk.i.length; + if (component.d.slice(offset, offset + chunk.i.length) !== chunk.i) { + throw new Error("The deleted text doesn't match the inserted text") + } + offset += chunk.i.length // The ops cancel each other out. } else { // Delete - append(chunk); + append(chunk) } } } } - + // Append extras from op1 - while (component = take()) { - if ((component != null ? component.d : undefined) == null) { throw new Error(`Trailing stuff in op1 ${i(component)}`); } - append(component); + while ((component = take())) { + if ((component != null ? component.d : undefined) == null) { + throw new Error(`Trailing stuff in op1 ${i(component)}`) + } + append(component) } - return result; -}; - - -const invertComponent = function(c) { - if (typeof(c) === 'number') { - return c; - } else if (c.i != null) { - return {d:c.i}; - } else { - return {i:c.d}; - } -}; - -// Invert an op -exports.invert = function(op) { - const result = []; - const append = makeAppend(result); - - for (const component of Array.from(op)) { append(invertComponent(component)); } - - return result; -}; - -if (typeof window !== 'undefined' && window !== null) { - if (!window.ot) { window.ot = {}; } - if (!window.ot.types) { window.ot.types = {}; } - window.ot.types.text = exports; + return result } +const invertComponent = function (c) { + if (typeof c === 'number') { + return c + } else if (c.i != null) { + return { d: c.i } + } else { + return { i: c.d } + } +} + +// Invert an op +exports.invert = function (op) { + const result = [] + const append = makeAppend(result) + + for (const component of Array.from(op)) { + append(invertComponent(component)) + } + + return result +} + +if (typeof window !== 'undefined' && window !== null) { + if (!window.ot) { + window.ot = {} + } + if (!window.ot.types) { + window.ot.types = {} + } + window.ot.types.text = exports +} diff --git a/services/document-updater/app/js/sharejs/types/text-tp2-api.js b/services/document-updater/app/js/sharejs/types/text-tp2-api.js index 97bf606267..3ab7ef1cb5 100644 --- a/services/document-updater/app/js/sharejs/types/text-tp2-api.js +++ b/services/document-updater/app/js/sharejs/types/text-tp2-api.js @@ -13,111 +13,121 @@ */ // Text document API for text-tp2 -let type; +let type if (typeof WEB !== 'undefined' && WEB !== null) { - type = exports.types['text-tp2']; + type = exports.types['text-tp2'] } else { - type = require('./text-tp2'); + type = require('./text-tp2') } -const {_takeDoc:takeDoc, _append:append} = type; +const { _takeDoc: takeDoc, _append: append } = type -const appendSkipChars = (op, doc, pos, maxlength) => (() => { - const result = []; - while (((maxlength === undefined) || (maxlength > 0)) && (pos.index < doc.data.length)) { - const part = takeDoc(doc, pos, maxlength, true); - if ((maxlength !== undefined) && (typeof part === 'string')) { maxlength -= part.length; } - result.push(append(op, (part.length || part))); - } - return result; -})(); +const appendSkipChars = (op, doc, pos, maxlength) => + (() => { + const result = [] + while ( + (maxlength === undefined || maxlength > 0) && + pos.index < doc.data.length + ) { + const part = takeDoc(doc, pos, maxlength, true) + if (maxlength !== undefined && typeof part === 'string') { + maxlength -= part.length + } + result.push(append(op, part.length || part)) + } + return result + })() type.api = { - 'provides': {'text':true}, + provides: { text: true }, // The number of characters in the string - 'getLength'() { return this.snapshot.charLength; }, + getLength() { + return this.snapshot.charLength + }, // Flatten a document into a string - 'getText'() { - const strings = (Array.from(this.snapshot.data).filter((elem) => typeof elem === 'string')); - return strings.join(''); + getText() { + const strings = Array.from(this.snapshot.data).filter( + (elem) => typeof elem === 'string' + ) + return strings.join('') }, - 'insert'(pos, text, callback) { - if (pos === undefined) { pos = 0; } + insert(pos, text, callback) { + if (pos === undefined) { + pos = 0 + } - const op = []; - const docPos = {index:0, offset:0}; + const op = [] + const docPos = { index: 0, offset: 0 } - appendSkipChars(op, this.snapshot, docPos, pos); - append(op, {'i':text}); - appendSkipChars(op, this.snapshot, docPos); - - this.submitOp(op, callback); - return op; + appendSkipChars(op, this.snapshot, docPos, pos) + append(op, { i: text }) + appendSkipChars(op, this.snapshot, docPos) + + this.submitOp(op, callback) + return op }, - - 'del'(pos, length, callback) { - const op = []; - const docPos = {index:0, offset:0}; - appendSkipChars(op, this.snapshot, docPos, pos); - + del(pos, length, callback) { + const op = [] + const docPos = { index: 0, offset: 0 } + + appendSkipChars(op, this.snapshot, docPos, pos) + while (length > 0) { - const part = takeDoc(this.snapshot, docPos, length, true); + const part = takeDoc(this.snapshot, docPos, length, true) if (typeof part === 'string') { - append(op, {'d':part.length}); - length -= part.length; + append(op, { d: part.length }) + length -= part.length } else { - append(op, part); + append(op, part) } } - - appendSkipChars(op, this.snapshot, docPos); - this.submitOp(op, callback); - return op; + appendSkipChars(op, this.snapshot, docPos) + + this.submitOp(op, callback) + return op }, - '_register'() { + _register() { // Interpret recieved ops + generate more detailed events for them - return this.on('remoteop', function(op, snapshot) { - let textPos = 0; - const docPos = {index:0, offset:0}; + return this.on('remoteop', function (op, snapshot) { + let textPos = 0 + const docPos = { index: 0, offset: 0 } for (const component of Array.from(op)) { - var part, remainder; + var part, remainder if (typeof component === 'number') { // Skip - remainder = component; + remainder = component while (remainder > 0) { - part = takeDoc(snapshot, docPos, remainder); + part = takeDoc(snapshot, docPos, remainder) if (typeof part === 'string') { - textPos += part.length; + textPos += part.length } - remainder -= part.length || part; + remainder -= part.length || part } } else if (component.i !== undefined) { // Insert if (typeof component.i === 'string') { - this.emit('insert', textPos, component.i); - textPos += component.i.length; + this.emit('insert', textPos, component.i) + textPos += component.i.length } } else { // Delete - remainder = component.d; + remainder = component.d while (remainder > 0) { - part = takeDoc(snapshot, docPos, remainder); + part = takeDoc(snapshot, docPos, remainder) if (typeof part === 'string') { - this.emit('delete', textPos, part); + this.emit('delete', textPos, part) } - remainder -= part.length || part; + remainder -= part.length || part } } } - - }); + }) } -}; - +} diff --git a/services/document-updater/app/js/sharejs/types/text-tp2.js b/services/document-updater/app/js/sharejs/types/text-tp2.js index 4efcb05871..f1e3c97dd6 100644 --- a/services/document-updater/app/js/sharejs/types/text-tp2.js +++ b/services/document-updater/app/js/sharejs/types/text-tp2.js @@ -38,369 +38,460 @@ // Eg, the document: 'Hello .....world' ('.' denotes tombstoned (deleted) characters) // would be represented by a document snapshot of ['Hello ', 5, 'world'] -let append, appendDoc, takeDoc; +let append, appendDoc, takeDoc var type = { name: 'text-tp2', tp2: true, - create() { return {charLength:0, totalLength:0, positionCache:[], data:[]}; }, + create() { + return { charLength: 0, totalLength: 0, positionCache: [], data: [] } + }, serialize(doc) { - if (!doc.data) { throw new Error('invalid doc snapshot'); } - return doc.data; + if (!doc.data) { + throw new Error('invalid doc snapshot') + } + return doc.data }, deserialize(data) { - const doc = type.create(); - doc.data = data; - + const doc = type.create() + doc.data = data + for (const component of Array.from(data)) { if (typeof component === 'string') { - doc.charLength += component.length; - doc.totalLength += component.length; + doc.charLength += component.length + doc.totalLength += component.length } else { - doc.totalLength += component; + doc.totalLength += component } } - - return doc; + + return doc } -}; +} - -const checkOp = function(op) { - if (!Array.isArray(op)) { throw new Error('Op must be an array of components'); } - let last = null; +const checkOp = function (op) { + if (!Array.isArray(op)) { + throw new Error('Op must be an array of components') + } + let last = null return (() => { - const result = []; + const result = [] for (const c of Array.from(op)) { - if (typeof(c) === 'object') { + if (typeof c === 'object') { if (c.i !== undefined) { - if (((typeof(c.i) !== 'string') || !(c.i.length > 0)) && ((typeof(c.i) !== 'number') || !(c.i > 0))) { throw new Error('Inserts must insert a string or a +ive number'); } + if ( + (typeof c.i !== 'string' || !(c.i.length > 0)) && + (typeof c.i !== 'number' || !(c.i > 0)) + ) { + throw new Error('Inserts must insert a string or a +ive number') + } } else if (c.d !== undefined) { - if ((typeof(c.d) !== 'number') || !(c.d > 0)) { throw new Error('Deletes must be a +ive number'); } + if (typeof c.d !== 'number' || !(c.d > 0)) { + throw new Error('Deletes must be a +ive number') + } } else { - throw new Error('Operation component must define .i or .d'); + throw new Error('Operation component must define .i or .d') } } else { - if (typeof(c) !== 'number') { throw new Error('Op components must be objects or numbers'); } - if (!(c > 0)) { throw new Error('Skip components must be a positive number'); } - if (typeof(last) === 'number') { throw new Error('Adjacent skip components should be combined'); } + if (typeof c !== 'number') { + throw new Error('Op components must be objects or numbers') + } + if (!(c > 0)) { + throw new Error('Skip components must be a positive number') + } + if (typeof last === 'number') { + throw new Error('Adjacent skip components should be combined') + } } - result.push(last = c); + result.push((last = c)) } - return result; - })(); -}; + return result + })() +} // Take the next part from the specified position in a document snapshot. // position = {index, offset}. It will be updated. -type._takeDoc = (takeDoc = function(doc, position, maxlength, tombsIndivisible) { - if (position.index >= doc.data.length) { throw new Error('Operation goes past the end of the document'); } - - const part = doc.data[position.index]; - // peel off data[0] - const result = typeof(part) === 'string' ? - maxlength !== undefined ? - part.slice(position.offset, (position.offset + maxlength)) - : - part.slice(position.offset) - : - (maxlength === undefined) || tombsIndivisible ? - part - position.offset - : - Math.min(maxlength, part - position.offset); - - const resultLen = result.length || result; - - if (((part.length || part) - position.offset) > resultLen) { - position.offset += resultLen; - } else { - position.index++; - position.offset = 0; +type._takeDoc = takeDoc = function ( + doc, + position, + maxlength, + tombsIndivisible +) { + if (position.index >= doc.data.length) { + throw new Error('Operation goes past the end of the document') } - - return result; -}); + + const part = doc.data[position.index] + // peel off data[0] + const result = + typeof part === 'string' + ? maxlength !== undefined + ? part.slice(position.offset, position.offset + maxlength) + : part.slice(position.offset) + : maxlength === undefined || tombsIndivisible + ? part - position.offset + : Math.min(maxlength, part - position.offset) + + const resultLen = result.length || result + + if ((part.length || part) - position.offset > resultLen) { + position.offset += resultLen + } else { + position.index++ + position.offset = 0 + } + + return result +} // Append a part to the end of a document -type._appendDoc = (appendDoc = function(doc, p) { - if ((p === 0) || (p === '')) { return; } +type._appendDoc = appendDoc = function (doc, p) { + if (p === 0 || p === '') { + return + } if (typeof p === 'string') { - doc.charLength += p.length; - doc.totalLength += p.length; + doc.charLength += p.length + doc.totalLength += p.length } else { - doc.totalLength += p; + doc.totalLength += p } - const { - data - } = doc; + const { data } = doc if (data.length === 0) { - data.push(p); - } else if (typeof(data[data.length - 1]) === typeof(p)) { - data[data.length - 1] += p; + data.push(p) + } else if (typeof data[data.length - 1] === typeof p) { + data[data.length - 1] += p } else { - data.push(p); + data.push(p) } -}); +} // Apply the op to the document. The document is not modified in the process. -type.apply = function(doc, op) { - if ((doc.totalLength === undefined) || (doc.charLength === undefined) || (doc.data.length === undefined)) { - throw new Error('Snapshot is invalid'); +type.apply = function (doc, op) { + if ( + doc.totalLength === undefined || + doc.charLength === undefined || + doc.data.length === undefined + ) { + throw new Error('Snapshot is invalid') } - checkOp(op); + checkOp(op) - const newDoc = type.create(); - const position = {index:0, offset:0}; + const newDoc = type.create() + const position = { index: 0, offset: 0 } for (const component of Array.from(op)) { - var part, remainder; - if (typeof(component) === 'number') { - remainder = component; + var part, remainder + if (typeof component === 'number') { + remainder = component while (remainder > 0) { - part = takeDoc(doc, position, remainder); - - appendDoc(newDoc, part); - remainder -= part.length || part; - } + part = takeDoc(doc, position, remainder) - } else if (component.i !== undefined) { - appendDoc(newDoc, component.i); - } else if (component.d !== undefined) { - remainder = component.d; - while (remainder > 0) { - part = takeDoc(doc, position, remainder); - remainder -= part.length || part; + appendDoc(newDoc, part) + remainder -= part.length || part } - appendDoc(newDoc, component.d); + } else if (component.i !== undefined) { + appendDoc(newDoc, component.i) + } else if (component.d !== undefined) { + remainder = component.d + while (remainder > 0) { + part = takeDoc(doc, position, remainder) + remainder -= part.length || part + } + appendDoc(newDoc, component.d) } } - - return newDoc; -}; + + return newDoc +} // Append an op component to the end of the specified op. // Exported for the randomOpGenerator. -type._append = (append = function(op, component) { - if ((component === 0) || (component.i === '') || (component.i === 0) || (component.d === 0)) { - +type._append = append = function (op, component) { + if ( + component === 0 || + component.i === '' || + component.i === 0 || + component.d === 0 + ) { } else if (op.length === 0) { - return op.push(component); + return op.push(component) } else { - const last = op[op.length - 1]; - if ((typeof(component) === 'number') && (typeof(last) === 'number')) { - return op[op.length - 1] += component; - } else if ((component.i !== undefined) && (last.i != null) && (typeof(last.i) === typeof(component.i))) { - return last.i += component.i; - } else if ((component.d !== undefined) && (last.d != null)) { - return last.d += component.d; + const last = op[op.length - 1] + if (typeof component === 'number' && typeof last === 'number') { + return (op[op.length - 1] += component) + } else if ( + component.i !== undefined && + last.i != null && + typeof last.i === typeof component.i + ) { + return (last.i += component.i) + } else if (component.d !== undefined && last.d != null) { + return (last.d += component.d) } else { - return op.push(component); + return op.push(component) } } -}); - +} + // Makes 2 functions for taking components from the start of an op, and for peeking // at the next op that could be taken. -const makeTake = function(op) { +const makeTake = function (op) { // The index of the next component to take - let index = 0; + let index = 0 // The offset into the component - let offset = 0; + let offset = 0 // Take up to length maxlength from the op. If maxlength is not defined, there is no max. // If insertsIndivisible is true, inserts (& insert tombstones) won't be separated. // // Returns null when op is fully consumed. - const take = function(maxlength, insertsIndivisible) { - let current; - if (index === op.length) { return null; } + const take = function (maxlength, insertsIndivisible) { + let current + if (index === op.length) { + return null + } - const e = op[index]; - if ((typeof((current = e)) === 'number') || (typeof((current = e.i)) === 'number') || ((current = e.d) !== undefined)) { - let c; - if ((maxlength == null) || ((current - offset) <= maxlength) || (insertsIndivisible && (e.i !== undefined))) { + const e = op[index] + if ( + typeof (current = e) === 'number' || + typeof (current = e.i) === 'number' || + (current = e.d) !== undefined + ) { + let c + if ( + maxlength == null || + current - offset <= maxlength || + (insertsIndivisible && e.i !== undefined) + ) { // Return the rest of the current element. - c = current - offset; - ++index; offset = 0; + c = current - offset + ++index + offset = 0 } else { - offset += maxlength; - c = maxlength; + offset += maxlength + c = maxlength + } + if (e.i !== undefined) { + return { i: c } + } else if (e.d !== undefined) { + return { d: c } + } else { + return c } - if (e.i !== undefined) { return {i:c}; } else if (e.d !== undefined) { return {d:c}; } else { return c; } } else { // Take from the inserted string - let result; - if ((maxlength == null) || ((e.i.length - offset) <= maxlength) || insertsIndivisible) { - result = {i:e.i.slice(offset)}; - ++index; offset = 0; + let result + if ( + maxlength == null || + e.i.length - offset <= maxlength || + insertsIndivisible + ) { + result = { i: e.i.slice(offset) } + ++index + offset = 0 } else { - result = {i:e.i.slice(offset, offset + maxlength)}; - offset += maxlength; + result = { i: e.i.slice(offset, offset + maxlength) } + offset += maxlength } - return result; + return result } - }; - - const peekType = () => op[index]; - - return [take, peekType]; -}; + } + + const peekType = () => op[index] + + return [take, peekType] +} // Find and return the length of an op component -const componentLength = function(component) { - if (typeof(component) === 'number') { - return component; - } else if (typeof(component.i) === 'string') { - return component.i.length; +const componentLength = function (component) { + if (typeof component === 'number') { + return component + } else if (typeof component.i === 'string') { + return component.i.length } else { // This should work because c.d and c.i must be +ive. - return component.d || component.i; + return component.d || component.i } -}; +} // Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate // adjacent inserts and deletes. -type.normalize = function(op) { - const newOp = []; - for (const component of Array.from(op)) { append(newOp, component); } - return newOp; -}; +type.normalize = function (op) { + const newOp = [] + for (const component of Array.from(op)) { + append(newOp, component) + } + return newOp +} // This is a helper method to transform and prune. goForwards is true for transform, false for prune. -const transformer = function(op, otherOp, goForwards, side) { - let component; - checkOp(op); - checkOp(otherOp); - const newOp = []; +const transformer = function (op, otherOp, goForwards, side) { + let component + checkOp(op) + checkOp(otherOp) + const newOp = [] - const [take, peek] = Array.from(makeTake(op)); + const [take, peek] = Array.from(makeTake(op)) for (component of Array.from(otherOp)) { - var chunk; - let length = componentLength(component); + var chunk + let length = componentLength(component) - if (component.i !== undefined) { // Insert text or tombs - if (goForwards) { // transform - insert skips over inserted parts + if (component.i !== undefined) { + // Insert text or tombs + if (goForwards) { + // transform - insert skips over inserted parts if (side === 'left') { // The left insert should go first. - while (__guard__(peek(), x => x.i) !== undefined) { append(newOp, take()); } + while (__guard__(peek(), (x) => x.i) !== undefined) { + append(newOp, take()) + } } // In any case, skip the inserted text. - append(newOp, length); - - } else { // Prune. Remove skips for inserts. + append(newOp, length) + } else { + // Prune. Remove skips for inserts. while (length > 0) { - chunk = take(length, true); + chunk = take(length, true) - if (chunk === null) { throw new Error('The transformed op is invalid'); } - if (chunk.d !== undefined) { throw new Error('The transformed op deletes locally inserted characters - it cannot be purged of the insert.'); } + if (chunk === null) { + throw new Error('The transformed op is invalid') + } + if (chunk.d !== undefined) { + throw new Error( + 'The transformed op deletes locally inserted characters - it cannot be purged of the insert.' + ) + } if (typeof chunk === 'number') { - length -= chunk; + length -= chunk } else { - append(newOp, chunk); + append(newOp, chunk) } } } - - } else { // Skip or delete + } else { + // Skip or delete while (length > 0) { - chunk = take(length, true); - if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } + chunk = take(length, true) + if (chunk === null) { + throw new Error( + 'The op traverses more elements than the document has' + ) + } - append(newOp, chunk); - if (!chunk.i) { length -= componentLength(chunk); } + append(newOp, chunk) + if (!chunk.i) { + length -= componentLength(chunk) + } } } } - + // Append extras from op1 - while (component = take()) { - if (component.i === undefined) { throw new Error(`Remaining fragments in the op: ${component}`); } - append(newOp, component); + while ((component = take())) { + if (component.i === undefined) { + throw new Error(`Remaining fragments in the op: ${component}`) + } + append(newOp, component) } - return newOp; -}; + return newOp +} // transform op1 by op2. Return transformed version of op1. // op1 and op2 are unchanged by transform. // side should be 'left' or 'right', depending on if op1.id <> op2.id. 'left' == client op. -type.transform = function(op, otherOp, side) { - if ((side !== 'left') && (side !== 'right')) { throw new Error(`side (${side}) should be 'left' or 'right'`); } - return transformer(op, otherOp, true, side); -}; +type.transform = function (op, otherOp, side) { + if (side !== 'left' && side !== 'right') { + throw new Error(`side (${side}) should be 'left' or 'right'`) + } + return transformer(op, otherOp, true, side) +} // Prune is the inverse of transform. -type.prune = (op, otherOp) => transformer(op, otherOp, false); +type.prune = (op, otherOp) => transformer(op, otherOp, false) // Compose 2 ops into 1 op. -type.compose = function(op1, op2) { - let component; - if ((op1 === null) || (op1 === undefined)) { return op2; } +type.compose = function (op1, op2) { + let component + if (op1 === null || op1 === undefined) { + return op2 + } - checkOp(op1); - checkOp(op2); + checkOp(op1) + checkOp(op2) - const result = []; + const result = [] - const [take, _] = Array.from(makeTake(op1)); + const [take, _] = Array.from(makeTake(op1)) for (component of Array.from(op2)) { - - var chunk, length; - if (typeof(component) === 'number') { // Skip + var chunk, length + if (typeof component === 'number') { + // Skip // Just copy from op1. - length = component; + length = component while (length > 0) { - chunk = take(length); - if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } - - append(result, chunk); - length -= componentLength(chunk); - } - - } else if (component.i !== undefined) { // Insert - append(result, {i:component.i}); - - } else { // Delete - length = component.d; - while (length > 0) { - chunk = take(length); - if (chunk === null) { throw new Error('The op traverses more elements than the document has'); } - - const chunkLength = componentLength(chunk); - if (chunk.i !== undefined) { - append(result, {i:chunkLength}); - } else { - append(result, {d:chunkLength}); + chunk = take(length) + if (chunk === null) { + throw new Error( + 'The op traverses more elements than the document has' + ) } - length -= chunkLength; + append(result, chunk) + length -= componentLength(chunk) + } + } else if (component.i !== undefined) { + // Insert + append(result, { i: component.i }) + } else { + // Delete + length = component.d + while (length > 0) { + chunk = take(length) + if (chunk === null) { + throw new Error( + 'The op traverses more elements than the document has' + ) + } + + const chunkLength = componentLength(chunk) + if (chunk.i !== undefined) { + append(result, { i: chunkLength }) + } else { + append(result, { d: chunkLength }) + } + + length -= chunkLength } } } - + // Append extras from op1 - while (component = take()) { - if (component.i === undefined) { throw new Error(`Remaining fragments in op1: ${component}`); } - append(result, component); + while ((component = take())) { + if (component.i === undefined) { + throw new Error(`Remaining fragments in op1: ${component}`) + } + append(result, component) } - return result; -}; - -if (typeof WEB !== 'undefined' && WEB !== null) { - exports.types['text-tp2'] = type; -} else { - module.exports = type; + return result } +if (typeof WEB !== 'undefined' && WEB !== null) { + exports.types['text-tp2'] = type +} else { + module.exports = type +} function __guard__(value, transform) { - return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; -} \ No newline at end of file + return typeof value !== 'undefined' && value !== null + ? transform(value) + : undefined +} diff --git a/services/document-updater/app/js/sharejs/types/text.js b/services/document-updater/app/js/sharejs/types/text.js index 66aee0f7d7..530d4c4987 100644 --- a/services/document-updater/app/js/sharejs/types/text.js +++ b/services/document-updater/app/js/sharejs/types/text.js @@ -32,104 +32,153 @@ // NOTE: The global scope here is shared with other sharejs files when built with closure. // Be careful what ends up in your namespace. -let append, transformComponent; -const text = {}; +let append, transformComponent +const text = {} -text.name = 'text'; +text.name = 'text' -text.create = () => ''; +text.create = () => '' -const strInject = (s1, pos, s2) => s1.slice(0, pos) + s2 + s1.slice(pos); +const strInject = (s1, pos, s2) => s1.slice(0, pos) + s2 + s1.slice(pos) -const checkValidComponent = function(c) { - if (typeof c.p !== 'number') { throw new Error('component missing position field'); } +const checkValidComponent = function (c) { + if (typeof c.p !== 'number') { + throw new Error('component missing position field') + } - const i_type = typeof c.i; - const d_type = typeof c.d; - const c_type = typeof c.c; - if (!((i_type === 'string') ^ (d_type === 'string') ^ (c_type === 'string'))) { throw new Error('component needs an i, d or c field'); } + const i_type = typeof c.i + const d_type = typeof c.d + const c_type = typeof c.c + if ( + !((i_type === 'string') ^ (d_type === 'string') ^ (c_type === 'string')) + ) { + throw new Error('component needs an i, d or c field') + } - if (!(c.p >= 0)) { throw new Error('position cannot be negative'); } -}; + if (!(c.p >= 0)) { + throw new Error('position cannot be negative') + } +} -const checkValidOp = function(op) { - for (const c of Array.from(op)) { checkValidComponent(c); } - return true; -}; +const checkValidOp = function (op) { + for (const c of Array.from(op)) { + checkValidComponent(c) + } + return true +} -text.apply = function(snapshot, op) { - checkValidOp(op); +text.apply = function (snapshot, op) { + checkValidOp(op) for (const component of Array.from(op)) { if (component.i != null) { - snapshot = strInject(snapshot, component.p, component.i); + snapshot = strInject(snapshot, component.p, component.i) } else if (component.d != null) { - const deleted = snapshot.slice(component.p, (component.p + component.d.length)); - if (component.d !== deleted) { throw new Error(`Delete component '${component.d}' does not match deleted text '${deleted}'`); } - snapshot = snapshot.slice(0, component.p) + snapshot.slice((component.p + component.d.length)); + const deleted = snapshot.slice( + component.p, + component.p + component.d.length + ) + if (component.d !== deleted) { + throw new Error( + `Delete component '${component.d}' does not match deleted text '${deleted}'` + ) + } + snapshot = + snapshot.slice(0, component.p) + + snapshot.slice(component.p + component.d.length) } else if (component.c != null) { - const comment = snapshot.slice(component.p, (component.p + component.c.length)); - if (component.c !== comment) { throw new Error(`Comment component '${component.c}' does not match commented text '${comment}'`); } + const comment = snapshot.slice( + component.p, + component.p + component.c.length + ) + if (component.c !== comment) { + throw new Error( + `Comment component '${component.c}' does not match commented text '${comment}'` + ) + } } else { - throw new Error("Unknown op type"); + throw new Error('Unknown op type') } } - return snapshot; -}; - + return snapshot +} // Exported for use by the random op generator. // // For simplicity, this version of append does not compress adjacent inserts and deletes of // the same text. It would be nice to change that at some stage. -text._append = (append = function(newOp, c) { - if ((c.i === '') || (c.d === '')) { return; } +text._append = append = function (newOp, c) { + if (c.i === '' || c.d === '') { + return + } if (newOp.length === 0) { - return newOp.push(c); + return newOp.push(c) } else { - const last = newOp[newOp.length - 1]; + const last = newOp[newOp.length - 1] // Compose the insert into the previous insert if possible - if ((last.i != null) && (c.i != null) && (last.p <= c.p && c.p <= (last.p + last.i.length))) { - return newOp[newOp.length - 1] = {i:strInject(last.i, c.p - last.p, c.i), p:last.p}; - } else if ((last.d != null) && (c.d != null) && (c.p <= last.p && last.p <= (c.p + c.d.length))) { - return newOp[newOp.length - 1] = {d:strInject(c.d, last.p - c.p, last.d), p:c.p}; + if ( + last.i != null && + c.i != null && + last.p <= c.p && + c.p <= last.p + last.i.length + ) { + return (newOp[newOp.length - 1] = { + i: strInject(last.i, c.p - last.p, c.i), + p: last.p + }) + } else if ( + last.d != null && + c.d != null && + c.p <= last.p && + last.p <= c.p + c.d.length + ) { + return (newOp[newOp.length - 1] = { + d: strInject(c.d, last.p - c.p, last.d), + p: c.p + }) } else { - return newOp.push(c); + return newOp.push(c) } } -}); +} -text.compose = function(op1, op2) { - checkValidOp(op1); - checkValidOp(op2); +text.compose = function (op1, op2) { + checkValidOp(op1) + checkValidOp(op2) - const newOp = op1.slice(); - for (const c of Array.from(op2)) { append(newOp, c); } + const newOp = op1.slice() + for (const c of Array.from(op2)) { + append(newOp, c) + } - return newOp; -}; + return newOp +} // Attempt to compress the op components together 'as much as possible'. // This implementation preserves order and preserves create/delete pairs. -text.compress = op => text.compose([], op); +text.compress = (op) => text.compose([], op) + +text.normalize = function (op) { + const newOp = [] -text.normalize = function(op) { - const newOp = []; - // Normalize should allow ops which are a single (unwrapped) component: // {i:'asdf', p:23}. // There's no good way to test if something is an array: // http://perfectionkills.com/instanceof-considered-harmful-or-how-to-write-a-robust-isarray/ // so this is probably the least bad solution. - if ((op.i != null) || (op.p != null)) { op = [op]; } + if (op.i != null || op.p != null) { + op = [op] + } for (const c of Array.from(op)) { - if (c.p == null) { c.p = 0; } - append(newOp, c); + if (c.p == null) { + c.p = 0 + } + append(newOp, c) } - - return newOp; -}; + + return newOp +} // This helper method transforms a position by an op component. // @@ -137,176 +186,205 @@ text.normalize = function(op) { // is pushed after the insert (true) or before it (false). // // insertAfter is optional for deletes. -const transformPosition = function(pos, c, insertAfter) { +const transformPosition = function (pos, c, insertAfter) { if (c.i != null) { - if ((c.p < pos) || ((c.p === pos) && insertAfter)) { - return pos + c.i.length; + if (c.p < pos || (c.p === pos && insertAfter)) { + return pos + c.i.length } else { - return pos; + return pos } } else if (c.d != null) { // I think this could also be written as: Math.min(c.p, Math.min(c.p - otherC.p, otherC.d.length)) // but I think its harder to read that way, and it compiles using ternary operators anyway // so its no slower written like this. if (pos <= c.p) { - return pos; - } else if (pos <= (c.p + c.d.length)) { - return c.p; + return pos + } else if (pos <= c.p + c.d.length) { + return c.p } else { - return pos - c.d.length; + return pos - c.d.length } } else if (c.c != null) { - return pos; + return pos } else { - throw new Error("unknown op type"); + throw new Error('unknown op type') } -}; +} // Helper method to transform a cursor position as a result of an op. // // Like transformPosition above, if c is an insert, insertAfter specifies whether the cursor position // is pushed after an insert (true) or before it (false). -text.transformCursor = function(position, op, side) { - const insertAfter = side === 'right'; - for (const c of Array.from(op)) { position = transformPosition(position, c, insertAfter); } - return position; -}; +text.transformCursor = function (position, op, side) { + const insertAfter = side === 'right' + for (const c of Array.from(op)) { + position = transformPosition(position, c, insertAfter) + } + return position +} // Transform an op component by another op component. Asymmetric. // The result will be appended to destination. // // exported for use in JSON type -text._tc = (transformComponent = function(dest, c, otherC, side) { - let cIntersect, intersectEnd, intersectStart, newC, otherIntersect; - checkValidOp([c]); - checkValidOp([otherC]); +text._tc = transformComponent = function (dest, c, otherC, side) { + let cIntersect, intersectEnd, intersectStart, newC, otherIntersect + checkValidOp([c]) + checkValidOp([otherC]) if (c.i != null) { - append(dest, {i:c.i, p:transformPosition(c.p, otherC, side === 'right')}); - - } else if (c.d != null) { // Delete - if (otherC.i != null) { // delete vs insert - let s = c.d; + append(dest, { + i: c.i, + p: transformPosition(c.p, otherC, side === 'right') + }) + } else if (c.d != null) { + // Delete + if (otherC.i != null) { + // delete vs insert + let s = c.d if (c.p < otherC.p) { - append(dest, {d:s.slice(0, otherC.p - c.p), p:c.p}); - s = s.slice((otherC.p - c.p)); + append(dest, { d: s.slice(0, otherC.p - c.p), p: c.p }) + s = s.slice(otherC.p - c.p) } if (s !== '') { - append(dest, {d:s, p:c.p + otherC.i.length}); + append(dest, { d: s, p: c.p + otherC.i.length }) } - - } else if (otherC.d != null) { // Delete vs delete - if (c.p >= (otherC.p + otherC.d.length)) { - append(dest, {d:c.d, p:c.p - otherC.d.length}); - } else if ((c.p + c.d.length) <= otherC.p) { - append(dest, c); + } else if (otherC.d != null) { + // Delete vs delete + if (c.p >= otherC.p + otherC.d.length) { + append(dest, { d: c.d, p: c.p - otherC.d.length }) + } else if (c.p + c.d.length <= otherC.p) { + append(dest, c) } else { // They overlap somewhere. - newC = {d:'', p:c.p}; + newC = { d: '', p: c.p } if (c.p < otherC.p) { - newC.d = c.d.slice(0, (otherC.p - c.p)); + newC.d = c.d.slice(0, otherC.p - c.p) } - if ((c.p + c.d.length) > (otherC.p + otherC.d.length)) { - newC.d += c.d.slice(((otherC.p + otherC.d.length) - c.p)); + if (c.p + c.d.length > otherC.p + otherC.d.length) { + newC.d += c.d.slice(otherC.p + otherC.d.length - c.p) } // This is entirely optional - just for a check that the deleted // text in the two ops matches - intersectStart = Math.max(c.p, otherC.p); - intersectEnd = Math.min(c.p + c.d.length, otherC.p + otherC.d.length); - cIntersect = c.d.slice(intersectStart - c.p, intersectEnd - c.p); - otherIntersect = otherC.d.slice(intersectStart - otherC.p, intersectEnd - otherC.p); - if (cIntersect !== otherIntersect) { throw new Error('Delete ops delete different text in the same region of the document'); } + intersectStart = Math.max(c.p, otherC.p) + intersectEnd = Math.min(c.p + c.d.length, otherC.p + otherC.d.length) + cIntersect = c.d.slice(intersectStart - c.p, intersectEnd - c.p) + otherIntersect = otherC.d.slice( + intersectStart - otherC.p, + intersectEnd - otherC.p + ) + if (cIntersect !== otherIntersect) { + throw new Error( + 'Delete ops delete different text in the same region of the document' + ) + } if (newC.d !== '') { // This could be rewritten similarly to insert v delete, above. - newC.p = transformPosition(newC.p, otherC); - append(dest, newC); + newC.p = transformPosition(newC.p, otherC) + append(dest, newC) } } - } else if (otherC.c != null) { - append(dest, c); - + append(dest, c) } else { - throw new Error("unknown op type"); + throw new Error('unknown op type') } - - } else if (c.c != null) { // Comment + } else if (c.c != null) { + // Comment if (otherC.i != null) { if (c.p < otherC.p && otherC.p < c.p + c.c.length) { - const offset = otherC.p - c.p; - const new_c = (c.c.slice(0, +(offset-1) + 1 || undefined) + otherC.i + c.c.slice(offset)); - append(dest, {c:new_c, p:c.p, t: c.t}); + const offset = otherC.p - c.p + const new_c = + c.c.slice(0, +(offset - 1) + 1 || undefined) + + otherC.i + + c.c.slice(offset) + append(dest, { c: new_c, p: c.p, t: c.t }) } else { - append(dest, {c:c.c, p:transformPosition(c.p, otherC, true), t: c.t}); + append(dest, { + c: c.c, + p: transformPosition(c.p, otherC, true), + t: c.t + }) } - } else if (otherC.d != null) { - if (c.p >= (otherC.p + otherC.d.length)) { - append(dest, {c:c.c, p:c.p - otherC.d.length, t: c.t}); - } else if ((c.p + c.c.length) <= otherC.p) { - append(dest, c); - } else { // Delete overlaps comment + if (c.p >= otherC.p + otherC.d.length) { + append(dest, { c: c.c, p: c.p - otherC.d.length, t: c.t }) + } else if (c.p + c.c.length <= otherC.p) { + append(dest, c) + } else { + // Delete overlaps comment // They overlap somewhere. - newC = {c:'', p:c.p, t: c.t}; + newC = { c: '', p: c.p, t: c.t } if (c.p < otherC.p) { - newC.c = c.c.slice(0, (otherC.p - c.p)); + newC.c = c.c.slice(0, otherC.p - c.p) } - if ((c.p + c.c.length) > (otherC.p + otherC.d.length)) { - newC.c += c.c.slice(((otherC.p + otherC.d.length) - c.p)); + if (c.p + c.c.length > otherC.p + otherC.d.length) { + newC.c += c.c.slice(otherC.p + otherC.d.length - c.p) } // This is entirely optional - just for a check that the deleted // text in the two ops matches - intersectStart = Math.max(c.p, otherC.p); - intersectEnd = Math.min(c.p + c.c.length, otherC.p + otherC.d.length); - cIntersect = c.c.slice(intersectStart - c.p, intersectEnd - c.p); - otherIntersect = otherC.d.slice(intersectStart - otherC.p, intersectEnd - otherC.p); - if (cIntersect !== otherIntersect) { throw new Error('Delete ops delete different text in the same region of the document'); } + intersectStart = Math.max(c.p, otherC.p) + intersectEnd = Math.min(c.p + c.c.length, otherC.p + otherC.d.length) + cIntersect = c.c.slice(intersectStart - c.p, intersectEnd - c.p) + otherIntersect = otherC.d.slice( + intersectStart - otherC.p, + intersectEnd - otherC.p + ) + if (cIntersect !== otherIntersect) { + throw new Error( + 'Delete ops delete different text in the same region of the document' + ) + } - newC.p = transformPosition(newC.p, otherC); - append(dest, newC); + newC.p = transformPosition(newC.p, otherC) + append(dest, newC) } - } else if (otherC.c != null) { - append(dest, c); - + append(dest, c) } else { - throw new Error("unknown op type"); + throw new Error('unknown op type') } } - - return dest; -}); -const invertComponent = function(c) { + return dest +} + +const invertComponent = function (c) { if (c.i != null) { - return {d:c.i, p:c.p}; + return { d: c.i, p: c.p } } else { - return {i:c.d, p:c.p}; + return { i: c.d, p: c.p } } -}; +} // No need to use append for invert, because the components won't be able to // cancel with one another. -text.invert = op => Array.from(op.slice().reverse()).map((c) => invertComponent(c)); - +text.invert = (op) => + Array.from(op.slice().reverse()).map((c) => invertComponent(c)) if (typeof WEB !== 'undefined' && WEB !== null) { - if (!exports.types) { exports.types = {}; } + if (!exports.types) { + exports.types = {} + } // This is kind of awful - come up with a better way to hook this helper code up. - bootstrapTransform(text, transformComponent, checkValidOp, append); + bootstrapTransform(text, transformComponent, checkValidOp, append) // [] is used to prevent closure from renaming types.text - exports.types.text = text; + exports.types.text = text } else { - module.exports = text; + module.exports = text // The text type really shouldn't need this - it should be possible to define // an efficient transform function by making a sort of transform map and passing each // op component through it. - require('./helpers').bootstrapTransform(text, transformComponent, checkValidOp, append); + require('./helpers').bootstrapTransform( + text, + transformComponent, + checkValidOp, + append + ) } - diff --git a/services/document-updater/app/js/sharejs/types/web-prelude.js b/services/document-updater/app/js/sharejs/types/web-prelude.js index e6a7529a52..a4c3a0f22e 100644 --- a/services/document-updater/app/js/sharejs/types/web-prelude.js +++ b/services/document-updater/app/js/sharejs/types/web-prelude.js @@ -9,8 +9,6 @@ @const @type {boolean} */ -const WEB = true; - - -const exports = window.sharejs; +const WEB = true +const exports = window.sharejs diff --git a/services/document-updater/app/js/sharejs/web-prelude.js b/services/document-updater/app/js/sharejs/web-prelude.js index e6a7529a52..a4c3a0f22e 100644 --- a/services/document-updater/app/js/sharejs/web-prelude.js +++ b/services/document-updater/app/js/sharejs/web-prelude.js @@ -9,8 +9,6 @@ @const @type {boolean} */ -const WEB = true; - - -const exports = window.sharejs; +const WEB = true +const exports = window.sharejs From 6c4d7fb8382d463eb32b26bf85e3367f14fbbcf3 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 6 May 2020 12:10:11 +0200 Subject: [PATCH 614/769] decaffeinate: Rename DiffCodecTests.coffee and 23 other files from .coffee to .js --- .../coffee/DiffCodec/{DiffCodecTests.coffee => DiffCodecTests.js} | 0 .../{DispatchManagerTests.coffee => DispatchManagerTests.js} | 0 .../{DocumentManagerTests.coffee => DocumentManagerTests.js} | 0 .../{HistoryManagerTests.coffee => HistoryManagerTests.js} | 0 ...istoryRedisManagerTests.coffee => HistoryRedisManagerTests.js} | 0 .../{HttpControllerTests.coffee => HttpControllerTests.js} | 0 .../LockManager/{CheckingTheLock.coffee => CheckingTheLock.js} | 0 .../LockManager/{ReleasingTheLock.coffee => ReleasingTheLock.js} | 0 .../coffee/LockManager/{getLockTests.coffee => getLockTests.js} | 0 .../coffee/LockManager/{tryLockTests.coffee => tryLockTests.js} | 0 ...{PersistenceManagerTests.coffee => PersistenceManagerTests.js} | 0 ...edisManagerTests.coffee => ProjectHistoryRedisManagerTests.js} | 0 ...AndDeleteProjectTests.coffee => flushAndDeleteProjectTests.js} | 0 .../{flushProjectTests.coffee => flushProjectTests.js} | 0 .../{getProjectDocsTests.coffee => getProjectDocsTests.js} | 0 .../{updateProjectTests.coffee => updateProjectTests.js} | 0 .../{RangesManagerTests.coffee => RangesManagerTests.js} | 0 .../{RateLimitManager.coffee => RateLimitManager.js} | 0 ...lTimeRedisManagerTests.coffee => RealTimeRedisManagerTests.js} | 0 .../{RedisManagerTests.coffee => RedisManagerTests.js} | 0 .../ShareJS/{TextTransformTests.coffee => TextTransformTests.js} | 0 .../coffee/ShareJsDB/{ShareJsDBTests.coffee => ShareJsDBTests.js} | 0 ...reJsUpdateManagerTests.coffee => ShareJsUpdateManagerTests.js} | 0 .../{UpdateManagerTests.coffee => UpdateManagerTests.js} | 0 24 files changed, 0 insertions(+), 0 deletions(-) rename services/document-updater/test/unit/coffee/DiffCodec/{DiffCodecTests.coffee => DiffCodecTests.js} (100%) rename services/document-updater/test/unit/coffee/DispatchManager/{DispatchManagerTests.coffee => DispatchManagerTests.js} (100%) rename services/document-updater/test/unit/coffee/DocumentManager/{DocumentManagerTests.coffee => DocumentManagerTests.js} (100%) rename services/document-updater/test/unit/coffee/HistoryManager/{HistoryManagerTests.coffee => HistoryManagerTests.js} (100%) rename services/document-updater/test/unit/coffee/HistoryRedisManager/{HistoryRedisManagerTests.coffee => HistoryRedisManagerTests.js} (100%) rename services/document-updater/test/unit/coffee/HttpController/{HttpControllerTests.coffee => HttpControllerTests.js} (100%) rename services/document-updater/test/unit/coffee/LockManager/{CheckingTheLock.coffee => CheckingTheLock.js} (100%) rename services/document-updater/test/unit/coffee/LockManager/{ReleasingTheLock.coffee => ReleasingTheLock.js} (100%) rename services/document-updater/test/unit/coffee/LockManager/{getLockTests.coffee => getLockTests.js} (100%) rename services/document-updater/test/unit/coffee/LockManager/{tryLockTests.coffee => tryLockTests.js} (100%) rename services/document-updater/test/unit/coffee/PersistenceManager/{PersistenceManagerTests.coffee => PersistenceManagerTests.js} (100%) rename services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/{ProjectHistoryRedisManagerTests.coffee => ProjectHistoryRedisManagerTests.js} (100%) rename services/document-updater/test/unit/coffee/ProjectManager/{flushAndDeleteProjectTests.coffee => flushAndDeleteProjectTests.js} (100%) rename services/document-updater/test/unit/coffee/ProjectManager/{flushProjectTests.coffee => flushProjectTests.js} (100%) rename services/document-updater/test/unit/coffee/ProjectManager/{getProjectDocsTests.coffee => getProjectDocsTests.js} (100%) rename services/document-updater/test/unit/coffee/ProjectManager/{updateProjectTests.coffee => updateProjectTests.js} (100%) rename services/document-updater/test/unit/coffee/RangesManager/{RangesManagerTests.coffee => RangesManagerTests.js} (100%) rename services/document-updater/test/unit/coffee/RateLimitManager/{RateLimitManager.coffee => RateLimitManager.js} (100%) rename services/document-updater/test/unit/coffee/RealTimeRedisManager/{RealTimeRedisManagerTests.coffee => RealTimeRedisManagerTests.js} (100%) rename services/document-updater/test/unit/coffee/RedisManager/{RedisManagerTests.coffee => RedisManagerTests.js} (100%) rename services/document-updater/test/unit/coffee/ShareJS/{TextTransformTests.coffee => TextTransformTests.js} (100%) rename services/document-updater/test/unit/coffee/ShareJsDB/{ShareJsDBTests.coffee => ShareJsDBTests.js} (100%) rename services/document-updater/test/unit/coffee/ShareJsUpdateManager/{ShareJsUpdateManagerTests.coffee => ShareJsUpdateManagerTests.js} (100%) rename services/document-updater/test/unit/coffee/UpdateManager/{UpdateManagerTests.coffee => UpdateManagerTests.js} (100%) diff --git a/services/document-updater/test/unit/coffee/DiffCodec/DiffCodecTests.coffee b/services/document-updater/test/unit/coffee/DiffCodec/DiffCodecTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/DiffCodec/DiffCodecTests.coffee rename to services/document-updater/test/unit/coffee/DiffCodec/DiffCodecTests.js diff --git a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.coffee rename to services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.js diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.coffee rename to services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.js diff --git a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.coffee rename to services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.js diff --git a/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.coffee rename to services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.js diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.coffee rename to services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.js diff --git a/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee b/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.js similarity index 100% rename from services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.coffee rename to services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.js diff --git a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.js similarity index 100% rename from services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.coffee rename to services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.js diff --git a/services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee b/services/document-updater/test/unit/coffee/LockManager/getLockTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/LockManager/getLockTests.coffee rename to services/document-updater/test/unit/coffee/LockManager/getLockTests.js diff --git a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/LockManager/tryLockTests.coffee rename to services/document-updater/test/unit/coffee/LockManager/tryLockTests.js diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.coffee rename to services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.js diff --git a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.coffee rename to services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js diff --git a/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.coffee rename to services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.js diff --git a/services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.coffee rename to services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.js diff --git a/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.coffee rename to services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.js diff --git a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.coffee rename to services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.js diff --git a/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee b/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.coffee rename to services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.js diff --git a/services/document-updater/test/unit/coffee/RateLimitManager/RateLimitManager.coffee b/services/document-updater/test/unit/coffee/RateLimitManager/RateLimitManager.js similarity index 100% rename from services/document-updater/test/unit/coffee/RateLimitManager/RateLimitManager.coffee rename to services/document-updater/test/unit/coffee/RateLimitManager/RateLimitManager.js diff --git a/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.coffee rename to services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.js diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee rename to services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.js diff --git a/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee b/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.coffee rename to services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.js diff --git a/services/document-updater/test/unit/coffee/ShareJsDB/ShareJsDBTests.coffee b/services/document-updater/test/unit/coffee/ShareJsDB/ShareJsDBTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/ShareJsDB/ShareJsDBTests.coffee rename to services/document-updater/test/unit/coffee/ShareJsDB/ShareJsDBTests.js diff --git a/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.coffee rename to services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.js diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.coffee rename to services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.js From c781526af0f55a5b31eef865db5c7aa1172ae1eb Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 6 May 2020 12:10:51 +0200 Subject: [PATCH 615/769] decaffeinate: Convert DiffCodecTests.coffee and 23 other files to JS --- .../unit/coffee/DiffCodec/DiffCodecTests.js | 106 +- .../DispatchManager/DispatchManagerTests.js | 217 ++- .../DocumentManager/DocumentManagerTests.js | 1229 ++++++++------ .../HistoryManager/HistoryManagerTests.js | 438 ++--- .../HistoryRedisManagerTests.js | 119 +- .../HttpController/HttpControllerTests.js | 1148 +++++++------ .../coffee/LockManager/CheckingTheLock.js | 89 +- .../coffee/LockManager/ReleasingTheLock.js | 119 +- .../unit/coffee/LockManager/getLockTests.js | 172 +- .../unit/coffee/LockManager/tryLockTests.js | 186 ++- .../PersistenceManagerTests.js | 426 +++-- .../ProjectHistoryRedisManagerTests.js | 230 +-- .../flushAndDeleteProjectTests.js | 185 +- .../ProjectManager/flushProjectTests.js | 167 +- .../ProjectManager/getProjectDocsTests.js | 245 +-- .../ProjectManager/updateProjectTests.js | 356 ++-- .../RangesManager/RangesManagerTests.js | 573 ++++--- .../RateLimitManager/RateLimitManager.js | 188 ++- .../RealTimeRedisManagerTests.js | 184 +- .../coffee/RedisManager/RedisManagerTests.js | 1481 ++++++++++------- .../unit/coffee/ShareJS/TextTransformTests.js | 553 +++--- .../unit/coffee/ShareJsDB/ShareJsDBTests.js | 186 ++- .../ShareJsUpdateManagerTests.js | 270 +-- .../UpdateManager/UpdateManagerTests.js | 708 ++++---- 24 files changed, 5566 insertions(+), 4009 deletions(-) diff --git a/services/document-updater/test/unit/coffee/DiffCodec/DiffCodecTests.js b/services/document-updater/test/unit/coffee/DiffCodec/DiffCodecTests.js index bcd07c0479..f3e6f5bbee 100644 --- a/services/document-updater/test/unit/coffee/DiffCodec/DiffCodecTests.js +++ b/services/document-updater/test/unit/coffee/DiffCodec/DiffCodecTests.js @@ -1,56 +1,76 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -expect = chai.expect -modulePath = "../../../../app/js/DiffCodec.js" -SandboxedModule = require('sandboxed-module') +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const { + expect +} = chai; +const modulePath = "../../../../app/js/DiffCodec.js"; +const SandboxedModule = require('sandboxed-module'); -describe "DiffCodec", -> - beforeEach -> - @callback = sinon.stub() - @DiffCodec = SandboxedModule.require modulePath +describe("DiffCodec", function() { + beforeEach(function() { + this.callback = sinon.stub(); + return this.DiffCodec = SandboxedModule.require(modulePath); + }); - describe "diffAsShareJsOps", -> - it "should insert new text correctly", (done) -> - @before = ["hello world"] - @after = ["hello beautiful world"] - @DiffCodec.diffAsShareJsOp @before, @after, (error, ops) -> - expect(ops).to.deep.equal [ - i: "beautiful " + return describe("diffAsShareJsOps", function() { + it("should insert new text correctly", function(done) { + this.before = ["hello world"]; + this.after = ["hello beautiful world"]; + return this.DiffCodec.diffAsShareJsOp(this.before, this.after, function(error, ops) { + expect(ops).to.deep.equal([{ + i: "beautiful ", p: 6 - ] - done() + } + ]); + return done(); + }); + }); - it "should shift later inserts by previous inserts", (done) -> - @before = ["the boy played with the ball"] - @after = ["the tall boy played with the red ball"] - @DiffCodec.diffAsShareJsOp @before, @after, (error, ops) -> - expect(ops).to.deep.equal [ - { i: "tall ", p: 4 } + it("should shift later inserts by previous inserts", function(done) { + this.before = ["the boy played with the ball"]; + this.after = ["the tall boy played with the red ball"]; + return this.DiffCodec.diffAsShareJsOp(this.before, this.after, function(error, ops) { + expect(ops).to.deep.equal([ + { i: "tall ", p: 4 }, { i: "red ", p: 29 } - ] - done() + ]); + return done(); + }); + }); - it "should delete text correctly", (done) -> - @before = ["hello beautiful world"] - @after = ["hello world"] - @DiffCodec.diffAsShareJsOp @before, @after, (error, ops) -> - expect(ops).to.deep.equal [ - d: "beautiful " + it("should delete text correctly", function(done) { + this.before = ["hello beautiful world"]; + this.after = ["hello world"]; + return this.DiffCodec.diffAsShareJsOp(this.before, this.after, function(error, ops) { + expect(ops).to.deep.equal([{ + d: "beautiful ", p: 6 - ] - done() + } + ]); + return done(); + }); + }); - it "should shift later deletes by the first deletes", (done) -> - @before = ["the tall boy played with the red ball"] - @after = ["the boy played with the ball"] - @DiffCodec.diffAsShareJsOp @before, @after, (error, ops) -> - expect(ops).to.deep.equal [ - { d: "tall ", p: 4 } + return it("should shift later deletes by the first deletes", function(done) { + this.before = ["the tall boy played with the red ball"]; + this.after = ["the boy played with the ball"]; + return this.DiffCodec.diffAsShareJsOp(this.before, this.after, function(error, ops) { + expect(ops).to.deep.equal([ + { d: "tall ", p: 4 }, { d: "red ", p: 24 } - ] - done() + ]); + return done(); + }); + }); + }); +}); diff --git a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.js b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.js index 773a85afd1..4ba0e2d16c 100644 --- a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.js +++ b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.js @@ -1,110 +1,147 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/DispatchManager.js" -SandboxedModule = require('sandboxed-module') -Errors = require "../../../../app/js/Errors.js" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const modulePath = "../../../../app/js/DispatchManager.js"; +const SandboxedModule = require('sandboxed-module'); +const Errors = require("../../../../app/js/Errors.js"); -describe "DispatchManager", -> - beforeEach -> - @timeout(3000) - @DispatchManager = SandboxedModule.require modulePath, requires: - "./UpdateManager" : @UpdateManager = {} - "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub(), warn: sinon.stub() } - "settings-sharelatex": @settings = - redis: +describe("DispatchManager", function() { + beforeEach(function() { + this.timeout(3000); + this.DispatchManager = SandboxedModule.require(modulePath, { requires: { + "./UpdateManager" : (this.UpdateManager = {}), + "logger-sharelatex": (this.logger = { log: sinon.stub(), error: sinon.stub(), warn: sinon.stub() }), + "settings-sharelatex": (this.settings = { + redis: { documentupdater: {} - "redis-sharelatex": @redis = {} - "./RateLimitManager": {} - "./Errors": Errors - "./Metrics": - Timer: -> - done: -> - @callback = sinon.stub() - @RateLimiter = { run: (task,cb) -> task(cb) } # run task without rate limit + } + }), + "redis-sharelatex": (this.redis = {}), + "./RateLimitManager": {}, + "./Errors": Errors, + "./Metrics": { + Timer() { + return {done() {}}; + } + } + } + } + ); + this.callback = sinon.stub(); + return this.RateLimiter = { run(task,cb) { return task(cb); } };}); // run task without rate limit - describe "each worker", -> - beforeEach -> - @client = - auth: sinon.stub() - @redis.createClient = sinon.stub().returns @client - @worker = @DispatchManager.createDispatcher(@RateLimiter) + return describe("each worker", function() { + beforeEach(function() { + this.client = + {auth: sinon.stub()}; + this.redis.createClient = sinon.stub().returns(this.client); + return this.worker = this.DispatchManager.createDispatcher(this.RateLimiter); + }); - it "should create a new redis client", -> - @redis.createClient.called.should.equal true + it("should create a new redis client", function() { + return this.redis.createClient.called.should.equal(true); + }); - describe "_waitForUpdateThenDispatchWorker", -> - beforeEach -> - @project_id = "project-id-123" - @doc_id = "doc-id-123" - @doc_key = "#{@project_id}:#{@doc_id}" - @client.blpop = sinon.stub().callsArgWith(2, null, ["pending-updates-list", @doc_key]) + describe("_waitForUpdateThenDispatchWorker", function() { + beforeEach(function() { + this.project_id = "project-id-123"; + this.doc_id = "doc-id-123"; + this.doc_key = `${this.project_id}:${this.doc_id}`; + return this.client.blpop = sinon.stub().callsArgWith(2, null, ["pending-updates-list", this.doc_key]); + }); - describe "in the normal case", -> - beforeEach -> - @UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArg(2) - @worker._waitForUpdateThenDispatchWorker @callback + describe("in the normal case", function() { + beforeEach(function() { + this.UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArg(2); + return this.worker._waitForUpdateThenDispatchWorker(this.callback); + }); - it "should call redis with BLPOP", -> - @client.blpop + it("should call redis with BLPOP", function() { + return this.client.blpop .calledWith("pending-updates-list", 0) - .should.equal true + .should.equal(true); + }); - it "should call processOutstandingUpdatesWithLock", -> - @UpdateManager.processOutstandingUpdatesWithLock - .calledWith(@project_id, @doc_id) - .should.equal true + it("should call processOutstandingUpdatesWithLock", function() { + return this.UpdateManager.processOutstandingUpdatesWithLock + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); - it "should not log any errors", -> - @logger.error.called.should.equal false - @logger.warn.called.should.equal false + it("should not log any errors", function() { + this.logger.error.called.should.equal(false); + return this.logger.warn.called.should.equal(false); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); - describe "with an error", -> - beforeEach -> - @UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArgWith(2, new Error("a generic error")) - @worker._waitForUpdateThenDispatchWorker @callback + describe("with an error", function() { + beforeEach(function() { + this.UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArgWith(2, new Error("a generic error")); + return this.worker._waitForUpdateThenDispatchWorker(this.callback); + }); - it "should log an error", -> - @logger.error.called.should.equal true + it("should log an error", function() { + return this.logger.error.called.should.equal(true); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); - describe "with a 'Delete component' error", -> - beforeEach -> - @UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArgWith(2, new Errors.DeleteMismatchError()) - @worker._waitForUpdateThenDispatchWorker @callback + return describe("with a 'Delete component' error", function() { + beforeEach(function() { + this.UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArgWith(2, new Errors.DeleteMismatchError()); + return this.worker._waitForUpdateThenDispatchWorker(this.callback); + }); - it "should log a warning", -> - @logger.warn.called.should.equal true + it("should log a warning", function() { + return this.logger.warn.called.should.equal(true); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); + }); - describe "run", -> - it "should call _waitForUpdateThenDispatchWorker until shutting down", (done) -> - callCount = 0 - @worker._waitForUpdateThenDispatchWorker = (callback = (error) ->) => - callCount++ - if callCount == 3 - @settings.shuttingDown = true - setTimeout () -> - callback() - , 10 - sinon.spy @worker, "_waitForUpdateThenDispatchWorker" - - - @worker.run() + return describe("run", () => it("should call _waitForUpdateThenDispatchWorker until shutting down", function(done) { + let callCount = 0; + this.worker._waitForUpdateThenDispatchWorker = callback => { + if (callback == null) { callback = function(error) {}; } + callCount++; + if (callCount === 3) { + this.settings.shuttingDown = true; + } + return setTimeout(() => callback() + , 10); + }; + sinon.spy(this.worker, "_waitForUpdateThenDispatchWorker"); + + + this.worker.run(); - checkStatus = () => - if not @settings.shuttingDown # retry until shutdown - setTimeout checkStatus, 100 - return - else - @worker._waitForUpdateThenDispatchWorker.callCount.should.equal 3 - done() + var checkStatus = () => { + if (!this.settings.shuttingDown) { // retry until shutdown + setTimeout(checkStatus, 100); + return; + } else { + this.worker._waitForUpdateThenDispatchWorker.callCount.should.equal(3); + return done(); + } + }; - checkStatus() + return checkStatus(); + })); + }); +}); diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.js b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.js index a8520f7fc1..a338685948 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.js +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.js @@ -1,538 +1,695 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/DocumentManager.js" -SandboxedModule = require('sandboxed-module') -Errors = require "../../../../app/js/Errors" -tk = require "timekeeper" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const modulePath = "../../../../app/js/DocumentManager.js"; +const SandboxedModule = require('sandboxed-module'); +const Errors = require("../../../../app/js/Errors"); +const tk = require("timekeeper"); -describe "DocumentManager", -> - beforeEach -> - tk.freeze(new Date()) - @DocumentManager = SandboxedModule.require modulePath, requires: - "./RedisManager": @RedisManager = {} - "./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {} - "./PersistenceManager": @PersistenceManager = {} - "./HistoryManager": @HistoryManager = - flushDocChangesAsync: sinon.stub() +describe("DocumentManager", function() { + beforeEach(function() { + let Timer; + tk.freeze(new Date()); + this.DocumentManager = SandboxedModule.require(modulePath, { requires: { + "./RedisManager": (this.RedisManager = {}), + "./ProjectHistoryRedisManager": (this.ProjectHistoryRedisManager = {}), + "./PersistenceManager": (this.PersistenceManager = {}), + "./HistoryManager": (this.HistoryManager = { + flushDocChangesAsync: sinon.stub(), flushProjectChangesAsync: sinon.stub() - "logger-sharelatex": @logger = {log: sinon.stub(), warn: sinon.stub()} - "./DocOpsManager": @DocOpsManager = {} - "./Metrics": @Metrics = - Timer: class Timer - done: sinon.stub() - "./RealTimeRedisManager": @RealTimeRedisManager = {} - "./DiffCodec": @DiffCodec = {} - "./UpdateManager": @UpdateManager = {} - "./RangesManager": @RangesManager = {} - @project_id = "project-id-123" - @projectHistoryId = "history-id-123" - @projectHistoryType = "project-history" - @doc_id = "doc-id-123" - @user_id = 1234 - @callback = sinon.stub() - @lines = ["one", "two", "three"] - @version = 42 - @ranges = { comments: "mock", entries: "mock" } - @pathname = '/a/b/c.tex' - @unflushedTime = Date.now() - @lastUpdatedAt = Date.now() - @lastUpdatedBy = 'last-author-id' - - afterEach -> - tk.reset() - - describe "flushAndDeleteDoc", -> - describe "successfully", -> - beforeEach -> - @RedisManager.removeDocFromMemory = sinon.stub().callsArg(2) - @DocumentManager.flushDocIfLoaded = sinon.stub().callsArgWith(2) - @DocumentManager.flushAndDeleteDoc @project_id, @doc_id, {}, @callback - - it "should flush the doc", -> - @DocumentManager.flushDocIfLoaded - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should remove the doc from redis", -> - @RedisManager.removeDocFromMemory - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should call the callback without error", -> - @callback.calledWith(null).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - it "should flush to the history api", -> - @HistoryManager.flushDocChangesAsync - .calledWithExactly(@project_id, @doc_id) - .should.equal true - - describe "when a flush error occurs", -> - beforeEach -> - @DocumentManager.flushDocIfLoaded = sinon.stub().callsArgWith(2, new Error("boom!")) - @RedisManager.removeDocFromMemory = sinon.stub().callsArg(2) - - it "should not remove the doc from redis", (done) -> - @DocumentManager.flushAndDeleteDoc @project_id, @doc_id, {}, (error) => - error.should.exist - @RedisManager.removeDocFromMemory.called.should.equal false - done() - - describe "when ignoring flush errors", -> - it "should remove the doc from redis", (done) -> - @DocumentManager.flushAndDeleteDoc @project_id, @doc_id, { ignoreFlushErrors: true }, (error) => - if error? - return done(error) - @RedisManager.removeDocFromMemory.called.should.equal true - done() - - describe "flushDocIfLoaded", -> - describe "when the doc is in Redis", -> - beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId, @unflushedTime, @lastUpdatedAt, @lastUpdatedBy) - @RedisManager.clearUnflushedTime = sinon.stub().callsArgWith(1, null) - @PersistenceManager.setDoc = sinon.stub().yields() - @DocumentManager.flushDocIfLoaded @project_id, @doc_id, @callback - - it "should get the doc from redis", -> - @RedisManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should write the doc lines to the persistence layer", -> - @PersistenceManager.setDoc - .calledWith(@project_id, @doc_id, @lines, @version, @ranges, @lastUpdatedAt, @lastUpdatedBy) - .should.equal true - - it "should call the callback without error", -> - @callback.calledWith(null).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "when the document is not in Redis", -> - beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null, null) - @PersistenceManager.setDoc = sinon.stub().yields() - @DocOpsManager.flushDocOpsToMongo = sinon.stub().callsArgWith(2) - @DocumentManager.flushDocIfLoaded @project_id, @doc_id, @callback - - it "should get the doc from redis", -> - @RedisManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should not write anything to the persistence layer", -> - @PersistenceManager.setDoc.called.should.equal false - @DocOpsManager.flushDocOpsToMongo.called.should.equal false - - it "should call the callback without error", -> - @callback.calledWith(null).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "getDocAndRecentOps", -> - describe "with a previous version specified", -> - beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId) - @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) - @DocumentManager.getDocAndRecentOps @project_id, @doc_id, @fromVersion, @callback - - it "should get the doc", -> - @DocumentManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should get the doc ops", -> - @RedisManager.getPreviousDocOps - .calledWith(@doc_id, @fromVersion, @version) - .should.equal true - - it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, @ops, @ranges, @pathname, @projectHistoryId).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "with no previous version specified", -> - beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId) - @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) - @DocumentManager.getDocAndRecentOps @project_id, @doc_id, -1, @callback - - it "should get the doc", -> - @DocumentManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should not need to get the doc ops", -> - @RedisManager.getPreviousDocOps.called.should.equal false - - it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, [], @ranges, @pathname, @projectHistoryId).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "getDoc", -> - describe "when the doc exists in Redis", -> - beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId, @unflushedTime) - @DocumentManager.getDoc @project_id, @doc_id, @callback - - it "should get the doc from Redis", -> - @RedisManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, @ranges, @pathname, @projectHistoryId, @unflushedTime, true).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "when the doc does not exist in Redis", -> - beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null, null, null, null) - @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId, @projectHistoryType) - @RedisManager.putDocInMemory = sinon.stub().yields() - @RedisManager.setHistoryType = sinon.stub().yields() - @DocumentManager.getDoc @project_id, @doc_id, @callback - - it "should try to get the doc from Redis", -> - @RedisManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should get the doc from the PersistenceManager", -> - @PersistenceManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should set the doc in Redis", -> - @RedisManager.putDocInMemory - .calledWith(@project_id, @doc_id, @lines, @version, @ranges, @pathname, @projectHistoryId) - .should.equal true - - it "should set the history type in Redis", -> - @RedisManager.setHistoryType - .calledWith(@doc_id, @projectHistoryType) - .should.equal true - - it "should call the callback with the doc info", -> - @callback.calledWith(null, @lines, @version, @ranges, @pathname, @projectHistoryId, null, false).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "setDoc", -> - describe "with plain tex lines", -> - beforeEach -> - @beforeLines = ["before", "lines"] - @afterLines = ["after", "lines"] - @ops = [{ i: "foo", p: 4 }, { d: "bar", p: 42 }] - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, @ranges, @pathname, @projectHistoryId, @unflushedTime, true) - @DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, @ops) - @UpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null) - @DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2) - @DocumentManager.flushAndDeleteDoc = sinon.stub().callsArg(3) - - describe "when already loaded", -> - beforeEach -> - @DocumentManager.setDoc @project_id, @doc_id, @afterLines, @source, @user_id, false, @callback - - it "should get the current doc lines", -> - @DocumentManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should return a diff of the old and new lines", -> - @DiffCodec.diffAsShareJsOp - .calledWith(@beforeLines, @afterLines) - .should.equal true - - it "should apply the diff as a ShareJS op", -> - @UpdateManager.applyUpdate - .calledWith( - @project_id, - @doc_id, - { - doc: @doc_id, - v: @version, - op: @ops, - meta: { - type: "external" - source: @source - user_id: @user_id - } - } - ) - .should.equal true - - it "should flush the doc to Mongo", -> - @DocumentManager.flushDocIfLoaded - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should not flush the project history", -> - @HistoryManager.flushProjectChangesAsync - .called.should.equal false - - it "should call the callback", -> - @callback.calledWith(null).should.equal true - - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true - - describe "when not already loaded", -> - beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @beforeLines, @version, @pathname, null, false) - @DocumentManager.setDoc @project_id, @doc_id, @afterLines, @source, @user_id, false, @callback - - it "should flush and delete the doc from the doc updater", -> - @DocumentManager.flushAndDeleteDoc - .calledWith(@project_id, @doc_id, {}) - .should.equal true - - it "should not flush the project history", -> - @HistoryManager.flushProjectChangesAsync - .calledWithExactly(@project_id) - .should.equal true - - describe "without new lines", -> - beforeEach -> - @DocumentManager.setDoc @project_id, @doc_id, null, @source, @user_id, false, @callback - - it "should return the callback with an error", -> - @callback.calledWith(new Error("No lines were passed to setDoc")) - - it "should not try to get the doc lines", -> - @DocumentManager.getDoc.called.should.equal false - - describe "with the undoing flag", -> - beforeEach -> - # Copy ops so we don't interfere with other tests - @ops = [{ i: "foo", p: 4 }, { d: "bar", p: 42 }] - @DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, @ops) - @DocumentManager.setDoc @project_id, @doc_id, @afterLines, @source, @user_id, true, @callback - - it "should set the undo flag on each op", -> - for op in @ops - op.u.should.equal true - - describe "acceptChanges", -> - beforeEach -> - @change_id = "mock-change-id" - @change_ids = [ "mock-change-id-1", "mock-change-id-2", "mock-change-id-3", "mock-change-id-4" ] - @version = 34 - @lines = ["original", "lines"] - @ranges = { entries: "mock", comments: "mock" } - @updated_ranges = { entries: "updated", comments: "updated" } - @DocumentManager.getDoc = sinon.stub().yields(null, @lines, @version, @ranges) - @RangesManager.acceptChanges = sinon.stub().yields(null, @updated_ranges) - @RedisManager.updateDocument = sinon.stub().yields() - - describe "successfully with a single change", -> - beforeEach -> - @DocumentManager.acceptChanges @project_id, @doc_id, [ @change_id ], @callback - - it "should get the document's current ranges", -> - @DocumentManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should apply the accept change to the ranges", -> - @RangesManager.acceptChanges - .calledWith([ @change_id ], @ranges) - .should.equal true - - it "should save the updated ranges", -> - @RedisManager.updateDocument - .calledWith(@project_id, @doc_id, @lines, @version, [], @updated_ranges, {}) - .should.equal true - - it "should call the callback", -> - @callback.called.should.equal true - - describe "successfully with multiple changes", -> - beforeEach -> - @DocumentManager.acceptChanges @project_id, @doc_id, @change_ids, @callback - - it "should apply the accept change to the ranges", -> - @RangesManager.acceptChanges - .calledWith(@change_ids, @ranges) - .should.equal true - - describe "when the doc is not found", -> - beforeEach -> - @DocumentManager.getDoc = sinon.stub().yields(null, null, null, null) - @DocumentManager.acceptChanges @project_id, @doc_id, [ @change_id ], @callback - - it "should not save anything", -> - @RedisManager.updateDocument.called.should.equal false - - it "should call the callback with a not found error", -> - error = new Errors.NotFoundError("document not found: #{@doc_id}") - @callback.calledWith(error).should.equal true - - describe "deleteComment", -> - beforeEach -> - @comment_id = "mock-comment-id" - @version = 34 - @lines = ["original", "lines"] - @ranges = { comments: ["one", "two", "three"] } - @updated_ranges = { comments: ["one", "three"] } - @DocumentManager.getDoc = sinon.stub().yields(null, @lines, @version, @ranges) - @RangesManager.deleteComment = sinon.stub().yields(null, @updated_ranges) - @RedisManager.updateDocument = sinon.stub().yields() - - describe "successfully", -> - beforeEach -> - @DocumentManager.deleteComment @project_id, @doc_id, @comment_id, @callback - - it "should get the document's current ranges", -> - @DocumentManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should delete the comment from the ranges", -> - @RangesManager.deleteComment - .calledWith(@comment_id, @ranges) - .should.equal true - - it "should save the updated ranges", -> - @RedisManager.updateDocument - .calledWith(@project_id, @doc_id, @lines, @version, [], @updated_ranges, {}) - .should.equal true - - it "should call the callback", -> - @callback.called.should.equal true - - describe "when the doc is not found", -> - beforeEach -> - @DocumentManager.getDoc = sinon.stub().yields(null, null, null, null) - @DocumentManager.acceptChanges @project_id, @doc_id, [ @comment_id ], @callback - - it "should not save anything", -> - @RedisManager.updateDocument.called.should.equal false - - it "should call the callback with a not found error", -> - error = new Errors.NotFoundError("document not found: #{@doc_id}") - @callback.calledWith(error).should.equal true - - describe "getDocAndFlushIfOld", -> - beforeEach -> - @DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2) - - describe "when the doc is in Redis", -> - describe "and has changes to be flushed", -> - beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @projectHistoryId, @pathname, Date.now() - 1e9, true) - @DocumentManager.getDocAndFlushIfOld @project_id, @doc_id, @callback - - it "should get the doc", -> - @DocumentManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should flush the doc", -> - @DocumentManager.flushDocIfLoaded - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should call the callback with the lines and versions", -> - @callback.calledWith(null, @lines, @version).should.equal true - - describe "and has only changes that don't need to be flushed", -> - beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, Date.now() - 100, true) - @DocumentManager.getDocAndFlushIfOld @project_id, @doc_id, @callback - - it "should get the doc", -> - @DocumentManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should not flush the doc", -> - @DocumentManager.flushDocIfLoaded - .called.should.equal false - - it "should call the callback with the lines and versions", -> - @callback.calledWith(null, @lines, @version).should.equal true - - describe "when the doc is not in Redis", -> - beforeEach -> - @DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, null, false) - @DocumentManager.getDocAndFlushIfOld @project_id, @doc_id, @callback - - it "should get the doc", -> - @DocumentManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "should not flush the doc", -> - @DocumentManager.flushDocIfLoaded - .called.should.equal false - - it "should call the callback with the lines and versions", -> - @callback.calledWith(null, @lines, @version).should.equal true - - describe "renameDoc", -> - beforeEach -> - @update = 'some-update' - @RedisManager.renameDoc = sinon.stub().yields() - - describe "successfully", -> - beforeEach -> - @DocumentManager.renameDoc @project_id, @doc_id, @user_id, @update, @projectHistoryId, @callback - - it "should rename the document", -> - @RedisManager.renameDoc - .calledWith(@project_id, @doc_id, @user_id, @update, @projectHistoryId) - .should.equal true - - it "should call the callback", -> - @callback.called.should.equal true - - describe "resyncDocContents", -> - describe "when doc is loaded in redis", -> - beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId) - @ProjectHistoryRedisManager.queueResyncDocContent = sinon.stub() - @DocumentManager.resyncDocContents @project_id, @doc_id, @callback - - it "gets the doc contents from redis", -> - @RedisManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "queues a resync doc content update", -> - @ProjectHistoryRedisManager.queueResyncDocContent - .calledWith(@project_id, @projectHistoryId, @doc_id, @lines, @version, @pathname, @callback) - .should.equal true - - describe "when doc is not loaded in redis", -> - beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null) - @PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname, @projectHistoryId) - @ProjectHistoryRedisManager.queueResyncDocContent = sinon.stub() - @DocumentManager.resyncDocContents @project_id, @doc_id, @callback - - it "tries to get the doc contents from redis", -> - @RedisManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "gets the doc contents from web", -> - @PersistenceManager.getDoc - .calledWith(@project_id, @doc_id) - .should.equal true - - it "queues a resync doc content update", -> - @ProjectHistoryRedisManager.queueResyncDocContent - .calledWith(@project_id, @projectHistoryId, @doc_id, @lines, @version, @pathname, @callback) - .should.equal true + }), + "logger-sharelatex": (this.logger = {log: sinon.stub(), warn: sinon.stub()}), + "./DocOpsManager": (this.DocOpsManager = {}), + "./Metrics": (this.Metrics = { + Timer: (Timer = (function() { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub(); + } + }; + Timer.initClass(); + return Timer; + })()) + }), + "./RealTimeRedisManager": (this.RealTimeRedisManager = {}), + "./DiffCodec": (this.DiffCodec = {}), + "./UpdateManager": (this.UpdateManager = {}), + "./RangesManager": (this.RangesManager = {}) + } + }); + this.project_id = "project-id-123"; + this.projectHistoryId = "history-id-123"; + this.projectHistoryType = "project-history"; + this.doc_id = "doc-id-123"; + this.user_id = 1234; + this.callback = sinon.stub(); + this.lines = ["one", "two", "three"]; + this.version = 42; + this.ranges = { comments: "mock", entries: "mock" }; + this.pathname = '/a/b/c.tex'; + this.unflushedTime = Date.now(); + this.lastUpdatedAt = Date.now(); + return this.lastUpdatedBy = 'last-author-id'; + }); + + afterEach(() => tk.reset()); + + describe("flushAndDeleteDoc", function() { + describe("successfully", function() { + beforeEach(function() { + this.RedisManager.removeDocFromMemory = sinon.stub().callsArg(2); + this.DocumentManager.flushDocIfLoaded = sinon.stub().callsArgWith(2); + return this.DocumentManager.flushAndDeleteDoc(this.project_id, this.doc_id, {}, this.callback); + }); + + it("should flush the doc", function() { + return this.DocumentManager.flushDocIfLoaded + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); + + it("should remove the doc from redis", function() { + return this.RedisManager.removeDocFromMemory + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); + + it("should call the callback without error", function() { + return this.callback.calledWith(null).should.equal(true); + }); + + it("should time the execution", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + + return it("should flush to the history api", function() { + return this.HistoryManager.flushDocChangesAsync + .calledWithExactly(this.project_id, this.doc_id) + .should.equal(true); + }); + }); + + return describe("when a flush error occurs", function() { + beforeEach(function() { + this.DocumentManager.flushDocIfLoaded = sinon.stub().callsArgWith(2, new Error("boom!")); + return this.RedisManager.removeDocFromMemory = sinon.stub().callsArg(2); + }); + + it("should not remove the doc from redis", function(done) { + return this.DocumentManager.flushAndDeleteDoc(this.project_id, this.doc_id, {}, error => { + error.should.exist; + this.RedisManager.removeDocFromMemory.called.should.equal(false); + return done(); + }); + }); + + return describe("when ignoring flush errors", () => it("should remove the doc from redis", function(done) { + return this.DocumentManager.flushAndDeleteDoc(this.project_id, this.doc_id, { ignoreFlushErrors: true }, error => { + if (error != null) { + return done(error); + } + this.RedisManager.removeDocFromMemory.called.should.equal(true); + return done(); + }); + })); + }); + }); + + describe("flushDocIfLoaded", function() { + describe("when the doc is in Redis", function() { + beforeEach(function() { + this.RedisManager.getDoc = sinon.stub().callsArgWith(2, null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId, this.unflushedTime, this.lastUpdatedAt, this.lastUpdatedBy); + this.RedisManager.clearUnflushedTime = sinon.stub().callsArgWith(1, null); + this.PersistenceManager.setDoc = sinon.stub().yields(); + return this.DocumentManager.flushDocIfLoaded(this.project_id, this.doc_id, this.callback); + }); + + it("should get the doc from redis", function() { + return this.RedisManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); + + it("should write the doc lines to the persistence layer", function() { + return this.PersistenceManager.setDoc + .calledWith(this.project_id, this.doc_id, this.lines, this.version, this.ranges, this.lastUpdatedAt, this.lastUpdatedBy) + .should.equal(true); + }); + + it("should call the callback without error", function() { + return this.callback.calledWith(null).should.equal(true); + }); + + return it("should time the execution", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); + + return describe("when the document is not in Redis", function() { + beforeEach(function() { + this.RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null, null); + this.PersistenceManager.setDoc = sinon.stub().yields(); + this.DocOpsManager.flushDocOpsToMongo = sinon.stub().callsArgWith(2); + return this.DocumentManager.flushDocIfLoaded(this.project_id, this.doc_id, this.callback); + }); + + it("should get the doc from redis", function() { + return this.RedisManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); + + it("should not write anything to the persistence layer", function() { + this.PersistenceManager.setDoc.called.should.equal(false); + return this.DocOpsManager.flushDocOpsToMongo.called.should.equal(false); + }); + + it("should call the callback without error", function() { + return this.callback.calledWith(null).should.equal(true); + }); + + return it("should time the execution", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); + }); + + describe("getDocAndRecentOps", function() { + describe("with a previous version specified", function() { + beforeEach(function() { + this.DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId); + this.RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, this.ops); + return this.DocumentManager.getDocAndRecentOps(this.project_id, this.doc_id, this.fromVersion, this.callback); + }); + + it("should get the doc", function() { + return this.DocumentManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); + + it("should get the doc ops", function() { + return this.RedisManager.getPreviousDocOps + .calledWith(this.doc_id, this.fromVersion, this.version) + .should.equal(true); + }); + + it("should call the callback with the doc info", function() { + return this.callback.calledWith(null, this.lines, this.version, this.ops, this.ranges, this.pathname, this.projectHistoryId).should.equal(true); + }); + + return it("should time the execution", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); + + return describe("with no previous version specified", function() { + beforeEach(function() { + this.DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId); + this.RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, this.ops); + return this.DocumentManager.getDocAndRecentOps(this.project_id, this.doc_id, -1, this.callback); + }); + + it("should get the doc", function() { + return this.DocumentManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); + + it("should not need to get the doc ops", function() { + return this.RedisManager.getPreviousDocOps.called.should.equal(false); + }); + + it("should call the callback with the doc info", function() { + return this.callback.calledWith(null, this.lines, this.version, [], this.ranges, this.pathname, this.projectHistoryId).should.equal(true); + }); + + return it("should time the execution", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); + }); + + describe("getDoc", function() { + describe("when the doc exists in Redis", function() { + beforeEach(function() { + this.RedisManager.getDoc = sinon.stub().callsArgWith(2, null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId, this.unflushedTime); + return this.DocumentManager.getDoc(this.project_id, this.doc_id, this.callback); + }); + + it("should get the doc from Redis", function() { + return this.RedisManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); + + it("should call the callback with the doc info", function() { + return this.callback.calledWith(null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId, this.unflushedTime, true).should.equal(true); + }); + + return it("should time the execution", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); + + return describe("when the doc does not exist in Redis", function() { + beforeEach(function() { + this.RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null, null, null, null); + this.PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId, this.projectHistoryType); + this.RedisManager.putDocInMemory = sinon.stub().yields(); + this.RedisManager.setHistoryType = sinon.stub().yields(); + return this.DocumentManager.getDoc(this.project_id, this.doc_id, this.callback); + }); + + it("should try to get the doc from Redis", function() { + return this.RedisManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); + + it("should get the doc from the PersistenceManager", function() { + return this.PersistenceManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); + + it("should set the doc in Redis", function() { + return this.RedisManager.putDocInMemory + .calledWith(this.project_id, this.doc_id, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId) + .should.equal(true); + }); + + it("should set the history type in Redis", function() { + return this.RedisManager.setHistoryType + .calledWith(this.doc_id, this.projectHistoryType) + .should.equal(true); + }); + + it("should call the callback with the doc info", function() { + return this.callback.calledWith(null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId, null, false).should.equal(true); + }); + + return it("should time the execution", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); + }); + + describe("setDoc", () => describe("with plain tex lines", function() { + beforeEach(function() { + this.beforeLines = ["before", "lines"]; + this.afterLines = ["after", "lines"]; + this.ops = [{ i: "foo", p: 4 }, { d: "bar", p: 42 }]; + this.DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, this.beforeLines, this.version, this.ranges, this.pathname, this.projectHistoryId, this.unflushedTime, true); + this.DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, this.ops); + this.UpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null); + this.DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2); + return this.DocumentManager.flushAndDeleteDoc = sinon.stub().callsArg(3); + }); + + describe("when already loaded", function() { + beforeEach(function() { + return this.DocumentManager.setDoc(this.project_id, this.doc_id, this.afterLines, this.source, this.user_id, false, this.callback); + }); + + it("should get the current doc lines", function() { + return this.DocumentManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); + + it("should return a diff of the old and new lines", function() { + return this.DiffCodec.diffAsShareJsOp + .calledWith(this.beforeLines, this.afterLines) + .should.equal(true); + }); + + it("should apply the diff as a ShareJS op", function() { + return this.UpdateManager.applyUpdate + .calledWith( + this.project_id, + this.doc_id, + { + doc: this.doc_id, + v: this.version, + op: this.ops, + meta: { + type: "external", + source: this.source, + user_id: this.user_id + } + } + ) + .should.equal(true); + }); + + it("should flush the doc to Mongo", function() { + return this.DocumentManager.flushDocIfLoaded + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); + + it("should not flush the project history", function() { + return this.HistoryManager.flushProjectChangesAsync + .called.should.equal(false); + }); + + it("should call the callback", function() { + return this.callback.calledWith(null).should.equal(true); + }); + + return it("should time the execution", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); + + describe("when not already loaded", function() { + beforeEach(function() { + this.DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, this.beforeLines, this.version, this.pathname, null, false); + return this.DocumentManager.setDoc(this.project_id, this.doc_id, this.afterLines, this.source, this.user_id, false, this.callback); + }); + + it("should flush and delete the doc from the doc updater", function() { + return this.DocumentManager.flushAndDeleteDoc + .calledWith(this.project_id, this.doc_id, {}) + .should.equal(true); + }); + + return it("should not flush the project history", function() { + return this.HistoryManager.flushProjectChangesAsync + .calledWithExactly(this.project_id) + .should.equal(true); + }); + }); + + describe("without new lines", function() { + beforeEach(function() { + return this.DocumentManager.setDoc(this.project_id, this.doc_id, null, this.source, this.user_id, false, this.callback); + }); + + it("should return the callback with an error", function() { + return this.callback.calledWith(new Error("No lines were passed to setDoc")); + }); + + return it("should not try to get the doc lines", function() { + return this.DocumentManager.getDoc.called.should.equal(false); + }); + }); + + return describe("with the undoing flag", function() { + beforeEach(function() { + // Copy ops so we don't interfere with other tests + this.ops = [{ i: "foo", p: 4 }, { d: "bar", p: 42 }]; + this.DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, this.ops); + return this.DocumentManager.setDoc(this.project_id, this.doc_id, this.afterLines, this.source, this.user_id, true, this.callback); + }); + + return it("should set the undo flag on each op", function() { + return Array.from(this.ops).map((op) => + op.u.should.equal(true)); + }); + }); + })); + + describe("acceptChanges", function() { + beforeEach(function() { + this.change_id = "mock-change-id"; + this.change_ids = [ "mock-change-id-1", "mock-change-id-2", "mock-change-id-3", "mock-change-id-4" ]; + this.version = 34; + this.lines = ["original", "lines"]; + this.ranges = { entries: "mock", comments: "mock" }; + this.updated_ranges = { entries: "updated", comments: "updated" }; + this.DocumentManager.getDoc = sinon.stub().yields(null, this.lines, this.version, this.ranges); + this.RangesManager.acceptChanges = sinon.stub().yields(null, this.updated_ranges); + return this.RedisManager.updateDocument = sinon.stub().yields(); + }); + + describe("successfully with a single change", function() { + beforeEach(function() { + return this.DocumentManager.acceptChanges(this.project_id, this.doc_id, [ this.change_id ], this.callback); + }); + + it("should get the document's current ranges", function() { + return this.DocumentManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); + + it("should apply the accept change to the ranges", function() { + return this.RangesManager.acceptChanges + .calledWith([ this.change_id ], this.ranges) + .should.equal(true); + }); + + it("should save the updated ranges", function() { + return this.RedisManager.updateDocument + .calledWith(this.project_id, this.doc_id, this.lines, this.version, [], this.updated_ranges, {}) + .should.equal(true); + }); + + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); + + describe("successfully with multiple changes", function() { + beforeEach(function() { + return this.DocumentManager.acceptChanges(this.project_id, this.doc_id, this.change_ids, this.callback); + }); + + return it("should apply the accept change to the ranges", function() { + return this.RangesManager.acceptChanges + .calledWith(this.change_ids, this.ranges) + .should.equal(true); + }); + }); + + return describe("when the doc is not found", function() { + beforeEach(function() { + this.DocumentManager.getDoc = sinon.stub().yields(null, null, null, null); + return this.DocumentManager.acceptChanges(this.project_id, this.doc_id, [ this.change_id ], this.callback); + }); + + it("should not save anything", function() { + return this.RedisManager.updateDocument.called.should.equal(false); + }); + + return it("should call the callback with a not found error", function() { + const error = new Errors.NotFoundError(`document not found: ${this.doc_id}`); + return this.callback.calledWith(error).should.equal(true); + }); + }); + }); + + describe("deleteComment", function() { + beforeEach(function() { + this.comment_id = "mock-comment-id"; + this.version = 34; + this.lines = ["original", "lines"]; + this.ranges = { comments: ["one", "two", "three"] }; + this.updated_ranges = { comments: ["one", "three"] }; + this.DocumentManager.getDoc = sinon.stub().yields(null, this.lines, this.version, this.ranges); + this.RangesManager.deleteComment = sinon.stub().yields(null, this.updated_ranges); + return this.RedisManager.updateDocument = sinon.stub().yields(); + }); + + describe("successfully", function() { + beforeEach(function() { + return this.DocumentManager.deleteComment(this.project_id, this.doc_id, this.comment_id, this.callback); + }); + + it("should get the document's current ranges", function() { + return this.DocumentManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); + + it("should delete the comment from the ranges", function() { + return this.RangesManager.deleteComment + .calledWith(this.comment_id, this.ranges) + .should.equal(true); + }); + + it("should save the updated ranges", function() { + return this.RedisManager.updateDocument + .calledWith(this.project_id, this.doc_id, this.lines, this.version, [], this.updated_ranges, {}) + .should.equal(true); + }); + + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); + + return describe("when the doc is not found", function() { + beforeEach(function() { + this.DocumentManager.getDoc = sinon.stub().yields(null, null, null, null); + return this.DocumentManager.acceptChanges(this.project_id, this.doc_id, [ this.comment_id ], this.callback); + }); + + it("should not save anything", function() { + return this.RedisManager.updateDocument.called.should.equal(false); + }); + + return it("should call the callback with a not found error", function() { + const error = new Errors.NotFoundError(`document not found: ${this.doc_id}`); + return this.callback.calledWith(error).should.equal(true); + }); + }); + }); + + describe("getDocAndFlushIfOld", function() { + beforeEach(function() { + return this.DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2); + }); + + describe("when the doc is in Redis", function() { + describe("and has changes to be flushed", function() { + beforeEach(function() { + this.DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, this.lines, this.version, this.ranges, this.projectHistoryId, this.pathname, Date.now() - 1e9, true); + return this.DocumentManager.getDocAndFlushIfOld(this.project_id, this.doc_id, this.callback); + }); + + it("should get the doc", function() { + return this.DocumentManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); + + it("should flush the doc", function() { + return this.DocumentManager.flushDocIfLoaded + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); + + return it("should call the callback with the lines and versions", function() { + return this.callback.calledWith(null, this.lines, this.version).should.equal(true); + }); + }); + + return describe("and has only changes that don't need to be flushed", function() { + beforeEach(function() { + this.DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, this.lines, this.version, this.ranges, this.pathname, Date.now() - 100, true); + return this.DocumentManager.getDocAndFlushIfOld(this.project_id, this.doc_id, this.callback); + }); + + it("should get the doc", function() { + return this.DocumentManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); + + it("should not flush the doc", function() { + return this.DocumentManager.flushDocIfLoaded + .called.should.equal(false); + }); + + return it("should call the callback with the lines and versions", function() { + return this.callback.calledWith(null, this.lines, this.version).should.equal(true); + }); + }); + }); + + return describe("when the doc is not in Redis", function() { + beforeEach(function() { + this.DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, this.lines, this.version, this.ranges, null, false); + return this.DocumentManager.getDocAndFlushIfOld(this.project_id, this.doc_id, this.callback); + }); + + it("should get the doc", function() { + return this.DocumentManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); + + it("should not flush the doc", function() { + return this.DocumentManager.flushDocIfLoaded + .called.should.equal(false); + }); + + return it("should call the callback with the lines and versions", function() { + return this.callback.calledWith(null, this.lines, this.version).should.equal(true); + }); + }); + }); + + describe("renameDoc", function() { + beforeEach(function() { + this.update = 'some-update'; + return this.RedisManager.renameDoc = sinon.stub().yields(); + }); + + return describe("successfully", function() { + beforeEach(function() { + return this.DocumentManager.renameDoc(this.project_id, this.doc_id, this.user_id, this.update, this.projectHistoryId, this.callback); + }); + + it("should rename the document", function() { + return this.RedisManager.renameDoc + .calledWith(this.project_id, this.doc_id, this.user_id, this.update, this.projectHistoryId) + .should.equal(true); + }); + + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); + }); + + return describe("resyncDocContents", function() { + describe("when doc is loaded in redis", function() { + beforeEach(function() { + this.RedisManager.getDoc = sinon.stub().callsArgWith(2, null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId); + this.ProjectHistoryRedisManager.queueResyncDocContent = sinon.stub(); + return this.DocumentManager.resyncDocContents(this.project_id, this.doc_id, this.callback); + }); + + it("gets the doc contents from redis", function() { + return this.RedisManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); + + return it("queues a resync doc content update", function() { + return this.ProjectHistoryRedisManager.queueResyncDocContent + .calledWith(this.project_id, this.projectHistoryId, this.doc_id, this.lines, this.version, this.pathname, this.callback) + .should.equal(true); + }); + }); + + return describe("when doc is not loaded in redis", function() { + beforeEach(function() { + this.RedisManager.getDoc = sinon.stub().callsArgWith(2, null); + this.PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId); + this.ProjectHistoryRedisManager.queueResyncDocContent = sinon.stub(); + return this.DocumentManager.resyncDocContents(this.project_id, this.doc_id, this.callback); + }); + + it("tries to get the doc contents from redis", function() { + return this.RedisManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); + + it("gets the doc contents from web", function() { + return this.PersistenceManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); + + return it("queues a resync doc content update", function() { + return this.ProjectHistoryRedisManager.queueResyncDocContent + .calledWith(this.project_id, this.projectHistoryId, this.doc_id, this.lines, this.version, this.pathname, this.callback) + .should.equal(true); + }); + }); + }); +}); diff --git a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.js b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.js index 6cb6b1d8da..d02b8fd295 100644 --- a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.js +++ b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.js @@ -1,241 +1,307 @@ -SandboxedModule = require('sandboxed-module') -sinon = require('sinon') -require('chai').should() -modulePath = require('path').join __dirname, '../../../../app/js/HistoryManager' +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon'); +require('chai').should(); +const modulePath = require('path').join(__dirname, '../../../../app/js/HistoryManager'); -describe "HistoryManager", -> - beforeEach -> - @HistoryManager = SandboxedModule.require modulePath, requires: - "request": @request = {} - "settings-sharelatex": @Settings = { - apis: - project_history: - enabled: true +describe("HistoryManager", function() { + beforeEach(function() { + this.HistoryManager = SandboxedModule.require(modulePath, { requires: { + "request": (this.request = {}), + "settings-sharelatex": (this.Settings = { + apis: { + project_history: { + enabled: true, url: "http://project_history.example.com" - trackchanges: + }, + trackchanges: { url: "http://trackchanges.example.com" - } - "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub(), debug: sinon.stub() } - "./DocumentManager": @DocumentManager = {} - "./HistoryRedisManager": @HistoryRedisManager = {} - "./RedisManager": @RedisManager = {} - "./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {} - "./Metrics": @metrics = {inc: sinon.stub()} - @project_id = "mock-project-id" - @doc_id = "mock-doc-id" - @callback = sinon.stub() + } + } + }), + "logger-sharelatex": (this.logger = { log: sinon.stub(), error: sinon.stub(), debug: sinon.stub() }), + "./DocumentManager": (this.DocumentManager = {}), + "./HistoryRedisManager": (this.HistoryRedisManager = {}), + "./RedisManager": (this.RedisManager = {}), + "./ProjectHistoryRedisManager": (this.ProjectHistoryRedisManager = {}), + "./Metrics": (this.metrics = {inc: sinon.stub()}) + } + }); + this.project_id = "mock-project-id"; + this.doc_id = "mock-doc-id"; + return this.callback = sinon.stub(); + }); - describe "flushDocChangesAsync", -> - beforeEach -> - @request.post = sinon.stub().callsArgWith(1, null, statusCode: 204) + describe("flushDocChangesAsync", function() { + beforeEach(function() { + return this.request.post = sinon.stub().callsArgWith(1, null, {statusCode: 204}); + }); - describe "when the project uses track changes", -> - beforeEach -> - @RedisManager.getHistoryType = sinon.stub().yields(null, 'track-changes') - @HistoryManager.flushDocChangesAsync @project_id, @doc_id + describe("when the project uses track changes", function() { + beforeEach(function() { + this.RedisManager.getHistoryType = sinon.stub().yields(null, 'track-changes'); + return this.HistoryManager.flushDocChangesAsync(this.project_id, this.doc_id); + }); - it "should send a request to the track changes api", -> - @request.post - .calledWith("#{@Settings.apis.trackchanges.url}/project/#{@project_id}/doc/#{@doc_id}/flush") - .should.equal true + return it("should send a request to the track changes api", function() { + return this.request.post + .calledWith(`${this.Settings.apis.trackchanges.url}/project/${this.project_id}/doc/${this.doc_id}/flush`) + .should.equal(true); + }); + }); - describe "when the project uses project history and double flush is not disabled", -> - beforeEach -> - @RedisManager.getHistoryType = sinon.stub().yields(null, 'project-history') - @HistoryManager.flushDocChangesAsync @project_id, @doc_id + describe("when the project uses project history and double flush is not disabled", function() { + beforeEach(function() { + this.RedisManager.getHistoryType = sinon.stub().yields(null, 'project-history'); + return this.HistoryManager.flushDocChangesAsync(this.project_id, this.doc_id); + }); - it "should send a request to the track changes api", -> - @request.post + return it("should send a request to the track changes api", function() { + return this.request.post .called - .should.equal true + .should.equal(true); + }); + }); - describe "when the project uses project history and double flush is disabled", -> - beforeEach -> - @Settings.disableDoubleFlush = true - @RedisManager.getHistoryType = sinon.stub().yields(null, 'project-history') - @HistoryManager.flushDocChangesAsync @project_id, @doc_id + return describe("when the project uses project history and double flush is disabled", function() { + beforeEach(function() { + this.Settings.disableDoubleFlush = true; + this.RedisManager.getHistoryType = sinon.stub().yields(null, 'project-history'); + return this.HistoryManager.flushDocChangesAsync(this.project_id, this.doc_id); + }); - it "should not send a request to the track changes api", -> - @request.post + return it("should not send a request to the track changes api", function() { + return this.request.post .called - .should.equal false + .should.equal(false); + }); + }); + }); - describe "flushProjectChangesAsync", -> - beforeEach -> - @request.post = sinon.stub().callsArgWith(1, null, statusCode: 204) + describe("flushProjectChangesAsync", function() { + beforeEach(function() { + this.request.post = sinon.stub().callsArgWith(1, null, {statusCode: 204}); - @HistoryManager.flushProjectChangesAsync @project_id + return this.HistoryManager.flushProjectChangesAsync(this.project_id); + }); - it "should send a request to the project history api", -> - @request.post - .calledWith({url: "#{@Settings.apis.project_history.url}/project/#{@project_id}/flush", qs:{background:true}}) - .should.equal true + return it("should send a request to the project history api", function() { + return this.request.post + .calledWith({url: `${this.Settings.apis.project_history.url}/project/${this.project_id}/flush`, qs:{background:true}}) + .should.equal(true); + }); + }); - describe "flushProjectChanges", -> + describe("flushProjectChanges", function() { - describe "in the normal case", -> - beforeEach -> - @request.post = sinon.stub().callsArgWith(1, null, statusCode: 204) - @HistoryManager.flushProjectChanges @project_id, {background:true} + describe("in the normal case", function() { + beforeEach(function() { + this.request.post = sinon.stub().callsArgWith(1, null, {statusCode: 204}); + return this.HistoryManager.flushProjectChanges(this.project_id, {background:true});}); - it "should send a request to the project history api", -> - @request.post - .calledWith({url: "#{@Settings.apis.project_history.url}/project/#{@project_id}/flush", qs:{background:true}}) - .should.equal true + return it("should send a request to the project history api", function() { + return this.request.post + .calledWith({url: `${this.Settings.apis.project_history.url}/project/${this.project_id}/flush`, qs:{background:true}}) + .should.equal(true); + }); + }); - describe "with the skip_history_flush option", -> - beforeEach -> - @request.post = sinon.stub() - @HistoryManager.flushProjectChanges @project_id, {skip_history_flush:true} + return describe("with the skip_history_flush option", function() { + beforeEach(function() { + this.request.post = sinon.stub(); + return this.HistoryManager.flushProjectChanges(this.project_id, {skip_history_flush:true});}); - it "should not send a request to the project history api", -> - @request.post + return it("should not send a request to the project history api", function() { + return this.request.post .called - .should.equal false + .should.equal(false); + }); + }); + }); - describe "recordAndFlushHistoryOps", -> - beforeEach -> - @ops = [ 'mock-ops' ] - @project_ops_length = 10 - @doc_ops_length = 5 + describe("recordAndFlushHistoryOps", function() { + beforeEach(function() { + this.ops = [ 'mock-ops' ]; + this.project_ops_length = 10; + this.doc_ops_length = 5; - @HistoryManager.flushProjectChangesAsync = sinon.stub() - @HistoryRedisManager.recordDocHasHistoryOps = sinon.stub().callsArg(3) - @HistoryManager.flushDocChangesAsync = sinon.stub() + this.HistoryManager.flushProjectChangesAsync = sinon.stub(); + this.HistoryRedisManager.recordDocHasHistoryOps = sinon.stub().callsArg(3); + return this.HistoryManager.flushDocChangesAsync = sinon.stub(); + }); - describe "with no ops", -> - beforeEach -> - @HistoryManager.recordAndFlushHistoryOps( - @project_id, @doc_id, [], @doc_ops_length, @project_ops_length, @callback - ) + describe("with no ops", function() { + beforeEach(function() { + return this.HistoryManager.recordAndFlushHistoryOps( + this.project_id, this.doc_id, [], this.doc_ops_length, this.project_ops_length, this.callback + ); + }); - it "should not flush project changes", -> - @HistoryManager.flushProjectChangesAsync.called.should.equal false + it("should not flush project changes", function() { + return this.HistoryManager.flushProjectChangesAsync.called.should.equal(false); + }); - it "should not record doc has history ops", -> - @HistoryRedisManager.recordDocHasHistoryOps.called.should.equal false + it("should not record doc has history ops", function() { + return this.HistoryRedisManager.recordDocHasHistoryOps.called.should.equal(false); + }); - it "should not flush doc changes", -> - @HistoryManager.flushDocChangesAsync.called.should.equal false + it("should not flush doc changes", function() { + return this.HistoryManager.flushDocChangesAsync.called.should.equal(false); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); - describe "with enough ops to flush project changes", -> - beforeEach -> - @HistoryManager.shouldFlushHistoryOps = sinon.stub() - @HistoryManager.shouldFlushHistoryOps.withArgs(@project_ops_length).returns(true) - @HistoryManager.shouldFlushHistoryOps.withArgs(@doc_ops_length).returns(false) + describe("with enough ops to flush project changes", function() { + beforeEach(function() { + this.HistoryManager.shouldFlushHistoryOps = sinon.stub(); + this.HistoryManager.shouldFlushHistoryOps.withArgs(this.project_ops_length).returns(true); + this.HistoryManager.shouldFlushHistoryOps.withArgs(this.doc_ops_length).returns(false); - @HistoryManager.recordAndFlushHistoryOps( - @project_id, @doc_id, @ops, @doc_ops_length, @project_ops_length, @callback - ) + return this.HistoryManager.recordAndFlushHistoryOps( + this.project_id, this.doc_id, this.ops, this.doc_ops_length, this.project_ops_length, this.callback + ); + }); - it "should flush project changes", -> - @HistoryManager.flushProjectChangesAsync - .calledWith(@project_id) - .should.equal true + it("should flush project changes", function() { + return this.HistoryManager.flushProjectChangesAsync + .calledWith(this.project_id) + .should.equal(true); + }); - it "should record doc has history ops", -> - @HistoryRedisManager.recordDocHasHistoryOps - .calledWith(@project_id, @doc_id, @ops) + it("should record doc has history ops", function() { + return this.HistoryRedisManager.recordDocHasHistoryOps + .calledWith(this.project_id, this.doc_id, this.ops); + }); - it "should not flush doc changes", -> - @HistoryManager.flushDocChangesAsync.called.should.equal false + it("should not flush doc changes", function() { + return this.HistoryManager.flushDocChangesAsync.called.should.equal(false); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); - describe "with enough ops to flush doc changes", -> - beforeEach -> - @HistoryManager.shouldFlushHistoryOps = sinon.stub() - @HistoryManager.shouldFlushHistoryOps.withArgs(@project_ops_length).returns(false) - @HistoryManager.shouldFlushHistoryOps.withArgs(@doc_ops_length).returns(true) + describe("with enough ops to flush doc changes", function() { + beforeEach(function() { + this.HistoryManager.shouldFlushHistoryOps = sinon.stub(); + this.HistoryManager.shouldFlushHistoryOps.withArgs(this.project_ops_length).returns(false); + this.HistoryManager.shouldFlushHistoryOps.withArgs(this.doc_ops_length).returns(true); - @HistoryManager.recordAndFlushHistoryOps( - @project_id, @doc_id, @ops, @doc_ops_length, @project_ops_length, @callback - ) + return this.HistoryManager.recordAndFlushHistoryOps( + this.project_id, this.doc_id, this.ops, this.doc_ops_length, this.project_ops_length, this.callback + ); + }); - it "should not flush project changes", -> - @HistoryManager.flushProjectChangesAsync.called.should.equal false + it("should not flush project changes", function() { + return this.HistoryManager.flushProjectChangesAsync.called.should.equal(false); + }); - it "should record doc has history ops", -> - @HistoryRedisManager.recordDocHasHistoryOps - .calledWith(@project_id, @doc_id, @ops) + it("should record doc has history ops", function() { + return this.HistoryRedisManager.recordDocHasHistoryOps + .calledWith(this.project_id, this.doc_id, this.ops); + }); - it "should flush doc changes", -> - @HistoryManager.flushDocChangesAsync - .calledWith(@project_id, @doc_id) - .should.equal true + it("should flush doc changes", function() { + return this.HistoryManager.flushDocChangesAsync + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); - describe "when recording doc has history ops errors", -> - beforeEach -> - @error = new Error("error") - @HistoryRedisManager.recordDocHasHistoryOps = - sinon.stub().callsArgWith(3, @error) + describe("when recording doc has history ops errors", function() { + beforeEach(function() { + this.error = new Error("error"); + this.HistoryRedisManager.recordDocHasHistoryOps = + sinon.stub().callsArgWith(3, this.error); - @HistoryManager.recordAndFlushHistoryOps( - @project_id, @doc_id, @ops, @doc_ops_length, @project_ops_length, @callback - ) + return this.HistoryManager.recordAndFlushHistoryOps( + this.project_id, this.doc_id, this.ops, this.doc_ops_length, this.project_ops_length, this.callback + ); + }); - it "should not flush doc changes", -> - @HistoryManager.flushDocChangesAsync.called.should.equal false + it("should not flush doc changes", function() { + return this.HistoryManager.flushDocChangesAsync.called.should.equal(false); + }); - it "should call the callback with the error", -> - @callback.calledWith(@error).should.equal true + return it("should call the callback with the error", function() { + return this.callback.calledWith(this.error).should.equal(true); + }); + }); - describe "shouldFlushHistoryOps", -> - it "should return false if the number of ops is not known", -> - @HistoryManager.shouldFlushHistoryOps(null, ['a', 'b', 'c'].length, 1).should.equal false + return describe("shouldFlushHistoryOps", function() { + it("should return false if the number of ops is not known", function() { + return this.HistoryManager.shouldFlushHistoryOps(null, ['a', 'b', 'c'].length, 1).should.equal(false); + }); - it "should return false if the updates didn't take us past the threshold", -> - # Currently there are 14 ops - # Previously we were on 11 ops - # We didn't pass over a multiple of 5 - @HistoryManager.shouldFlushHistoryOps(14, ['a', 'b', 'c'].length, 5).should.equal false + it("should return false if the updates didn't take us past the threshold", function() { + // Currently there are 14 ops + // Previously we were on 11 ops + // We didn't pass over a multiple of 5 + this.HistoryManager.shouldFlushHistoryOps(14, ['a', 'b', 'c'].length, 5).should.equal(false); - it "should return true if the updates took to the threshold", -> - # Currently there are 15 ops - # Previously we were on 12 ops - # We've reached a new multiple of 5 - @HistoryManager.shouldFlushHistoryOps(15, ['a', 'b', 'c'].length, 5).should.equal true + it("should return true if the updates took to the threshold", function() {}); + // Currently there are 15 ops + // Previously we were on 12 ops + // We've reached a new multiple of 5 + return this.HistoryManager.shouldFlushHistoryOps(15, ['a', 'b', 'c'].length, 5).should.equal(true); + }); - it "should return true if the updates took past the threshold", -> - # Currently there are 19 ops - # Previously we were on 16 ops - # We didn't pass over a multiple of 5 - @HistoryManager.shouldFlushHistoryOps(17, ['a', 'b', 'c'].length, 5).should.equal true + return it("should return true if the updates took past the threshold", function() { + // Currently there are 19 ops + // Previously we were on 16 ops + // We didn't pass over a multiple of 5 + return this.HistoryManager.shouldFlushHistoryOps(17, ['a', 'b', 'c'].length, 5).should.equal(true); + }); + }); + }); - describe "resyncProjectHistory", -> - beforeEach -> - @projectHistoryId = 'history-id-1234' - @docs = [ - doc: @doc_id + return describe("resyncProjectHistory", function() { + beforeEach(function() { + this.projectHistoryId = 'history-id-1234'; + this.docs = [{ + doc: this.doc_id, path: 'main.tex' - ] - @files = [ - file: 'mock-file-id' - path: 'universe.png' - url: "www.filestore.test/#{@project_id}/mock-file-id" - ] - @ProjectHistoryRedisManager.queueResyncProjectStructure = sinon.stub().yields() - @DocumentManager.resyncDocContentsWithLock = sinon.stub().yields() - @HistoryManager.resyncProjectHistory @project_id, @projectHistoryId, @docs, @files, @callback + } + ]; + this.files = [{ + file: 'mock-file-id', + path: 'universe.png', + url: `www.filestore.test/${this.project_id}/mock-file-id` + } + ]; + this.ProjectHistoryRedisManager.queueResyncProjectStructure = sinon.stub().yields(); + this.DocumentManager.resyncDocContentsWithLock = sinon.stub().yields(); + return this.HistoryManager.resyncProjectHistory(this.project_id, this.projectHistoryId, this.docs, this.files, this.callback); + }); - it "should queue a project structure reync", -> - @ProjectHistoryRedisManager.queueResyncProjectStructure - .calledWith(@project_id, @projectHistoryId, @docs, @files) - .should.equal true + it("should queue a project structure reync", function() { + return this.ProjectHistoryRedisManager.queueResyncProjectStructure + .calledWith(this.project_id, this.projectHistoryId, this.docs, this.files) + .should.equal(true); + }); - it "should queue doc content reyncs", -> - @DocumentManager + it("should queue doc content reyncs", function() { + return this.DocumentManager .resyncDocContentsWithLock - .calledWith(@project_id, @doc_id) - .should.equal true + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); +}); diff --git a/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.js b/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.js index ca3937d4c5..70e95769a7 100644 --- a/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.js +++ b/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.js @@ -1,55 +1,82 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/HistoryRedisManager.js" -SandboxedModule = require('sandboxed-module') -Errors = require "../../../../app/js/Errors" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const modulePath = "../../../../app/js/HistoryRedisManager.js"; +const SandboxedModule = require('sandboxed-module'); +const Errors = require("../../../../app/js/Errors"); -describe "HistoryRedisManager", -> - beforeEach -> - @rclient = - auth: () -> +describe("HistoryRedisManager", function() { + beforeEach(function() { + this.rclient = { + auth() {}, exec: sinon.stub() - @rclient.multi = () => @rclient - @HistoryRedisManager = SandboxedModule.require modulePath, requires: - "redis-sharelatex": createClient: () => @rclient - "settings-sharelatex": - redis: - history: @settings = - key_schema: - uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}" - docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}" - "logger-sharelatex": { log: () -> } - @doc_id = "doc-id-123" - @project_id = "project-id-123" - @callback = sinon.stub() + }; + this.rclient.multi = () => this.rclient; + this.HistoryRedisManager = SandboxedModule.require(modulePath, { requires: { + "redis-sharelatex": { createClient: () => this.rclient + }, + "settings-sharelatex": { + redis: { + history: (this.settings = { + key_schema: { + uncompressedHistoryOps({doc_id}) { return `UncompressedHistoryOps:${doc_id}`; }, + docsWithHistoryOps({project_id}) { return `DocsWithHistoryOps:${project_id}`; } + } + }) + } + }, + "logger-sharelatex": { log() {} } + } + }); + this.doc_id = "doc-id-123"; + this.project_id = "project-id-123"; + return this.callback = sinon.stub(); + }); - describe "recordDocHasHistoryOps", -> - beforeEach -> - @ops = [{ op: [{ i: "foo", p: 4 }] },{ op: [{ i: "bar", p: 56 }] }] - @rclient.sadd = sinon.stub().yields() + return describe("recordDocHasHistoryOps", function() { + beforeEach(function() { + this.ops = [{ op: [{ i: "foo", p: 4 }] },{ op: [{ i: "bar", p: 56 }] }]; + return this.rclient.sadd = sinon.stub().yields(); + }); - describe "with ops", -> - beforeEach (done) -> - @HistoryRedisManager.recordDocHasHistoryOps @project_id, @doc_id, @ops, (args...) => - @callback(args...) - done() + describe("with ops", function() { + beforeEach(function(done) { + return this.HistoryRedisManager.recordDocHasHistoryOps(this.project_id, this.doc_id, this.ops, (...args) => { + this.callback(...Array.from(args || [])); + return done(); + }); + }); - it "should add the doc_id to the set of which records the project docs", -> - @rclient.sadd - .calledWith("DocsWithHistoryOps:#{@project_id}", @doc_id) - .should.equal true + return it("should add the doc_id to the set of which records the project docs", function() { + return this.rclient.sadd + .calledWith(`DocsWithHistoryOps:${this.project_id}`, this.doc_id) + .should.equal(true); + }); + }); - describe "with no ops", -> - beforeEach (done) -> - @HistoryRedisManager.recordDocHasHistoryOps @project_id, @doc_id, [], (args...) => - @callback(args...) - done() + return describe("with no ops", function() { + beforeEach(function(done) { + return this.HistoryRedisManager.recordDocHasHistoryOps(this.project_id, this.doc_id, [], (...args) => { + this.callback(...Array.from(args || [])); + return done(); + }); + }); - it "should not add the doc_id to the set of which records the project docs", -> - @rclient.sadd + it("should not add the doc_id to the set of which records the project docs", function() { + return this.rclient.sadd .called - .should.equal false + .should.equal(false); + }); - it "should call the callback with an error", -> - @callback.calledWith(new Error("cannot push no ops")).should.equal true + return it("should call the callback with an error", function() { + return this.callback.calledWith(new Error("cannot push no ops")).should.equal(true); + }); + }); + }); +}); diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.js b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.js index 4f316a12ba..9acbfb4325 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.js +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.js @@ -1,617 +1,809 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/HttpController.js" -SandboxedModule = require('sandboxed-module') -Errors = require "../../../../app/js/Errors.js" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const modulePath = "../../../../app/js/HttpController.js"; +const SandboxedModule = require('sandboxed-module'); +const Errors = require("../../../../app/js/Errors.js"); -describe "HttpController", -> - beforeEach -> - @HttpController = SandboxedModule.require modulePath, requires: - "./DocumentManager": @DocumentManager = {} - "./HistoryManager": @HistoryManager = - flushProjectChangesAsync: sinon.stub() - "./ProjectManager": @ProjectManager = {} - "logger-sharelatex" : @logger = { log: sinon.stub() } - "./ProjectFlusher": {flushAllProjects:->} - "./DeleteQueueManager": @DeleteQueueManager = {} - "./Metrics": @Metrics = {} +describe("HttpController", function() { + beforeEach(function() { + let Timer; + this.HttpController = SandboxedModule.require(modulePath, { requires: { + "./DocumentManager": (this.DocumentManager = {}), + "./HistoryManager": (this.HistoryManager = + {flushProjectChangesAsync: sinon.stub()}), + "./ProjectManager": (this.ProjectManager = {}), + "logger-sharelatex" : (this.logger = { log: sinon.stub() }), + "./ProjectFlusher": {flushAllProjects() {}}, + "./DeleteQueueManager": (this.DeleteQueueManager = {}), + "./Metrics": (this.Metrics = {}), "./Errors" : Errors - @Metrics.Timer = class Timer - done: sinon.stub() - @project_id = "project-id-123" - @doc_id = "doc-id-123" - @next = sinon.stub() - @res = - send: sinon.stub() - sendStatus: sinon.stub() + } + } + ); + this.Metrics.Timer = (Timer = (function() { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub(); + } + }; + Timer.initClass(); + return Timer; + })()); + this.project_id = "project-id-123"; + this.doc_id = "doc-id-123"; + this.next = sinon.stub(); + return this.res = { + send: sinon.stub(), + sendStatus: sinon.stub(), json: sinon.stub() + }; + }); - describe "getDoc", -> - beforeEach -> - @lines = ["one", "two", "three"] - @ops = ["mock-op-1", "mock-op-2"] - @version = 42 - @fromVersion = 42 - @ranges = { changes: "mock", comments: "mock" } - @pathname = '/a/b/c' - @req = - params: - project_id: @project_id - doc_id: @doc_id + describe("getDoc", function() { + beforeEach(function() { + this.lines = ["one", "two", "three"]; + this.ops = ["mock-op-1", "mock-op-2"]; + this.version = 42; + this.fromVersion = 42; + this.ranges = { changes: "mock", comments: "mock" }; + this.pathname = '/a/b/c'; + return this.req = { + params: { + project_id: this.project_id, + doc_id: this.doc_id + } + }; + }); - describe "when the document exists and no recent ops are requested", -> - beforeEach -> - @DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, @lines, @version, [], @ranges, @pathname) - @HttpController.getDoc(@req, @res, @next) + describe("when the document exists and no recent ops are requested", function() { + beforeEach(function() { + this.DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, this.lines, this.version, [], this.ranges, this.pathname); + return this.HttpController.getDoc(this.req, this.res, this.next); + }); - it "should get the doc", -> - @DocumentManager.getDocAndRecentOpsWithLock - .calledWith(@project_id, @doc_id, -1) - .should.equal true + it("should get the doc", function() { + return this.DocumentManager.getDocAndRecentOpsWithLock + .calledWith(this.project_id, this.doc_id, -1) + .should.equal(true); + }); - it "should return the doc as JSON", -> - @res.json + it("should return the doc as JSON", function() { + return this.res.json .calledWith({ - id: @doc_id - lines: @lines - version: @version - ops: [] - ranges: @ranges - pathname: @pathname + id: this.doc_id, + lines: this.lines, + version: this.version, + ops: [], + ranges: this.ranges, + pathname: this.pathname }) - .should.equal true + .should.equal(true); + }); - it "should log the request", -> - @logger.log - .calledWith(doc_id: @doc_id, project_id: @project_id, "getting doc via http") - .should.equal true + it("should log the request", function() { + return this.logger.log + .calledWith({doc_id: this.doc_id, project_id: this.project_id}, "getting doc via http") + .should.equal(true); + }); - it "should time the request", -> - @Metrics.Timer::done.called.should.equal true + return it("should time the request", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); - describe "when recent ops are requested", -> - beforeEach -> - @DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, @lines, @version, @ops, @ranges, @pathname) - @req.query = fromVersion: "#{@fromVersion}" - @HttpController.getDoc(@req, @res, @next) + describe("when recent ops are requested", function() { + beforeEach(function() { + this.DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, this.lines, this.version, this.ops, this.ranges, this.pathname); + this.req.query = {fromVersion: `${this.fromVersion}`}; + return this.HttpController.getDoc(this.req, this.res, this.next); + }); - it "should get the doc", -> - @DocumentManager.getDocAndRecentOpsWithLock - .calledWith(@project_id, @doc_id, @fromVersion) - .should.equal true + it("should get the doc", function() { + return this.DocumentManager.getDocAndRecentOpsWithLock + .calledWith(this.project_id, this.doc_id, this.fromVersion) + .should.equal(true); + }); - it "should return the doc as JSON", -> - @res.json + it("should return the doc as JSON", function() { + return this.res.json .calledWith({ - id: @doc_id - lines: @lines - version: @version - ops: @ops - ranges: @ranges - pathname: @pathname + id: this.doc_id, + lines: this.lines, + version: this.version, + ops: this.ops, + ranges: this.ranges, + pathname: this.pathname }) - .should.equal true + .should.equal(true); + }); - it "should log the request", -> - @logger.log - .calledWith(doc_id: @doc_id, project_id: @project_id, "getting doc via http") - .should.equal true + it("should log the request", function() { + return this.logger.log + .calledWith({doc_id: this.doc_id, project_id: this.project_id}, "getting doc via http") + .should.equal(true); + }); - it "should time the request", -> - @Metrics.Timer::done.called.should.equal true + return it("should time the request", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); - describe "when the document does not exist", -> - beforeEach -> - @DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, null, null) - @HttpController.getDoc(@req, @res, @next) + describe("when the document does not exist", function() { + beforeEach(function() { + this.DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, null, null); + return this.HttpController.getDoc(this.req, this.res, this.next); + }); - it "should call next with NotFoundError", -> - @next + return it("should call next with NotFoundError", function() { + return this.next .calledWith(new Errors.NotFoundError("not found")) - .should.equal true + .should.equal(true); + }); + }); - describe "when an errors occurs", -> - beforeEach -> - @DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, new Error("oops"), null, null) - @HttpController.getDoc(@req, @res, @next) + return describe("when an errors occurs", function() { + beforeEach(function() { + this.DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, new Error("oops"), null, null); + return this.HttpController.getDoc(this.req, this.res, this.next); + }); - it "should call next with the error", -> - @next + return it("should call next with the error", function() { + return this.next .calledWith(new Error("oops")) - .should.equal true + .should.equal(true); + }); + }); + }); - describe "setDoc", -> - beforeEach -> - @lines = ["one", "two", "three"] - @source = "dropbox" - @user_id = "user-id-123" - @req = - headers: {} - params: - project_id: @project_id - doc_id: @doc_id - body: - lines: @lines - source: @source - user_id: @user_id - undoing: @undoing = true + describe("setDoc", function() { + beforeEach(function() { + this.lines = ["one", "two", "three"]; + this.source = "dropbox"; + this.user_id = "user-id-123"; + return this.req = { + headers: {}, + params: { + project_id: this.project_id, + doc_id: this.doc_id + }, + body: { + lines: this.lines, + source: this.source, + user_id: this.user_id, + undoing: (this.undoing = true) + } + }; + }); - describe "successfully", -> - beforeEach -> - @DocumentManager.setDocWithLock = sinon.stub().callsArgWith(6) - @HttpController.setDoc(@req, @res, @next) + describe("successfully", function() { + beforeEach(function() { + this.DocumentManager.setDocWithLock = sinon.stub().callsArgWith(6); + return this.HttpController.setDoc(this.req, this.res, this.next); + }); - it "should set the doc", -> - @DocumentManager.setDocWithLock - .calledWith(@project_id, @doc_id, @lines, @source, @user_id, @undoing) - .should.equal true + it("should set the doc", function() { + return this.DocumentManager.setDocWithLock + .calledWith(this.project_id, this.doc_id, this.lines, this.source, this.user_id, this.undoing) + .should.equal(true); + }); - it "should return a successful No Content response", -> - @res.sendStatus + it("should return a successful No Content response", function() { + return this.res.sendStatus .calledWith(204) - .should.equal true + .should.equal(true); + }); - it "should log the request", -> - @logger.log - .calledWith(doc_id: @doc_id, project_id: @project_id, lines: @lines, source: @source, user_id: @user_id, undoing: @undoing, "setting doc via http") - .should.equal true + it("should log the request", function() { + return this.logger.log + .calledWith({doc_id: this.doc_id, project_id: this.project_id, lines: this.lines, source: this.source, user_id: this.user_id, undoing: this.undoing}, "setting doc via http") + .should.equal(true); + }); - it "should time the request", -> - @Metrics.Timer::done.called.should.equal true + return it("should time the request", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); - describe "when an errors occurs", -> - beforeEach -> - @DocumentManager.setDocWithLock = sinon.stub().callsArgWith(6, new Error("oops")) - @HttpController.setDoc(@req, @res, @next) + describe("when an errors occurs", function() { + beforeEach(function() { + this.DocumentManager.setDocWithLock = sinon.stub().callsArgWith(6, new Error("oops")); + return this.HttpController.setDoc(this.req, this.res, this.next); + }); - it "should call next with the error", -> - @next + return it("should call next with the error", function() { + return this.next .calledWith(new Error("oops")) - .should.equal true + .should.equal(true); + }); + }); - describe "when the payload is too large", -> - beforeEach -> - lines = [] - for _ in [0..200000] - lines.push "test test test" - @req.body.lines = lines - @DocumentManager.setDocWithLock = sinon.stub().callsArgWith(6) - @HttpController.setDoc(@req, @res, @next) + return describe("when the payload is too large", function() { + beforeEach(function() { + const lines = []; + for (let _ = 0; _ <= 200000; _++) { + lines.push("test test test"); + } + this.req.body.lines = lines; + this.DocumentManager.setDocWithLock = sinon.stub().callsArgWith(6); + return this.HttpController.setDoc(this.req, this.res, this.next); + }); - it 'should send back a 406 response', -> - @res.sendStatus.calledWith(406).should.equal true + it('should send back a 406 response', function() { + return this.res.sendStatus.calledWith(406).should.equal(true); + }); - it 'should not call setDocWithLock', -> - @DocumentManager.setDocWithLock.callCount.should.equal 0 + return it('should not call setDocWithLock', function() { + return this.DocumentManager.setDocWithLock.callCount.should.equal(0); + }); + }); + }); - describe "flushProject", -> - beforeEach -> - @req = - params: - project_id: @project_id + describe("flushProject", function() { + beforeEach(function() { + return this.req = { + params: { + project_id: this.project_id + } + }; + }); - describe "successfully", -> - beforeEach -> - @ProjectManager.flushProjectWithLocks = sinon.stub().callsArgWith(1) - @HttpController.flushProject(@req, @res, @next) + describe("successfully", function() { + beforeEach(function() { + this.ProjectManager.flushProjectWithLocks = sinon.stub().callsArgWith(1); + return this.HttpController.flushProject(this.req, this.res, this.next); + }); - it "should flush the project", -> - @ProjectManager.flushProjectWithLocks - .calledWith(@project_id) - .should.equal true + it("should flush the project", function() { + return this.ProjectManager.flushProjectWithLocks + .calledWith(this.project_id) + .should.equal(true); + }); - it "should return a successful No Content response", -> - @res.sendStatus + it("should return a successful No Content response", function() { + return this.res.sendStatus .calledWith(204) - .should.equal true + .should.equal(true); + }); - it "should log the request", -> - @logger.log - .calledWith(project_id: @project_id, "flushing project via http") - .should.equal true + it("should log the request", function() { + return this.logger.log + .calledWith({project_id: this.project_id}, "flushing project via http") + .should.equal(true); + }); - it "should time the request", -> - @Metrics.Timer::done.called.should.equal true + return it("should time the request", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); - describe "when an errors occurs", -> - beforeEach -> - @ProjectManager.flushProjectWithLocks = sinon.stub().callsArgWith(1, new Error("oops")) - @HttpController.flushProject(@req, @res, @next) + return describe("when an errors occurs", function() { + beforeEach(function() { + this.ProjectManager.flushProjectWithLocks = sinon.stub().callsArgWith(1, new Error("oops")); + return this.HttpController.flushProject(this.req, this.res, this.next); + }); - it "should call next with the error", -> - @next + return it("should call next with the error", function() { + return this.next .calledWith(new Error("oops")) - .should.equal true + .should.equal(true); + }); + }); + }); - describe "flushDocIfLoaded", -> - beforeEach -> - @lines = ["one", "two", "three"] - @version = 42 - @req = - params: - project_id: @project_id - doc_id: @doc_id + describe("flushDocIfLoaded", function() { + beforeEach(function() { + this.lines = ["one", "two", "three"]; + this.version = 42; + return this.req = { + params: { + project_id: this.project_id, + doc_id: this.doc_id + } + }; + }); - describe "successfully", -> - beforeEach -> - @DocumentManager.flushDocIfLoadedWithLock = sinon.stub().callsArgWith(2) - @HttpController.flushDocIfLoaded(@req, @res, @next) + describe("successfully", function() { + beforeEach(function() { + this.DocumentManager.flushDocIfLoadedWithLock = sinon.stub().callsArgWith(2); + return this.HttpController.flushDocIfLoaded(this.req, this.res, this.next); + }); - it "should flush the doc", -> - @DocumentManager.flushDocIfLoadedWithLock - .calledWith(@project_id, @doc_id) - .should.equal true + it("should flush the doc", function() { + return this.DocumentManager.flushDocIfLoadedWithLock + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); - it "should return a successful No Content response", -> - @res.sendStatus + it("should return a successful No Content response", function() { + return this.res.sendStatus .calledWith(204) - .should.equal true + .should.equal(true); + }); - it "should log the request", -> - @logger.log - .calledWith(doc_id: @doc_id, project_id: @project_id, "flushing doc via http") - .should.equal true + it("should log the request", function() { + return this.logger.log + .calledWith({doc_id: this.doc_id, project_id: this.project_id}, "flushing doc via http") + .should.equal(true); + }); - it "should time the request", -> - @Metrics.Timer::done.called.should.equal true + return it("should time the request", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); - describe "when an errors occurs", -> - beforeEach -> - @DocumentManager.flushDocIfLoadedWithLock = sinon.stub().callsArgWith(2, new Error("oops")) - @HttpController.flushDocIfLoaded(@req, @res, @next) + return describe("when an errors occurs", function() { + beforeEach(function() { + this.DocumentManager.flushDocIfLoadedWithLock = sinon.stub().callsArgWith(2, new Error("oops")); + return this.HttpController.flushDocIfLoaded(this.req, this.res, this.next); + }); - it "should call next with the error", -> - @next + return it("should call next with the error", function() { + return this.next .calledWith(new Error("oops")) - .should.equal true + .should.equal(true); + }); + }); + }); - describe "deleteDoc", -> - beforeEach -> - @req = - params: - project_id: @project_id - doc_id: @doc_id + describe("deleteDoc", function() { + beforeEach(function() { + return this.req = { + params: { + project_id: this.project_id, + doc_id: this.doc_id + }, query: {} + };}); - describe "successfully", -> - beforeEach -> - @DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArgWith(3) - @HttpController.deleteDoc(@req, @res, @next) + describe("successfully", function() { + beforeEach(function() { + this.DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArgWith(3); + return this.HttpController.deleteDoc(this.req, this.res, this.next); + }); - it "should flush and delete the doc", -> - @DocumentManager.flushAndDeleteDocWithLock - .calledWith(@project_id, @doc_id, { ignoreFlushErrors: false }) - .should.equal true + it("should flush and delete the doc", function() { + return this.DocumentManager.flushAndDeleteDocWithLock + .calledWith(this.project_id, this.doc_id, { ignoreFlushErrors: false }) + .should.equal(true); + }); - it "should flush project history", -> - @HistoryManager.flushProjectChangesAsync - .calledWithExactly(@project_id) - .should.equal true + it("should flush project history", function() { + return this.HistoryManager.flushProjectChangesAsync + .calledWithExactly(this.project_id) + .should.equal(true); + }); - it "should return a successful No Content response", -> - @res.sendStatus + it("should return a successful No Content response", function() { + return this.res.sendStatus .calledWith(204) - .should.equal true + .should.equal(true); + }); - it "should log the request", -> - @logger.log - .calledWith(doc_id: @doc_id, project_id: @project_id, "deleting doc via http") - .should.equal true + it("should log the request", function() { + return this.logger.log + .calledWith({doc_id: this.doc_id, project_id: this.project_id}, "deleting doc via http") + .should.equal(true); + }); - it "should time the request", -> - @Metrics.Timer::done.called.should.equal true + return it("should time the request", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); - describe "ignoring errors", -> - beforeEach -> - @req.query.ignore_flush_errors = 'true' - @DocumentManager.flushAndDeleteDocWithLock = sinon.stub().yields() - @HttpController.deleteDoc(@req, @res, @next) + describe("ignoring errors", function() { + beforeEach(function() { + this.req.query.ignore_flush_errors = 'true'; + this.DocumentManager.flushAndDeleteDocWithLock = sinon.stub().yields(); + return this.HttpController.deleteDoc(this.req, this.res, this.next); + }); - it "should delete the doc", -> - @DocumentManager.flushAndDeleteDocWithLock - .calledWith(@project_id, @doc_id, { ignoreFlushErrors: true }) - .should.equal true + it("should delete the doc", function() { + return this.DocumentManager.flushAndDeleteDocWithLock + .calledWith(this.project_id, this.doc_id, { ignoreFlushErrors: true }) + .should.equal(true); + }); - it "should return a successful No Content response", -> - @res.sendStatus.calledWith(204).should.equal true + return it("should return a successful No Content response", function() { + return this.res.sendStatus.calledWith(204).should.equal(true); + }); + }); - describe "when an errors occurs", -> - beforeEach -> - @DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArgWith(3, new Error("oops")) - @HttpController.deleteDoc(@req, @res, @next) + return describe("when an errors occurs", function() { + beforeEach(function() { + this.DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArgWith(3, new Error("oops")); + return this.HttpController.deleteDoc(this.req, this.res, this.next); + }); - it "should flush project history", -> - @HistoryManager.flushProjectChangesAsync - .calledWithExactly(@project_id) - .should.equal true + it("should flush project history", function() { + return this.HistoryManager.flushProjectChangesAsync + .calledWithExactly(this.project_id) + .should.equal(true); + }); - it "should call next with the error", -> - @next + return it("should call next with the error", function() { + return this.next .calledWith(new Error("oops")) - .should.equal true + .should.equal(true); + }); + }); + }); - describe "deleteProject", -> - beforeEach -> - @req = - params: - project_id: @project_id + describe("deleteProject", function() { + beforeEach(function() { + return this.req = { + params: { + project_id: this.project_id + } + }; + }); - describe "successfully", -> - beforeEach -> - @ProjectManager.flushAndDeleteProjectWithLocks = sinon.stub().callsArgWith(2) - @HttpController.deleteProject(@req, @res, @next) + describe("successfully", function() { + beforeEach(function() { + this.ProjectManager.flushAndDeleteProjectWithLocks = sinon.stub().callsArgWith(2); + return this.HttpController.deleteProject(this.req, this.res, this.next); + }); - it "should delete the project", -> - @ProjectManager.flushAndDeleteProjectWithLocks - .calledWith(@project_id) - .should.equal true + it("should delete the project", function() { + return this.ProjectManager.flushAndDeleteProjectWithLocks + .calledWith(this.project_id) + .should.equal(true); + }); - it "should return a successful No Content response", -> - @res.sendStatus + it("should return a successful No Content response", function() { + return this.res.sendStatus .calledWith(204) - .should.equal true + .should.equal(true); + }); - it "should log the request", -> - @logger.log - .calledWith(project_id: @project_id, "deleting project via http") - .should.equal true + it("should log the request", function() { + return this.logger.log + .calledWith({project_id: this.project_id}, "deleting project via http") + .should.equal(true); + }); - it "should time the request", -> - @Metrics.Timer::done.called.should.equal true + return it("should time the request", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); - describe "with the background=true option from realtime", -> - beforeEach -> - @ProjectManager.queueFlushAndDeleteProject = sinon.stub().callsArgWith(1) - @req.query = {background:true, shutdown:true} - @HttpController.deleteProject(@req, @res, @next) + describe("with the background=true option from realtime", function() { + beforeEach(function() { + this.ProjectManager.queueFlushAndDeleteProject = sinon.stub().callsArgWith(1); + this.req.query = {background:true, shutdown:true}; + return this.HttpController.deleteProject(this.req, this.res, this.next); + }); - it "should queue the flush and delete", -> - @ProjectManager.queueFlushAndDeleteProject - .calledWith(@project_id) - .should.equal true + return it("should queue the flush and delete", function() { + return this.ProjectManager.queueFlushAndDeleteProject + .calledWith(this.project_id) + .should.equal(true); + }); + }); - describe "when an errors occurs", -> - beforeEach -> - @ProjectManager.flushAndDeleteProjectWithLocks = sinon.stub().callsArgWith(2, new Error("oops")) - @HttpController.deleteProject(@req, @res, @next) + return describe("when an errors occurs", function() { + beforeEach(function() { + this.ProjectManager.flushAndDeleteProjectWithLocks = sinon.stub().callsArgWith(2, new Error("oops")); + return this.HttpController.deleteProject(this.req, this.res, this.next); + }); - it "should call next with the error", -> - @next + return it("should call next with the error", function() { + return this.next .calledWith(new Error("oops")) - .should.equal true + .should.equal(true); + }); + }); + }); - describe "acceptChanges", -> - beforeEach -> - @req = - params: - project_id: @project_id - doc_id: @doc_id - change_id: @change_id = "mock-change-od-1" + describe("acceptChanges", function() { + beforeEach(function() { + return this.req = { + params: { + project_id: this.project_id, + doc_id: this.doc_id, + change_id: (this.change_id = "mock-change-od-1") + } + }; + }); - describe "successfully with a single change", -> - beforeEach -> - @DocumentManager.acceptChangesWithLock = sinon.stub().callsArgWith(3) - @HttpController.acceptChanges(@req, @res, @next) + describe("successfully with a single change", function() { + beforeEach(function() { + this.DocumentManager.acceptChangesWithLock = sinon.stub().callsArgWith(3); + return this.HttpController.acceptChanges(this.req, this.res, this.next); + }); - it "should accept the change", -> - @DocumentManager.acceptChangesWithLock - .calledWith(@project_id, @doc_id, [ @change_id ]) - .should.equal true + it("should accept the change", function() { + return this.DocumentManager.acceptChangesWithLock + .calledWith(this.project_id, this.doc_id, [ this.change_id ]) + .should.equal(true); + }); - it "should return a successful No Content response", -> - @res.sendStatus + it("should return a successful No Content response", function() { + return this.res.sendStatus .calledWith(204) - .should.equal true + .should.equal(true); + }); - it "should log the request", -> - @logger.log - .calledWith({@project_id, @doc_id}, "accepting 1 changes via http") - .should.equal true + it("should log the request", function() { + return this.logger.log + .calledWith({project_id: this.project_id, doc_id: this.doc_id}, "accepting 1 changes via http") + .should.equal(true); + }); - it "should time the request", -> - @Metrics.Timer::done.called.should.equal true + return it("should time the request", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); - describe "succesfully with with multiple changes", -> - beforeEach -> - @change_ids = [ "mock-change-od-1", "mock-change-od-2", "mock-change-od-3", "mock-change-od-4" ] - @req.body = - change_ids: @change_ids - @DocumentManager.acceptChangesWithLock = sinon.stub().callsArgWith(3) - @HttpController.acceptChanges(@req, @res, @next) + describe("succesfully with with multiple changes", function() { + beforeEach(function() { + this.change_ids = [ "mock-change-od-1", "mock-change-od-2", "mock-change-od-3", "mock-change-od-4" ]; + this.req.body = + {change_ids: this.change_ids}; + this.DocumentManager.acceptChangesWithLock = sinon.stub().callsArgWith(3); + return this.HttpController.acceptChanges(this.req, this.res, this.next); + }); - it "should accept the changes in the body payload", -> - @DocumentManager.acceptChangesWithLock - .calledWith(@project_id, @doc_id, @change_ids) - .should.equal true + it("should accept the changes in the body payload", function() { + return this.DocumentManager.acceptChangesWithLock + .calledWith(this.project_id, this.doc_id, this.change_ids) + .should.equal(true); + }); - it "should log the request with the correct number of changes", -> - @logger.log - .calledWith({@project_id, @doc_id}, "accepting #{ @change_ids.length } changes via http") - .should.equal true + return it("should log the request with the correct number of changes", function() { + return this.logger.log + .calledWith({project_id: this.project_id, doc_id: this.doc_id}, `accepting ${ this.change_ids.length } changes via http`) + .should.equal(true); + }); + }); - describe "when an errors occurs", -> - beforeEach -> - @DocumentManager.acceptChangesWithLock = sinon.stub().callsArgWith(3, new Error("oops")) - @HttpController.acceptChanges(@req, @res, @next) + return describe("when an errors occurs", function() { + beforeEach(function() { + this.DocumentManager.acceptChangesWithLock = sinon.stub().callsArgWith(3, new Error("oops")); + return this.HttpController.acceptChanges(this.req, this.res, this.next); + }); - it "should call next with the error", -> - @next + return it("should call next with the error", function() { + return this.next .calledWith(new Error("oops")) - .should.equal true + .should.equal(true); + }); + }); + }); - describe "deleteComment", -> - beforeEach -> - @req = - params: - project_id: @project_id - doc_id: @doc_id - comment_id: @comment_id = "mock-comment-id" + describe("deleteComment", function() { + beforeEach(function() { + return this.req = { + params: { + project_id: this.project_id, + doc_id: this.doc_id, + comment_id: (this.comment_id = "mock-comment-id") + } + }; + }); - describe "successfully", -> - beforeEach -> - @DocumentManager.deleteCommentWithLock = sinon.stub().callsArgWith(3) - @HttpController.deleteComment(@req, @res, @next) + describe("successfully", function() { + beforeEach(function() { + this.DocumentManager.deleteCommentWithLock = sinon.stub().callsArgWith(3); + return this.HttpController.deleteComment(this.req, this.res, this.next); + }); - it "should accept the change", -> - @DocumentManager.deleteCommentWithLock - .calledWith(@project_id, @doc_id, @comment_id) - .should.equal true + it("should accept the change", function() { + return this.DocumentManager.deleteCommentWithLock + .calledWith(this.project_id, this.doc_id, this.comment_id) + .should.equal(true); + }); - it "should return a successful No Content response", -> - @res.sendStatus + it("should return a successful No Content response", function() { + return this.res.sendStatus .calledWith(204) - .should.equal true + .should.equal(true); + }); - it "should log the request", -> - @logger.log - .calledWith({@project_id, @doc_id, @comment_id}, "deleting comment via http") - .should.equal true + it("should log the request", function() { + return this.logger.log + .calledWith({project_id: this.project_id, doc_id: this.doc_id, comment_id: this.comment_id}, "deleting comment via http") + .should.equal(true); + }); - it "should time the request", -> - @Metrics.Timer::done.called.should.equal true + return it("should time the request", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); - describe "when an errors occurs", -> - beforeEach -> - @DocumentManager.deleteCommentWithLock = sinon.stub().callsArgWith(3, new Error("oops")) - @HttpController.deleteComment(@req, @res, @next) + return describe("when an errors occurs", function() { + beforeEach(function() { + this.DocumentManager.deleteCommentWithLock = sinon.stub().callsArgWith(3, new Error("oops")); + return this.HttpController.deleteComment(this.req, this.res, this.next); + }); - it "should call next with the error", -> - @next + return it("should call next with the error", function() { + return this.next .calledWith(new Error("oops")) - .should.equal true + .should.equal(true); + }); + }); + }); - describe "getProjectDocsAndFlushIfOld", -> - beforeEach -> - @state = "01234567890abcdef" - @docs = [{_id: "1234", lines: "hello", v: 23}, {_id: "4567", lines: "world", v: 45}] - @req = - params: - project_id: @project_id - query: - state: @state + describe("getProjectDocsAndFlushIfOld", function() { + beforeEach(function() { + this.state = "01234567890abcdef"; + this.docs = [{_id: "1234", lines: "hello", v: 23}, {_id: "4567", lines: "world", v: 45}]; + return this.req = { + params: { + project_id: this.project_id + }, + query: { + state: this.state + } + }; + }); - describe "successfully", -> - beforeEach -> - @ProjectManager.getProjectDocsAndFlushIfOld = sinon.stub().callsArgWith(3,null, @docs) - @HttpController.getProjectDocsAndFlushIfOld(@req, @res, @next) + describe("successfully", function() { + beforeEach(function() { + this.ProjectManager.getProjectDocsAndFlushIfOld = sinon.stub().callsArgWith(3,null, this.docs); + return this.HttpController.getProjectDocsAndFlushIfOld(this.req, this.res, this.next); + }); - it "should get docs from the project manager", -> - @ProjectManager.getProjectDocsAndFlushIfOld - .calledWith(@project_id, @state, {}) - .should.equal true + it("should get docs from the project manager", function() { + return this.ProjectManager.getProjectDocsAndFlushIfOld + .calledWith(this.project_id, this.state, {}) + .should.equal(true); + }); - it "should return a successful response", -> - @res.send - .calledWith(@docs) - .should.equal true + it("should return a successful response", function() { + return this.res.send + .calledWith(this.docs) + .should.equal(true); + }); - it "should log the request", -> - @logger.log - .calledWith({project_id: @project_id, exclude: []}, "getting docs via http") - .should.equal true + it("should log the request", function() { + return this.logger.log + .calledWith({project_id: this.project_id, exclude: []}, "getting docs via http") + .should.equal(true); + }); - it "should log the response", -> - @logger.log - .calledWith({project_id: @project_id, result: ["1234:23", "4567:45"]}, "got docs via http") - .should.equal true + it("should log the response", function() { + return this.logger.log + .calledWith({project_id: this.project_id, result: ["1234:23", "4567:45"]}, "got docs via http") + .should.equal(true); + }); - it "should time the request", -> - @Metrics.Timer::done.called.should.equal true + return it("should time the request", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); - describe "when there is a conflict", -> - beforeEach -> - @ProjectManager.getProjectDocsAndFlushIfOld = sinon.stub().callsArgWith(3, new Errors.ProjectStateChangedError("project state changed")) - @HttpController.getProjectDocsAndFlushIfOld(@req, @res, @next) + describe("when there is a conflict", function() { + beforeEach(function() { + this.ProjectManager.getProjectDocsAndFlushIfOld = sinon.stub().callsArgWith(3, new Errors.ProjectStateChangedError("project state changed")); + return this.HttpController.getProjectDocsAndFlushIfOld(this.req, this.res, this.next); + }); - it "should return an HTTP 409 Conflict response", -> - @res.sendStatus + return it("should return an HTTP 409 Conflict response", function() { + return this.res.sendStatus .calledWith(409) - .should.equal true + .should.equal(true); + }); + }); - describe "when an error occurs", -> - beforeEach -> - @ProjectManager.getProjectDocsAndFlushIfOld = sinon.stub().callsArgWith(3, new Error("oops")) - @HttpController.getProjectDocsAndFlushIfOld(@req, @res, @next) + return describe("when an error occurs", function() { + beforeEach(function() { + this.ProjectManager.getProjectDocsAndFlushIfOld = sinon.stub().callsArgWith(3, new Error("oops")); + return this.HttpController.getProjectDocsAndFlushIfOld(this.req, this.res, this.next); + }); - it "should call next with the error", -> - @next + return it("should call next with the error", function() { + return this.next .calledWith(new Error("oops")) - .should.equal true + .should.equal(true); + }); + }); + }); - describe "updateProject", -> - beforeEach -> - @projectHistoryId = "history-id-123" - @userId = "user-id-123" - @docUpdates = sinon.stub() - @fileUpdates = sinon.stub() - @version = 1234567 - @req = - body: {@projectHistoryId, @userId, @docUpdates, @fileUpdates, @version} - params: - project_id: @project_id + describe("updateProject", function() { + beforeEach(function() { + this.projectHistoryId = "history-id-123"; + this.userId = "user-id-123"; + this.docUpdates = sinon.stub(); + this.fileUpdates = sinon.stub(); + this.version = 1234567; + return this.req = { + body: {projectHistoryId: this.projectHistoryId, userId: this.userId, docUpdates: this.docUpdates, fileUpdates: this.fileUpdates, version: this.version}, + params: { + project_id: this.project_id + } + }; + }); - describe "successfully", -> - beforeEach -> - @ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(6) - @HttpController.updateProject(@req, @res, @next) + describe("successfully", function() { + beforeEach(function() { + this.ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(6); + return this.HttpController.updateProject(this.req, this.res, this.next); + }); - it "should accept the change", -> - @ProjectManager.updateProjectWithLocks - .calledWith(@project_id, @projectHistoryId, @userId, @docUpdates, @fileUpdates, @version) - .should.equal true + it("should accept the change", function() { + return this.ProjectManager.updateProjectWithLocks + .calledWith(this.project_id, this.projectHistoryId, this.userId, this.docUpdates, this.fileUpdates, this.version) + .should.equal(true); + }); - it "should return a successful No Content response", -> - @res.sendStatus + it("should return a successful No Content response", function() { + return this.res.sendStatus .calledWith(204) - .should.equal true + .should.equal(true); + }); - it "should time the request", -> - @Metrics.Timer::done.called.should.equal true + return it("should time the request", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); - describe "when an errors occurs", -> - beforeEach -> - @ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(6, new Error("oops")) - @HttpController.updateProject(@req, @res, @next) + return describe("when an errors occurs", function() { + beforeEach(function() { + this.ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(6, new Error("oops")); + return this.HttpController.updateProject(this.req, this.res, this.next); + }); - it "should call next with the error", -> - @next + return it("should call next with the error", function() { + return this.next .calledWith(new Error("oops")) - .should.equal true + .should.equal(true); + }); + }); + }); - describe "resyncProjectHistory", -> - beforeEach -> - @projectHistoryId = "history-id-123" - @docs = sinon.stub() - @files = sinon.stub() - @fileUpdates = sinon.stub() - @req = + return describe("resyncProjectHistory", function() { + beforeEach(function() { + this.projectHistoryId = "history-id-123"; + this.docs = sinon.stub(); + this.files = sinon.stub(); + this.fileUpdates = sinon.stub(); + return this.req = { body: - {@projectHistoryId, @docs, @files} - params: - project_id: @project_id + {projectHistoryId: this.projectHistoryId, docs: this.docs, files: this.files}, + params: { + project_id: this.project_id + } + }; + }); - describe "successfully", -> - beforeEach -> - @HistoryManager.resyncProjectHistory = sinon.stub().callsArgWith(4) - @HttpController.resyncProjectHistory(@req, @res, @next) + describe("successfully", function() { + beforeEach(function() { + this.HistoryManager.resyncProjectHistory = sinon.stub().callsArgWith(4); + return this.HttpController.resyncProjectHistory(this.req, this.res, this.next); + }); - it "should accept the change", -> - @HistoryManager.resyncProjectHistory - .calledWith(@project_id, @projectHistoryId, @docs, @files) - .should.equal true + it("should accept the change", function() { + return this.HistoryManager.resyncProjectHistory + .calledWith(this.project_id, this.projectHistoryId, this.docs, this.files) + .should.equal(true); + }); - it "should return a successful No Content response", -> - @res.sendStatus + return it("should return a successful No Content response", function() { + return this.res.sendStatus .calledWith(204) - .should.equal true + .should.equal(true); + }); + }); - describe "when an errors occurs", -> - beforeEach -> - @HistoryManager.resyncProjectHistory = sinon.stub().callsArgWith(4, new Error("oops")) - @HttpController.resyncProjectHistory(@req, @res, @next) + return describe("when an errors occurs", function() { + beforeEach(function() { + this.HistoryManager.resyncProjectHistory = sinon.stub().callsArgWith(4, new Error("oops")); + return this.HttpController.resyncProjectHistory(this.req, this.res, this.next); + }); - it "should call next with the error", -> - @next + return it("should call next with the error", function() { + return this.next .calledWith(new Error("oops")) - .should.equal true + .should.equal(true); + }); + }); + }); +}); diff --git a/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.js b/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.js index a080e563f1..4c9c3f5f70 100644 --- a/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.js +++ b/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.js @@ -1,37 +1,62 @@ -require('coffee-script') -sinon = require('sinon') -assert = require('assert') -path = require('path') -modulePath = path.join __dirname, '../../../../app/js/LockManager.js' -project_id = 1234 -doc_id = 5678 -blockingKey = "Blocking:#{doc_id}" -SandboxedModule = require('sandboxed-module') +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +require('coffee-script'); +const sinon = require('sinon'); +const assert = require('assert'); +const path = require('path'); +const modulePath = path.join(__dirname, '../../../../app/js/LockManager.js'); +const project_id = 1234; +const doc_id = 5678; +const blockingKey = `Blocking:${doc_id}`; +const SandboxedModule = require('sandboxed-module'); -describe 'LockManager - checking the lock', ()-> +describe('LockManager - checking the lock', function(){ - existsStub = sinon.stub() + let Profiler; + const existsStub = sinon.stub(); - mocks = - "logger-sharelatex": log:-> - "redis-sharelatex": - createClient : ()-> - auth:-> - exists: existsStub - "./Metrics": {inc: () ->} - "./Profiler": class Profiler - log: sinon.stub().returns { end: sinon.stub() } - end: sinon.stub() - LockManager = SandboxedModule.require(modulePath, requires: mocks) + const mocks = { + "logger-sharelatex": { log() {} + }, + "redis-sharelatex": { + createClient(){ + return { + auth() {}, + exists: existsStub + }; + } + }, + "./Metrics": {inc() {}}, + "./Profiler": (Profiler = (function() { + Profiler = class Profiler { + static initClass() { + this.prototype.log = sinon.stub().returns({ end: sinon.stub() }); + this.prototype.end = sinon.stub(); + } + }; + Profiler.initClass(); + return Profiler; + })()) + }; + const LockManager = SandboxedModule.require(modulePath, {requires: mocks}); - it 'should return true if the key does not exists', (done)-> - existsStub.yields(null, "0") - LockManager.checkLock doc_id, (err, free)-> - free.should.equal true - done() + it('should return true if the key does not exists', function(done){ + existsStub.yields(null, "0"); + return LockManager.checkLock(doc_id, function(err, free){ + free.should.equal(true); + return done(); + }); + }); - it 'should return false if the key does exists', (done)-> - existsStub.yields(null, "1") - LockManager.checkLock doc_id, (err, free)-> - free.should.equal false - done() + return it('should return false if the key does exists', function(done){ + existsStub.yields(null, "1"); + return LockManager.checkLock(doc_id, function(err, free){ + free.should.equal(false); + return done(); + }); + }); +}); diff --git a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.js b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.js index 28fb02059e..67616c062c 100644 --- a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.js +++ b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.js @@ -1,53 +1,82 @@ -require('coffee-script') -sinon = require('sinon') -assert = require('assert') -path = require('path') -modulePath = path.join __dirname, '../../../../app/js/LockManager.js' -project_id = 1234 -doc_id = 5678 -SandboxedModule = require('sandboxed-module') +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +require('coffee-script'); +const sinon = require('sinon'); +const assert = require('assert'); +const path = require('path'); +const modulePath = path.join(__dirname, '../../../../app/js/LockManager.js'); +const project_id = 1234; +const doc_id = 5678; +const SandboxedModule = require('sandboxed-module'); -describe 'LockManager - releasing the lock', ()-> - beforeEach -> - @client = { - auth: -> +describe('LockManager - releasing the lock', function(){ + beforeEach(function() { + let Profiler; + this.client = { + auth() {}, eval: sinon.stub() - } - mocks = - "logger-sharelatex": - log:-> - error:-> - "redis-sharelatex": - createClient : () => @client + }; + const mocks = { + "logger-sharelatex": { + log() {}, + error() {} + }, + "redis-sharelatex": { + createClient : () => this.client + }, "settings-sharelatex": { - redis: - lock: - key_schema: - blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" - } - "./Metrics": {inc: () ->} - "./Profiler": class Profiler - log: sinon.stub().returns { end: sinon.stub() } - end: sinon.stub() - @LockManager = SandboxedModule.require(modulePath, requires: mocks) - @lockValue = "lock-value-stub" - @callback = sinon.stub() + redis: { + lock: { + key_schema: { + blockingKey({doc_id}) { return `Blocking:${doc_id}`; } + } + } + } + }, + "./Metrics": {inc() {}}, + "./Profiler": (Profiler = (function() { + Profiler = class Profiler { + static initClass() { + this.prototype.log = sinon.stub().returns({ end: sinon.stub() }); + this.prototype.end = sinon.stub(); + } + }; + Profiler.initClass(); + return Profiler; + })()) + }; + this.LockManager = SandboxedModule.require(modulePath, {requires: mocks}); + this.lockValue = "lock-value-stub"; + return this.callback = sinon.stub(); + }); - describe "when the lock is current", -> - beforeEach -> - @client.eval = sinon.stub().yields(null, 1) - @LockManager.releaseLock doc_id, @lockValue, @callback + describe("when the lock is current", function() { + beforeEach(function() { + this.client.eval = sinon.stub().yields(null, 1); + return this.LockManager.releaseLock(doc_id, this.lockValue, this.callback); + }); - it 'should clear the data from redis', -> - @client.eval.calledWith(@LockManager.unlockScript, 1, "Blocking:#{doc_id}", @lockValue).should.equal true + it('should clear the data from redis', function() { + return this.client.eval.calledWith(this.LockManager.unlockScript, 1, `Blocking:${doc_id}`, this.lockValue).should.equal(true); + }); - it 'should call the callback', -> - @callback.called.should.equal true + return it('should call the callback', function() { + return this.callback.called.should.equal(true); + }); + }); - describe "when the lock has expired", -> - beforeEach -> - @client.eval = sinon.stub().yields(null, 0) - @LockManager.releaseLock doc_id, @lockValue, @callback + return describe("when the lock has expired", function() { + beforeEach(function() { + this.client.eval = sinon.stub().yields(null, 0); + return this.LockManager.releaseLock(doc_id, this.lockValue, this.callback); + }); - it 'should return an error if the lock has expired', -> - @callback.calledWith(new Error("tried to release timed out lock")).should.equal true + return it('should return an error if the lock has expired', function() { + return this.callback.calledWith(new Error("tried to release timed out lock")).should.equal(true); + }); + }); +}); diff --git a/services/document-updater/test/unit/coffee/LockManager/getLockTests.js b/services/document-updater/test/unit/coffee/LockManager/getLockTests.js index 7093ab223a..bdb301b8d5 100644 --- a/services/document-updater/test/unit/coffee/LockManager/getLockTests.js +++ b/services/document-updater/test/unit/coffee/LockManager/getLockTests.js @@ -1,79 +1,121 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/LockManager.js" -SandboxedModule = require('sandboxed-module') +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const modulePath = "../../../../app/js/LockManager.js"; +const SandboxedModule = require('sandboxed-module'); -describe 'LockManager - getting the lock', -> - beforeEach -> - @LockManager = SandboxedModule.require modulePath, requires: - "logger-sharelatex": log:-> - "redis-sharelatex": - createClient : () => - auth:-> - "./Metrics": {inc: () ->} - "./Profiler": class Profiler - log: sinon.stub().returns { end: sinon.stub() } - end: sinon.stub() - @callback = sinon.stub() - @doc_id = "doc-id-123" +describe('LockManager - getting the lock', function() { + beforeEach(function() { + let Profiler; + this.LockManager = SandboxedModule.require(modulePath, { requires: { + "logger-sharelatex": { log() {} + }, + "redis-sharelatex": { + createClient : () => { + return {auth() {}}; + } + }, + "./Metrics": {inc() {}}, + "./Profiler": (Profiler = (function() { + Profiler = class Profiler { + static initClass() { + this.prototype.log = sinon.stub().returns({ end: sinon.stub() }); + this.prototype.end = sinon.stub(); + } + }; + Profiler.initClass(); + return Profiler; + })()) + } + } + ); + this.callback = sinon.stub(); + return this.doc_id = "doc-id-123"; + }); - describe "when the lock is not set", -> - beforeEach (done) -> - @lockValue = "mock-lock-value" - @LockManager.tryLock = sinon.stub().callsArgWith(1, null, true, @lockValue) - @LockManager.getLock @doc_id, (args...) => - @callback(args...) - done() + describe("when the lock is not set", function() { + beforeEach(function(done) { + this.lockValue = "mock-lock-value"; + this.LockManager.tryLock = sinon.stub().callsArgWith(1, null, true, this.lockValue); + return this.LockManager.getLock(this.doc_id, (...args) => { + this.callback(...Array.from(args || [])); + return done(); + }); + }); - it "should try to get the lock", -> - @LockManager.tryLock - .calledWith(@doc_id) - .should.equal true + it("should try to get the lock", function() { + return this.LockManager.tryLock + .calledWith(this.doc_id) + .should.equal(true); + }); - it "should only need to try once", -> - @LockManager.tryLock.callCount.should.equal 1 + it("should only need to try once", function() { + return this.LockManager.tryLock.callCount.should.equal(1); + }); - it "should return the callback with the lock value", -> - @callback.calledWith(null, @lockValue).should.equal true + return it("should return the callback with the lock value", function() { + return this.callback.calledWith(null, this.lockValue).should.equal(true); + }); + }); - describe "when the lock is initially set", -> - beforeEach (done) -> - @lockValue = "mock-lock-value" - startTime = Date.now() - tries = 0 - @LockManager.LOCK_TEST_INTERVAL = 5 - @LockManager.tryLock = (doc_id, callback = (error, isFree) ->) => - if (Date.now() - startTime < 20) or (tries < 2) - tries = tries + 1 - callback null, false - else - callback null, true, @lockValue - sinon.spy @LockManager, "tryLock" + describe("when the lock is initially set", function() { + beforeEach(function(done) { + this.lockValue = "mock-lock-value"; + const startTime = Date.now(); + let tries = 0; + this.LockManager.LOCK_TEST_INTERVAL = 5; + this.LockManager.tryLock = (doc_id, callback) => { + if (callback == null) { callback = function(error, isFree) {}; } + if (((Date.now() - startTime) < 20) || (tries < 2)) { + tries = tries + 1; + return callback(null, false); + } else { + return callback(null, true, this.lockValue); + } + }; + sinon.spy(this.LockManager, "tryLock"); - @LockManager.getLock @doc_id, (args...) => - @callback(args...) - done() + return this.LockManager.getLock(this.doc_id, (...args) => { + this.callback(...Array.from(args || [])); + return done(); + }); + }); - it "should call tryLock multiple times until free", -> - (@LockManager.tryLock.callCount > 1).should.equal true + it("should call tryLock multiple times until free", function() { + return (this.LockManager.tryLock.callCount > 1).should.equal(true); + }); - it "should return the callback with the lock value", -> - @callback.calledWith(null, @lockValue).should.equal true + return it("should return the callback with the lock value", function() { + return this.callback.calledWith(null, this.lockValue).should.equal(true); + }); + }); - describe "when the lock times out", -> - beforeEach (done) -> - time = Date.now() - @LockManager.MAX_LOCK_WAIT_TIME = 5 - @LockManager.tryLock = sinon.stub().callsArgWith(1, null, false) - @LockManager.getLock @doc_id, (args...) => - @callback(args...) - done() + return describe("when the lock times out", function() { + beforeEach(function(done) { + const time = Date.now(); + this.LockManager.MAX_LOCK_WAIT_TIME = 5; + this.LockManager.tryLock = sinon.stub().callsArgWith(1, null, false); + return this.LockManager.getLock(this.doc_id, (...args) => { + this.callback(...Array.from(args || [])); + return done(); + }); + }); - it "should return the callback with an error", -> - e = new Error("Timeout") - e.doc_id = @doc_id - @callback.calledWith(e).should.equal true + return it("should return the callback with an error", function() { + const e = new Error("Timeout"); + e.doc_id = this.doc_id; + return this.callback.calledWith(e).should.equal(true); + }); + }); +}); diff --git a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.js b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.js index 82de2f45b8..cdc3e34f1e 100644 --- a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.js +++ b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.js @@ -1,86 +1,132 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/LockManager.js" -SandboxedModule = require('sandboxed-module') +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const modulePath = "../../../../app/js/LockManager.js"; +const SandboxedModule = require('sandboxed-module'); -describe 'LockManager - trying the lock', -> - beforeEach -> - @LockManager = SandboxedModule.require modulePath, requires: - "logger-sharelatex": log:-> - "redis-sharelatex": - createClient : () => - auth:-> - set: @set = sinon.stub() - "./Metrics": {inc: () ->} +describe('LockManager - trying the lock', function() { + beforeEach(function() { + let Profiler; + this.LockManager = SandboxedModule.require(modulePath, { requires: { + "logger-sharelatex": { log() {} + }, + "redis-sharelatex": { + createClient : () => { + return { + auth() {}, + set: (this.set = sinon.stub()) + }; + } + }, + "./Metrics": {inc() {}}, "settings-sharelatex": { - redis: - lock: - key_schema: - blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" - } - "./Profiler": @Profiler = class Profiler - log: sinon.stub().returns { end: sinon.stub() } - end: sinon.stub() + redis: { + lock: { + key_schema: { + blockingKey({doc_id}) { return `Blocking:${doc_id}`; } + } + } + } + }, + "./Profiler": (this.Profiler = (Profiler = (function() { + Profiler = class Profiler { + static initClass() { + this.prototype.log = sinon.stub().returns({ end: sinon.stub() }); + this.prototype.end = sinon.stub(); + } + }; + Profiler.initClass(); + return Profiler; + })())) + } + } + ); - @callback = sinon.stub() - @doc_id = "doc-id-123" + this.callback = sinon.stub(); + return this.doc_id = "doc-id-123"; + }); - describe "when the lock is not set", -> - beforeEach -> - @lockValue = "mock-lock-value" - @LockManager.randomLock = sinon.stub().returns @lockValue - @set.callsArgWith(5, null, "OK") - @LockManager.tryLock @doc_id, @callback + describe("when the lock is not set", function() { + beforeEach(function() { + this.lockValue = "mock-lock-value"; + this.LockManager.randomLock = sinon.stub().returns(this.lockValue); + this.set.callsArgWith(5, null, "OK"); + return this.LockManager.tryLock(this.doc_id, this.callback); + }); - it "should set the lock key with an expiry if it is not set", -> - @set.calledWith("Blocking:#{@doc_id}", @lockValue, "EX", 30, "NX") - .should.equal true + it("should set the lock key with an expiry if it is not set", function() { + return this.set.calledWith(`Blocking:${this.doc_id}`, this.lockValue, "EX", 30, "NX") + .should.equal(true); + }); - it "should return the callback with true and the lock value", -> - @callback.calledWith(null, true, @lockValue).should.equal true + return it("should return the callback with true and the lock value", function() { + return this.callback.calledWith(null, true, this.lockValue).should.equal(true); + }); + }); - describe "when the lock is already set", -> - beforeEach -> - @set.callsArgWith(5, null, null) - @LockManager.tryLock @doc_id, @callback + describe("when the lock is already set", function() { + beforeEach(function() { + this.set.callsArgWith(5, null, null); + return this.LockManager.tryLock(this.doc_id, this.callback); + }); - it "should return the callback with false", -> - @callback.calledWith(null, false).should.equal true + return it("should return the callback with false", function() { + return this.callback.calledWith(null, false).should.equal(true); + }); + }); - describe "when it takes a long time for redis to set the lock", -> - beforeEach -> - @Profiler.prototype.end = () -> 7000 # take a long time - @Profiler.prototype.log = sinon.stub().returns { end: @Profiler.prototype.end } - @lockValue = "mock-lock-value" - @LockManager.randomLock = sinon.stub().returns @lockValue - @LockManager.releaseLock = sinon.stub().callsArgWith(2,null) - @set.callsArgWith(5, null, "OK") + return describe("when it takes a long time for redis to set the lock", function() { + beforeEach(function() { + this.Profiler.prototype.end = () => 7000; // take a long time + this.Profiler.prototype.log = sinon.stub().returns({ end: this.Profiler.prototype.end }); + this.lockValue = "mock-lock-value"; + this.LockManager.randomLock = sinon.stub().returns(this.lockValue); + this.LockManager.releaseLock = sinon.stub().callsArgWith(2,null); + return this.set.callsArgWith(5, null, "OK"); + }); - describe "in all cases", -> - beforeEach -> - @LockManager.tryLock @doc_id, @callback + describe("in all cases", function() { + beforeEach(function() { + return this.LockManager.tryLock(this.doc_id, this.callback); + }); - it "should set the lock key with an expiry if it is not set", -> - @set.calledWith("Blocking:#{@doc_id}", @lockValue, "EX", 30, "NX") - .should.equal true + it("should set the lock key with an expiry if it is not set", function() { + return this.set.calledWith(`Blocking:${this.doc_id}`, this.lockValue, "EX", 30, "NX") + .should.equal(true); + }); - it "should try to release the lock", -> - @LockManager.releaseLock.calledWith(@doc_id, @lockValue).should.equal true + return it("should try to release the lock", function() { + return this.LockManager.releaseLock.calledWith(this.doc_id, this.lockValue).should.equal(true); + }); + }); - describe "if the lock is released successfully", -> - beforeEach -> - @LockManager.releaseLock = sinon.stub().callsArgWith(2,null) - @LockManager.tryLock @doc_id, @callback + describe("if the lock is released successfully", function() { + beforeEach(function() { + this.LockManager.releaseLock = sinon.stub().callsArgWith(2,null); + return this.LockManager.tryLock(this.doc_id, this.callback); + }); - it "should return the callback with false", -> - @callback.calledWith(null, false).should.equal true + return it("should return the callback with false", function() { + return this.callback.calledWith(null, false).should.equal(true); + }); + }); - describe "if the lock has already timed out", -> - beforeEach -> - @LockManager.releaseLock = sinon.stub().callsArgWith(2, new Error("tried to release timed out lock")) - @LockManager.tryLock @doc_id, @callback + return describe("if the lock has already timed out", function() { + beforeEach(function() { + this.LockManager.releaseLock = sinon.stub().callsArgWith(2, new Error("tried to release timed out lock")); + return this.LockManager.tryLock(this.doc_id, this.callback); + }); - it "should return the callback with an error", -> - e = new Error("tried to release timed out lock") - @callback.calledWith(e).should.equal true + return it("should return the callback with an error", function() { + const e = new Error("tried to release timed out lock"); + return this.callback.calledWith(e).should.equal(true); + }); + }); + }); +}); diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.js b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.js index 0ad69c3885..cbef821472 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.js +++ b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.js @@ -1,226 +1,304 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/PersistenceManager.js" -SandboxedModule = require('sandboxed-module') -Errors = require "../../../../app/js/Errors" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const modulePath = "../../../../app/js/PersistenceManager.js"; +const SandboxedModule = require('sandboxed-module'); +const Errors = require("../../../../app/js/Errors"); -describe "PersistenceManager", -> - beforeEach -> - @request = sinon.stub() - @request.defaults = () => @request - @PersistenceManager = SandboxedModule.require modulePath, requires: - "requestretry": @request - "settings-sharelatex": @Settings = {} - "./Metrics": @Metrics = - Timer: class Timer - done: sinon.stub() +describe("PersistenceManager", function() { + beforeEach(function() { + let Timer; + this.request = sinon.stub(); + this.request.defaults = () => this.request; + this.PersistenceManager = SandboxedModule.require(modulePath, { requires: { + "requestretry": this.request, + "settings-sharelatex": (this.Settings = {}), + "./Metrics": (this.Metrics = { + Timer: (Timer = (function() { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub(); + } + }; + Timer.initClass(); + return Timer; + })()), inc: sinon.stub() - "logger-sharelatex": @logger = {log: sinon.stub(), err: sinon.stub()} - @project_id = "project-id-123" - @projectHistoryId = "history-id-123" - @doc_id = "doc-id-123" - @lines = ["one", "two", "three"] - @version = 42 - @callback = sinon.stub() - @ranges = { comments: "mock", entries: "mock" } - @pathname = '/a/b/c.tex' - @lastUpdatedAt = Date.now() - @lastUpdatedBy = 'last-author-id' - @Settings.apis = - web: - url: @url = "www.example.com" - user: @user = "sharelatex" - pass: @pass = "password" - - describe "getDoc", -> - beforeEach -> - @webResponse = { - lines: @lines, - version: @version, - ranges: @ranges - pathname: @pathname, - projectHistoryId: @projectHistoryId + }), + "logger-sharelatex": (this.logger = {log: sinon.stub(), err: sinon.stub()}) + } + }); + this.project_id = "project-id-123"; + this.projectHistoryId = "history-id-123"; + this.doc_id = "doc-id-123"; + this.lines = ["one", "two", "three"]; + this.version = 42; + this.callback = sinon.stub(); + this.ranges = { comments: "mock", entries: "mock" }; + this.pathname = '/a/b/c.tex'; + this.lastUpdatedAt = Date.now(); + this.lastUpdatedBy = 'last-author-id'; + return this.Settings.apis = { + web: { + url: (this.url = "www.example.com"), + user: (this.user = "sharelatex"), + pass: (this.pass = "password") } + }; + }); - describe "with a successful response from the web api", -> - beforeEach -> - @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(@webResponse)) - @PersistenceManager.getDoc(@project_id, @doc_id, @callback) + describe("getDoc", function() { + beforeEach(function() { + return this.webResponse = { + lines: this.lines, + version: this.version, + ranges: this.ranges, + pathname: this.pathname, + projectHistoryId: this.projectHistoryId + };}); - it "should call the web api", -> - @request + describe("with a successful response from the web api", function() { + beforeEach(function() { + this.request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(this.webResponse)); + return this.PersistenceManager.getDoc(this.project_id, this.doc_id, this.callback); + }); + + it("should call the web api", function() { + return this.request .calledWith({ - url: "#{@url}/project/#{@project_id}/doc/#{@doc_id}" - method: "GET" - headers: + url: `${this.url}/project/${this.project_id}/doc/${this.doc_id}`, + method: "GET", + headers: { "accept": "application/json" - auth: - user: @user - pass: @pass + }, + auth: { + user: this.user, + pass: this.pass, sendImmediately: true - jar: false + }, + jar: false, timeout: 5000 }) - .should.equal true + .should.equal(true); + }); - it "should call the callback with the doc lines, version and ranges", -> - @callback - .calledWith(null, @lines, @version, @ranges, @pathname, @projectHistoryId) - .should.equal true + it("should call the callback with the doc lines, version and ranges", function() { + return this.callback + .calledWith(null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId) + .should.equal(true); + }); - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true + it("should time the execution", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); - it "should increment the metric", -> - @Metrics.inc.calledWith("getDoc", 1, {status: 200}).should.equal true + return it("should increment the metric", function() { + return this.Metrics.inc.calledWith("getDoc", 1, {status: 200}).should.equal(true); + }); + }); - describe "when request returns an error", -> - beforeEach -> - @error = new Error("oops") - @error.code = "EOOPS" - @request.callsArgWith(1, @error, null, null) - @PersistenceManager.getDoc(@project_id, @doc_id, @callback) + describe("when request returns an error", function() { + beforeEach(function() { + this.error = new Error("oops"); + this.error.code = "EOOPS"; + this.request.callsArgWith(1, this.error, null, null); + return this.PersistenceManager.getDoc(this.project_id, this.doc_id, this.callback); + }); - it "should return the error", -> - @callback.calledWith(@error).should.equal true + it("should return the error", function() { + return this.callback.calledWith(this.error).should.equal(true); + }); - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true + it("should time the execution", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); - it "should increment the metric", -> - @Metrics.inc.calledWith("getDoc", 1, {status: "EOOPS"}).should.equal true + return it("should increment the metric", function() { + return this.Metrics.inc.calledWith("getDoc", 1, {status: "EOOPS"}).should.equal(true); + }); + }); - describe "when the request returns 404", -> - beforeEach -> - @request.callsArgWith(1, null, {statusCode: 404}, "") - @PersistenceManager.getDoc(@project_id, @doc_id, @callback) + describe("when the request returns 404", function() { + beforeEach(function() { + this.request.callsArgWith(1, null, {statusCode: 404}, ""); + return this.PersistenceManager.getDoc(this.project_id, this.doc_id, this.callback); + }); - it "should return a NotFoundError", -> - @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true + it("should return a NotFoundError", function() { + return this.callback.calledWith(new Errors.NotFoundError("not found")).should.equal(true); + }); - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true + it("should time the execution", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); - it "should increment the metric", -> - @Metrics.inc.calledWith("getDoc", 1, {status: 404}).should.equal true + return it("should increment the metric", function() { + return this.Metrics.inc.calledWith("getDoc", 1, {status: 404}).should.equal(true); + }); + }); - describe "when the request returns an error status code", -> - beforeEach -> - @request.callsArgWith(1, null, {statusCode: 500}, "") - @PersistenceManager.getDoc(@project_id, @doc_id, @callback) + describe("when the request returns an error status code", function() { + beforeEach(function() { + this.request.callsArgWith(1, null, {statusCode: 500}, ""); + return this.PersistenceManager.getDoc(this.project_id, this.doc_id, this.callback); + }); - it "should return an error", -> - @callback.calledWith(new Error("web api error")).should.equal true + it("should return an error", function() { + return this.callback.calledWith(new Error("web api error")).should.equal(true); + }); - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true + it("should time the execution", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); - it "should increment the metric", -> - @Metrics.inc.calledWith("getDoc", 1, {status: 500}).should.equal true + return it("should increment the metric", function() { + return this.Metrics.inc.calledWith("getDoc", 1, {status: 500}).should.equal(true); + }); + }); - describe "when request returns an doc without lines", -> - beforeEach -> - delete @webResponse.lines - @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(@webResponse)) - @PersistenceManager.getDoc(@project_id, @doc_id, @callback) + describe("when request returns an doc without lines", function() { + beforeEach(function() { + delete this.webResponse.lines; + this.request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(this.webResponse)); + return this.PersistenceManager.getDoc(this.project_id, this.doc_id, this.callback); + }); - it "should return and error", -> - @callback.calledWith(new Error("web API response had no doc lines")).should.equal true + return it("should return and error", function() { + return this.callback.calledWith(new Error("web API response had no doc lines")).should.equal(true); + }); + }); - describe "when request returns an doc without a version", -> - beforeEach -> - delete @webResponse.version - @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(@webResponse)) - @PersistenceManager.getDoc(@project_id, @doc_id, @callback) + describe("when request returns an doc without a version", function() { + beforeEach(function() { + delete this.webResponse.version; + this.request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(this.webResponse)); + return this.PersistenceManager.getDoc(this.project_id, this.doc_id, this.callback); + }); - it "should return and error", -> - @callback.calledWith(new Error("web API response had no valid doc version")).should.equal true + return it("should return and error", function() { + return this.callback.calledWith(new Error("web API response had no valid doc version")).should.equal(true); + }); + }); - describe "when request returns an doc without a pathname", -> - beforeEach -> - delete @webResponse.pathname - @request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(@webResponse)) - @PersistenceManager.getDoc(@project_id, @doc_id, @callback) + return describe("when request returns an doc without a pathname", function() { + beforeEach(function() { + delete this.webResponse.pathname; + this.request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(this.webResponse)); + return this.PersistenceManager.getDoc(this.project_id, this.doc_id, this.callback); + }); - it "should return and error", -> - @callback.calledWith(new Error("web API response had no valid doc pathname")).should.equal true + return it("should return and error", function() { + return this.callback.calledWith(new Error("web API response had no valid doc pathname")).should.equal(true); + }); + }); + }); - describe "setDoc", -> - describe "with a successful response from the web api", -> - beforeEach -> - @request.callsArgWith(1, null, {statusCode: 200}) - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @ranges, @lastUpdatedAt, @lastUpdatedBy, @callback) + return describe("setDoc", function() { + describe("with a successful response from the web api", function() { + beforeEach(function() { + this.request.callsArgWith(1, null, {statusCode: 200}); + return this.PersistenceManager.setDoc(this.project_id, this.doc_id, this.lines, this.version, this.ranges, this.lastUpdatedAt, this.lastUpdatedBy, this.callback); + }); - it "should call the web api", -> - @request + it("should call the web api", function() { + return this.request .calledWith({ - url: "#{@url}/project/#{@project_id}/doc/#{@doc_id}" - json: - lines: @lines - version: @version - ranges: @ranges - lastUpdatedAt: @lastUpdatedAt - lastUpdatedBy: @lastUpdatedBy - method: "POST" - auth: - user: @user - pass: @pass + url: `${this.url}/project/${this.project_id}/doc/${this.doc_id}`, + json: { + lines: this.lines, + version: this.version, + ranges: this.ranges, + lastUpdatedAt: this.lastUpdatedAt, + lastUpdatedBy: this.lastUpdatedBy + }, + method: "POST", + auth: { + user: this.user, + pass: this.pass, sendImmediately: true - jar: false + }, + jar: false, timeout: 5000 }) - .should.equal true + .should.equal(true); + }); - it "should call the callback without error", -> - @callback.calledWith(null).should.equal true + it("should call the callback without error", function() { + return this.callback.calledWith(null).should.equal(true); + }); - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true + it("should time the execution", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); - it "should increment the metric", -> - @Metrics.inc.calledWith("setDoc", 1, {status: 200}).should.equal true + return it("should increment the metric", function() { + return this.Metrics.inc.calledWith("setDoc", 1, {status: 200}).should.equal(true); + }); + }); - describe "when request returns an error", -> - beforeEach -> - @error = new Error("oops") - @error.code = "EOOPS" - @request.callsArgWith(1, @error, null, null) - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @ranges, @lastUpdatedAt, @lastUpdatedBy, @callback) + describe("when request returns an error", function() { + beforeEach(function() { + this.error = new Error("oops"); + this.error.code = "EOOPS"; + this.request.callsArgWith(1, this.error, null, null); + return this.PersistenceManager.setDoc(this.project_id, this.doc_id, this.lines, this.version, this.ranges, this.lastUpdatedAt, this.lastUpdatedBy, this.callback); + }); - it "should return the error", -> - @callback.calledWith(@error).should.equal true + it("should return the error", function() { + return this.callback.calledWith(this.error).should.equal(true); + }); - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true + it("should time the execution", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); - it "should increment the metric", -> - @Metrics.inc.calledWith("setDoc", 1, {status: "EOOPS"}).should.equal true + return it("should increment the metric", function() { + return this.Metrics.inc.calledWith("setDoc", 1, {status: "EOOPS"}).should.equal(true); + }); + }); - describe "when the request returns 404", -> - beforeEach -> - @request.callsArgWith(1, null, {statusCode: 404}, "") - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @ranges, @lastUpdatedAt, @lastUpdatedBy, @callback) + describe("when the request returns 404", function() { + beforeEach(function() { + this.request.callsArgWith(1, null, {statusCode: 404}, ""); + return this.PersistenceManager.setDoc(this.project_id, this.doc_id, this.lines, this.version, this.ranges, this.lastUpdatedAt, this.lastUpdatedBy, this.callback); + }); - it "should return a NotFoundError", -> - @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true + it("should return a NotFoundError", function() { + return this.callback.calledWith(new Errors.NotFoundError("not found")).should.equal(true); + }); - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true + it("should time the execution", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); - it "should increment the metric", -> - @Metrics.inc.calledWith("setDoc", 1, {status: 404}).should.equal true + return it("should increment the metric", function() { + return this.Metrics.inc.calledWith("setDoc", 1, {status: 404}).should.equal(true); + }); + }); - describe "when the request returns an error status code", -> - beforeEach -> - @request.callsArgWith(1, null, {statusCode: 500}, "") - @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @ranges, @lastUpdatedAt, @lastUpdatedBy, @callback) + return describe("when the request returns an error status code", function() { + beforeEach(function() { + this.request.callsArgWith(1, null, {statusCode: 500}, ""); + return this.PersistenceManager.setDoc(this.project_id, this.doc_id, this.lines, this.version, this.ranges, this.lastUpdatedAt, this.lastUpdatedBy, this.callback); + }); - it "should return an error", -> - @callback.calledWith(new Error("web api error")).should.equal true + it("should return an error", function() { + return this.callback.calledWith(new Error("web api error")).should.equal(true); + }); - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true + it("should time the execution", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); - it "should increment the metric", -> - @Metrics.inc.calledWith("setDoc", 1, {status: 500}).should.equal true + return it("should increment the metric", function() { + return this.Metrics.inc.calledWith("setDoc", 1, {status: 500}).should.equal(true); + }); + }); + }); +}); diff --git a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js index 9810b77d5f..6187749c18 100644 --- a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js +++ b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js @@ -1,122 +1,152 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/ProjectHistoryRedisManager.js" -SandboxedModule = require('sandboxed-module') -tk = require "timekeeper" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const modulePath = "../../../../app/js/ProjectHistoryRedisManager.js"; +const SandboxedModule = require('sandboxed-module'); +const tk = require("timekeeper"); -describe "ProjectHistoryRedisManager", -> - beforeEach -> - @project_id = "project-id-123" - @projectHistoryId = "history-id-123" - @user_id = "user-id-123" - @callback = sinon.stub() - @rclient = {} - tk.freeze(new Date()) - @ProjectHistoryRedisManager = SandboxedModule.require modulePath, - requires: - "settings-sharelatex": @settings = { - redis: - project_history: - key_schema: - projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:#{project_id}" - projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:#{project_id}" - } - "redis-sharelatex": - createClient: () => @rclient - "logger-sharelatex": - log:-> - "./Metrics": @metrics = { summary: sinon.stub()} - globals: - JSON: @JSON = JSON +describe("ProjectHistoryRedisManager", function() { + beforeEach(function() { + this.project_id = "project-id-123"; + this.projectHistoryId = "history-id-123"; + this.user_id = "user-id-123"; + this.callback = sinon.stub(); + this.rclient = {}; + tk.freeze(new Date()); + return this.ProjectHistoryRedisManager = SandboxedModule.require(modulePath, { + requires: { + "settings-sharelatex": (this.settings = { + redis: { + project_history: { + key_schema: { + projectHistoryOps({project_id}) { return `ProjectHistory:Ops:${project_id}`; }, + projectHistoryFirstOpTimestamp({project_id}) { return `ProjectHistory:FirstOpTimestamp:${project_id}`; } + } + } + } + }), + "redis-sharelatex": { + createClient: () => this.rclient + }, + "logger-sharelatex": { + log() {} + }, + "./Metrics": (this.metrics = { summary: sinon.stub()}) + }, + globals: { + JSON: (this.JSON = JSON) + } + } + ); + }); - afterEach -> - tk.reset() + afterEach(() => tk.reset()); - describe "queueOps", -> - beforeEach -> - @ops = ["mock-op-1", "mock-op-2"] - @multi = exec: sinon.stub() - @multi.rpush = sinon.stub() - @multi.setnx = sinon.stub() - @rclient.multi = () => @multi - # @rclient = multi: () => @multi - @ProjectHistoryRedisManager.queueOps @project_id, @ops..., @callback + describe("queueOps", function() { + beforeEach(function() { + this.ops = ["mock-op-1", "mock-op-2"]; + this.multi = {exec: sinon.stub()}; + this.multi.rpush = sinon.stub(); + this.multi.setnx = sinon.stub(); + this.rclient.multi = () => this.multi; + // @rclient = multi: () => @multi + return this.ProjectHistoryRedisManager.queueOps(this.project_id, ...Array.from(this.ops), this.callback); + }); - it "should queue an update", -> - @multi.rpush + it("should queue an update", function() { + return this.multi.rpush .calledWithExactly( - "ProjectHistory:Ops:#{@project_id}" - @ops[0] - @ops[1] - ).should.equal true + `ProjectHistory:Ops:${this.project_id}`, + this.ops[0], + this.ops[1] + ).should.equal(true); + }); - it "should set the queue timestamp if not present", -> - @multi.setnx + return it("should set the queue timestamp if not present", function() { + return this.multi.setnx .calledWithExactly( - "ProjectHistory:FirstOpTimestamp:#{@project_id}" + `ProjectHistory:FirstOpTimestamp:${this.project_id}`, Date.now() - ).should.equal true + ).should.equal(true); + }); + }); - describe "queueRenameEntity", -> - beforeEach () -> - @file_id = 1234 + describe("queueRenameEntity", function() { + beforeEach(function() { + this.file_id = 1234; - @rawUpdate = - pathname: @pathname = '/old' - newPathname: @newPathname = '/new' - version: @version = 2 + this.rawUpdate = { + pathname: (this.pathname = '/old'), + newPathname: (this.newPathname = '/new'), + version: (this.version = 2) + }; - @ProjectHistoryRedisManager.queueOps = sinon.stub() - @ProjectHistoryRedisManager.queueRenameEntity @project_id, @projectHistoryId, 'file', @file_id, @user_id, @rawUpdate, @callback + this.ProjectHistoryRedisManager.queueOps = sinon.stub(); + return this.ProjectHistoryRedisManager.queueRenameEntity(this.project_id, this.projectHistoryId, 'file', this.file_id, this.user_id, this.rawUpdate, this.callback); + }); - it "should queue an update", -> - update = - pathname: @pathname - new_pathname: @newPathname - meta: - user_id: @user_id + return it("should queue an update", function() { + const update = { + pathname: this.pathname, + new_pathname: this.newPathname, + meta: { + user_id: this.user_id, ts: new Date() - version: @version - projectHistoryId: @projectHistoryId - file: @file_id + }, + version: this.version, + projectHistoryId: this.projectHistoryId, + file: this.file_id + }; - @ProjectHistoryRedisManager.queueOps - .calledWithExactly(@project_id, @JSON.stringify(update), @callback) - .should.equal true + return this.ProjectHistoryRedisManager.queueOps + .calledWithExactly(this.project_id, this.JSON.stringify(update), this.callback) + .should.equal(true); + }); + }); - describe "queueAddEntity", -> - beforeEach () -> - @rclient.rpush = sinon.stub().yields() - @doc_id = 1234 + return describe("queueAddEntity", function() { + beforeEach(function() { + this.rclient.rpush = sinon.stub().yields(); + this.doc_id = 1234; - @rawUpdate = - pathname: @pathname = '/old' - docLines: @docLines = 'a\nb' - version: @version = 2 - url: @url = 'filestore.example.com' + this.rawUpdate = { + pathname: (this.pathname = '/old'), + docLines: (this.docLines = 'a\nb'), + version: (this.version = 2), + url: (this.url = 'filestore.example.com') + }; - @ProjectHistoryRedisManager.queueOps = sinon.stub() - @ProjectHistoryRedisManager.queueAddEntity @project_id, @projectHistoryId, 'doc', @doc_id, @user_id, @rawUpdate, @callback + this.ProjectHistoryRedisManager.queueOps = sinon.stub(); + return this.ProjectHistoryRedisManager.queueAddEntity(this.project_id, this.projectHistoryId, 'doc', this.doc_id, this.user_id, this.rawUpdate, this.callback); + }); - it "should queue an update", -> - update = - pathname: @pathname - docLines: @docLines - url: @url - meta: - user_id: @user_id + it("should queue an update", function() { + const update = { + pathname: this.pathname, + docLines: this.docLines, + url: this.url, + meta: { + user_id: this.user_id, ts: new Date() - version: @version - projectHistoryId: @projectHistoryId - doc: @doc_id + }, + version: this.version, + projectHistoryId: this.projectHistoryId, + doc: this.doc_id + }; - @ProjectHistoryRedisManager.queueOps - .calledWithExactly(@project_id, @JSON.stringify(update), @callback) - .should.equal true + return this.ProjectHistoryRedisManager.queueOps + .calledWithExactly(this.project_id, this.JSON.stringify(update), this.callback) + .should.equal(true); + }); - describe "queueResyncProjectStructure", -> - it "should queue an update", -> + describe("queueResyncProjectStructure", () => it("should queue an update", function() {})); - describe "queueResyncDocContent", -> - it "should queue an update", -> + return describe("queueResyncDocContent", () => it("should queue an update", function() {})); + }); +}); diff --git a/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.js b/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.js index 596d827726..ec572d7715 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.js +++ b/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.js @@ -1,86 +1,125 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/ProjectManager.js" -SandboxedModule = require('sandboxed-module') +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const modulePath = "../../../../app/js/ProjectManager.js"; +const SandboxedModule = require('sandboxed-module'); -describe "ProjectManager - flushAndDeleteProject", -> - beforeEach -> - @ProjectManager = SandboxedModule.require modulePath, requires: - "./RedisManager": @RedisManager = {} - "./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {} - "./DocumentManager": @DocumentManager = {} - "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } - "./HistoryManager": @HistoryManager = - flushProjectChanges: sinon.stub().callsArg(2) - "./Metrics": @Metrics = - Timer: class Timer - done: sinon.stub() - @project_id = "project-id-123" - @callback = sinon.stub() +describe("ProjectManager - flushAndDeleteProject", function() { + beforeEach(function() { + let Timer; + this.ProjectManager = SandboxedModule.require(modulePath, { requires: { + "./RedisManager": (this.RedisManager = {}), + "./ProjectHistoryRedisManager": (this.ProjectHistoryRedisManager = {}), + "./DocumentManager": (this.DocumentManager = {}), + "logger-sharelatex": (this.logger = { log: sinon.stub(), error: sinon.stub() }), + "./HistoryManager": (this.HistoryManager = + {flushProjectChanges: sinon.stub().callsArg(2)}), + "./Metrics": (this.Metrics = { + Timer: (Timer = (function() { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub(); + } + }; + Timer.initClass(); + return Timer; + })()) + }) + } + } + ); + this.project_id = "project-id-123"; + return this.callback = sinon.stub(); + }); - describe "successfully", -> - beforeEach (done) -> - @doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"] - @RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, @doc_ids) - @DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArg(3) - @ProjectManager.flushAndDeleteProjectWithLocks @project_id, {}, (error) => - @callback(error) - done() + describe("successfully", function() { + beforeEach(function(done) { + this.doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"]; + this.RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, this.doc_ids); + this.DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArg(3); + return this.ProjectManager.flushAndDeleteProjectWithLocks(this.project_id, {}, error => { + this.callback(error); + return done(); + }); + }); - it "should get the doc ids in the project", -> - @RedisManager.getDocIdsInProject - .calledWith(@project_id) - .should.equal true + it("should get the doc ids in the project", function() { + return this.RedisManager.getDocIdsInProject + .calledWith(this.project_id) + .should.equal(true); + }); - it "should delete each doc in the project", -> - for doc_id in @doc_ids - @DocumentManager.flushAndDeleteDocWithLock - .calledWith(@project_id, doc_id, {}) - .should.equal true + it("should delete each doc in the project", function() { + return Array.from(this.doc_ids).map((doc_id) => + this.DocumentManager.flushAndDeleteDocWithLock + .calledWith(this.project_id, doc_id, {}) + .should.equal(true)); + }); - it "should flush project history", -> - @HistoryManager.flushProjectChanges - .calledWith(@project_id, {}) - .should.equal true + it("should flush project history", function() { + return this.HistoryManager.flushProjectChanges + .calledWith(this.project_id, {}) + .should.equal(true); + }); - it "should call the callback without error", -> - @callback.calledWith(null).should.equal true + it("should call the callback without error", function() { + return this.callback.calledWith(null).should.equal(true); + }); - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true + return it("should time the execution", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); - describe "when a doc errors", -> - beforeEach (done) -> - @doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"] - @RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, @doc_ids) - @DocumentManager.flushAndDeleteDocWithLock = sinon.spy (project_id, doc_id, options, callback) => - if doc_id == "doc-id-1" - callback(@error = new Error("oops, something went wrong")) - else - callback() - @ProjectManager.flushAndDeleteProjectWithLocks @project_id, {}, (error) => - @callback(error) - done() + return describe("when a doc errors", function() { + beforeEach(function(done) { + this.doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"]; + this.RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, this.doc_ids); + this.DocumentManager.flushAndDeleteDocWithLock = sinon.spy((project_id, doc_id, options, callback) => { + if (doc_id === "doc-id-1") { + return callback(this.error = new Error("oops, something went wrong")); + } else { + return callback(); + } + }); + return this.ProjectManager.flushAndDeleteProjectWithLocks(this.project_id, {}, error => { + this.callback(error); + return done(); + }); + }); - it "should still flush each doc in the project", -> - for doc_id in @doc_ids - @DocumentManager.flushAndDeleteDocWithLock - .calledWith(@project_id, doc_id, {}) - .should.equal true + it("should still flush each doc in the project", function() { + return Array.from(this.doc_ids).map((doc_id) => + this.DocumentManager.flushAndDeleteDocWithLock + .calledWith(this.project_id, doc_id, {}) + .should.equal(true)); + }); - it "should still flush project history", -> - @HistoryManager.flushProjectChanges - .calledWith(@project_id, {}) - .should.equal true + it("should still flush project history", function() { + return this.HistoryManager.flushProjectChanges + .calledWith(this.project_id, {}) + .should.equal(true); + }); - it "should record the error", -> - @logger.error - .calledWith(err: @error, project_id: @project_id, doc_id: "doc-id-1", "error deleting doc") - .should.equal true + it("should record the error", function() { + return this.logger.error + .calledWith({err: this.error, project_id: this.project_id, doc_id: "doc-id-1"}, "error deleting doc") + .should.equal(true); + }); - it "should call the callback with an error", -> - @callback.calledWith(new Error()).should.equal true + it("should call the callback with an error", function() { + return this.callback.calledWith(new Error()).should.equal(true); + }); - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true + return it("should time the execution", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); +}); diff --git a/services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.js b/services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.js index a5fe3805d5..7160bbca10 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.js +++ b/services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.js @@ -1,75 +1,114 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/ProjectManager.js" -SandboxedModule = require('sandboxed-module') +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const modulePath = "../../../../app/js/ProjectManager.js"; +const SandboxedModule = require('sandboxed-module'); -describe "ProjectManager - flushProject", -> - beforeEach -> - @ProjectManager = SandboxedModule.require modulePath, requires: - "./RedisManager": @RedisManager = {} - "./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {} - "./DocumentManager": @DocumentManager = {} - "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } - "./HistoryManager": @HistoryManager = {} - "./Metrics": @Metrics = - Timer: class Timer - done: sinon.stub() - @project_id = "project-id-123" - @callback = sinon.stub() +describe("ProjectManager - flushProject", function() { + beforeEach(function() { + let Timer; + this.ProjectManager = SandboxedModule.require(modulePath, { requires: { + "./RedisManager": (this.RedisManager = {}), + "./ProjectHistoryRedisManager": (this.ProjectHistoryRedisManager = {}), + "./DocumentManager": (this.DocumentManager = {}), + "logger-sharelatex": (this.logger = { log: sinon.stub(), error: sinon.stub() }), + "./HistoryManager": (this.HistoryManager = {}), + "./Metrics": (this.Metrics = { + Timer: (Timer = (function() { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub(); + } + }; + Timer.initClass(); + return Timer; + })()) + }) + } + } + ); + this.project_id = "project-id-123"; + return this.callback = sinon.stub(); + }); - describe "successfully", -> - beforeEach (done) -> - @doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"] - @RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, @doc_ids) - @DocumentManager.flushDocIfLoadedWithLock = sinon.stub().callsArg(2) - @ProjectManager.flushProjectWithLocks @project_id, (error) => - @callback(error) - done() + describe("successfully", function() { + beforeEach(function(done) { + this.doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"]; + this.RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, this.doc_ids); + this.DocumentManager.flushDocIfLoadedWithLock = sinon.stub().callsArg(2); + return this.ProjectManager.flushProjectWithLocks(this.project_id, error => { + this.callback(error); + return done(); + }); + }); - it "should get the doc ids in the project", -> - @RedisManager.getDocIdsInProject - .calledWith(@project_id) - .should.equal true + it("should get the doc ids in the project", function() { + return this.RedisManager.getDocIdsInProject + .calledWith(this.project_id) + .should.equal(true); + }); - it "should flush each doc in the project", -> - for doc_id in @doc_ids - @DocumentManager.flushDocIfLoadedWithLock - .calledWith(@project_id, doc_id) - .should.equal true + it("should flush each doc in the project", function() { + return Array.from(this.doc_ids).map((doc_id) => + this.DocumentManager.flushDocIfLoadedWithLock + .calledWith(this.project_id, doc_id) + .should.equal(true)); + }); - it "should call the callback without error", -> - @callback.calledWith(null).should.equal true + it("should call the callback without error", function() { + return this.callback.calledWith(null).should.equal(true); + }); - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true + return it("should time the execution", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); - describe "when a doc errors", -> - beforeEach (done) -> - @doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"] - @RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, @doc_ids) - @DocumentManager.flushDocIfLoadedWithLock = sinon.spy (project_id, doc_id, callback = (error) ->) => - if doc_id == "doc-id-1" - callback(@error = new Error("oops, something went wrong")) - else - callback() - @ProjectManager.flushProjectWithLocks @project_id, (error) => - @callback(error) - done() + return describe("when a doc errors", function() { + beforeEach(function(done) { + this.doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"]; + this.RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, this.doc_ids); + this.DocumentManager.flushDocIfLoadedWithLock = sinon.spy((project_id, doc_id, callback) => { + if (callback == null) { callback = function(error) {}; } + if (doc_id === "doc-id-1") { + return callback(this.error = new Error("oops, something went wrong")); + } else { + return callback(); + } + }); + return this.ProjectManager.flushProjectWithLocks(this.project_id, error => { + this.callback(error); + return done(); + }); + }); - it "should still flush each doc in the project", -> - for doc_id in @doc_ids - @DocumentManager.flushDocIfLoadedWithLock - .calledWith(@project_id, doc_id) - .should.equal true + it("should still flush each doc in the project", function() { + return Array.from(this.doc_ids).map((doc_id) => + this.DocumentManager.flushDocIfLoadedWithLock + .calledWith(this.project_id, doc_id) + .should.equal(true)); + }); - it "should record the error", -> - @logger.error - .calledWith(err: @error, project_id: @project_id, doc_id: "doc-id-1", "error flushing doc") - .should.equal true + it("should record the error", function() { + return this.logger.error + .calledWith({err: this.error, project_id: this.project_id, doc_id: "doc-id-1"}, "error flushing doc") + .should.equal(true); + }); - it "should call the callback with an error", -> - @callback.calledWith(new Error()).should.equal true + it("should call the callback with an error", function() { + return this.callback.calledWith(new Error()).should.equal(true); + }); - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true + return it("should time the execution", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); +}); diff --git a/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.js b/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.js index 11f23bbd4c..98fd1e825b 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.js +++ b/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.js @@ -1,118 +1,161 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/ProjectManager.js" -SandboxedModule = require('sandboxed-module') -Errors = require "../../../../app/js/Errors.js" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const modulePath = "../../../../app/js/ProjectManager.js"; +const SandboxedModule = require('sandboxed-module'); +const Errors = require("../../../../app/js/Errors.js"); -describe "ProjectManager - getProjectDocsAndFlushIfOld", -> - beforeEach -> - @ProjectManager = SandboxedModule.require modulePath, requires: - "./RedisManager": @RedisManager = {} - "./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {} - "./DocumentManager": @DocumentManager = {} - "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } - "./HistoryManager": @HistoryManager = {} - "./Metrics": @Metrics = - Timer: class Timer - done: sinon.stub() - @project_id = "project-id-123" - @callback = sinon.stub() - @doc_versions = [111, 222, 333] +describe("ProjectManager - getProjectDocsAndFlushIfOld", function() { + beforeEach(function() { + let Timer; + this.ProjectManager = SandboxedModule.require(modulePath, { requires: { + "./RedisManager": (this.RedisManager = {}), + "./ProjectHistoryRedisManager": (this.ProjectHistoryRedisManager = {}), + "./DocumentManager": (this.DocumentManager = {}), + "logger-sharelatex": (this.logger = { log: sinon.stub(), error: sinon.stub() }), + "./HistoryManager": (this.HistoryManager = {}), + "./Metrics": (this.Metrics = { + Timer: (Timer = (function() { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub(); + } + }; + Timer.initClass(); + return Timer; + })()) + }) + } + } + ); + this.project_id = "project-id-123"; + this.callback = sinon.stub(); + return this.doc_versions = [111, 222, 333];}); - describe "successfully", -> - beforeEach (done) -> - @doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"] - @doc_lines = [["aaa","aaa"],["bbb","bbb"],["ccc","ccc"]] - @docs = [ - {_id: @doc_ids[0], lines: @doc_lines[0], v: @doc_versions[0]} - {_id: @doc_ids[1], lines: @doc_lines[1], v: @doc_versions[1]} - {_id: @doc_ids[2], lines: @doc_lines[2], v: @doc_versions[2]} - ] - @RedisManager.checkOrSetProjectState = sinon.stub().callsArgWith(2, null) - @RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, @doc_ids) - @DocumentManager.getDocAndFlushIfOldWithLock = sinon.stub() - @DocumentManager.getDocAndFlushIfOldWithLock.withArgs(@project_id, @doc_ids[0]) - .callsArgWith(2, null, @doc_lines[0], @doc_versions[0]) - @DocumentManager.getDocAndFlushIfOldWithLock.withArgs(@project_id, @doc_ids[1]) - .callsArgWith(2, null, @doc_lines[1], @doc_versions[1]) - @DocumentManager.getDocAndFlushIfOldWithLock.withArgs(@project_id, @doc_ids[2]) - .callsArgWith(2, null, @doc_lines[2], @doc_versions[2]) - @ProjectManager.getProjectDocsAndFlushIfOld @project_id, @projectStateHash, @excludeVersions, (error, docs) => - @callback(error, docs) - done() + describe("successfully", function() { + beforeEach(function(done) { + this.doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"]; + this.doc_lines = [["aaa","aaa"],["bbb","bbb"],["ccc","ccc"]]; + this.docs = [ + {_id: this.doc_ids[0], lines: this.doc_lines[0], v: this.doc_versions[0]}, + {_id: this.doc_ids[1], lines: this.doc_lines[1], v: this.doc_versions[1]}, + {_id: this.doc_ids[2], lines: this.doc_lines[2], v: this.doc_versions[2]} + ]; + this.RedisManager.checkOrSetProjectState = sinon.stub().callsArgWith(2, null); + this.RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, this.doc_ids); + this.DocumentManager.getDocAndFlushIfOldWithLock = sinon.stub(); + this.DocumentManager.getDocAndFlushIfOldWithLock.withArgs(this.project_id, this.doc_ids[0]) + .callsArgWith(2, null, this.doc_lines[0], this.doc_versions[0]); + this.DocumentManager.getDocAndFlushIfOldWithLock.withArgs(this.project_id, this.doc_ids[1]) + .callsArgWith(2, null, this.doc_lines[1], this.doc_versions[1]); + this.DocumentManager.getDocAndFlushIfOldWithLock.withArgs(this.project_id, this.doc_ids[2]) + .callsArgWith(2, null, this.doc_lines[2], this.doc_versions[2]); + return this.ProjectManager.getProjectDocsAndFlushIfOld(this.project_id, this.projectStateHash, this.excludeVersions, (error, docs) => { + this.callback(error, docs); + return done(); + }); + }); - it "should check the project state", -> - @RedisManager.checkOrSetProjectState - .calledWith(@project_id, @projectStateHash) - .should.equal true + it("should check the project state", function() { + return this.RedisManager.checkOrSetProjectState + .calledWith(this.project_id, this.projectStateHash) + .should.equal(true); + }); - it "should get the doc ids in the project", -> - @RedisManager.getDocIdsInProject - .calledWith(@project_id) - .should.equal true + it("should get the doc ids in the project", function() { + return this.RedisManager.getDocIdsInProject + .calledWith(this.project_id) + .should.equal(true); + }); - it "should call the callback without error", -> - @callback.calledWith(null, @docs).should.equal true + it("should call the callback without error", function() { + return this.callback.calledWith(null, this.docs).should.equal(true); + }); - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true + return it("should time the execution", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); - describe "when the state does not match", -> - beforeEach (done) -> - @doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"] - @RedisManager.checkOrSetProjectState = sinon.stub().callsArgWith(2, null, true) - @ProjectManager.getProjectDocsAndFlushIfOld @project_id, @projectStateHash, @excludeVersions, (error, docs) => - @callback(error, docs) - done() + describe("when the state does not match", function() { + beforeEach(function(done) { + this.doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"]; + this.RedisManager.checkOrSetProjectState = sinon.stub().callsArgWith(2, null, true); + return this.ProjectManager.getProjectDocsAndFlushIfOld(this.project_id, this.projectStateHash, this.excludeVersions, (error, docs) => { + this.callback(error, docs); + return done(); + }); + }); - it "should check the project state", -> - @RedisManager.checkOrSetProjectState - .calledWith(@project_id, @projectStateHash) - .should.equal true + it("should check the project state", function() { + return this.RedisManager.checkOrSetProjectState + .calledWith(this.project_id, this.projectStateHash) + .should.equal(true); + }); - it "should call the callback with an error", -> - @callback.calledWith(new Errors.ProjectStateChangedError("project state changed")).should.equal true + it("should call the callback with an error", function() { + return this.callback.calledWith(new Errors.ProjectStateChangedError("project state changed")).should.equal(true); + }); - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true + return it("should time the execution", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); - describe "when a doc errors", -> - beforeEach (done) -> - @doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"] - @RedisManager.checkOrSetProjectState = sinon.stub().callsArgWith(2, null) - @RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, @doc_ids) - @DocumentManager.getDocAndFlushIfOldWithLock = sinon.stub() - @DocumentManager.getDocAndFlushIfOldWithLock.withArgs(@project_id, "doc-id-1") - .callsArgWith(2, null, ["test doc content"], @doc_versions[1]) - @DocumentManager.getDocAndFlushIfOldWithLock.withArgs(@project_id, "doc-id-2") - .callsArgWith(2, @error = new Error("oops")) # trigger an error - @ProjectManager.getProjectDocsAndFlushIfOld @project_id, @projectStateHash, @excludeVersions, (error, docs) => - @callback(error) - done() + describe("when a doc errors", function() { + beforeEach(function(done) { + this.doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"]; + this.RedisManager.checkOrSetProjectState = sinon.stub().callsArgWith(2, null); + this.RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, this.doc_ids); + this.DocumentManager.getDocAndFlushIfOldWithLock = sinon.stub(); + this.DocumentManager.getDocAndFlushIfOldWithLock.withArgs(this.project_id, "doc-id-1") + .callsArgWith(2, null, ["test doc content"], this.doc_versions[1]); + this.DocumentManager.getDocAndFlushIfOldWithLock.withArgs(this.project_id, "doc-id-2") + .callsArgWith(2, (this.error = new Error("oops"))); // trigger an error + return this.ProjectManager.getProjectDocsAndFlushIfOld(this.project_id, this.projectStateHash, this.excludeVersions, (error, docs) => { + this.callback(error); + return done(); + }); + }); - it "should record the error", -> - @logger.error - .calledWith(err: @error, project_id: @project_id, doc_id: "doc-id-2", "error getting project doc lines in getProjectDocsAndFlushIfOld") - .should.equal true + it("should record the error", function() { + return this.logger.error + .calledWith({err: this.error, project_id: this.project_id, doc_id: "doc-id-2"}, "error getting project doc lines in getProjectDocsAndFlushIfOld") + .should.equal(true); + }); - it "should call the callback with an error", -> - @callback.calledWith(new Error("oops")).should.equal true + it("should call the callback with an error", function() { + return this.callback.calledWith(new Error("oops")).should.equal(true); + }); - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true + return it("should time the execution", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); - describe "clearing the project state with clearProjectState", -> - beforeEach (done) -> - @RedisManager.clearProjectState = sinon.stub().callsArg(1) - @ProjectManager.clearProjectState @project_id, (error) => - @callback(error) - done() + return describe("clearing the project state with clearProjectState", function() { + beforeEach(function(done) { + this.RedisManager.clearProjectState = sinon.stub().callsArg(1); + return this.ProjectManager.clearProjectState(this.project_id, error => { + this.callback(error); + return done(); + }); + }); - it "should clear the project state", -> - @RedisManager.clearProjectState - .calledWith(@project_id) - .should.equal true + it("should clear the project state", function() { + return this.RedisManager.clearProjectState + .calledWith(this.project_id) + .should.equal(true); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); +}); diff --git a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.js b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.js index 635e562669..2c20c7322f 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.js +++ b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.js @@ -1,180 +1,242 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/ProjectManager.js" -SandboxedModule = require('sandboxed-module') -_ = require('lodash') +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const modulePath = "../../../../app/js/ProjectManager.js"; +const SandboxedModule = require('sandboxed-module'); +const _ = require('lodash'); -describe "ProjectManager", -> - beforeEach -> - @ProjectManager = SandboxedModule.require modulePath, requires: - "./RedisManager": @RedisManager = {} - "./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {} - "./DocumentManager": @DocumentManager = {} - "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } - "./HistoryManager": @HistoryManager = {} - "./Metrics": @Metrics = - Timer: class Timer - done: sinon.stub() +describe("ProjectManager", function() { + beforeEach(function() { + let Timer; + this.ProjectManager = SandboxedModule.require(modulePath, { requires: { + "./RedisManager": (this.RedisManager = {}), + "./ProjectHistoryRedisManager": (this.ProjectHistoryRedisManager = {}), + "./DocumentManager": (this.DocumentManager = {}), + "logger-sharelatex": (this.logger = { log: sinon.stub(), error: sinon.stub() }), + "./HistoryManager": (this.HistoryManager = {}), + "./Metrics": (this.Metrics = { + Timer: (Timer = (function() { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub(); + } + }; + Timer.initClass(); + return Timer; + })()) + }) + } + } + ); - @project_id = "project-id-123" - @projectHistoryId = 'history-id-123' - @user_id = "user-id-123" - @version = 1234567 - @HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(false) - @HistoryManager.flushProjectChangesAsync = sinon.stub() - @callback = sinon.stub() + this.project_id = "project-id-123"; + this.projectHistoryId = 'history-id-123'; + this.user_id = "user-id-123"; + this.version = 1234567; + this.HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(false); + this.HistoryManager.flushProjectChangesAsync = sinon.stub(); + return this.callback = sinon.stub(); + }); - describe "updateProjectWithLocks", -> - describe "rename operations", -> - beforeEach -> - @firstDocUpdate = - id: 1 - pathname: 'foo' + return describe("updateProjectWithLocks", function() { + describe("rename operations", function() { + beforeEach(function() { + this.firstDocUpdate = { + id: 1, + pathname: 'foo', newPathname: 'foo' - @secondDocUpdate = - id: 2 - pathname: 'bar' + }; + this.secondDocUpdate = { + id: 2, + pathname: 'bar', newPathname: 'bar2' - @docUpdates = [ @firstDocUpdate, @secondDocUpdate ] - @firstFileUpdate = - id: 2 - pathname: 'bar' + }; + this.docUpdates = [ this.firstDocUpdate, this.secondDocUpdate ]; + this.firstFileUpdate = { + id: 2, + pathname: 'bar', newPathname: 'bar2' - @fileUpdates = [ @firstFileUpdate ] - @DocumentManager.renameDocWithLock = sinon.stub().yields() - @ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields() + }; + this.fileUpdates = [ this.firstFileUpdate ]; + this.DocumentManager.renameDocWithLock = sinon.stub().yields(); + return this.ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields(); + }); - describe "successfully", -> - beforeEach -> - @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback + describe("successfully", function() { + beforeEach(function() { + return this.ProjectManager.updateProjectWithLocks(this.project_id, this.projectHistoryId, this.user_id, this.docUpdates, this.fileUpdates, this.version, this.callback); + }); - it "should rename the docs in the updates", -> - firstDocUpdateWithVersion = _.extend({}, @firstDocUpdate, {version: "#{@version}.0"}) - secondDocUpdateWithVersion = _.extend({}, @secondDocUpdate, {version: "#{@version}.1"}) - @DocumentManager.renameDocWithLock - .calledWith(@project_id, @firstDocUpdate.id, @user_id, firstDocUpdateWithVersion, @projectHistoryId) - .should.equal true - @DocumentManager.renameDocWithLock - .calledWith(@project_id, @secondDocUpdate.id, @user_id, secondDocUpdateWithVersion, @projectHistoryId) - .should.equal true + it("should rename the docs in the updates", function() { + const firstDocUpdateWithVersion = _.extend({}, this.firstDocUpdate, {version: `${this.version}.0`}); + const secondDocUpdateWithVersion = _.extend({}, this.secondDocUpdate, {version: `${this.version}.1`}); + this.DocumentManager.renameDocWithLock + .calledWith(this.project_id, this.firstDocUpdate.id, this.user_id, firstDocUpdateWithVersion, this.projectHistoryId) + .should.equal(true); + return this.DocumentManager.renameDocWithLock + .calledWith(this.project_id, this.secondDocUpdate.id, this.user_id, secondDocUpdateWithVersion, this.projectHistoryId) + .should.equal(true); + }); - it "should rename the files in the updates", -> - firstFileUpdateWithVersion = _.extend({}, @firstFileUpdate, {version: "#{@version}.2"}) - @ProjectHistoryRedisManager.queueRenameEntity - .calledWith(@project_id, @projectHistoryId, 'file', @firstFileUpdate.id, @user_id, firstFileUpdateWithVersion) - .should.equal true + it("should rename the files in the updates", function() { + const firstFileUpdateWithVersion = _.extend({}, this.firstFileUpdate, {version: `${this.version}.2`}); + return this.ProjectHistoryRedisManager.queueRenameEntity + .calledWith(this.project_id, this.projectHistoryId, 'file', this.firstFileUpdate.id, this.user_id, firstFileUpdateWithVersion) + .should.equal(true); + }); - it "should not flush the history", -> - @HistoryManager.flushProjectChangesAsync - .calledWith(@project_id) - .should.equal false + it("should not flush the history", function() { + return this.HistoryManager.flushProjectChangesAsync + .calledWith(this.project_id) + .should.equal(false); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); - describe "when renaming a doc fails", -> - beforeEach -> - @error = new Error('error') - @DocumentManager.renameDocWithLock = sinon.stub().yields(@error) - @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback + describe("when renaming a doc fails", function() { + beforeEach(function() { + this.error = new Error('error'); + this.DocumentManager.renameDocWithLock = sinon.stub().yields(this.error); + return this.ProjectManager.updateProjectWithLocks(this.project_id, this.projectHistoryId, this.user_id, this.docUpdates, this.fileUpdates, this.version, this.callback); + }); - it "should call the callback with the error", -> - @callback.calledWith(@error).should.equal true + return it("should call the callback with the error", function() { + return this.callback.calledWith(this.error).should.equal(true); + }); + }); - describe "when renaming a file fails", -> - beforeEach -> - @error = new Error('error') - @ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields(@error) - @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback + describe("when renaming a file fails", function() { + beforeEach(function() { + this.error = new Error('error'); + this.ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields(this.error); + return this.ProjectManager.updateProjectWithLocks(this.project_id, this.projectHistoryId, this.user_id, this.docUpdates, this.fileUpdates, this.version, this.callback); + }); - it "should call the callback with the error", -> - @callback.calledWith(@error).should.equal true + return it("should call the callback with the error", function() { + return this.callback.calledWith(this.error).should.equal(true); + }); + }); - describe "with enough ops to flush", -> - beforeEach -> - @HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(true) - @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback + return describe("with enough ops to flush", function() { + beforeEach(function() { + this.HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(true); + return this.ProjectManager.updateProjectWithLocks(this.project_id, this.projectHistoryId, this.user_id, this.docUpdates, this.fileUpdates, this.version, this.callback); + }); - it "should flush the history", -> - @HistoryManager.flushProjectChangesAsync - .calledWith(@project_id) - .should.equal true + return it("should flush the history", function() { + return this.HistoryManager.flushProjectChangesAsync + .calledWith(this.project_id) + .should.equal(true); + }); + }); + }); - describe "add operations", -> - beforeEach -> - @firstDocUpdate = - id: 1 + return describe("add operations", function() { + beforeEach(function() { + this.firstDocUpdate = { + id: 1, docLines: "a\nb" - @secondDocUpdate = - id: 2 + }; + this.secondDocUpdate = { + id: 2, docLines: "a\nb" - @docUpdates = [ @firstDocUpdate, @secondDocUpdate ] - @firstFileUpdate = - id: 3 + }; + this.docUpdates = [ this.firstDocUpdate, this.secondDocUpdate ]; + this.firstFileUpdate = { + id: 3, url: 'filestore.example.com/2' - @secondFileUpdate = - id: 4 + }; + this.secondFileUpdate = { + id: 4, url: 'filestore.example.com/3' - @fileUpdates = [ @firstFileUpdate, @secondFileUpdate ] - @ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields() + }; + this.fileUpdates = [ this.firstFileUpdate, this.secondFileUpdate ]; + return this.ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields(); + }); - describe "successfully", -> - beforeEach -> - @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback + describe("successfully", function() { + beforeEach(function() { + return this.ProjectManager.updateProjectWithLocks(this.project_id, this.projectHistoryId, this.user_id, this.docUpdates, this.fileUpdates, this.version, this.callback); + }); - it "should add the docs in the updates", -> - firstDocUpdateWithVersion = _.extend({}, @firstDocUpdate, {version: "#{@version}.0"}) - secondDocUpdateWithVersion = _.extend({}, @secondDocUpdate, {version: "#{@version}.1"}) - @ProjectHistoryRedisManager.queueAddEntity.getCall(0) - .calledWith(@project_id, @projectHistoryId, 'doc', @firstDocUpdate.id, @user_id, firstDocUpdateWithVersion) - .should.equal true - @ProjectHistoryRedisManager.queueAddEntity.getCall(1) - .calledWith(@project_id, @projectHistoryId, 'doc', @secondDocUpdate.id, @user_id, secondDocUpdateWithVersion) - .should.equal true + it("should add the docs in the updates", function() { + const firstDocUpdateWithVersion = _.extend({}, this.firstDocUpdate, {version: `${this.version}.0`}); + const secondDocUpdateWithVersion = _.extend({}, this.secondDocUpdate, {version: `${this.version}.1`}); + this.ProjectHistoryRedisManager.queueAddEntity.getCall(0) + .calledWith(this.project_id, this.projectHistoryId, 'doc', this.firstDocUpdate.id, this.user_id, firstDocUpdateWithVersion) + .should.equal(true); + return this.ProjectHistoryRedisManager.queueAddEntity.getCall(1) + .calledWith(this.project_id, this.projectHistoryId, 'doc', this.secondDocUpdate.id, this.user_id, secondDocUpdateWithVersion) + .should.equal(true); + }); - it "should add the files in the updates", -> - firstFileUpdateWithVersion = _.extend({}, @firstFileUpdate, {version: "#{@version}.2"}) - secondFileUpdateWithVersion = _.extend({}, @secondFileUpdate, {version: "#{@version}.3"}) - @ProjectHistoryRedisManager.queueAddEntity.getCall(2) - .calledWith(@project_id, @projectHistoryId, 'file', @firstFileUpdate.id, @user_id, firstFileUpdateWithVersion) - .should.equal true - @ProjectHistoryRedisManager.queueAddEntity.getCall(3) - .calledWith(@project_id, @projectHistoryId, 'file', @secondFileUpdate.id, @user_id, secondFileUpdateWithVersion) - .should.equal true + it("should add the files in the updates", function() { + const firstFileUpdateWithVersion = _.extend({}, this.firstFileUpdate, {version: `${this.version}.2`}); + const secondFileUpdateWithVersion = _.extend({}, this.secondFileUpdate, {version: `${this.version}.3`}); + this.ProjectHistoryRedisManager.queueAddEntity.getCall(2) + .calledWith(this.project_id, this.projectHistoryId, 'file', this.firstFileUpdate.id, this.user_id, firstFileUpdateWithVersion) + .should.equal(true); + return this.ProjectHistoryRedisManager.queueAddEntity.getCall(3) + .calledWith(this.project_id, this.projectHistoryId, 'file', this.secondFileUpdate.id, this.user_id, secondFileUpdateWithVersion) + .should.equal(true); + }); - it "should not flush the history", -> - @HistoryManager.flushProjectChangesAsync - .calledWith(@project_id) - .should.equal false + it("should not flush the history", function() { + return this.HistoryManager.flushProjectChangesAsync + .calledWith(this.project_id) + .should.equal(false); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); - describe "when adding a doc fails", -> - beforeEach -> - @error = new Error('error') - @ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields(@error) - @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback + describe("when adding a doc fails", function() { + beforeEach(function() { + this.error = new Error('error'); + this.ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields(this.error); + return this.ProjectManager.updateProjectWithLocks(this.project_id, this.projectHistoryId, this.user_id, this.docUpdates, this.fileUpdates, this.version, this.callback); + }); - it "should call the callback with the error", -> - @callback.calledWith(@error).should.equal true + return it("should call the callback with the error", function() { + return this.callback.calledWith(this.error).should.equal(true); + }); + }); - describe "when adding a file fails", -> - beforeEach -> - @error = new Error('error') - @ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields(@error) - @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback + describe("when adding a file fails", function() { + beforeEach(function() { + this.error = new Error('error'); + this.ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields(this.error); + return this.ProjectManager.updateProjectWithLocks(this.project_id, this.projectHistoryId, this.user_id, this.docUpdates, this.fileUpdates, this.version, this.callback); + }); - it "should call the callback with the error", -> - @callback.calledWith(@error).should.equal true + return it("should call the callback with the error", function() { + return this.callback.calledWith(this.error).should.equal(true); + }); + }); - describe "with enough ops to flush", -> - beforeEach -> - @HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(true) - @ProjectManager.updateProjectWithLocks @project_id, @projectHistoryId, @user_id, @docUpdates, @fileUpdates, @version, @callback + return describe("with enough ops to flush", function() { + beforeEach(function() { + this.HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(true); + return this.ProjectManager.updateProjectWithLocks(this.project_id, this.projectHistoryId, this.user_id, this.docUpdates, this.fileUpdates, this.version, this.callback); + }); - it "should flush the history", -> - @HistoryManager.flushProjectChangesAsync - .calledWith(@project_id) - .should.equal true + return it("should flush the history", function() { + return this.HistoryManager.flushProjectChangesAsync + .calledWith(this.project_id) + .should.equal(true); + }); + }); + }); + }); +}); diff --git a/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.js b/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.js index 93d5d26e2f..df6a146d9b 100644 --- a/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.js +++ b/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.js @@ -1,316 +1,395 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -expect = chai.expect -modulePath = "../../../../app/js/RangesManager.js" -SandboxedModule = require('sandboxed-module') +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const { + expect +} = chai; +const modulePath = "../../../../app/js/RangesManager.js"; +const SandboxedModule = require('sandboxed-module'); -describe "RangesManager", -> - beforeEach -> - @RangesManager = SandboxedModule.require modulePath, - requires: - "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } +describe("RangesManager", function() { + beforeEach(function() { + this.RangesManager = SandboxedModule.require(modulePath, { + requires: { + "logger-sharelatex": (this.logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() }) + } + }); - @doc_id = "doc-id-123" - @project_id = "project-id-123" - @user_id = "user-id-123" - @callback = sinon.stub() + this.doc_id = "doc-id-123"; + this.project_id = "project-id-123"; + this.user_id = "user-id-123"; + return this.callback = sinon.stub(); + }); - describe "applyUpdate", -> - beforeEach -> - @updates = [{ - meta: - user_id: @user_id + describe("applyUpdate", function() { + beforeEach(function() { + this.updates = [{ + meta: { + user_id: this.user_id + }, op: [{ - i: "two " + i: "two ", p: 4 }] - }] - @entries = { + }]; + this.entries = { comments: [{ - op: - c: "three " + op: { + c: "three ", p: 4 - metadata: - user_id: @user_id - }] + }, + metadata: { + user_id: this.user_id + } + }], changes: [{ - op: - i: "five" + op: { + i: "five", p: 15 - metadata: - user_id: @user_id + }, + metadata: { + user_id: this.user_id + } }] - } - @newDocLines = ["one two three four five"] # old is "one three four five" + }; + return this.newDocLines = ["one two three four five"];}); // old is "one three four five" - describe "successfully", -> - beforeEach -> - @RangesManager.applyUpdate @project_id, @doc_id, @entries, @updates, @newDocLines, @callback + describe("successfully", function() { + beforeEach(function() { + return this.RangesManager.applyUpdate(this.project_id, this.doc_id, this.entries, this.updates, this.newDocLines, this.callback); + }); - it "should return the modified the comments and changes", -> - @callback.called.should.equal true - [error, entries, ranges_were_collapsed] = @callback.args[0] - expect(error).to.be.null - expect(ranges_were_collapsed).to.equal false - entries.comments[0].op.should.deep.equal { - c: "three " + return it("should return the modified the comments and changes", function() { + this.callback.called.should.equal(true); + const [error, entries, ranges_were_collapsed] = Array.from(this.callback.args[0]); + expect(error).to.be.null; + expect(ranges_were_collapsed).to.equal(false); + entries.comments[0].op.should.deep.equal({ + c: "three ", p: 8 - } - entries.changes[0].op.should.deep.equal { - i: "five" + }); + return entries.changes[0].op.should.deep.equal({ + i: "five", p: 19 - } + }); + }); + }); - describe "with empty comments", -> - beforeEach -> - @entries.comments = [] - @RangesManager.applyUpdate @project_id, @doc_id, @entries, @updates, @newDocLines, @callback + describe("with empty comments", function() { + beforeEach(function() { + this.entries.comments = []; + return this.RangesManager.applyUpdate(this.project_id, this.doc_id, this.entries, this.updates, this.newDocLines, this.callback); + }); - it "should return an object with no comments", -> - # Save space in redis and don't store just {} - @callback.called.should.equal true - [error, entries] = @callback.args[0] - expect(error).to.be.null - expect(entries.comments).to.be.undefined + return it("should return an object with no comments", function() { + // Save space in redis and don't store just {} + this.callback.called.should.equal(true); + const [error, entries] = Array.from(this.callback.args[0]); + expect(error).to.be.null; + return expect(entries.comments).to.be.undefined; + }); + }); - describe "with empty changes", -> - beforeEach -> - @entries.changes = [] - @RangesManager.applyUpdate @project_id, @doc_id, @entries, @updates, @newDocLines, @callback + describe("with empty changes", function() { + beforeEach(function() { + this.entries.changes = []; + return this.RangesManager.applyUpdate(this.project_id, this.doc_id, this.entries, this.updates, this.newDocLines, this.callback); + }); - it "should return an object with no changes", -> - # Save space in redis and don't store just {} - @callback.called.should.equal true - [error, entries] = @callback.args[0] - expect(error).to.be.null - expect(entries.changes).to.be.undefined + return it("should return an object with no changes", function() { + // Save space in redis and don't store just {} + this.callback.called.should.equal(true); + const [error, entries] = Array.from(this.callback.args[0]); + expect(error).to.be.null; + return expect(entries.changes).to.be.undefined; + }); + }); - describe "with too many comments", -> - beforeEach -> - @RangesManager.MAX_COMMENTS = 2 - @updates = [{ - meta: - user_id: @user_id + describe("with too many comments", function() { + beforeEach(function() { + this.RangesManager.MAX_COMMENTS = 2; + this.updates = [{ + meta: { + user_id: this.user_id + }, op: [{ - c: "one" - p: 0 + c: "one", + p: 0, t: "thread-id-1" }] - }] - @entries = { + }]; + this.entries = { comments: [{ - op: - c: "three " - p: 4 + op: { + c: "three ", + p: 4, t: "thread-id-2" - metadata: - user_id: @user_id + }, + metadata: { + user_id: this.user_id + } }, { - op: - c: "four " - p: 10 + op: { + c: "four ", + p: 10, t: "thread-id-3" - metadata: - user_id: @user_id - }] + }, + metadata: { + user_id: this.user_id + } + }], changes: [] - } - @RangesManager.applyUpdate @project_id, @doc_id, @entries, @updates, @newDocLines, @callback + }; + return this.RangesManager.applyUpdate(this.project_id, this.doc_id, this.entries, this.updates, this.newDocLines, this.callback); + }); - it "should return an error", -> - @callback.called.should.equal true - [error, entries] = @callback.args[0] - expect(error).to.not.be.null - expect(error.message).to.equal("too many comments or tracked changes") + return it("should return an error", function() { + this.callback.called.should.equal(true); + const [error, entries] = Array.from(this.callback.args[0]); + expect(error).to.not.be.null; + return expect(error.message).to.equal("too many comments or tracked changes"); + }); + }); - describe "with too many changes", -> - beforeEach -> - @RangesManager.MAX_CHANGES = 2 - @updates = [{ - meta: - user_id: @user_id + describe("with too many changes", function() { + beforeEach(function() { + this.RangesManager.MAX_CHANGES = 2; + this.updates = [{ + meta: { + user_id: this.user_id, tc: "track-changes-id-yes" + }, op: [{ - i: "one " + i: "one ", p: 0 }] - }] - @entries = { + }]; + this.entries = { changes: [{ - op: - i: "three" + op: { + i: "three", p: 4 - metadata: - user_id: @user_id + }, + metadata: { + user_id: this.user_id + } }, { - op: - i: "four" + op: { + i: "four", p: 10 - metadata: - user_id: @user_id - }] + }, + metadata: { + user_id: this.user_id + } + }], comments: [] - } - @newDocLines = ["one two three four"] - @RangesManager.applyUpdate @project_id, @doc_id, @entries, @updates, @newDocLines, @callback + }; + this.newDocLines = ["one two three four"]; + return this.RangesManager.applyUpdate(this.project_id, this.doc_id, this.entries, this.updates, this.newDocLines, this.callback); + }); - it "should return an error", -> - # Save space in redis and don't store just {} - @callback.called.should.equal true - [error, entries] = @callback.args[0] - expect(error).to.not.be.null - expect(error.message).to.equal("too many comments or tracked changes") + return it("should return an error", function() { + // Save space in redis and don't store just {} + this.callback.called.should.equal(true); + const [error, entries] = Array.from(this.callback.args[0]); + expect(error).to.not.be.null; + return expect(error.message).to.equal("too many comments or tracked changes"); + }); + }); - describe "inconsistent changes", -> - beforeEach -> - @updates = [{ - meta: - user_id: @user_id + describe("inconsistent changes", function() { + beforeEach(function() { + this.updates = [{ + meta: { + user_id: this.user_id + }, op: [{ - c: "doesn't match" + c: "doesn't match", p: 0 }] - }] - @RangesManager.applyUpdate @project_id, @doc_id, @entries, @updates, @newDocLines, @callback + }]; + return this.RangesManager.applyUpdate(this.project_id, this.doc_id, this.entries, this.updates, this.newDocLines, this.callback); + }); - it "should return an error", -> - # Save space in redis and don't store just {} - @callback.called.should.equal true - [error, entries] = @callback.args[0] - expect(error).to.not.be.null - expect(error.message).to.equal("Change ({\"op\":{\"i\":\"five\",\"p\":15},\"metadata\":{\"user_id\":\"user-id-123\"}}) doesn't match text (\"our \")") + return it("should return an error", function() { + // Save space in redis and don't store just {} + this.callback.called.should.equal(true); + const [error, entries] = Array.from(this.callback.args[0]); + expect(error).to.not.be.null; + return expect(error.message).to.equal("Change ({\"op\":{\"i\":\"five\",\"p\":15},\"metadata\":{\"user_id\":\"user-id-123\"}}) doesn't match text (\"our \")"); + }); + }); - describe "with an update that collapses a range", -> - beforeEach -> - @updates = [{ - meta: - user_id: @user_id + return describe("with an update that collapses a range", function() { + beforeEach(function() { + this.updates = [{ + meta: { + user_id: this.user_id + }, op: [{ - d: "one" - p: 0 + d: "one", + p: 0, t: "thread-id-1" }] - }] - @entries = { + }]; + this.entries = { comments: [{ - op: - c: "n" - p: 1 + op: { + c: "n", + p: 1, t: "thread-id-2" - metadata: - user_id: @user_id - }] + }, + metadata: { + user_id: this.user_id + } + }], changes: [] + }; + return this.RangesManager.applyUpdate(this.project_id, this.doc_id, this.entries, this.updates, this.newDocLines, this.callback); + }); + + return it("should return ranges_were_collapsed == true", function() { + this.callback.called.should.equal(true); + const [error, entries, ranges_were_collapsed] = Array.from(this.callback.args[0]); + return expect(ranges_were_collapsed).to.equal(true); + }); + }); + }); + + return describe("acceptChanges", function() { + beforeEach(function() { + this.RangesManager = SandboxedModule.require(modulePath, { + requires: { + "logger-sharelatex": (this.logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() }), + "./RangesTracker":(this.RangesTracker = SandboxedModule.require("../../../../app/js/RangesTracker.js")) } - @RangesManager.applyUpdate @project_id, @doc_id, @entries, @updates, @newDocLines, @callback - - it "should return ranges_were_collapsed == true", -> - @callback.called.should.equal true - [error, entries, ranges_were_collapsed] = @callback.args[0] - expect(ranges_were_collapsed).to.equal true - - describe "acceptChanges", -> - beforeEach -> - @RangesManager = SandboxedModule.require modulePath, - requires: - "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } - "./RangesTracker":@RangesTracker = SandboxedModule.require "../../../../app/js/RangesTracker.js" - - @ranges = { - comments: [] - changes: [{ - id: "a1" - op: - i: "lorem" - p: 0 - }, { - id: "a2" - op: - i: "ipsum" - p: 10 - }, { - id: "a3" - op: - i: "dolor" - p: 20 - }, { - id: "a4" - op: - i: "sit" - p: 30 - }, { - id: "a5" - op: - i: "amet" - p: 40 - }] } - @removeChangeIdsSpy = sinon.spy @RangesTracker.prototype, "removeChangeIds" + ); - describe "successfully with a single change", -> - beforeEach (done) -> - @change_ids = [ @ranges.changes[1].id ] - @RangesManager.acceptChanges @change_ids, @ranges, (err, ranges) => - @rangesResponse = ranges - done() + this.ranges = { + comments: [], + changes: [{ + id: "a1", + op: { + i: "lorem", + p: 0 + } + }, { + id: "a2", + op: { + i: "ipsum", + p: 10 + } + }, { + id: "a3", + op: { + i: "dolor", + p: 20 + } + }, { + id: "a4", + op: { + i: "sit", + p: 30 + } + }, { + id: "a5", + op: { + i: "amet", + p: 40 + } + }] + }; + return this.removeChangeIdsSpy = sinon.spy(this.RangesTracker.prototype, "removeChangeIds"); + }); - it "should log the call with the correct number of changes", -> - @logger.log + describe("successfully with a single change", function() { + beforeEach(function(done) { + this.change_ids = [ this.ranges.changes[1].id ]; + return this.RangesManager.acceptChanges(this.change_ids, this.ranges, (err, ranges) => { + this.rangesResponse = ranges; + return done(); + }); + }); + + it("should log the call with the correct number of changes", function() { + return this.logger.log .calledWith("accepting 1 changes in ranges") - .should.equal true + .should.equal(true); + }); - it "should delegate the change removal to the ranges tracker", -> - @removeChangeIdsSpy - .calledWith(@change_ids) - .should.equal true + it("should delegate the change removal to the ranges tracker", function() { + return this.removeChangeIdsSpy + .calledWith(this.change_ids) + .should.equal(true); + }); - it "should remove the change", -> - expect(@rangesResponse.changes - .find((change) => change.id == @ranges.changes[1].id)) - .to.be.undefined + it("should remove the change", function() { + return expect(this.rangesResponse.changes + .find(change => change.id === this.ranges.changes[1].id)) + .to.be.undefined; + }); - it "should return the original number of changes minus 1", -> - @rangesResponse.changes.length - .should.equal @ranges.changes.length - 1 + it("should return the original number of changes minus 1", function() { + return this.rangesResponse.changes.length + .should.equal(this.ranges.changes.length - 1); + }); - it "should not touch other changes", -> - for i in [ 0, 2, 3, 4] - expect(@rangesResponse.changes - .find((change) => change.id == @ranges.changes[i].id)) - .to.deep.equal @ranges.changes[i] + return it("should not touch other changes", function() { + return [ 0, 2, 3, 4].map((i) => + expect(this.rangesResponse.changes + .find(change => change.id === this.ranges.changes[i].id)) + .to.deep.equal(this.ranges.changes[i])); + }); + }); - describe "successfully with multiple changes", -> - beforeEach (done) -> - @change_ids = [ @ranges.changes[1].id, @ranges.changes[3].id, @ranges.changes[4].id ] - @RangesManager.acceptChanges @change_ids, @ranges, (err, ranges) => - @rangesResponse = ranges - done() + return describe("successfully with multiple changes", function() { + beforeEach(function(done) { + this.change_ids = [ this.ranges.changes[1].id, this.ranges.changes[3].id, this.ranges.changes[4].id ]; + return this.RangesManager.acceptChanges(this.change_ids, this.ranges, (err, ranges) => { + this.rangesResponse = ranges; + return done(); + }); + }); - it "should log the call with the correct number of changes", -> - @logger.log - .calledWith("accepting #{ @change_ids.length } changes in ranges") - .should.equal true + it("should log the call with the correct number of changes", function() { + return this.logger.log + .calledWith(`accepting ${ this.change_ids.length } changes in ranges`) + .should.equal(true); + }); - it "should delegate the change removal to the ranges tracker", -> - @removeChangeIdsSpy - .calledWith(@change_ids) - .should.equal true + it("should delegate the change removal to the ranges tracker", function() { + return this.removeChangeIdsSpy + .calledWith(this.change_ids) + .should.equal(true); + }); - it "should remove the changes", -> - for i in [ 1, 3, 4] - expect(@rangesResponse.changes - .find((change) => change.id == @ranges.changes[1].id)) - .to.be.undefined + it("should remove the changes", function() { + return [ 1, 3, 4].map((i) => + expect(this.rangesResponse.changes + .find(change => change.id === this.ranges.changes[1].id)) + .to.be.undefined); + }); - it "should return the original number of changes minus the number of accepted changes", -> - @rangesResponse.changes.length - .should.equal @ranges.changes.length - 3 + it("should return the original number of changes minus the number of accepted changes", function() { + return this.rangesResponse.changes.length + .should.equal(this.ranges.changes.length - 3); + }); - it "should not touch other changes", -> - for i in [ 0, 2 ] - expect(@rangesResponse.changes - .find((change) => change.id == @ranges.changes[i].id)) - .to.deep.equal @ranges.changes[i] + return it("should not touch other changes", function() { + return [ 0, 2 ].map((i) => + expect(this.rangesResponse.changes + .find(change => change.id === this.ranges.changes[i].id)) + .to.deep.equal(this.ranges.changes[i])); + }); + }); +}); +}); diff --git a/services/document-updater/test/unit/coffee/RateLimitManager/RateLimitManager.js b/services/document-updater/test/unit/coffee/RateLimitManager/RateLimitManager.js index fe5dc95327..1e5cd6a71c 100644 --- a/services/document-updater/test/unit/coffee/RateLimitManager/RateLimitManager.js +++ b/services/document-updater/test/unit/coffee/RateLimitManager/RateLimitManager.js @@ -1,88 +1,132 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -expect = chai.expect -modulePath = "../../../../app/js/RateLimitManager.js" -SandboxedModule = require('sandboxed-module') +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const { + expect +} = chai; +const modulePath = "../../../../app/js/RateLimitManager.js"; +const SandboxedModule = require('sandboxed-module'); -describe "RateLimitManager", -> - beforeEach -> - @RateLimitManager = SandboxedModule.require modulePath, requires: - "logger-sharelatex": @logger = { log: sinon.stub() } - "settings-sharelatex": @settings = {} - "./Metrics": @Metrics = - Timer: class Timer - done: sinon.stub() +describe("RateLimitManager", function() { + beforeEach(function() { + let Timer; + this.RateLimitManager = SandboxedModule.require(modulePath, { requires: { + "logger-sharelatex": (this.logger = { log: sinon.stub() }), + "settings-sharelatex": (this.settings = {}), + "./Metrics": (this.Metrics = { + Timer: (Timer = (function() { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub(); + } + }; + Timer.initClass(); + return Timer; + })()), gauge: sinon.stub() - @callback = sinon.stub() - @RateLimiter = new @RateLimitManager(1) + }) + } + } + ); + this.callback = sinon.stub(); + return this.RateLimiter = new this.RateLimitManager(1); + }); - describe "for a single task", -> - beforeEach -> - @task = sinon.stub() - @RateLimiter.run @task, @callback + describe("for a single task", function() { + beforeEach(function() { + this.task = sinon.stub(); + return this.RateLimiter.run(this.task, this.callback); + }); - it "should execute the task in the background", -> - @task.called.should.equal true + it("should execute the task in the background", function() { + return this.task.called.should.equal(true); + }); - it "should call the callback", -> - @callback.called.should.equal true + it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); - it "should finish with a worker count of one", -> - # because it's in the background - expect(@RateLimiter.ActiveWorkerCount).to.equal 1 + return it("should finish with a worker count of one", function() { + // because it's in the background + return expect(this.RateLimiter.ActiveWorkerCount).to.equal(1); + }); + }); - describe "for multiple tasks", -> - beforeEach (done) -> - @task = sinon.stub() - @finalTask = sinon.stub() - task = (cb) => - @task() - setTimeout cb, 100 - finalTask = (cb) => - @finalTask() - setTimeout cb, 100 - @RateLimiter.run task, @callback - @RateLimiter.run task, @callback - @RateLimiter.run task, @callback - @RateLimiter.run finalTask, (err) => - @callback(err) - done() + describe("for multiple tasks", function() { + beforeEach(function(done) { + this.task = sinon.stub(); + this.finalTask = sinon.stub(); + const task = cb => { + this.task(); + return setTimeout(cb, 100); + }; + const finalTask = cb => { + this.finalTask(); + return setTimeout(cb, 100); + }; + this.RateLimiter.run(task, this.callback); + this.RateLimiter.run(task, this.callback); + this.RateLimiter.run(task, this.callback); + return this.RateLimiter.run(finalTask, err => { + this.callback(err); + return done(); + }); + }); - it "should execute the first three tasks", -> - @task.calledThrice.should.equal true + it("should execute the first three tasks", function() { + return this.task.calledThrice.should.equal(true); + }); - it "should execute the final task", -> - @finalTask.called.should.equal true + it("should execute the final task", function() { + return this.finalTask.called.should.equal(true); + }); - it "should call the callback", -> - @callback.called.should.equal true + it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); - it "should finish with worker count of zero", -> - expect(@RateLimiter.ActiveWorkerCount).to.equal 0 + return it("should finish with worker count of zero", function() { + return expect(this.RateLimiter.ActiveWorkerCount).to.equal(0); + }); + }); - describe "for a mixture of long-running tasks", -> - beforeEach (done) -> - @task = sinon.stub() - @finalTask = sinon.stub() - finalTask = (cb) => - @finalTask() - setTimeout cb, 100 - @RateLimiter.run @task, @callback - @RateLimiter.run @task, @callback - @RateLimiter.run @task, @callback - @RateLimiter.run finalTask, (err) => - @callback(err) - done() + return describe("for a mixture of long-running tasks", function() { + beforeEach(function(done) { + this.task = sinon.stub(); + this.finalTask = sinon.stub(); + const finalTask = cb => { + this.finalTask(); + return setTimeout(cb, 100); + }; + this.RateLimiter.run(this.task, this.callback); + this.RateLimiter.run(this.task, this.callback); + this.RateLimiter.run(this.task, this.callback); + return this.RateLimiter.run(finalTask, err => { + this.callback(err); + return done(); + }); + }); - it "should execute the first three tasks", -> - @task.calledThrice.should.equal true + it("should execute the first three tasks", function() { + return this.task.calledThrice.should.equal(true); + }); - it "should execute the final task", -> - @finalTask.called.should.equal true + it("should execute the final task", function() { + return this.finalTask.called.should.equal(true); + }); - it "should call the callback", -> - @callback.called.should.equal true + it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); - it "should finish with worker count of three", -> - expect(@RateLimiter.ActiveWorkerCount).to.equal 3 + return it("should finish with worker count of three", function() { + return expect(this.RateLimiter.ActiveWorkerCount).to.equal(3); + }); + }); +}); diff --git a/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.js b/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.js index 429a03b971..135452f5a3 100644 --- a/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.js +++ b/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.js @@ -1,95 +1,129 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/RealTimeRedisManager.js" -SandboxedModule = require('sandboxed-module') -Errors = require "../../../../app/js/Errors" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const modulePath = "../../../../app/js/RealTimeRedisManager.js"; +const SandboxedModule = require('sandboxed-module'); +const Errors = require("../../../../app/js/Errors"); -describe "RealTimeRedisManager", -> - beforeEach -> - @rclient = - auth: () -> +describe("RealTimeRedisManager", function() { + beforeEach(function() { + this.rclient = { + auth() {}, exec: sinon.stub() - @rclient.multi = () => @rclient - @pubsubClient = - publish: sinon.stub() - @RealTimeRedisManager = SandboxedModule.require modulePath, requires: - "redis-sharelatex": createClient: (config) => if (config.name is 'pubsub') then @pubsubClient else @rclient - "settings-sharelatex": - redis: - documentupdater: @settings = - key_schema: - pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" - pubsub: + }; + this.rclient.multi = () => this.rclient; + this.pubsubClient = + {publish: sinon.stub()}; + this.RealTimeRedisManager = SandboxedModule.require(modulePath, { requires: { + "redis-sharelatex": { createClient: config => (config.name === 'pubsub') ? this.pubsubClient : this.rclient + }, + "settings-sharelatex": { + redis: { + documentupdater: (this.settings = { + key_schema: { + pendingUpdates({doc_id}) { return `PendingUpdates:${doc_id}`; } + } + }), + pubsub: { name: "pubsub" - "logger-sharelatex": { log: () -> } - "crypto": @crypto = { randomBytes: sinon.stub().withArgs(4).returns(Buffer.from([0x1, 0x2, 0x3, 0x4])) } - "os": @os = {hostname: sinon.stub().returns("somehost")} - "./Metrics": @metrics = { summary: sinon.stub()} + } + } + }, + "logger-sharelatex": { log() {} }, + "crypto": (this.crypto = { randomBytes: sinon.stub().withArgs(4).returns(Buffer.from([0x1, 0x2, 0x3, 0x4])) }), + "os": (this.os = {hostname: sinon.stub().returns("somehost")}), + "./Metrics": (this.metrics = { summary: sinon.stub()}) + } + }); - @doc_id = "doc-id-123" - @project_id = "project-id-123" - @callback = sinon.stub() + this.doc_id = "doc-id-123"; + this.project_id = "project-id-123"; + return this.callback = sinon.stub(); + }); - describe "getPendingUpdatesForDoc", -> - beforeEach -> - @rclient.lrange = sinon.stub() - @rclient.ltrim = sinon.stub() + describe("getPendingUpdatesForDoc", function() { + beforeEach(function() { + this.rclient.lrange = sinon.stub(); + return this.rclient.ltrim = sinon.stub(); + }); - describe "successfully", -> - beforeEach -> - @updates = [ + describe("successfully", function() { + beforeEach(function() { + this.updates = [ + { op: [{ i: "foo", p: 4 }] }, { op: [{ i: "foo", p: 4 }] } - { op: [{ i: "foo", p: 4 }] } - ] - @jsonUpdates = @updates.map (update) -> JSON.stringify update - @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonUpdates]) - @RealTimeRedisManager.getPendingUpdatesForDoc @doc_id, @callback + ]; + this.jsonUpdates = this.updates.map(update => JSON.stringify(update)); + this.rclient.exec = sinon.stub().callsArgWith(0, null, [this.jsonUpdates]); + return this.RealTimeRedisManager.getPendingUpdatesForDoc(this.doc_id, this.callback); + }); - it "should get the pending updates", -> - @rclient.lrange - .calledWith("PendingUpdates:#{@doc_id}", 0, 7) - .should.equal true + it("should get the pending updates", function() { + return this.rclient.lrange + .calledWith(`PendingUpdates:${this.doc_id}`, 0, 7) + .should.equal(true); + }); - it "should delete the pending updates", -> - @rclient.ltrim - .calledWith("PendingUpdates:#{@doc_id}", 8, -1) - .should.equal true + it("should delete the pending updates", function() { + return this.rclient.ltrim + .calledWith(`PendingUpdates:${this.doc_id}`, 8, -1) + .should.equal(true); + }); - it "should call the callback with the updates", -> - @callback.calledWith(null, @updates).should.equal true + return it("should call the callback with the updates", function() { + return this.callback.calledWith(null, this.updates).should.equal(true); + }); + }); - describe "when the JSON doesn't parse", -> - beforeEach -> - @jsonUpdates = [ - JSON.stringify { op: [{ i: "foo", p: 4 }] } + return describe("when the JSON doesn't parse", function() { + beforeEach(function() { + this.jsonUpdates = [ + JSON.stringify({ op: [{ i: "foo", p: 4 }] }), "broken json" - ] - @rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonUpdates]) - @RealTimeRedisManager.getPendingUpdatesForDoc @doc_id, @callback + ]; + this.rclient.exec = sinon.stub().callsArgWith(0, null, [this.jsonUpdates]); + return this.RealTimeRedisManager.getPendingUpdatesForDoc(this.doc_id, this.callback); + }); - it "should return an error to the callback", -> - @callback.calledWith(new Error("JSON parse error")).should.equal true + return it("should return an error to the callback", function() { + return this.callback.calledWith(new Error("JSON parse error")).should.equal(true); + }); + }); + }); - describe "getUpdatesLength", -> - beforeEach -> - @rclient.llen = sinon.stub().yields(null, @length = 3) - @RealTimeRedisManager.getUpdatesLength @doc_id, @callback + describe("getUpdatesLength", function() { + beforeEach(function() { + this.rclient.llen = sinon.stub().yields(null, (this.length = 3)); + return this.RealTimeRedisManager.getUpdatesLength(this.doc_id, this.callback); + }); - it "should look up the length", -> - @rclient.llen.calledWith("PendingUpdates:#{@doc_id}").should.equal true + it("should look up the length", function() { + return this.rclient.llen.calledWith(`PendingUpdates:${this.doc_id}`).should.equal(true); + }); - it "should return the length", -> - @callback.calledWith(null, @length).should.equal true + return it("should return the length", function() { + return this.callback.calledWith(null, this.length).should.equal(true); + }); + }); - describe "sendData", -> - beforeEach -> - @message_id = "doc:somehost:01020304-0" - @RealTimeRedisManager.sendData({op: "thisop"}) + return describe("sendData", function() { + beforeEach(function() { + this.message_id = "doc:somehost:01020304-0"; + return this.RealTimeRedisManager.sendData({op: "thisop"}); + }); - it "should send the op with a message id", -> - @pubsubClient.publish.calledWith("applied-ops", JSON.stringify({op:"thisop",_id:@message_id})).should.equal true + it("should send the op with a message id", function() { + return this.pubsubClient.publish.calledWith("applied-ops", JSON.stringify({op:"thisop",_id:this.message_id})).should.equal(true); + }); - it "should track the payload size", -> - @metrics.summary.calledWith("redis.publish.applied-ops", JSON.stringify({op:"thisop",_id:@message_id}).length).should.equal true + return it("should track the payload size", function() { + return this.metrics.summary.calledWith("redis.publish.applied-ops", JSON.stringify({op:"thisop",_id:this.message_id}).length).should.equal(true); + }); + }); +}); diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.js b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.js index 254de8d0a7..cf22958b05 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.js +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.js @@ -1,803 +1,998 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/RedisManager.js" -SandboxedModule = require('sandboxed-module') -Errors = require "../../../../app/js/Errors" -crypto = require "crypto" -tk = require "timekeeper" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const modulePath = "../../../../app/js/RedisManager.js"; +const SandboxedModule = require('sandboxed-module'); +const Errors = require("../../../../app/js/Errors"); +const crypto = require("crypto"); +const tk = require("timekeeper"); -describe "RedisManager", -> - beforeEach -> - @multi = exec: sinon.stub() - @rclient = multi: () => @multi - tk.freeze(new Date()) - @RedisManager = SandboxedModule.require modulePath, - requires: - "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } - "./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {} - "settings-sharelatex": @settings = { - documentupdater: {logHashErrors: {write:true, read:true}} - apis: +describe("RedisManager", function() { + beforeEach(function() { + let Timer; + this.multi = {exec: sinon.stub()}; + this.rclient = {multi: () => this.multi}; + tk.freeze(new Date()); + this.RedisManager = SandboxedModule.require(modulePath, { + requires: { + "logger-sharelatex": (this.logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() }), + "./ProjectHistoryRedisManager": (this.ProjectHistoryRedisManager = {}), + "settings-sharelatex": (this.settings = { + documentupdater: {logHashErrors: {write:true, read:true}}, + apis: { project_history: {enabled: true} - redis: - documentupdater: - key_schema: - blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" - docLines: ({doc_id}) -> "doclines:#{doc_id}" - docOps: ({doc_id}) -> "DocOps:#{doc_id}" - docVersion: ({doc_id}) -> "DocVersion:#{doc_id}" - docHash: ({doc_id}) -> "DocHash:#{doc_id}" - projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" - pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" - docsInProject: ({project_id}) -> "DocsIn:#{project_id}" - ranges: ({doc_id}) -> "Ranges:#{doc_id}" - pathname: ({doc_id}) -> "Pathname:#{doc_id}" - projectHistoryId: ({doc_id}) -> "ProjectHistoryId:#{doc_id}" - projectHistoryType: ({doc_id}) -> "ProjectHistoryType:#{doc_id}" - projectState: ({project_id}) -> "ProjectState:#{project_id}" - unflushedTime: ({doc_id}) -> "UnflushedTime:#{doc_id}" - lastUpdatedBy: ({doc_id}) -> "lastUpdatedBy:#{doc_id}" - lastUpdatedAt: ({doc_id}) -> "lastUpdatedAt:#{doc_id}" - history: - key_schema: - uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}" - docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}" - } - "redis-sharelatex": - createClient: () => @rclient - "./Metrics": @metrics = - inc: sinon.stub() - summary: sinon.stub() - Timer: class Timer - constructor: () -> - this.start = new Date() - done: () -> - timeSpan = new Date - this.start - return timeSpan + }, + redis: { + documentupdater: { + key_schema: { + blockingKey({doc_id}) { return `Blocking:${doc_id}`; }, + docLines({doc_id}) { return `doclines:${doc_id}`; }, + docOps({doc_id}) { return `DocOps:${doc_id}`; }, + docVersion({doc_id}) { return `DocVersion:${doc_id}`; }, + docHash({doc_id}) { return `DocHash:${doc_id}`; }, + projectKey({doc_id}) { return `ProjectId:${doc_id}`; }, + pendingUpdates({doc_id}) { return `PendingUpdates:${doc_id}`; }, + docsInProject({project_id}) { return `DocsIn:${project_id}`; }, + ranges({doc_id}) { return `Ranges:${doc_id}`; }, + pathname({doc_id}) { return `Pathname:${doc_id}`; }, + projectHistoryId({doc_id}) { return `ProjectHistoryId:${doc_id}`; }, + projectHistoryType({doc_id}) { return `ProjectHistoryType:${doc_id}`; }, + projectState({project_id}) { return `ProjectState:${project_id}`; }, + unflushedTime({doc_id}) { return `UnflushedTime:${doc_id}`; }, + lastUpdatedBy({doc_id}) { return `lastUpdatedBy:${doc_id}`; }, + lastUpdatedAt({doc_id}) { return `lastUpdatedAt:${doc_id}`; } + } + }, + history: { + key_schema: { + uncompressedHistoryOps({doc_id}) { return `UncompressedHistoryOps:${doc_id}`; }, + docsWithHistoryOps({project_id}) { return `DocsWithHistoryOps:${project_id}`; } + } + } + } + }), + "redis-sharelatex": { + createClient: () => this.rclient + }, + "./Metrics": (this.metrics = { + inc: sinon.stub(), + summary: sinon.stub(), + Timer: (Timer = class Timer { + constructor() { + this.start = new Date(); + } + done() { + const timeSpan = new Date - this.start; + return timeSpan; + } + }) + }), "./Errors": Errors - globals: - JSON: @JSON = JSON + }, + globals: { + JSON: (this.JSON = JSON) + } + } + ); - @doc_id = "doc-id-123" - @project_id = "project-id-123" - @projectHistoryId = 123 - @callback = sinon.stub() + this.doc_id = "doc-id-123"; + this.project_id = "project-id-123"; + this.projectHistoryId = 123; + return this.callback = sinon.stub(); + }); - afterEach -> - tk.reset() + afterEach(() => tk.reset()); - describe "getDoc", -> - beforeEach -> - @lines = ["one", "two", "three", "これは"] # include some utf8 - @jsonlines = JSON.stringify @lines - @version = 42 - @hash = crypto.createHash('sha1').update(@jsonlines,'utf8').digest('hex') - @ranges = { comments: "mock", entries: "mock" } - @json_ranges = JSON.stringify @ranges - @unflushed_time = 12345 - @pathname = '/a/b/c.tex' - @multi.get = sinon.stub() - @multi.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @hash, @project_id, @json_ranges, @pathname, @projectHistoryId.toString(), @unflushed_time]) - @rclient.sadd = sinon.stub().yields(null, 0) + describe("getDoc", function() { + beforeEach(function() { + this.lines = ["one", "two", "three", "これは"]; // include some utf8 + this.jsonlines = JSON.stringify(this.lines); + this.version = 42; + this.hash = crypto.createHash('sha1').update(this.jsonlines,'utf8').digest('hex'); + this.ranges = { comments: "mock", entries: "mock" }; + this.json_ranges = JSON.stringify(this.ranges); + this.unflushed_time = 12345; + this.pathname = '/a/b/c.tex'; + this.multi.get = sinon.stub(); + this.multi.exec = sinon.stub().callsArgWith(0, null, [this.jsonlines, this.version, this.hash, this.project_id, this.json_ranges, this.pathname, this.projectHistoryId.toString(), this.unflushed_time]); + return this.rclient.sadd = sinon.stub().yields(null, 0); + }); - describe "successfully", -> - beforeEach -> - @RedisManager.getDoc @project_id, @doc_id, @callback + describe("successfully", function() { + beforeEach(function() { + return this.RedisManager.getDoc(this.project_id, this.doc_id, this.callback); + }); - it "should get the lines from redis", -> - @multi.get - .calledWith("doclines:#{@doc_id}") - .should.equal true + it("should get the lines from redis", function() { + return this.multi.get + .calledWith(`doclines:${this.doc_id}`) + .should.equal(true); + }); - it "should get the version from", -> - @multi.get - .calledWith("DocVersion:#{@doc_id}") - .should.equal true + it("should get the version from", function() { + return this.multi.get + .calledWith(`DocVersion:${this.doc_id}`) + .should.equal(true); + }); - it 'should get the hash', -> - @multi.get - .calledWith("DocHash:#{@doc_id}") - .should.equal true + it('should get the hash', function() { + return this.multi.get + .calledWith(`DocHash:${this.doc_id}`) + .should.equal(true); + }); - it "should get the ranges", -> - @multi.get - .calledWith("Ranges:#{@doc_id}") - .should.equal true + it("should get the ranges", function() { + return this.multi.get + .calledWith(`Ranges:${this.doc_id}`) + .should.equal(true); + }); - it "should get the unflushed time", -> - @multi.get - .calledWith("UnflushedTime:#{@doc_id}") - .should.equal true + it("should get the unflushed time", function() { + return this.multi.get + .calledWith(`UnflushedTime:${this.doc_id}`) + .should.equal(true); + }); - it "should get the pathname", -> - @multi.get - .calledWith("Pathname:#{@doc_id}") - .should.equal true + it("should get the pathname", function() { + return this.multi.get + .calledWith(`Pathname:${this.doc_id}`) + .should.equal(true); + }); - it "should get the projectHistoryId as an integer", -> - @multi.get - .calledWith("ProjectHistoryId:#{@doc_id}") - .should.equal true + it("should get the projectHistoryId as an integer", function() { + return this.multi.get + .calledWith(`ProjectHistoryId:${this.doc_id}`) + .should.equal(true); + }); - it "should get lastUpdatedAt", -> - @multi.get - .calledWith("lastUpdatedAt:#{@doc_id}") - .should.equal true + it("should get lastUpdatedAt", function() { + return this.multi.get + .calledWith(`lastUpdatedAt:${this.doc_id}`) + .should.equal(true); + }); - it "should get lastUpdatedBy", -> - @multi.get - .calledWith("lastUpdatedBy:#{@doc_id}") - .should.equal true + it("should get lastUpdatedBy", function() { + return this.multi.get + .calledWith(`lastUpdatedBy:${this.doc_id}`) + .should.equal(true); + }); - it "should check if the document is in the DocsIn set", -> - @rclient.sadd - .calledWith("DocsIn:#{@project_id}") - .should.equal true + it("should check if the document is in the DocsIn set", function() { + return this.rclient.sadd + .calledWith(`DocsIn:${this.project_id}`) + .should.equal(true); + }); - it 'should return the document', -> - @callback - .calledWithExactly(null, @lines, @version, @ranges, @pathname, @projectHistoryId, @unflushed_time, @lastUpdatedAt, @lastUpdatedBy) - .should.equal true + it('should return the document', function() { + return this.callback + .calledWithExactly(null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId, this.unflushed_time, this.lastUpdatedAt, this.lastUpdatedBy) + .should.equal(true); + }); - it 'should not log any errors', -> - @logger.error.calledWith() - .should.equal false + return it('should not log any errors', function() { + return this.logger.error.calledWith() + .should.equal(false); + }); + }); - describe "when the document is not present", -> - beforeEach -> - @multi.exec = sinon.stub().callsArgWith(0, null, [null, null, null, null, null, null, null, null, null, null]) - @rclient.sadd = sinon.stub().yields() - @RedisManager.getDoc @project_id, @doc_id, @callback + describe("when the document is not present", function() { + beforeEach(function() { + this.multi.exec = sinon.stub().callsArgWith(0, null, [null, null, null, null, null, null, null, null, null, null]); + this.rclient.sadd = sinon.stub().yields(); + return this.RedisManager.getDoc(this.project_id, this.doc_id, this.callback); + }); - it "should not check if the document is in the DocsIn set", -> - @rclient.sadd - .calledWith("DocsIn:#{@project_id}") - .should.equal false + it("should not check if the document is in the DocsIn set", function() { + return this.rclient.sadd + .calledWith(`DocsIn:${this.project_id}`) + .should.equal(false); + }); - it 'should return an empty result', -> - @callback + it('should return an empty result', function() { + return this.callback .calledWithExactly(null, null, 0, {}, null, null, null, null, null) - .should.equal true + .should.equal(true); + }); - it 'should not log any errors', -> - @logger.error.calledWith() - .should.equal false + return it('should not log any errors', function() { + return this.logger.error.calledWith() + .should.equal(false); + }); + }); - describe "when the document is missing from the DocsIn set", -> - beforeEach -> - @rclient.sadd = sinon.stub().yields(null, 1) - @RedisManager.getDoc @project_id, @doc_id, @callback + describe("when the document is missing from the DocsIn set", function() { + beforeEach(function() { + this.rclient.sadd = sinon.stub().yields(null, 1); + return this.RedisManager.getDoc(this.project_id, this.doc_id, this.callback); + }); - it 'should log an error', -> - @logger.error.calledWith() - .should.equal true + it('should log an error', function() { + return this.logger.error.calledWith() + .should.equal(true); + }); - it 'should return the document', -> - @callback - .calledWithExactly(null, @lines, @version, @ranges, @pathname, @projectHistoryId, @unflushed_time, @lastUpdatedAt, @lastUpdatedBy) - .should.equal true + return it('should return the document', function() { + return this.callback + .calledWithExactly(null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId, this.unflushed_time, this.lastUpdatedAt, this.lastUpdatedBy) + .should.equal(true); + }); + }); - describe "with a corrupted document", -> - beforeEach -> - @badHash = "INVALID-HASH-VALUE" - @multi.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @badHash, @project_id, @json_ranges]) - @RedisManager.getDoc @project_id, @doc_id, @callback + describe("with a corrupted document", function() { + beforeEach(function() { + this.badHash = "INVALID-HASH-VALUE"; + this.multi.exec = sinon.stub().callsArgWith(0, null, [this.jsonlines, this.version, this.badHash, this.project_id, this.json_ranges]); + return this.RedisManager.getDoc(this.project_id, this.doc_id, this.callback); + }); - it 'should log a hash error', -> - @logger.error.calledWith() - .should.equal true + it('should log a hash error', function() { + return this.logger.error.calledWith() + .should.equal(true); + }); - it 'should return the document', -> - @callback - .calledWith(null, @lines, @version, @ranges) - .should.equal true + return it('should return the document', function() { + return this.callback + .calledWith(null, this.lines, this.version, this.ranges) + .should.equal(true); + }); + }); - describe "with a slow request to redis", -> - beforeEach -> - @multi.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @badHash, @project_id, @json_ranges, @pathname, @unflushed_time]) - @clock = sinon.useFakeTimers(); - @multi.exec = (cb) => - @clock.tick(6000); - cb(null, [@jsonlines, @version, @another_project_id, @json_ranges, @pathname, @unflushed_time]) + describe("with a slow request to redis", function() { + beforeEach(function() { + this.multi.exec = sinon.stub().callsArgWith(0, null, [this.jsonlines, this.version, this.badHash, this.project_id, this.json_ranges, this.pathname, this.unflushed_time]); + this.clock = sinon.useFakeTimers(); + this.multi.exec = cb => { + this.clock.tick(6000); + return cb(null, [this.jsonlines, this.version, this.another_project_id, this.json_ranges, this.pathname, this.unflushed_time]); + }; - @RedisManager.getDoc @project_id, @doc_id, @callback + return this.RedisManager.getDoc(this.project_id, this.doc_id, this.callback); + }); - afterEach -> - @clock.restore() + afterEach(function() { + return this.clock.restore(); + }); - it 'should return an error', -> - @callback + return it('should return an error', function() { + return this.callback .calledWith(new Error("redis getDoc exceeded timeout")) - .should.equal true + .should.equal(true); + }); + }); - describe "getDoc with an invalid project id", -> - beforeEach -> - @another_project_id = "project-id-456" - @multi.exec = sinon.stub().callsArgWith(0, null, [@jsonlines, @version, @hash, @another_project_id, @json_ranges, @pathname, @unflushed_time]) - @RedisManager.getDoc @project_id, @doc_id, @callback + return describe("getDoc with an invalid project id", function() { + beforeEach(function() { + this.another_project_id = "project-id-456"; + this.multi.exec = sinon.stub().callsArgWith(0, null, [this.jsonlines, this.version, this.hash, this.another_project_id, this.json_ranges, this.pathname, this.unflushed_time]); + return this.RedisManager.getDoc(this.project_id, this.doc_id, this.callback); + }); - it 'should return an error', -> - @callback + return it('should return an error', function() { + return this.callback .calledWith(new Errors.NotFoundError("not found")) - .should.equal true + .should.equal(true); + }); + }); + }); - describe "getPreviousDocOpsTests", -> - describe "with a start and an end value", -> - beforeEach -> - @first_version_in_redis = 30 - @version = 70 - @length = @version - @first_version_in_redis - @start = 50 - @end = 60 - @ops = [ + describe("getPreviousDocOpsTests", function() { + describe("with a start and an end value", function() { + beforeEach(function() { + this.first_version_in_redis = 30; + this.version = 70; + this.length = this.version - this.first_version_in_redis; + this.start = 50; + this.end = 60; + this.ops = [ { "mock": "op-1" }, { "mock": "op-2" } - ] - @jsonOps = @ops.map (op) -> JSON.stringify op - @rclient.llen = sinon.stub().callsArgWith(1, null, @length) - @rclient.get = sinon.stub().callsArgWith(1, null, @version.toString()) - @rclient.lrange = sinon.stub().callsArgWith(3, null, @jsonOps) - @RedisManager.getPreviousDocOps(@doc_id, @start, @end, @callback) + ]; + this.jsonOps = this.ops.map(op => JSON.stringify(op)); + this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length); + this.rclient.get = sinon.stub().callsArgWith(1, null, this.version.toString()); + this.rclient.lrange = sinon.stub().callsArgWith(3, null, this.jsonOps); + return this.RedisManager.getPreviousDocOps(this.doc_id, this.start, this.end, this.callback); + }); - it "should get the length of the existing doc ops", -> - @rclient.llen - .calledWith("DocOps:#{@doc_id}") - .should.equal true + it("should get the length of the existing doc ops", function() { + return this.rclient.llen + .calledWith(`DocOps:${this.doc_id}`) + .should.equal(true); + }); - it "should get the current version of the doc", -> - @rclient.get - .calledWith("DocVersion:#{@doc_id}") - .should.equal true + it("should get the current version of the doc", function() { + return this.rclient.get + .calledWith(`DocVersion:${this.doc_id}`) + .should.equal(true); + }); - it "should get the appropriate docs ops", -> - @rclient.lrange - .calledWith("DocOps:#{@doc_id}", @start - @first_version_in_redis, @end - @first_version_in_redis) - .should.equal true + it("should get the appropriate docs ops", function() { + return this.rclient.lrange + .calledWith(`DocOps:${this.doc_id}`, this.start - this.first_version_in_redis, this.end - this.first_version_in_redis) + .should.equal(true); + }); - it "should return the docs with the doc ops deserialized", -> - @callback.calledWith(null, @ops).should.equal true + return it("should return the docs with the doc ops deserialized", function() { + return this.callback.calledWith(null, this.ops).should.equal(true); + }); + }); - describe "with an end value of -1", -> - beforeEach -> - @first_version_in_redis = 30 - @version = 70 - @length = @version - @first_version_in_redis - @start = 50 - @end = -1 - @ops = [ + describe("with an end value of -1", function() { + beforeEach(function() { + this.first_version_in_redis = 30; + this.version = 70; + this.length = this.version - this.first_version_in_redis; + this.start = 50; + this.end = -1; + this.ops = [ { "mock": "op-1" }, { "mock": "op-2" } - ] - @jsonOps = @ops.map (op) -> JSON.stringify op - @rclient.llen = sinon.stub().callsArgWith(1, null, @length) - @rclient.get = sinon.stub().callsArgWith(1, null, @version.toString()) - @rclient.lrange = sinon.stub().callsArgWith(3, null, @jsonOps) - @RedisManager.getPreviousDocOps(@doc_id, @start, @end, @callback) + ]; + this.jsonOps = this.ops.map(op => JSON.stringify(op)); + this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length); + this.rclient.get = sinon.stub().callsArgWith(1, null, this.version.toString()); + this.rclient.lrange = sinon.stub().callsArgWith(3, null, this.jsonOps); + return this.RedisManager.getPreviousDocOps(this.doc_id, this.start, this.end, this.callback); + }); - it "should get the appropriate docs ops to the end of list", -> - @rclient.lrange - .calledWith("DocOps:#{@doc_id}", @start - @first_version_in_redis, -1) - .should.equal true + it("should get the appropriate docs ops to the end of list", function() { + return this.rclient.lrange + .calledWith(`DocOps:${this.doc_id}`, this.start - this.first_version_in_redis, -1) + .should.equal(true); + }); - it "should return the docs with the doc ops deserialized", -> - @callback.calledWith(null, @ops).should.equal true + return it("should return the docs with the doc ops deserialized", function() { + return this.callback.calledWith(null, this.ops).should.equal(true); + }); + }); - describe "when the requested range is not in Redis", -> - beforeEach -> - @first_version_in_redis = 30 - @version = 70 - @length = @version - @first_version_in_redis - @start = 20 - @end = -1 - @ops = [ + describe("when the requested range is not in Redis", function() { + beforeEach(function() { + this.first_version_in_redis = 30; + this.version = 70; + this.length = this.version - this.first_version_in_redis; + this.start = 20; + this.end = -1; + this.ops = [ { "mock": "op-1" }, { "mock": "op-2" } - ] - @jsonOps = @ops.map (op) -> JSON.stringify op - @rclient.llen = sinon.stub().callsArgWith(1, null, @length) - @rclient.get = sinon.stub().callsArgWith(1, null, @version.toString()) - @rclient.lrange = sinon.stub().callsArgWith(3, null, @jsonOps) - @RedisManager.getPreviousDocOps(@doc_id, @start, @end, @callback) + ]; + this.jsonOps = this.ops.map(op => JSON.stringify(op)); + this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length); + this.rclient.get = sinon.stub().callsArgWith(1, null, this.version.toString()); + this.rclient.lrange = sinon.stub().callsArgWith(3, null, this.jsonOps); + return this.RedisManager.getPreviousDocOps(this.doc_id, this.start, this.end, this.callback); + }); - it "should return an error", -> - @callback.calledWith(new Errors.OpRangeNotAvailableError("doc ops range is not loaded in redis")).should.equal true + it("should return an error", function() { + return this.callback.calledWith(new Errors.OpRangeNotAvailableError("doc ops range is not loaded in redis")).should.equal(true); + }); - it "should log out the problem", -> - @logger.warn.called.should.equal true + return it("should log out the problem", function() { + return this.logger.warn.called.should.equal(true); + }); + }); - describe "with a slow request to redis", -> - beforeEach -> - @first_version_in_redis = 30 - @version = 70 - @length = @version - @first_version_in_redis - @start = 50 - @end = 60 - @ops = [ + return describe("with a slow request to redis", function() { + beforeEach(function() { + this.first_version_in_redis = 30; + this.version = 70; + this.length = this.version - this.first_version_in_redis; + this.start = 50; + this.end = 60; + this.ops = [ { "mock": "op-1" }, { "mock": "op-2" } - ] - @jsonOps = @ops.map (op) -> JSON.stringify op - @rclient.llen = sinon.stub().callsArgWith(1, null, @length) - @rclient.get = sinon.stub().callsArgWith(1, null, @version.toString()) - @clock = sinon.useFakeTimers(); - @rclient.lrange = (key, start, end, cb) => - @clock.tick(6000); - cb(null, @jsonOps) - @RedisManager.getPreviousDocOps(@doc_id, @start, @end, @callback) + ]; + this.jsonOps = this.ops.map(op => JSON.stringify(op)); + this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length); + this.rclient.get = sinon.stub().callsArgWith(1, null, this.version.toString()); + this.clock = sinon.useFakeTimers(); + this.rclient.lrange = (key, start, end, cb) => { + this.clock.tick(6000); + return cb(null, this.jsonOps); + }; + return this.RedisManager.getPreviousDocOps(this.doc_id, this.start, this.end, this.callback); + }); - afterEach -> - @clock.restore() + afterEach(function() { + return this.clock.restore(); + }); - it 'should return an error', -> - @callback + return it('should return an error', function() { + return this.callback .calledWith(new Error("redis getPreviousDocOps exceeded timeout")) - .should.equal true + .should.equal(true); + }); + }); + }); - describe "updateDocument", -> - beforeEach -> - @lines = ["one", "two", "three", "これは"] - @ops = [{ op: [{ i: "foo", p: 4 }] },{ op: [{ i: "bar", p: 8 }] }] - @version = 42 - @hash = crypto.createHash('sha1').update(JSON.stringify(@lines),'utf8').digest('hex') - @ranges = { comments: "mock", entries: "mock" } - @updateMeta = { user_id: 'last-author-fake-id' } - @doc_update_list_length = sinon.stub() - @project_update_list_length = sinon.stub() + describe("updateDocument", function() { + beforeEach(function() { + this.lines = ["one", "two", "three", "これは"]; + this.ops = [{ op: [{ i: "foo", p: 4 }] },{ op: [{ i: "bar", p: 8 }] }]; + this.version = 42; + this.hash = crypto.createHash('sha1').update(JSON.stringify(this.lines),'utf8').digest('hex'); + this.ranges = { comments: "mock", entries: "mock" }; + this.updateMeta = { user_id: 'last-author-fake-id' }; + this.doc_update_list_length = sinon.stub(); + this.project_update_list_length = sinon.stub(); - @RedisManager.getDocVersion = sinon.stub() - @multi.set = sinon.stub() - @multi.rpush = sinon.stub() - @multi.expire = sinon.stub() - @multi.ltrim = sinon.stub() - @multi.del = sinon.stub() - @multi.exec = sinon.stub().callsArgWith(0, null, - [@hash, null, null, null, null, null, null, @doc_update_list_length, null, null] - ) - @ProjectHistoryRedisManager.queueOps = sinon.stub().callsArgWith( - @ops.length + 1, null, @project_update_list_length - ) + this.RedisManager.getDocVersion = sinon.stub(); + this.multi.set = sinon.stub(); + this.multi.rpush = sinon.stub(); + this.multi.expire = sinon.stub(); + this.multi.ltrim = sinon.stub(); + this.multi.del = sinon.stub(); + this.multi.exec = sinon.stub().callsArgWith(0, null, + [this.hash, null, null, null, null, null, null, this.doc_update_list_length, null, null] + ); + return this.ProjectHistoryRedisManager.queueOps = sinon.stub().callsArgWith( + this.ops.length + 1, null, this.project_update_list_length + ); + }); - describe "with a consistent version", -> - beforeEach -> + describe("with a consistent version", function() { + beforeEach(function() {}); - describe "with project history enabled", -> - beforeEach -> - @settings.apis.project_history.enabled = true - @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) - @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @updateMeta, @callback + describe("with project history enabled", function() { + beforeEach(function() { + this.settings.apis.project_history.enabled = true; + this.RedisManager.getDocVersion.withArgs(this.doc_id).yields(null, this.version - this.ops.length); + return this.RedisManager.updateDocument(this.project_id, this.doc_id, this.lines, this.version, this.ops, this.ranges, this.updateMeta, this.callback); + }); - it "should get the current doc version to check for consistency", -> - @RedisManager.getDocVersion - .calledWith(@doc_id) - .should.equal true + it("should get the current doc version to check for consistency", function() { + return this.RedisManager.getDocVersion + .calledWith(this.doc_id) + .should.equal(true); + }); - it "should set the doclines", -> - @multi.set - .calledWith("doclines:#{@doc_id}", JSON.stringify(@lines)) - .should.equal true + it("should set the doclines", function() { + return this.multi.set + .calledWith(`doclines:${this.doc_id}`, JSON.stringify(this.lines)) + .should.equal(true); + }); - it "should set the version", -> - @multi.set - .calledWith("DocVersion:#{@doc_id}", @version) - .should.equal true + it("should set the version", function() { + return this.multi.set + .calledWith(`DocVersion:${this.doc_id}`, this.version) + .should.equal(true); + }); - it "should set the hash", -> - @multi.set - .calledWith("DocHash:#{@doc_id}", @hash) - .should.equal true + it("should set the hash", function() { + return this.multi.set + .calledWith(`DocHash:${this.doc_id}`, this.hash) + .should.equal(true); + }); - it "should set the ranges", -> - @multi.set - .calledWith("Ranges:#{@doc_id}", JSON.stringify(@ranges)) - .should.equal true + it("should set the ranges", function() { + return this.multi.set + .calledWith(`Ranges:${this.doc_id}`, JSON.stringify(this.ranges)) + .should.equal(true); + }); - it "should set the unflushed time", -> - @multi.set - .calledWith("UnflushedTime:#{@doc_id}", Date.now(), "NX") - .should.equal true + it("should set the unflushed time", function() { + return this.multi.set + .calledWith(`UnflushedTime:${this.doc_id}`, Date.now(), "NX") + .should.equal(true); + }); - it "should set the last updated time", -> - @multi.set - .calledWith("lastUpdatedAt:#{@doc_id}", Date.now()) - .should.equal true + it("should set the last updated time", function() { + return this.multi.set + .calledWith(`lastUpdatedAt:${this.doc_id}`, Date.now()) + .should.equal(true); + }); - it "should set the last updater", -> - @multi.set - .calledWith("lastUpdatedBy:#{@doc_id}", 'last-author-fake-id') - .should.equal true + it("should set the last updater", function() { + return this.multi.set + .calledWith(`lastUpdatedBy:${this.doc_id}`, 'last-author-fake-id') + .should.equal(true); + }); - it "should push the doc op into the doc ops list", -> - @multi.rpush - .calledWith("DocOps:#{@doc_id}", JSON.stringify(@ops[0]), JSON.stringify(@ops[1])) - .should.equal true + it("should push the doc op into the doc ops list", function() { + return this.multi.rpush + .calledWith(`DocOps:${this.doc_id}`, JSON.stringify(this.ops[0]), JSON.stringify(this.ops[1])) + .should.equal(true); + }); - it "should renew the expiry ttl on the doc ops array", -> - @multi.expire - .calledWith("DocOps:#{@doc_id}", @RedisManager.DOC_OPS_TTL) - .should.equal true + it("should renew the expiry ttl on the doc ops array", function() { + return this.multi.expire + .calledWith(`DocOps:${this.doc_id}`, this.RedisManager.DOC_OPS_TTL) + .should.equal(true); + }); - it "should truncate the list to 100 members", -> - @multi.ltrim - .calledWith("DocOps:#{@doc_id}", -@RedisManager.DOC_OPS_MAX_LENGTH, -1) - .should.equal true + it("should truncate the list to 100 members", function() { + return this.multi.ltrim + .calledWith(`DocOps:${this.doc_id}`, -this.RedisManager.DOC_OPS_MAX_LENGTH, -1) + .should.equal(true); + }); - it "should push the updates into the history ops list", -> - @multi.rpush - .calledWith("UncompressedHistoryOps:#{@doc_id}", JSON.stringify(@ops[0]), JSON.stringify(@ops[1])) - .should.equal true + it("should push the updates into the history ops list", function() { + return this.multi.rpush + .calledWith(`UncompressedHistoryOps:${this.doc_id}`, JSON.stringify(this.ops[0]), JSON.stringify(this.ops[1])) + .should.equal(true); + }); - it "should push the updates into the project history ops list", -> - @ProjectHistoryRedisManager.queueOps - .calledWith(@project_id, JSON.stringify(@ops[0])) - .should.equal true + it("should push the updates into the project history ops list", function() { + return this.ProjectHistoryRedisManager.queueOps + .calledWith(this.project_id, JSON.stringify(this.ops[0])) + .should.equal(true); + }); - it "should call the callback", -> - @callback - .calledWith(null, @doc_update_list_length, @project_update_list_length) - .should.equal true + it("should call the callback", function() { + return this.callback + .calledWith(null, this.doc_update_list_length, this.project_update_list_length) + .should.equal(true); + }); - it 'should not log any errors', -> - @logger.error.calledWith() - .should.equal false + return it('should not log any errors', function() { + return this.logger.error.calledWith() + .should.equal(false); + }); + }); - describe "with project history disabled", -> - beforeEach -> - @settings.apis.project_history.enabled = false - @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) - @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @updateMeta, @callback + describe("with project history disabled", function() { + beforeEach(function() { + this.settings.apis.project_history.enabled = false; + this.RedisManager.getDocVersion.withArgs(this.doc_id).yields(null, this.version - this.ops.length); + return this.RedisManager.updateDocument(this.project_id, this.doc_id, this.lines, this.version, this.ops, this.ranges, this.updateMeta, this.callback); + }); - it "should not push the updates into the project history ops list", -> - @ProjectHistoryRedisManager.queueOps.called.should.equal false + it("should not push the updates into the project history ops list", function() { + return this.ProjectHistoryRedisManager.queueOps.called.should.equal(false); + }); - it "should call the callback", -> - @callback - .calledWith(null, @doc_update_list_length) - .should.equal true + return it("should call the callback", function() { + return this.callback + .calledWith(null, this.doc_update_list_length) + .should.equal(true); + }); + }); - describe "with a doc using project history only", -> - beforeEach -> - @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length, 'project-history') - @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @updateMeta, @callback + return describe("with a doc using project history only", function() { + beforeEach(function() { + this.RedisManager.getDocVersion.withArgs(this.doc_id).yields(null, this.version - this.ops.length, 'project-history'); + return this.RedisManager.updateDocument(this.project_id, this.doc_id, this.lines, this.version, this.ops, this.ranges, this.updateMeta, this.callback); + }); - it "should not push the updates to the track-changes ops list", -> - @multi.rpush - .calledWith("UncompressedHistoryOps:#{@doc_id}") - .should.equal false + it("should not push the updates to the track-changes ops list", function() { + return this.multi.rpush + .calledWith(`UncompressedHistoryOps:${this.doc_id}`) + .should.equal(false); + }); - it "should push the updates into the project history ops list", -> - @ProjectHistoryRedisManager.queueOps - .calledWith(@project_id, JSON.stringify(@ops[0])) - .should.equal true + it("should push the updates into the project history ops list", function() { + return this.ProjectHistoryRedisManager.queueOps + .calledWith(this.project_id, JSON.stringify(this.ops[0])) + .should.equal(true); + }); - it "should call the callback with the project update count only", -> - @callback - .calledWith(null, undefined, @project_update_list_length) - .should.equal true + return it("should call the callback with the project update count only", function() { + return this.callback + .calledWith(null, undefined, this.project_update_list_length) + .should.equal(true); + }); + }); + }); - describe "with an inconsistent version", -> - beforeEach -> - @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length - 1) - @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @updateMeta, @callback + describe("with an inconsistent version", function() { + beforeEach(function() { + this.RedisManager.getDocVersion.withArgs(this.doc_id).yields(null, this.version - this.ops.length - 1); + return this.RedisManager.updateDocument(this.project_id, this.doc_id, this.lines, this.version, this.ops, this.ranges, this.updateMeta, this.callback); + }); - it "should not call multi.exec", -> - @multi.exec.called.should.equal false + it("should not call multi.exec", function() { + return this.multi.exec.called.should.equal(false); + }); - it "should call the callback with an error", -> - @callback - .calledWith(new Error("Version mismatch. '#{@doc_id}' is corrupted.")) - .should.equal true + return it("should call the callback with an error", function() { + return this.callback + .calledWith(new Error(`Version mismatch. '${this.doc_id}' is corrupted.`)) + .should.equal(true); + }); + }); - describe "with no updates", -> - beforeEach -> - @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version) - @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, [], @ranges, @updateMeta, @callback + describe("with no updates", function() { + beforeEach(function() { + this.RedisManager.getDocVersion.withArgs(this.doc_id).yields(null, this.version); + return this.RedisManager.updateDocument(this.project_id, this.doc_id, this.lines, this.version, [], this.ranges, this.updateMeta, this.callback); + }); - it "should not try to enqueue doc updates", -> - @multi.rpush + it("should not try to enqueue doc updates", function() { + return this.multi.rpush .called - .should.equal false + .should.equal(false); + }); - it "should not try to enqueue project updates", -> - @ProjectHistoryRedisManager.queueOps + it("should not try to enqueue project updates", function() { + return this.ProjectHistoryRedisManager.queueOps .called - .should.equal false + .should.equal(false); + }); - it "should still set the doclines", -> - @multi.set - .calledWith("doclines:#{@doc_id}", JSON.stringify(@lines)) - .should.equal true + return it("should still set the doclines", function() { + return this.multi.set + .calledWith(`doclines:${this.doc_id}`, JSON.stringify(this.lines)) + .should.equal(true); + }); + }); - describe "with empty ranges", -> - beforeEach -> - @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) - @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, {}, @updateMeta, @callback + describe("with empty ranges", function() { + beforeEach(function() { + this.RedisManager.getDocVersion.withArgs(this.doc_id).yields(null, this.version - this.ops.length); + return this.RedisManager.updateDocument(this.project_id, this.doc_id, this.lines, this.version, this.ops, {}, this.updateMeta, this.callback); + }); - it "should not set the ranges", -> - @multi.set - .calledWith("Ranges:#{@doc_id}", JSON.stringify(@ranges)) - .should.equal false + it("should not set the ranges", function() { + return this.multi.set + .calledWith(`Ranges:${this.doc_id}`, JSON.stringify(this.ranges)) + .should.equal(false); + }); - it "should delete the ranges key", -> - @multi.del - .calledWith("Ranges:#{@doc_id}") - .should.equal true + return it("should delete the ranges key", function() { + return this.multi.del + .calledWith(`Ranges:${this.doc_id}`) + .should.equal(true); + }); + }); - describe "with null bytes in the serialized doc lines", -> - beforeEach -> - @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) - @_stringify = JSON.stringify - @JSON.stringify = () -> return '["bad bytes! \u0000 <- here"]' - @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @updateMeta, @callback + describe("with null bytes in the serialized doc lines", function() { + beforeEach(function() { + this.RedisManager.getDocVersion.withArgs(this.doc_id).yields(null, this.version - this.ops.length); + this._stringify = JSON.stringify; + this.JSON.stringify = () => '["bad bytes! \u0000 <- here"]'; + return this.RedisManager.updateDocument(this.project_id, this.doc_id, this.lines, this.version, this.ops, this.ranges, this.updateMeta, this.callback); + }); - afterEach -> - @JSON.stringify = @_stringify + afterEach(function() { + return this.JSON.stringify = this._stringify; + }); - it "should log an error", -> - @logger.error.called.should.equal true + it("should log an error", function() { + return this.logger.error.called.should.equal(true); + }); - it "should call the callback with an error", -> - @callback.calledWith(new Error("null bytes found in doc lines")).should.equal true + return it("should call the callback with an error", function() { + return this.callback.calledWith(new Error("null bytes found in doc lines")).should.equal(true); + }); + }); - describe "with ranges that are too big", -> - beforeEach -> - @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) - @RedisManager._serializeRanges = sinon.stub().yields(new Error("ranges are too large")) - @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @updateMeta, @callback + describe("with ranges that are too big", function() { + beforeEach(function() { + this.RedisManager.getDocVersion.withArgs(this.doc_id).yields(null, this.version - this.ops.length); + this.RedisManager._serializeRanges = sinon.stub().yields(new Error("ranges are too large")); + return this.RedisManager.updateDocument(this.project_id, this.doc_id, this.lines, this.version, this.ops, this.ranges, this.updateMeta, this.callback); + }); - it 'should log an error', -> - @logger.error.called.should.equal true + it('should log an error', function() { + return this.logger.error.called.should.equal(true); + }); - it "should call the callback with the error", -> - @callback.calledWith(new Error("ranges are too large")).should.equal true + return it("should call the callback with the error", function() { + return this.callback.calledWith(new Error("ranges are too large")).should.equal(true); + }); + }); - describe "without user id from meta", -> - beforeEach -> - @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) - @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, {}, @callback + return describe("without user id from meta", function() { + beforeEach(function() { + this.RedisManager.getDocVersion.withArgs(this.doc_id).yields(null, this.version - this.ops.length); + return this.RedisManager.updateDocument(this.project_id, this.doc_id, this.lines, this.version, this.ops, this.ranges, {}, this.callback); + }); - it "should set the last updater to null", -> - @multi.del - .calledWith("lastUpdatedBy:#{@doc_id}") - .should.equal true + it("should set the last updater to null", function() { + return this.multi.del + .calledWith(`lastUpdatedBy:${this.doc_id}`) + .should.equal(true); + }); - it "should still set the last updated time", -> - @multi.set - .calledWith("lastUpdatedAt:#{@doc_id}", Date.now()) - .should.equal true + return it("should still set the last updated time", function() { + return this.multi.set + .calledWith(`lastUpdatedAt:${this.doc_id}`, Date.now()) + .should.equal(true); + }); + }); + }); - describe "putDocInMemory", -> - beforeEach -> - @multi.set = sinon.stub() - @rclient.sadd = sinon.stub().yields() - @multi.del = sinon.stub() - @lines = ["one", "two", "three", "これは"] - @version = 42 - @hash = crypto.createHash('sha1').update(JSON.stringify(@lines),'utf8').digest('hex') - @multi.exec = sinon.stub().callsArgWith(0, null, [@hash]) - @ranges = { comments: "mock", entries: "mock" } - @pathname = '/a/b/c.tex' + describe("putDocInMemory", function() { + beforeEach(function() { + this.multi.set = sinon.stub(); + this.rclient.sadd = sinon.stub().yields(); + this.multi.del = sinon.stub(); + this.lines = ["one", "two", "three", "これは"]; + this.version = 42; + this.hash = crypto.createHash('sha1').update(JSON.stringify(this.lines),'utf8').digest('hex'); + this.multi.exec = sinon.stub().callsArgWith(0, null, [this.hash]); + this.ranges = { comments: "mock", entries: "mock" }; + return this.pathname = '/a/b/c.tex'; + }); - describe "with non-empty ranges", -> - beforeEach (done) -> - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @projectHistoryId, done + describe("with non-empty ranges", function() { + beforeEach(function(done) { + return this.RedisManager.putDocInMemory(this.project_id, this.doc_id, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId, done); + }); - it "should set the lines", -> - @multi.set - .calledWith("doclines:#{@doc_id}", JSON.stringify(@lines)) - .should.equal true + it("should set the lines", function() { + return this.multi.set + .calledWith(`doclines:${this.doc_id}`, JSON.stringify(this.lines)) + .should.equal(true); + }); - it "should set the version", -> - @multi.set - .calledWith("DocVersion:#{@doc_id}", @version) - .should.equal true + it("should set the version", function() { + return this.multi.set + .calledWith(`DocVersion:${this.doc_id}`, this.version) + .should.equal(true); + }); - it "should set the hash", -> - @multi.set - .calledWith("DocHash:#{@doc_id}", @hash) - .should.equal true + it("should set the hash", function() { + return this.multi.set + .calledWith(`DocHash:${this.doc_id}`, this.hash) + .should.equal(true); + }); - it "should set the ranges", -> - @multi.set - .calledWith("Ranges:#{@doc_id}", JSON.stringify(@ranges)) - .should.equal true + it("should set the ranges", function() { + return this.multi.set + .calledWith(`Ranges:${this.doc_id}`, JSON.stringify(this.ranges)) + .should.equal(true); + }); - it "should set the project_id for the doc", -> - @multi.set - .calledWith("ProjectId:#{@doc_id}", @project_id) - .should.equal true + it("should set the project_id for the doc", function() { + return this.multi.set + .calledWith(`ProjectId:${this.doc_id}`, this.project_id) + .should.equal(true); + }); - it "should set the pathname for the doc", -> - @multi.set - .calledWith("Pathname:#{@doc_id}", @pathname) - .should.equal true + it("should set the pathname for the doc", function() { + return this.multi.set + .calledWith(`Pathname:${this.doc_id}`, this.pathname) + .should.equal(true); + }); - it "should set the projectHistoryId for the doc", -> - @multi.set - .calledWith("ProjectHistoryId:#{@doc_id}", @projectHistoryId) - .should.equal true + it("should set the projectHistoryId for the doc", function() { + return this.multi.set + .calledWith(`ProjectHistoryId:${this.doc_id}`, this.projectHistoryId) + .should.equal(true); + }); - it "should add the doc_id to the project set", -> - @rclient.sadd - .calledWith("DocsIn:#{@project_id}", @doc_id) - .should.equal true + it("should add the doc_id to the project set", function() { + return this.rclient.sadd + .calledWith(`DocsIn:${this.project_id}`, this.doc_id) + .should.equal(true); + }); - it 'should not log any errors', -> - @logger.error.calledWith() - .should.equal false + return it('should not log any errors', function() { + return this.logger.error.calledWith() + .should.equal(false); + }); + }); - describe "with empty ranges", -> - beforeEach (done) -> - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, {}, @pathname, @projectHistoryId, done + describe("with empty ranges", function() { + beforeEach(function(done) { + return this.RedisManager.putDocInMemory(this.project_id, this.doc_id, this.lines, this.version, {}, this.pathname, this.projectHistoryId, done); + }); - it "should delete the ranges key", -> - @multi.del - .calledWith("Ranges:#{@doc_id}") - .should.equal true + it("should delete the ranges key", function() { + return this.multi.del + .calledWith(`Ranges:${this.doc_id}`) + .should.equal(true); + }); - it "should not set the ranges", -> - @multi.set - .calledWith("Ranges:#{@doc_id}", JSON.stringify(@ranges)) - .should.equal false + return it("should not set the ranges", function() { + return this.multi.set + .calledWith(`Ranges:${this.doc_id}`, JSON.stringify(this.ranges)) + .should.equal(false); + }); + }); - describe "with null bytes in the serialized doc lines", -> - beforeEach -> - @_stringify = JSON.stringify - @JSON.stringify = () -> return '["bad bytes! \u0000 <- here"]' - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @projectHistoryId, @callback + describe("with null bytes in the serialized doc lines", function() { + beforeEach(function() { + this._stringify = JSON.stringify; + this.JSON.stringify = () => '["bad bytes! \u0000 <- here"]'; + return this.RedisManager.putDocInMemory(this.project_id, this.doc_id, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId, this.callback); + }); - afterEach -> - @JSON.stringify = @_stringify + afterEach(function() { + return this.JSON.stringify = this._stringify; + }); - it "should log an error", -> - @logger.error.called.should.equal true + it("should log an error", function() { + return this.logger.error.called.should.equal(true); + }); - it "should call the callback with an error", -> - @callback.calledWith(new Error("null bytes found in doc lines")).should.equal true + return it("should call the callback with an error", function() { + return this.callback.calledWith(new Error("null bytes found in doc lines")).should.equal(true); + }); + }); - describe "with ranges that are too big", -> - beforeEach -> - @RedisManager._serializeRanges = sinon.stub().yields(new Error("ranges are too large")) - @RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @projectHistoryId, @callback + return describe("with ranges that are too big", function() { + beforeEach(function() { + this.RedisManager._serializeRanges = sinon.stub().yields(new Error("ranges are too large")); + return this.RedisManager.putDocInMemory(this.project_id, this.doc_id, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId, this.callback); + }); - it 'should log an error', -> - @logger.error.called.should.equal true + it('should log an error', function() { + return this.logger.error.called.should.equal(true); + }); - it "should call the callback with the error", -> - @callback.calledWith(new Error("ranges are too large")).should.equal true + return it("should call the callback with the error", function() { + return this.callback.calledWith(new Error("ranges are too large")).should.equal(true); + }); + }); + }); - describe "removeDocFromMemory", -> - beforeEach (done) -> - @multi.strlen = sinon.stub() - @multi.del = sinon.stub() - @multi.srem = sinon.stub() - @multi.exec.yields() - @RedisManager.removeDocFromMemory @project_id, @doc_id, done + describe("removeDocFromMemory", function() { + beforeEach(function(done) { + this.multi.strlen = sinon.stub(); + this.multi.del = sinon.stub(); + this.multi.srem = sinon.stub(); + this.multi.exec.yields(); + return this.RedisManager.removeDocFromMemory(this.project_id, this.doc_id, done); + }); - it "should check the length of the current doclines", -> - @multi.strlen - .calledWith("doclines:#{@doc_id}") - .should.equal true + it("should check the length of the current doclines", function() { + return this.multi.strlen + .calledWith(`doclines:${this.doc_id}`) + .should.equal(true); + }); - it "should delete the lines", -> - @multi.del - .calledWith("doclines:#{@doc_id}") - .should.equal true + it("should delete the lines", function() { + return this.multi.del + .calledWith(`doclines:${this.doc_id}`) + .should.equal(true); + }); - it "should delete the version", -> - @multi.del - .calledWith("DocVersion:#{@doc_id}") - .should.equal true + it("should delete the version", function() { + return this.multi.del + .calledWith(`DocVersion:${this.doc_id}`) + .should.equal(true); + }); - it "should delete the hash", -> - @multi.del - .calledWith("DocHash:#{@doc_id}") - .should.equal true + it("should delete the hash", function() { + return this.multi.del + .calledWith(`DocHash:${this.doc_id}`) + .should.equal(true); + }); - it "should delete the unflushed time", -> - @multi.del - .calledWith("UnflushedTime:#{@doc_id}") - .should.equal true + it("should delete the unflushed time", function() { + return this.multi.del + .calledWith(`UnflushedTime:${this.doc_id}`) + .should.equal(true); + }); - it "should delete the project_id for the doc", -> - @multi.del - .calledWith("ProjectId:#{@doc_id}") - .should.equal true + it("should delete the project_id for the doc", function() { + return this.multi.del + .calledWith(`ProjectId:${this.doc_id}`) + .should.equal(true); + }); - it "should remove the doc_id from the project set", -> - @multi.srem - .calledWith("DocsIn:#{@project_id}", @doc_id) - .should.equal true + it("should remove the doc_id from the project set", function() { + return this.multi.srem + .calledWith(`DocsIn:${this.project_id}`, this.doc_id) + .should.equal(true); + }); - it "should delete the pathname for the doc", -> - @multi.del - .calledWith("Pathname:#{@doc_id}") - .should.equal true + it("should delete the pathname for the doc", function() { + return this.multi.del + .calledWith(`Pathname:${this.doc_id}`) + .should.equal(true); + }); - it "should delete the pathname for the doc", -> - @multi.del - .calledWith("ProjectHistoryId:#{@doc_id}") - .should.equal true + it("should delete the pathname for the doc", function() { + return this.multi.del + .calledWith(`ProjectHistoryId:${this.doc_id}`) + .should.equal(true); + }); - it "should delete lastUpdatedAt", -> - @multi.del - .calledWith("lastUpdatedAt:#{@doc_id}") - .should.equal true + it("should delete lastUpdatedAt", function() { + return this.multi.del + .calledWith(`lastUpdatedAt:${this.doc_id}`) + .should.equal(true); + }); - it "should delete lastUpdatedBy", -> - @multi.del - .calledWith("lastUpdatedBy:#{@doc_id}") - .should.equal true + return it("should delete lastUpdatedBy", function() { + return this.multi.del + .calledWith(`lastUpdatedBy:${this.doc_id}`) + .should.equal(true); + }); + }); - describe "clearProjectState", -> - beforeEach (done) -> - @rclient.del = sinon.stub().callsArg(1) - @RedisManager.clearProjectState @project_id, done + describe("clearProjectState", function() { + beforeEach(function(done) { + this.rclient.del = sinon.stub().callsArg(1); + return this.RedisManager.clearProjectState(this.project_id, done); + }); - it "should delete the project state", -> - @rclient.del - .calledWith("ProjectState:#{@project_id}") - .should.equal true + return it("should delete the project state", function() { + return this.rclient.del + .calledWith(`ProjectState:${this.project_id}`) + .should.equal(true); + }); + }); - describe "renameDoc", -> - beforeEach () -> - @rclient.rpush = sinon.stub().yields() - @rclient.set = sinon.stub().yields() - @update = - id: @doc_id - pathname: @pathname = 'pathname' - newPathname: @newPathname = 'new-pathname' + return describe("renameDoc", function() { + beforeEach(function() { + this.rclient.rpush = sinon.stub().yields(); + this.rclient.set = sinon.stub().yields(); + return this.update = { + id: this.doc_id, + pathname: (this.pathname = 'pathname'), + newPathname: (this.newPathname = 'new-pathname') + }; + }); - describe "the document is cached in redis", -> - beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, 'lines', 'version') - @ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields() - @RedisManager.renameDoc @project_id, @doc_id, @userId, @update, @projectHistoryId, @callback + describe("the document is cached in redis", function() { + beforeEach(function() { + this.RedisManager.getDoc = sinon.stub().callsArgWith(2, null, 'lines', 'version'); + this.ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields(); + return this.RedisManager.renameDoc(this.project_id, this.doc_id, this.userId, this.update, this.projectHistoryId, this.callback); + }); - it "update the cached pathname", -> - @rclient.set - .calledWith("Pathname:#{@doc_id}", @newPathname) - .should.equal true + it("update the cached pathname", function() { + return this.rclient.set + .calledWith(`Pathname:${this.doc_id}`, this.newPathname) + .should.equal(true); + }); - it "should queue an update", -> - @ProjectHistoryRedisManager.queueRenameEntity - .calledWithExactly(@project_id, @projectHistoryId, 'doc', @doc_id, @userId, @update, @callback) - .should.equal true + return it("should queue an update", function() { + return this.ProjectHistoryRedisManager.queueRenameEntity + .calledWithExactly(this.project_id, this.projectHistoryId, 'doc', this.doc_id, this.userId, this.update, this.callback) + .should.equal(true); + }); + }); - describe "the document is not cached in redis", -> - beforeEach -> - @RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null) - @ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields() - @RedisManager.renameDoc @project_id, @doc_id, @userId, @update, @projectHistoryId, @callback + describe("the document is not cached in redis", function() { + beforeEach(function() { + this.RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null); + this.ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields(); + return this.RedisManager.renameDoc(this.project_id, this.doc_id, this.userId, this.update, this.projectHistoryId, this.callback); + }); - it "does not update the cached pathname", -> - @rclient.set.called.should.equal false + it("does not update the cached pathname", function() { + return this.rclient.set.called.should.equal(false); + }); - it "should queue an update", -> - @ProjectHistoryRedisManager.queueRenameEntity - .calledWithExactly(@project_id, @projectHistoryId, 'doc', @doc_id, @userId, @update, @callback) - .should.equal true + return it("should queue an update", function() { + return this.ProjectHistoryRedisManager.queueRenameEntity + .calledWithExactly(this.project_id, this.projectHistoryId, 'doc', this.doc_id, this.userId, this.update, this.callback) + .should.equal(true); + }); + }); - describe "getDocVersion", -> - beforeEach -> - @version = 12345 + return describe("getDocVersion", function() { + beforeEach(function() { + return this.version = 12345; + }); - describe "when the document does not have a project history type set", -> - beforeEach -> - @rclient.mget = sinon.stub().withArgs("DocVersion:#{@doc_id}", "ProjectHistoryType:#{@doc_id}").callsArgWith(2, null, ["#{@version}"]) - @RedisManager.getDocVersion @doc_id, @callback + describe("when the document does not have a project history type set", function() { + beforeEach(function() { + this.rclient.mget = sinon.stub().withArgs(`DocVersion:${this.doc_id}`, `ProjectHistoryType:${this.doc_id}`).callsArgWith(2, null, [`${this.version}`]); + return this.RedisManager.getDocVersion(this.doc_id, this.callback); + }); - it "should return the document version and an undefined history type", -> - @callback.calledWithExactly(null, @version, undefined).should.equal true + return it("should return the document version and an undefined history type", function() { + return this.callback.calledWithExactly(null, this.version, undefined).should.equal(true); + }); + }); - describe "when the document has a project history type set", -> - beforeEach -> - @rclient.mget = sinon.stub().withArgs("DocVersion:#{@doc_id}", "ProjectHistoryType:#{@doc_id}").callsArgWith(2, null, ["#{@version}", 'project-history']) - @RedisManager.getDocVersion @doc_id, @callback + return describe("when the document has a project history type set", function() { + beforeEach(function() { + this.rclient.mget = sinon.stub().withArgs(`DocVersion:${this.doc_id}`, `ProjectHistoryType:${this.doc_id}`).callsArgWith(2, null, [`${this.version}`, 'project-history']); + return this.RedisManager.getDocVersion(this.doc_id, this.callback); + }); - it "should return the document version and history type", -> - @callback.calledWithExactly(null, @version, 'project-history').should.equal true + return it("should return the document version and history type", function() { + return this.callback.calledWithExactly(null, this.version, 'project-history').should.equal(true); + }); + }); + }); + }); +}); diff --git a/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.js b/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.js index 81440bfe5b..06b5699808 100644 --- a/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.js +++ b/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.js @@ -1,283 +1,358 @@ -text = require "../../../../app/js/sharejs/types/text" -require("chai").should() -RangesTracker = require "../../../../app/js/RangesTracker" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS202: Simplify dynamic range loops + * DS205: Consider reworking code to avoid use of IIFEs + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const text = require("../../../../app/js/sharejs/types/text"); +require("chai").should(); +const RangesTracker = require("../../../../app/js/RangesTracker"); -describe "ShareJS text type", -> - beforeEach -> - @t = "mock-thread-id" +describe("ShareJS text type", function() { + beforeEach(function() { + return this.t = "mock-thread-id"; + }); - describe "transform", -> - describe "insert / insert", -> - it "with an insert before", -> - dest = [] - text._tc(dest, { i: "foo", p: 9 }, { i: "bar", p: 3 }) - dest.should.deep.equal [{ i: "foo", p: 12 }] + describe("transform", function() { + describe("insert / insert", function() { + it("with an insert before", function() { + const dest = []; + text._tc(dest, { i: "foo", p: 9 }, { i: "bar", p: 3 }); + return dest.should.deep.equal([{ i: "foo", p: 12 }]); + }); - it "with an insert after", -> - dest = [] - text._tc(dest, { i: "foo", p: 3 }, { i: "bar", p: 9 }) - dest.should.deep.equal [{ i: "foo", p: 3 }] + it("with an insert after", function() { + const dest = []; + text._tc(dest, { i: "foo", p: 3 }, { i: "bar", p: 9 }); + return dest.should.deep.equal([{ i: "foo", p: 3 }]); + }); - it "with an insert at the same place with side == 'right'", -> - dest = [] - text._tc(dest, { i: "foo", p: 3 }, { i: "bar", p: 3 }, 'right') - dest.should.deep.equal [{ i: "foo", p: 6 }] + it("with an insert at the same place with side == 'right'", function() { + const dest = []; + text._tc(dest, { i: "foo", p: 3 }, { i: "bar", p: 3 }, 'right'); + return dest.should.deep.equal([{ i: "foo", p: 6 }]); + }); - it "with an insert at the same place with side == 'left'", -> - dest = [] - text._tc(dest, { i: "foo", p: 3 }, { i: "bar", p: 3 }, 'left') - dest.should.deep.equal [{ i: "foo", p: 3 }] + return it("with an insert at the same place with side == 'left'", function() { + const dest = []; + text._tc(dest, { i: "foo", p: 3 }, { i: "bar", p: 3 }, 'left'); + return dest.should.deep.equal([{ i: "foo", p: 3 }]); + }); + }); - describe "insert / delete", -> - it "with a delete before", -> - dest = [] - text._tc(dest, { i: "foo", p: 9 }, { d: "bar", p: 3 }) - dest.should.deep.equal [{ i: "foo", p: 6 }] + describe("insert / delete", function() { + it("with a delete before", function() { + const dest = []; + text._tc(dest, { i: "foo", p: 9 }, { d: "bar", p: 3 }); + return dest.should.deep.equal([{ i: "foo", p: 6 }]); + }); - it "with a delete after", -> - dest = [] - text._tc(dest, { i: "foo", p: 3 }, { d: "bar", p: 9 }) - dest.should.deep.equal [{ i: "foo", p: 3 }] + it("with a delete after", function() { + const dest = []; + text._tc(dest, { i: "foo", p: 3 }, { d: "bar", p: 9 }); + return dest.should.deep.equal([{ i: "foo", p: 3 }]); + }); - it "with a delete at the same place with side == 'right'", -> - dest = [] - text._tc(dest, { i: "foo", p: 3 }, { d: "bar", p: 3 }, 'right') - dest.should.deep.equal [{ i: "foo", p: 3 }] + it("with a delete at the same place with side == 'right'", function() { + const dest = []; + text._tc(dest, { i: "foo", p: 3 }, { d: "bar", p: 3 }, 'right'); + return dest.should.deep.equal([{ i: "foo", p: 3 }]); + }); - it "with a delete at the same place with side == 'left'", -> - dest = [] + return it("with a delete at the same place with side == 'left'", function() { + const dest = []; - text._tc(dest, { i: "foo", p: 3 }, { d: "bar", p: 3 }, 'left') - dest.should.deep.equal [{ i: "foo", p: 3 }] + text._tc(dest, { i: "foo", p: 3 }, { d: "bar", p: 3 }, 'left'); + return dest.should.deep.equal([{ i: "foo", p: 3 }]); + }); + }); - describe "delete / insert", -> - it "with an insert before", -> - dest = [] - text._tc(dest, { d: "foo", p: 9 }, { i: "bar", p: 3 }) - dest.should.deep.equal [{ d: "foo", p: 12 }] + describe("delete / insert", function() { + it("with an insert before", function() { + const dest = []; + text._tc(dest, { d: "foo", p: 9 }, { i: "bar", p: 3 }); + return dest.should.deep.equal([{ d: "foo", p: 12 }]); + }); - it "with an insert after", -> - dest = [] - text._tc(dest, { d: "foo", p: 3 }, { i: "bar", p: 9 }) - dest.should.deep.equal [{ d: "foo", p: 3 }] + it("with an insert after", function() { + const dest = []; + text._tc(dest, { d: "foo", p: 3 }, { i: "bar", p: 9 }); + return dest.should.deep.equal([{ d: "foo", p: 3 }]); + }); - it "with an insert at the same place with side == 'right'", -> - dest = [] - text._tc(dest, { d: "foo", p: 3 }, { i: "bar", p: 3 }, 'right') - dest.should.deep.equal [{ d: "foo", p: 6 }] + it("with an insert at the same place with side == 'right'", function() { + const dest = []; + text._tc(dest, { d: "foo", p: 3 }, { i: "bar", p: 3 }, 'right'); + return dest.should.deep.equal([{ d: "foo", p: 6 }]); + }); - it "with an insert at the same place with side == 'left'", -> - dest = [] - text._tc(dest, { d: "foo", p: 3 }, { i: "bar", p: 3 }, 'left') - dest.should.deep.equal [{ d: "foo", p: 6 }] + it("with an insert at the same place with side == 'left'", function() { + const dest = []; + text._tc(dest, { d: "foo", p: 3 }, { i: "bar", p: 3 }, 'left'); + return dest.should.deep.equal([{ d: "foo", p: 6 }]); + }); - it "with a delete that overlaps the insert location", -> - dest = [] - text._tc(dest, { d: "foo", p: 3 }, { i: "bar", p: 4 }) - dest.should.deep.equal [{ d: "f", p: 3 }, { d: "oo", p: 6 }] + return it("with a delete that overlaps the insert location", function() { + const dest = []; + text._tc(dest, { d: "foo", p: 3 }, { i: "bar", p: 4 }); + return dest.should.deep.equal([{ d: "f", p: 3 }, { d: "oo", p: 6 }]); + }); + }); - describe "delete / delete", -> - it "with a delete before", -> - dest = [] - text._tc(dest, { d: "foo", p: 9 }, { d: "bar", p: 3 }) - dest.should.deep.equal [{ d: "foo", p: 6 }] + describe("delete / delete", function() { + it("with a delete before", function() { + const dest = []; + text._tc(dest, { d: "foo", p: 9 }, { d: "bar", p: 3 }); + return dest.should.deep.equal([{ d: "foo", p: 6 }]); + }); - it "with a delete after", -> - dest = [] - text._tc(dest, { d: "foo", p: 3 }, { d: "bar", p: 9 }) - dest.should.deep.equal [{ d: "foo", p: 3 }] + it("with a delete after", function() { + const dest = []; + text._tc(dest, { d: "foo", p: 3 }, { d: "bar", p: 9 }); + return dest.should.deep.equal([{ d: "foo", p: 3 }]); + }); - it "with deleting the same content", -> - dest = [] - text._tc(dest, { d: "foo", p: 3 }, { d: "foo", p: 3 }, 'right') - dest.should.deep.equal [] + it("with deleting the same content", function() { + const dest = []; + text._tc(dest, { d: "foo", p: 3 }, { d: "foo", p: 3 }, 'right'); + return dest.should.deep.equal([]); + }); - it "with the delete overlapping before", -> - dest = [] - text._tc(dest, { d: "foobar", p: 3 }, { d: "abcfoo", p: 0 }, 'right') - dest.should.deep.equal [{ d: "bar", p: 0 }] + it("with the delete overlapping before", function() { + const dest = []; + text._tc(dest, { d: "foobar", p: 3 }, { d: "abcfoo", p: 0 }, 'right'); + return dest.should.deep.equal([{ d: "bar", p: 0 }]); + }); - it "with the delete overlapping after", -> - dest = [] - text._tc(dest, { d: "abcfoo", p: 3 }, { d: "foobar", p: 6 }) - dest.should.deep.equal [{ d: "abc", p: 3 }] + it("with the delete overlapping after", function() { + const dest = []; + text._tc(dest, { d: "abcfoo", p: 3 }, { d: "foobar", p: 6 }); + return dest.should.deep.equal([{ d: "abc", p: 3 }]); + }); - it "with the delete overlapping the whole delete", -> - dest = [] - text._tc(dest, { d: "abcfoo123", p: 3 }, { d: "foo", p: 6 }) - dest.should.deep.equal [{ d: "abc123", p: 3 }] + it("with the delete overlapping the whole delete", function() { + const dest = []; + text._tc(dest, { d: "abcfoo123", p: 3 }, { d: "foo", p: 6 }); + return dest.should.deep.equal([{ d: "abc123", p: 3 }]); + }); - it "with the delete inside the whole delete", -> - dest = [] - text._tc(dest, { d: "foo", p: 6 }, { d: "abcfoo123", p: 3 }) - dest.should.deep.equal [] + return it("with the delete inside the whole delete", function() { + const dest = []; + text._tc(dest, { d: "foo", p: 6 }, { d: "abcfoo123", p: 3 }); + return dest.should.deep.equal([]); + }); + }); - describe "comment / insert", -> - it "with an insert before", -> - dest = [] - text._tc(dest, { c: "foo", p: 9, @t }, { i: "bar", p: 3 }) - dest.should.deep.equal [{ c: "foo", p: 12, @t }] + describe("comment / insert", function() { + it("with an insert before", function() { + const dest = []; + text._tc(dest, { c: "foo", p: 9, t: this.t }, { i: "bar", p: 3 }); + return dest.should.deep.equal([{ c: "foo", p: 12, t: this.t }]); + }); - it "with an insert after", -> - dest = [] - text._tc(dest, { c: "foo", p: 3, @t }, { i: "bar", p: 9 }) - dest.should.deep.equal [{ c: "foo", p: 3, @t }] + it("with an insert after", function() { + const dest = []; + text._tc(dest, { c: "foo", p: 3, t: this.t }, { i: "bar", p: 9 }); + return dest.should.deep.equal([{ c: "foo", p: 3, t: this.t }]); + }); - it "with an insert at the left edge", -> - dest = [] - text._tc(dest, { c: "foo", p: 3, @t }, { i: "bar", p: 3 }) - # RangesTracker doesn't inject inserts into comments on edges, so neither should we - dest.should.deep.equal [{ c: "foo", p: 6, @t }] + it("with an insert at the left edge", function() { + const dest = []; + text._tc(dest, { c: "foo", p: 3, t: this.t }, { i: "bar", p: 3 }); + // RangesTracker doesn't inject inserts into comments on edges, so neither should we + return dest.should.deep.equal([{ c: "foo", p: 6, t: this.t }]); + }); - it "with an insert at the right edge", -> - dest = [] - text._tc(dest, { c: "foo", p: 3, @t }, { i: "bar", p: 6 }) - # RangesTracker doesn't inject inserts into comments on edges, so neither should we - dest.should.deep.equal [{ c: "foo", p: 3, @t }] + it("with an insert at the right edge", function() { + const dest = []; + text._tc(dest, { c: "foo", p: 3, t: this.t }, { i: "bar", p: 6 }); + // RangesTracker doesn't inject inserts into comments on edges, so neither should we + return dest.should.deep.equal([{ c: "foo", p: 3, t: this.t }]); + }); - it "with an insert in the middle", -> - dest = [] - text._tc(dest, { c: "foo", p: 3, @t }, { i: "bar", p: 5 }) - dest.should.deep.equal [{ c: "fobaro", p: 3, @t }] + return it("with an insert in the middle", function() { + const dest = []; + text._tc(dest, { c: "foo", p: 3, t: this.t }, { i: "bar", p: 5 }); + return dest.should.deep.equal([{ c: "fobaro", p: 3, t: this.t }]); + }); + }); - describe "comment / delete", -> - it "with a delete before", -> - dest = [] - text._tc(dest, { c: "foo", p: 9, @t }, { d: "bar", p: 3 }) - dest.should.deep.equal [{ c: "foo", p: 6, @t }] + describe("comment / delete", function() { + it("with a delete before", function() { + const dest = []; + text._tc(dest, { c: "foo", p: 9, t: this.t }, { d: "bar", p: 3 }); + return dest.should.deep.equal([{ c: "foo", p: 6, t: this.t }]); + }); - it "with a delete after", -> - dest = [] - text._tc(dest, { c: "foo", p: 3, @t }, { i: "bar", p: 9 }) - dest.should.deep.equal [{ c: "foo", p: 3, @t }] + it("with a delete after", function() { + const dest = []; + text._tc(dest, { c: "foo", p: 3, t: this.t }, { i: "bar", p: 9 }); + return dest.should.deep.equal([{ c: "foo", p: 3, t: this.t }]); + }); - it "with a delete overlapping the comment content before", -> - dest = [] - text._tc(dest, { c: "foobar", p: 6, @t }, { d: "123foo", p: 3 }) - dest.should.deep.equal [{ c: "bar", p: 3, @t }] + it("with a delete overlapping the comment content before", function() { + const dest = []; + text._tc(dest, { c: "foobar", p: 6, t: this.t }, { d: "123foo", p: 3 }); + return dest.should.deep.equal([{ c: "bar", p: 3, t: this.t }]); + }); - it "with a delete overlapping the comment content after", -> - dest = [] - text._tc(dest, { c: "foobar", p: 6, @t }, { d: "bar123", p: 9 }) - dest.should.deep.equal [{ c: "foo", p: 6, @t }] + it("with a delete overlapping the comment content after", function() { + const dest = []; + text._tc(dest, { c: "foobar", p: 6, t: this.t }, { d: "bar123", p: 9 }); + return dest.should.deep.equal([{ c: "foo", p: 6, t: this.t }]); + }); - it "with a delete overlapping the comment content in the middle", -> - dest = [] - text._tc(dest, { c: "foo123bar", p: 6, @t }, { d: "123", p: 9 }) - dest.should.deep.equal [{ c: "foobar", p: 6, @t }] + it("with a delete overlapping the comment content in the middle", function() { + const dest = []; + text._tc(dest, { c: "foo123bar", p: 6, t: this.t }, { d: "123", p: 9 }); + return dest.should.deep.equal([{ c: "foobar", p: 6, t: this.t }]); + }); - it "with a delete overlapping the whole comment", -> - dest = [] - text._tc(dest, { c: "foo", p: 6, @t }, { d: "123foo456", p: 3 }) - dest.should.deep.equal [{ c: "", p: 3, @t }] + return it("with a delete overlapping the whole comment", function() { + const dest = []; + text._tc(dest, { c: "foo", p: 6, t: this.t }, { d: "123foo456", p: 3 }); + return dest.should.deep.equal([{ c: "", p: 3, t: this.t }]); + }); + }); - describe "comment / insert", -> - it "should not do anything", -> - dest = [] - text._tc(dest, { i: "foo", p: 6 }, { c: "bar", p: 3 }) - dest.should.deep.equal [{ i: "foo", p: 6 }] + describe("comment / insert", () => it("should not do anything", function() { + const dest = []; + text._tc(dest, { i: "foo", p: 6 }, { c: "bar", p: 3 }); + return dest.should.deep.equal([{ i: "foo", p: 6 }]); + })); - describe "comment / delete", -> - it "should not do anything", -> - dest = [] - text._tc(dest, { d: "foo", p: 6 }, { c: "bar", p: 3 }) - dest.should.deep.equal [{ d: "foo", p: 6 }] + describe("comment / delete", () => it("should not do anything", function() { + const dest = []; + text._tc(dest, { d: "foo", p: 6 }, { c: "bar", p: 3 }); + return dest.should.deep.equal([{ d: "foo", p: 6 }]); + })); - describe "comment / comment", -> - it "should not do anything", -> - dest = [] - text._tc(dest, { c: "foo", p: 6 }, { c: "bar", p: 3 }) - dest.should.deep.equal [{ c: "foo", p: 6 }] + return describe("comment / comment", () => it("should not do anything", function() { + const dest = []; + text._tc(dest, { c: "foo", p: 6 }, { c: "bar", p: 3 }); + return dest.should.deep.equal([{ c: "foo", p: 6 }]); + })); +}); - describe "apply", -> - it "should apply an insert", -> - text.apply("foo", [{ i: "bar", p: 2 }]).should.equal "fobaro" + describe("apply", function() { + it("should apply an insert", () => text.apply("foo", [{ i: "bar", p: 2 }]).should.equal("fobaro")); - it "should apply a delete", -> - text.apply("foo123bar", [{ d: "123", p: 3 }]).should.equal "foobar" + it("should apply a delete", () => text.apply("foo123bar", [{ d: "123", p: 3 }]).should.equal("foobar")); - it "should do nothing with a comment", -> - text.apply("foo123bar", [{ c: "123", p: 3 }]).should.equal "foo123bar" + it("should do nothing with a comment", () => text.apply("foo123bar", [{ c: "123", p: 3 }]).should.equal("foo123bar")); - it "should throw an error when deleted content does not match", -> - (() -> - text.apply("foo123bar", [{ d: "456", p: 3 }]) - ).should.throw(Error) + it("should throw an error when deleted content does not match", () => ((() => text.apply("foo123bar", [{ d: "456", p: 3 }]))).should.throw(Error)); - it "should throw an error when comment content does not match", -> - (() -> - text.apply("foo123bar", [{ c: "456", p: 3 }]) - ).should.throw(Error) + return it("should throw an error when comment content does not match", () => ((() => text.apply("foo123bar", [{ c: "456", p: 3 }]))).should.throw(Error)); + }); - describe "applying ops and comments in different orders", -> - it "should not matter which op or comment is applied first", -> - transform = (op1, op2, side) -> - d = [] - text._tc(d, op1, op2, side) - return d - - applySnapshot = (snapshot, op) -> - return text.apply(snapshot, op) - - applyRanges = (rangesTracker, ops) -> - for op in ops - rangesTracker.applyOp(op, {}) - return rangesTracker - - commentsEqual = (comments1, comments2) -> - return false if comments1.length != comments2.length - comments1.sort (a,b) -> - if a.offset - b.offset == 0 - return a.length - b.length - else - return a.offset - b.offset - comments2.sort (a,b) -> - if a.offset - b.offset == 0 - return a.length - b.length - else - return a.offset - b.offset - for comment1, i in comments1 - comment2 = comments2[i] - if comment1.offset != comment2.offset or comment1.length != comment2.length - return false - return true - - SNAPSHOT = "123" - - OPS = [] - # Insert ops - for p in [0..SNAPSHOT.length] - OPS.push {i: "a", p: p} - OPS.push {i: "bc", p: p} - for p in [0..(SNAPSHOT.length-1)] - for length in [1..(SNAPSHOT.length - p)] - OPS.push {d: SNAPSHOT.slice(p, p+length), p} - for p in [0..(SNAPSHOT.length-1)] - for length in [1..(SNAPSHOT.length - p)] - OPS.push {c: SNAPSHOT.slice(p, p+length), p, @t} + return describe("applying ops and comments in different orders", () => it("should not matter which op or comment is applied first", function() { + let length, p; + let asc, end; + let asc1, end1; + let asc3, end3; + const transform = function(op1, op2, side) { + const d = []; + text._tc(d, op1, op2, side); + return d; + }; + + const applySnapshot = (snapshot, op) => text.apply(snapshot, op); + + const applyRanges = function(rangesTracker, ops) { + for (let op of Array.from(ops)) { + rangesTracker.applyOp(op, {}); + } + return rangesTracker; + }; + + const commentsEqual = function(comments1, comments2) { + if (comments1.length !== comments2.length) { return false; } + comments1.sort(function(a,b) { + if ((a.offset - b.offset) === 0) { + return a.length - b.length; + } else { + return a.offset - b.offset; + } + }); + comments2.sort(function(a,b) { + if ((a.offset - b.offset) === 0) { + return a.length - b.length; + } else { + return a.offset - b.offset; + } + }); + for (let i = 0; i < comments1.length; i++) { + const comment1 = comments1[i]; + const comment2 = comments2[i]; + if ((comment1.offset !== comment2.offset) || (comment1.length !== comment2.length)) { + return false; + } + } + return true; + }; + + const SNAPSHOT = "123"; + + const OPS = []; + // Insert ops + for (p = 0, end = SNAPSHOT.length, asc = 0 <= end; asc ? p <= end : p >= end; asc ? p++ : p--) { + OPS.push({i: "a", p}); + OPS.push({i: "bc", p}); + } + for (p = 0, end1 = SNAPSHOT.length-1, asc1 = 0 <= end1; asc1 ? p <= end1 : p >= end1; asc1 ? p++ : p--) { + var asc2, end2; + for (length = 1, end2 = SNAPSHOT.length - p, asc2 = 1 <= end2; asc2 ? length <= end2 : length >= end2; asc2 ? length++ : length--) { + OPS.push({d: SNAPSHOT.slice(p, p+length), p}); + } + } + for (p = 0, end3 = SNAPSHOT.length-1, asc3 = 0 <= end3; asc3 ? p <= end3 : p >= end3; asc3 ? p++ : p--) { + var asc4, end4; + for (length = 1, end4 = SNAPSHOT.length - p, asc4 = 1 <= end4; asc4 ? length <= end4 : length >= end4; asc4 ? length++ : length--) { + OPS.push({c: SNAPSHOT.slice(p, p+length), p, t: this.t}); + } + } - for op1 in OPS - for op2 in OPS - op1_t = transform(op1, op2, "left") - op2_t = transform(op2, op1, "right") - - rt12 = new RangesTracker() - snapshot12 = applySnapshot(applySnapshot(SNAPSHOT, [op1]), op2_t) - applyRanges(rt12, [op1]) - applyRanges(rt12, op2_t) - - rt21 = new RangesTracker() - snapshot21 = applySnapshot(applySnapshot(SNAPSHOT, [op2]), op1_t) - applyRanges(rt21, [op2]) - applyRanges(rt21, op1_t) - - if snapshot12 != snapshot21 - console.error {op1, op2, op1_t, op2_t, snapshot12, snapshot21}, "Ops are not consistent" - throw new Error("OT is inconsistent") - - if !commentsEqual(rt12.comments, rt21.comments) - console.log rt12.comments - console.log rt21.comments - console.error {op1, op2, op1_t, op2_t, rt12_comments: rt12.comments, rt21_comments: rt21.comments}, "Comments are not consistent" - throw new Error("OT is inconsistent") + return (() => { + const result = []; + for (var op1 of Array.from(OPS)) { + result.push((() => { + const result1 = []; + for (let op2 of Array.from(OPS)) { + const op1_t = transform(op1, op2, "left"); + const op2_t = transform(op2, op1, "right"); + + const rt12 = new RangesTracker(); + const snapshot12 = applySnapshot(applySnapshot(SNAPSHOT, [op1]), op2_t); + applyRanges(rt12, [op1]); + applyRanges(rt12, op2_t); + + const rt21 = new RangesTracker(); + const snapshot21 = applySnapshot(applySnapshot(SNAPSHOT, [op2]), op1_t); + applyRanges(rt21, [op2]); + applyRanges(rt21, op1_t); + + if (snapshot12 !== snapshot21) { + console.error({op1, op2, op1_t, op2_t, snapshot12, snapshot21}, "Ops are not consistent"); + throw new Error("OT is inconsistent"); + } + + if (!commentsEqual(rt12.comments, rt21.comments)) { + console.log(rt12.comments); + console.log(rt21.comments); + console.error({op1, op2, op1_t, op2_t, rt12_comments: rt12.comments, rt21_comments: rt21.comments}, "Comments are not consistent"); + throw new Error("OT is inconsistent"); + } else { + result1.push(undefined); + } + } + return result1; + })()); + } + return result; + })(); + })); +}); diff --git a/services/document-updater/test/unit/coffee/ShareJsDB/ShareJsDBTests.js b/services/document-updater/test/unit/coffee/ShareJsDB/ShareJsDBTests.js index aa03d9fb1e..27f6729bdb 100644 --- a/services/document-updater/test/unit/coffee/ShareJsDB/ShareJsDBTests.js +++ b/services/document-updater/test/unit/coffee/ShareJsDB/ShareJsDBTests.js @@ -1,93 +1,129 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -expect = chai.expect -modulePath = "../../../../app/js/ShareJsDB.js" -SandboxedModule = require('sandboxed-module') -Errors = require "../../../../app/js/Errors" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const { + expect +} = chai; +const modulePath = "../../../../app/js/ShareJsDB.js"; +const SandboxedModule = require('sandboxed-module'); +const Errors = require("../../../../app/js/Errors"); -describe "ShareJsDB", -> - beforeEach -> - @doc_id = "document-id" - @project_id = "project-id" - @doc_key = "#{@project_id}:#{@doc_id}" - @callback = sinon.stub() - @ShareJsDB = SandboxedModule.require modulePath, requires: - "./RedisManager": @RedisManager = {} +describe("ShareJsDB", function() { + beforeEach(function() { + this.doc_id = "document-id"; + this.project_id = "project-id"; + this.doc_key = `${this.project_id}:${this.doc_id}`; + this.callback = sinon.stub(); + this.ShareJsDB = SandboxedModule.require(modulePath, { requires: { + "./RedisManager": (this.RedisManager = {}) + } + }); - @version = 42 - @lines = ["one", "two", "three"] - @db = new @ShareJsDB(@project_id, @doc_id, @lines, @version) + this.version = 42; + this.lines = ["one", "two", "three"]; + return this.db = new this.ShareJsDB(this.project_id, this.doc_id, this.lines, this.version); + }); - describe "getSnapshot", -> - describe "successfully", -> - beforeEach -> - @db.getSnapshot @doc_key, @callback + describe("getSnapshot", function() { + describe("successfully", function() { + beforeEach(function() { + return this.db.getSnapshot(this.doc_key, this.callback); + }); - it "should return the doc lines", -> - @callback.args[0][1].snapshot.should.equal @lines.join("\n") + it("should return the doc lines", function() { + return this.callback.args[0][1].snapshot.should.equal(this.lines.join("\n")); + }); - it "should return the doc version", -> - @callback.args[0][1].v.should.equal @version + it("should return the doc version", function() { + return this.callback.args[0][1].v.should.equal(this.version); + }); - it "should return the type as text", -> - @callback.args[0][1].type.should.equal "text" + return it("should return the type as text", function() { + return this.callback.args[0][1].type.should.equal("text"); + }); + }); - describe "when the key does not match", -> - beforeEach -> - @db.getSnapshot "bad:key", @callback + return describe("when the key does not match", function() { + beforeEach(function() { + return this.db.getSnapshot("bad:key", this.callback); + }); - it "should return the callback with a NotFoundError", -> - @callback.calledWith(new Errors.NotFoundError("not found")).should.equal true + return it("should return the callback with a NotFoundError", function() { + return this.callback.calledWith(new Errors.NotFoundError("not found")).should.equal(true); + }); + }); + }); - describe "getOps", -> - describe "with start == end", -> - beforeEach -> - @start = @end = 42 - @db.getOps @doc_key, @start, @end, @callback + describe("getOps", function() { + describe("with start == end", function() { + beforeEach(function() { + this.start = (this.end = 42); + return this.db.getOps(this.doc_key, this.start, this.end, this.callback); + }); - it "should return an empty array", -> - @callback.calledWith(null, []).should.equal true + return it("should return an empty array", function() { + return this.callback.calledWith(null, []).should.equal(true); + }); + }); - describe "with a non empty range", -> - beforeEach -> - @start = 35 - @end = 42 - @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) - @db.getOps @doc_key, @start, @end, @callback + describe("with a non empty range", function() { + beforeEach(function() { + this.start = 35; + this.end = 42; + this.RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, this.ops); + return this.db.getOps(this.doc_key, this.start, this.end, this.callback); + }); - it "should get the range from redis", -> - @RedisManager.getPreviousDocOps - .calledWith(@doc_id, @start, @end-1) - .should.equal true + it("should get the range from redis", function() { + return this.RedisManager.getPreviousDocOps + .calledWith(this.doc_id, this.start, this.end-1) + .should.equal(true); + }); - it "should return the ops", -> - @callback.calledWith(null, @ops).should.equal true + return it("should return the ops", function() { + return this.callback.calledWith(null, this.ops).should.equal(true); + }); + }); - describe "with no specified end", -> - beforeEach -> - @start = 35 - @end = null - @RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, @ops) - @db.getOps @doc_key, @start, @end, @callback + return describe("with no specified end", function() { + beforeEach(function() { + this.start = 35; + this.end = null; + this.RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, this.ops); + return this.db.getOps(this.doc_key, this.start, this.end, this.callback); + }); - it "should get until the end of the list", -> - @RedisManager.getPreviousDocOps - .calledWith(@doc_id, @start, -1) - .should.equal true + return it("should get until the end of the list", function() { + return this.RedisManager.getPreviousDocOps + .calledWith(this.doc_id, this.start, -1) + .should.equal(true); + }); + }); + }); - describe "writeOps", -> - describe "writing an op", -> - beforeEach -> - @opData = - op: {p: 20, t: "foo"} - meta: {source: "bar"} - v: @version - @db.writeOp @doc_key, @opData, @callback + return describe("writeOps", () => describe("writing an op", function() { + beforeEach(function() { + this.opData = { + op: {p: 20, t: "foo"}, + meta: {source: "bar"}, + v: this.version + }; + return this.db.writeOp(this.doc_key, this.opData, this.callback); + }); - it "should write into appliedOps", -> - expect(@db.appliedOps[@doc_key]).to.deep.equal [@opData] + it("should write into appliedOps", function() { + return expect(this.db.appliedOps[this.doc_key]).to.deep.equal([this.opData]); + }); - it "should call the callback without an error", -> - @callback.called.should.equal true - (@callback.args[0][0]?).should.equal false + return it("should call the callback without an error", function() { + this.callback.called.should.equal(true); + return (this.callback.args[0][0] != null).should.equal(false); + }); + })); +}); diff --git a/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.js b/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.js index 363705845f..4e0fbc52dd 100644 --- a/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.js +++ b/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.js @@ -1,131 +1,181 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/ShareJsUpdateManager.js" -SandboxedModule = require('sandboxed-module') -crypto = require('crypto') +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const modulePath = "../../../../app/js/ShareJsUpdateManager.js"; +const SandboxedModule = require('sandboxed-module'); +const crypto = require('crypto'); -describe "ShareJsUpdateManager", -> - beforeEach -> - @project_id = "project-id-123" - @doc_id = "document-id-123" - @callback = sinon.stub() - @ShareJsUpdateManager = SandboxedModule.require modulePath, - requires: +describe("ShareJsUpdateManager", function() { + beforeEach(function() { + let Model; + this.project_id = "project-id-123"; + this.doc_id = "document-id-123"; + this.callback = sinon.stub(); + return this.ShareJsUpdateManager = SandboxedModule.require(modulePath, { + requires: { "./sharejs/server/model": - class Model - constructor: (@db) -> - "./ShareJsDB" : @ShareJsDB = { mockDB: true } - "redis-sharelatex" : createClient: () => @rclient = auth:-> - "logger-sharelatex": @logger = { log: sinon.stub() } - "./RealTimeRedisManager": @RealTimeRedisManager = {} - "./Metrics": @metrics = { inc: sinon.stub() } - globals: - clearTimeout: @clearTimeout = sinon.stub() + (Model = class Model { + constructor(db) { + this.db = db; + } + }), + "./ShareJsDB" : (this.ShareJsDB = { mockDB: true }), + "redis-sharelatex" : { createClient: () => { return this.rclient = {auth() {}}; } + }, + "logger-sharelatex": (this.logger = { log: sinon.stub() }), + "./RealTimeRedisManager": (this.RealTimeRedisManager = {}), + "./Metrics": (this.metrics = { inc: sinon.stub() }) + }, + globals: { + clearTimeout: (this.clearTimeout = sinon.stub()) + } + } + ); + }); - describe "applyUpdate", -> - beforeEach -> - @lines = ["one", "two"] - @version = 34 - @updatedDocLines = ["onefoo", "two"] - content = @updatedDocLines.join("\n") - @hash = crypto.createHash('sha1').update("blob " + content.length + "\x00").update(content, 'utf8').digest('hex') - @update = {p: 4, t: "foo", v:@version, hash:@hash} - @model = - applyOp: sinon.stub().callsArg(2) - getSnapshot: sinon.stub() - db: + describe("applyUpdate", function() { + beforeEach(function() { + this.lines = ["one", "two"]; + this.version = 34; + this.updatedDocLines = ["onefoo", "two"]; + const content = this.updatedDocLines.join("\n"); + this.hash = crypto.createHash('sha1').update("blob " + content.length + "\x00").update(content, 'utf8').digest('hex'); + this.update = {p: 4, t: "foo", v:this.version, hash:this.hash}; + this.model = { + applyOp: sinon.stub().callsArg(2), + getSnapshot: sinon.stub(), + db: { appliedOps: {} - @ShareJsUpdateManager.getNewShareJsModel = sinon.stub().returns(@model) - @ShareJsUpdateManager._listenForOps = sinon.stub() - @ShareJsUpdateManager.removeDocFromCache = sinon.stub().callsArg(1) + } + }; + this.ShareJsUpdateManager.getNewShareJsModel = sinon.stub().returns(this.model); + this.ShareJsUpdateManager._listenForOps = sinon.stub(); + return this.ShareJsUpdateManager.removeDocFromCache = sinon.stub().callsArg(1); + }); - describe "successfully", -> - beforeEach (done) -> - @model.getSnapshot.callsArgWith(1, null, {snapshot: @updatedDocLines.join("\n"), v: @version}) - @model.db.appliedOps["#{@project_id}:#{@doc_id}"] = @appliedOps = ["mock-ops"] - @ShareJsUpdateManager.applyUpdate @project_id, @doc_id, @update, @lines, @version, (err, docLines, version, appliedOps) => - @callback(err, docLines, version, appliedOps) - done() + describe("successfully", function() { + beforeEach(function(done) { + this.model.getSnapshot.callsArgWith(1, null, {snapshot: this.updatedDocLines.join("\n"), v: this.version}); + this.model.db.appliedOps[`${this.project_id}:${this.doc_id}`] = (this.appliedOps = ["mock-ops"]); + return this.ShareJsUpdateManager.applyUpdate(this.project_id, this.doc_id, this.update, this.lines, this.version, (err, docLines, version, appliedOps) => { + this.callback(err, docLines, version, appliedOps); + return done(); + }); + }); - it "should create a new ShareJs model", -> - @ShareJsUpdateManager.getNewShareJsModel - .calledWith(@project_id, @doc_id, @lines, @version) - .should.equal true + it("should create a new ShareJs model", function() { + return this.ShareJsUpdateManager.getNewShareJsModel + .calledWith(this.project_id, this.doc_id, this.lines, this.version) + .should.equal(true); + }); - it "should listen for ops on the model", -> - @ShareJsUpdateManager._listenForOps - .calledWith(@model) - .should.equal true + it("should listen for ops on the model", function() { + return this.ShareJsUpdateManager._listenForOps + .calledWith(this.model) + .should.equal(true); + }); - it "should send the update to ShareJs", -> - @model.applyOp - .calledWith("#{@project_id}:#{@doc_id}", @update) - .should.equal true + it("should send the update to ShareJs", function() { + return this.model.applyOp + .calledWith(`${this.project_id}:${this.doc_id}`, this.update) + .should.equal(true); + }); - it "should get the updated doc lines", -> - @model.getSnapshot - .calledWith("#{@project_id}:#{@doc_id}") - .should.equal true + it("should get the updated doc lines", function() { + return this.model.getSnapshot + .calledWith(`${this.project_id}:${this.doc_id}`) + .should.equal(true); + }); - it "should return the updated doc lines, version and ops", -> - @callback.calledWith(null, @updatedDocLines, @version, @appliedOps).should.equal true + return it("should return the updated doc lines, version and ops", function() { + return this.callback.calledWith(null, this.updatedDocLines, this.version, this.appliedOps).should.equal(true); + }); + }); - describe "when applyOp fails", -> - beforeEach (done) -> - @error = new Error("Something went wrong") - @model.applyOp = sinon.stub().callsArgWith(2, @error) - @ShareJsUpdateManager.applyUpdate @project_id, @doc_id, @update, @lines, @version, (err, docLines, version) => - @callback(err, docLines, version) - done() + describe("when applyOp fails", function() { + beforeEach(function(done) { + this.error = new Error("Something went wrong"); + this.model.applyOp = sinon.stub().callsArgWith(2, this.error); + return this.ShareJsUpdateManager.applyUpdate(this.project_id, this.doc_id, this.update, this.lines, this.version, (err, docLines, version) => { + this.callback(err, docLines, version); + return done(); + }); + }); - it "should call the callback with the error", -> - @callback.calledWith(@error).should.equal true + return it("should call the callback with the error", function() { + return this.callback.calledWith(this.error).should.equal(true); + }); + }); - describe "when getSnapshot fails", -> - beforeEach (done) -> - @error = new Error("Something went wrong") - @model.getSnapshot.callsArgWith(1, @error) - @ShareJsUpdateManager.applyUpdate @project_id, @doc_id, @update, @lines, @version, (err, docLines, version) => - @callback(err, docLines, version) - done() + describe("when getSnapshot fails", function() { + beforeEach(function(done) { + this.error = new Error("Something went wrong"); + this.model.getSnapshot.callsArgWith(1, this.error); + return this.ShareJsUpdateManager.applyUpdate(this.project_id, this.doc_id, this.update, this.lines, this.version, (err, docLines, version) => { + this.callback(err, docLines, version); + return done(); + }); + }); - it "should call the callback with the error", -> - @callback.calledWith(@error).should.equal true + return it("should call the callback with the error", function() { + return this.callback.calledWith(this.error).should.equal(true); + }); + }); - describe "with an invalid hash", -> - beforeEach (done) -> - @error = new Error("invalid hash") - @model.getSnapshot.callsArgWith(1, null, {snapshot: "unexpected content", v: @version}) - @model.db.appliedOps["#{@project_id}:#{@doc_id}"] = @appliedOps = ["mock-ops"] - @ShareJsUpdateManager.applyUpdate @project_id, @doc_id, @update, @lines, @version, (err, docLines, version, appliedOps) => - @callback(err, docLines, version, appliedOps) - done() + return describe("with an invalid hash", function() { + beforeEach(function(done) { + this.error = new Error("invalid hash"); + this.model.getSnapshot.callsArgWith(1, null, {snapshot: "unexpected content", v: this.version}); + this.model.db.appliedOps[`${this.project_id}:${this.doc_id}`] = (this.appliedOps = ["mock-ops"]); + return this.ShareJsUpdateManager.applyUpdate(this.project_id, this.doc_id, this.update, this.lines, this.version, (err, docLines, version, appliedOps) => { + this.callback(err, docLines, version, appliedOps); + return done(); + }); + }); - it "should call the callback with the error", -> - @callback.calledWith(@error).should.equal true + return it("should call the callback with the error", function() { + return this.callback.calledWith(this.error).should.equal(true); + }); + }); + }); - describe "_listenForOps", -> - beforeEach -> - @model = on: (event, callback) => - @callback = callback - sinon.spy @model, "on" - @ShareJsUpdateManager._listenForOps(@model) + return describe("_listenForOps", function() { + beforeEach(function() { + this.model = { on: (event, callback) => { + return this.callback = callback; + } + }; + sinon.spy(this.model, "on"); + return this.ShareJsUpdateManager._listenForOps(this.model); + }); - it "should listen to the model for updates", -> - @model.on.calledWith("applyOp") - .should.equal true + it("should listen to the model for updates", function() { + return this.model.on.calledWith("applyOp") + .should.equal(true); + }); - describe "the callback", -> - beforeEach -> - @opData = - op: {t: "foo", p: 1} - meta: source: "bar" - @RealTimeRedisManager.sendData = sinon.stub() - @callback("#{@project_id}:#{@doc_id}", @opData) + return describe("the callback", function() { + beforeEach(function() { + this.opData = { + op: {t: "foo", p: 1}, + meta: { source: "bar" + } + }; + this.RealTimeRedisManager.sendData = sinon.stub(); + return this.callback(`${this.project_id}:${this.doc_id}`, this.opData); + }); - it "should publish the op to redis", -> - @RealTimeRedisManager.sendData - .calledWith({project_id: @project_id, doc_id: @doc_id, op: @opData}) - .should.equal true + return it("should publish the op to redis", function() { + return this.RealTimeRedisManager.sendData + .calledWith({project_id: this.project_id, doc_id: this.doc_id, op: this.opData}) + .should.equal(true); + }); + }); + }); +}); diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.js b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.js index ac8d4c742c..ad195d5081 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.js +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.js @@ -1,369 +1,481 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/UpdateManager.js" -SandboxedModule = require('sandboxed-module') +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const modulePath = "../../../../app/js/UpdateManager.js"; +const SandboxedModule = require('sandboxed-module'); -describe "UpdateManager", -> - beforeEach -> - @project_id = "project-id-123" - @projectHistoryId = "history-id-123" - @doc_id = "document-id-123" - @callback = sinon.stub() - @UpdateManager = SandboxedModule.require modulePath, requires: - "./LockManager" : @LockManager = {} - "./RedisManager" : @RedisManager = {} - "./RealTimeRedisManager" : @RealTimeRedisManager = {} - "./ShareJsUpdateManager" : @ShareJsUpdateManager = {} - "./HistoryManager" : @HistoryManager = {} - "logger-sharelatex": @logger = { log: sinon.stub() } - "./Metrics": @Metrics = - Timer: class Timer - done: sinon.stub() - "settings-sharelatex": @Settings = {} - "./DocumentManager": @DocumentManager = {} - "./RangesManager": @RangesManager = {} - "./SnapshotManager": @SnapshotManager = {} - "./Profiler": class Profiler - log: sinon.stub().returns { end: sinon.stub() } - end: sinon.stub() +describe("UpdateManager", function() { + beforeEach(function() { + let Profiler, Timer; + this.project_id = "project-id-123"; + this.projectHistoryId = "history-id-123"; + this.doc_id = "document-id-123"; + this.callback = sinon.stub(); + return this.UpdateManager = SandboxedModule.require(modulePath, { requires: { + "./LockManager" : (this.LockManager = {}), + "./RedisManager" : (this.RedisManager = {}), + "./RealTimeRedisManager" : (this.RealTimeRedisManager = {}), + "./ShareJsUpdateManager" : (this.ShareJsUpdateManager = {}), + "./HistoryManager" : (this.HistoryManager = {}), + "logger-sharelatex": (this.logger = { log: sinon.stub() }), + "./Metrics": (this.Metrics = { + Timer: (Timer = (function() { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub(); + } + }; + Timer.initClass(); + return Timer; + })()) + }), + "settings-sharelatex": (this.Settings = {}), + "./DocumentManager": (this.DocumentManager = {}), + "./RangesManager": (this.RangesManager = {}), + "./SnapshotManager": (this.SnapshotManager = {}), + "./Profiler": (Profiler = (function() { + Profiler = class Profiler { + static initClass() { + this.prototype.log = sinon.stub().returns({ end: sinon.stub() }); + this.prototype.end = sinon.stub(); + } + }; + Profiler.initClass(); + return Profiler; + })()) + } + } + ); + }); - describe "processOutstandingUpdates", -> - beforeEach -> - @UpdateManager.fetchAndApplyUpdates = sinon.stub().callsArg(2) - @UpdateManager.processOutstandingUpdates @project_id, @doc_id, @callback + describe("processOutstandingUpdates", function() { + beforeEach(function() { + this.UpdateManager.fetchAndApplyUpdates = sinon.stub().callsArg(2); + return this.UpdateManager.processOutstandingUpdates(this.project_id, this.doc_id, this.callback); + }); - it "should apply the updates", -> - @UpdateManager.fetchAndApplyUpdates.calledWith(@project_id, @doc_id).should.equal true + it("should apply the updates", function() { + return this.UpdateManager.fetchAndApplyUpdates.calledWith(this.project_id, this.doc_id).should.equal(true); + }); - it "should call the callback", -> - @callback.called.should.equal true + it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); - it "should time the execution", -> - @Metrics.Timer::done.called.should.equal true + return it("should time the execution", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); - describe "processOutstandingUpdatesWithLock", -> - describe "when the lock is free", -> - beforeEach -> - @LockManager.tryLock = sinon.stub().callsArgWith(1, null, true, @lockValue = "mock-lock-value") - @LockManager.releaseLock = sinon.stub().callsArg(2) - @UpdateManager.continueProcessingUpdatesWithLock = sinon.stub().callsArg(2) - @UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2) + describe("processOutstandingUpdatesWithLock", function() { + describe("when the lock is free", function() { + beforeEach(function() { + this.LockManager.tryLock = sinon.stub().callsArgWith(1, null, true, (this.lockValue = "mock-lock-value")); + this.LockManager.releaseLock = sinon.stub().callsArg(2); + this.UpdateManager.continueProcessingUpdatesWithLock = sinon.stub().callsArg(2); + return this.UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2); + }); - describe "successfully", -> - beforeEach -> - @UpdateManager.processOutstandingUpdatesWithLock @project_id, @doc_id, @callback + describe("successfully", function() { + beforeEach(function() { + return this.UpdateManager.processOutstandingUpdatesWithLock(this.project_id, this.doc_id, this.callback); + }); - it "should acquire the lock", -> - @LockManager.tryLock.calledWith(@doc_id).should.equal true + it("should acquire the lock", function() { + return this.LockManager.tryLock.calledWith(this.doc_id).should.equal(true); + }); - it "should free the lock", -> - @LockManager.releaseLock.calledWith(@doc_id, @lockValue).should.equal true + it("should free the lock", function() { + return this.LockManager.releaseLock.calledWith(this.doc_id, this.lockValue).should.equal(true); + }); - it "should process the outstanding updates", -> - @UpdateManager.processOutstandingUpdates.calledWith(@project_id, @doc_id).should.equal true + it("should process the outstanding updates", function() { + return this.UpdateManager.processOutstandingUpdates.calledWith(this.project_id, this.doc_id).should.equal(true); + }); - it "should do everything with the lock acquired", -> - @UpdateManager.processOutstandingUpdates.calledAfter(@LockManager.tryLock).should.equal true - @UpdateManager.processOutstandingUpdates.calledBefore(@LockManager.releaseLock).should.equal true + it("should do everything with the lock acquired", function() { + this.UpdateManager.processOutstandingUpdates.calledAfter(this.LockManager.tryLock).should.equal(true); + return this.UpdateManager.processOutstandingUpdates.calledBefore(this.LockManager.releaseLock).should.equal(true); + }); - it "should continue processing new updates that may have come in", -> - @UpdateManager.continueProcessingUpdatesWithLock.calledWith(@project_id, @doc_id).should.equal true + it("should continue processing new updates that may have come in", function() { + return this.UpdateManager.continueProcessingUpdatesWithLock.calledWith(this.project_id, this.doc_id).should.equal(true); + }); - it "should return the callback", -> - @callback.called.should.equal true + return it("should return the callback", function() { + return this.callback.called.should.equal(true); + }); + }); - describe "when processOutstandingUpdates returns an error", -> - beforeEach -> - @UpdateManager.processOutstandingUpdates = sinon.stub().callsArgWith(2, @error = new Error("Something went wrong")) - @UpdateManager.processOutstandingUpdatesWithLock @project_id, @doc_id, @callback + return describe("when processOutstandingUpdates returns an error", function() { + beforeEach(function() { + this.UpdateManager.processOutstandingUpdates = sinon.stub().callsArgWith(2, (this.error = new Error("Something went wrong"))); + return this.UpdateManager.processOutstandingUpdatesWithLock(this.project_id, this.doc_id, this.callback); + }); - it "should free the lock", -> - @LockManager.releaseLock.calledWith(@doc_id, @lockValue).should.equal true + it("should free the lock", function() { + return this.LockManager.releaseLock.calledWith(this.doc_id, this.lockValue).should.equal(true); + }); - it "should return the error in the callback", -> - @callback.calledWith(@error).should.equal true + return it("should return the error in the callback", function() { + return this.callback.calledWith(this.error).should.equal(true); + }); + }); + }); - describe "when the lock is taken", -> - beforeEach -> - @LockManager.tryLock = sinon.stub().callsArgWith(1, null, false) - @UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2) - @UpdateManager.processOutstandingUpdatesWithLock @project_id, @doc_id, @callback + return describe("when the lock is taken", function() { + beforeEach(function() { + this.LockManager.tryLock = sinon.stub().callsArgWith(1, null, false); + this.UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2); + return this.UpdateManager.processOutstandingUpdatesWithLock(this.project_id, this.doc_id, this.callback); + }); - it "should return the callback", -> - @callback.called.should.equal true + it("should return the callback", function() { + return this.callback.called.should.equal(true); + }); - it "should not process the updates", -> - @UpdateManager.processOutstandingUpdates.called.should.equal false + return it("should not process the updates", function() { + return this.UpdateManager.processOutstandingUpdates.called.should.equal(false); + }); + }); + }); - describe "continueProcessingUpdatesWithLock", -> - describe "when there are outstanding updates", -> - beforeEach -> - @RealTimeRedisManager.getUpdatesLength = sinon.stub().callsArgWith(1, null, 3) - @UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArg(2) - @UpdateManager.continueProcessingUpdatesWithLock @project_id, @doc_id, @callback + describe("continueProcessingUpdatesWithLock", function() { + describe("when there are outstanding updates", function() { + beforeEach(function() { + this.RealTimeRedisManager.getUpdatesLength = sinon.stub().callsArgWith(1, null, 3); + this.UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArg(2); + return this.UpdateManager.continueProcessingUpdatesWithLock(this.project_id, this.doc_id, this.callback); + }); - it "should process the outstanding updates", -> - @UpdateManager.processOutstandingUpdatesWithLock.calledWith(@project_id, @doc_id).should.equal true + it("should process the outstanding updates", function() { + return this.UpdateManager.processOutstandingUpdatesWithLock.calledWith(this.project_id, this.doc_id).should.equal(true); + }); - it "should return the callback", -> - @callback.called.should.equal true + return it("should return the callback", function() { + return this.callback.called.should.equal(true); + }); + }); - describe "when there are no outstanding updates", -> - beforeEach -> - @RealTimeRedisManager.getUpdatesLength = sinon.stub().callsArgWith(1, null, 0) - @UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArg(2) - @UpdateManager.continueProcessingUpdatesWithLock @project_id, @doc_id, @callback + return describe("when there are no outstanding updates", function() { + beforeEach(function() { + this.RealTimeRedisManager.getUpdatesLength = sinon.stub().callsArgWith(1, null, 0); + this.UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArg(2); + return this.UpdateManager.continueProcessingUpdatesWithLock(this.project_id, this.doc_id, this.callback); + }); - it "should not try to process the outstanding updates", -> - @UpdateManager.processOutstandingUpdatesWithLock.called.should.equal false + it("should not try to process the outstanding updates", function() { + return this.UpdateManager.processOutstandingUpdatesWithLock.called.should.equal(false); + }); - it "should return the callback", -> - @callback.called.should.equal true + return it("should return the callback", function() { + return this.callback.called.should.equal(true); + }); + }); + }); - describe "fetchAndApplyUpdates", -> - describe "with updates", -> - beforeEach -> - @updates = [{p: 1, t: "foo"}] - @updatedDocLines = ["updated", "lines"] - @version = 34 - @RealTimeRedisManager.getPendingUpdatesForDoc = sinon.stub().callsArgWith(1, null, @updates) - @UpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null, @updatedDocLines, @version) - @UpdateManager.fetchAndApplyUpdates @project_id, @doc_id, @callback + describe("fetchAndApplyUpdates", function() { + describe("with updates", function() { + beforeEach(function() { + this.updates = [{p: 1, t: "foo"}]; + this.updatedDocLines = ["updated", "lines"]; + this.version = 34; + this.RealTimeRedisManager.getPendingUpdatesForDoc = sinon.stub().callsArgWith(1, null, this.updates); + this.UpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null, this.updatedDocLines, this.version); + return this.UpdateManager.fetchAndApplyUpdates(this.project_id, this.doc_id, this.callback); + }); - it "should get the pending updates", -> - @RealTimeRedisManager.getPendingUpdatesForDoc.calledWith(@doc_id).should.equal true + it("should get the pending updates", function() { + return this.RealTimeRedisManager.getPendingUpdatesForDoc.calledWith(this.doc_id).should.equal(true); + }); - it "should apply the updates", -> - for update in @updates - @UpdateManager.applyUpdate - .calledWith(@project_id, @doc_id, update) - .should.equal true + it("should apply the updates", function() { + return Array.from(this.updates).map((update) => + this.UpdateManager.applyUpdate + .calledWith(this.project_id, this.doc_id, update) + .should.equal(true)); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); - describe "when there are no updates", -> - beforeEach -> - @updates = [] - @RealTimeRedisManager.getPendingUpdatesForDoc = sinon.stub().callsArgWith(1, null, @updates) - @UpdateManager.applyUpdate = sinon.stub() - @RedisManager.setDocument = sinon.stub() - @UpdateManager.fetchAndApplyUpdates @project_id, @doc_id, @callback + return describe("when there are no updates", function() { + beforeEach(function() { + this.updates = []; + this.RealTimeRedisManager.getPendingUpdatesForDoc = sinon.stub().callsArgWith(1, null, this.updates); + this.UpdateManager.applyUpdate = sinon.stub(); + this.RedisManager.setDocument = sinon.stub(); + return this.UpdateManager.fetchAndApplyUpdates(this.project_id, this.doc_id, this.callback); + }); - it "should not call applyUpdate", -> - @UpdateManager.applyUpdate.called.should.equal false + it("should not call applyUpdate", function() { + return this.UpdateManager.applyUpdate.called.should.equal(false); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); + }); - describe "applyUpdate", -> - beforeEach -> - @updateMeta = { user_id: 'last-author-fake-id' } - @update = {op: [{p: 42, i: "foo"}], meta: @updateMeta} - @updatedDocLines = ["updated", "lines"] - @version = 34 - @lines = ["original", "lines"] - @ranges = { entries: "mock", comments: "mock" } - @updated_ranges = { entries: "updated", comments: "updated" } - @appliedOps = [ {v: 42, op: "mock-op-42"}, { v: 45, op: "mock-op-45" }] - @doc_ops_length = sinon.stub() - @project_ops_length = sinon.stub() - @pathname = '/a/b/c.tex' - @DocumentManager.getDoc = sinon.stub().yields(null, @lines, @version, @ranges, @pathname, @projectHistoryId) - @RangesManager.applyUpdate = sinon.stub().yields(null, @updated_ranges, false) - @ShareJsUpdateManager.applyUpdate = sinon.stub().yields(null, @updatedDocLines, @version, @appliedOps) - @RedisManager.updateDocument = sinon.stub().yields(null, @doc_ops_length, @project_ops_length) - @RealTimeRedisManager.sendData = sinon.stub() - @UpdateManager._addProjectHistoryMetadataToOps = sinon.stub() - @HistoryManager.recordAndFlushHistoryOps = sinon.stub().callsArg(5) + describe("applyUpdate", function() { + beforeEach(function() { + this.updateMeta = { user_id: 'last-author-fake-id' }; + this.update = {op: [{p: 42, i: "foo"}], meta: this.updateMeta}; + this.updatedDocLines = ["updated", "lines"]; + this.version = 34; + this.lines = ["original", "lines"]; + this.ranges = { entries: "mock", comments: "mock" }; + this.updated_ranges = { entries: "updated", comments: "updated" }; + this.appliedOps = [ {v: 42, op: "mock-op-42"}, { v: 45, op: "mock-op-45" }]; + this.doc_ops_length = sinon.stub(); + this.project_ops_length = sinon.stub(); + this.pathname = '/a/b/c.tex'; + this.DocumentManager.getDoc = sinon.stub().yields(null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId); + this.RangesManager.applyUpdate = sinon.stub().yields(null, this.updated_ranges, false); + this.ShareJsUpdateManager.applyUpdate = sinon.stub().yields(null, this.updatedDocLines, this.version, this.appliedOps); + this.RedisManager.updateDocument = sinon.stub().yields(null, this.doc_ops_length, this.project_ops_length); + this.RealTimeRedisManager.sendData = sinon.stub(); + this.UpdateManager._addProjectHistoryMetadataToOps = sinon.stub(); + return this.HistoryManager.recordAndFlushHistoryOps = sinon.stub().callsArg(5); + }); - describe "normally", -> - beforeEach -> - @UpdateManager.applyUpdate @project_id, @doc_id, @update, @callback + describe("normally", function() { + beforeEach(function() { + return this.UpdateManager.applyUpdate(this.project_id, this.doc_id, this.update, this.callback); + }); - it "should apply the updates via ShareJS", -> - @ShareJsUpdateManager.applyUpdate - .calledWith(@project_id, @doc_id, @update, @lines, @version) - .should.equal true + it("should apply the updates via ShareJS", function() { + return this.ShareJsUpdateManager.applyUpdate + .calledWith(this.project_id, this.doc_id, this.update, this.lines, this.version) + .should.equal(true); + }); - it "should update the ranges", -> - @RangesManager.applyUpdate - .calledWith(@project_id, @doc_id, @ranges, @appliedOps, @updatedDocLines) - .should.equal true + it("should update the ranges", function() { + return this.RangesManager.applyUpdate + .calledWith(this.project_id, this.doc_id, this.ranges, this.appliedOps, this.updatedDocLines) + .should.equal(true); + }); - it "should save the document", -> - @RedisManager.updateDocument - .calledWith(@project_id, @doc_id, @updatedDocLines, @version, @appliedOps, @updated_ranges, @updateMeta) - .should.equal true + it("should save the document", function() { + return this.RedisManager.updateDocument + .calledWith(this.project_id, this.doc_id, this.updatedDocLines, this.version, this.appliedOps, this.updated_ranges, this.updateMeta) + .should.equal(true); + }); - it "should add metadata to the ops" , -> - @UpdateManager._addProjectHistoryMetadataToOps - .calledWith(@appliedOps, @pathname, @projectHistoryId, @lines) - .should.equal true + it("should add metadata to the ops" , function() { + return this.UpdateManager._addProjectHistoryMetadataToOps + .calledWith(this.appliedOps, this.pathname, this.projectHistoryId, this.lines) + .should.equal(true); + }); - it "should push the applied ops into the history queue", -> - @HistoryManager.recordAndFlushHistoryOps - .calledWith(@project_id, @doc_id, @appliedOps, @doc_ops_length, @project_ops_length) - .should.equal true + it("should push the applied ops into the history queue", function() { + return this.HistoryManager.recordAndFlushHistoryOps + .calledWith(this.project_id, this.doc_id, this.appliedOps, this.doc_ops_length, this.project_ops_length) + .should.equal(true); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); - describe "with UTF-16 surrogate pairs in the update", -> - beforeEach -> - @update = {op: [{p: 42, i: "\uD835\uDC00"}]} - @UpdateManager.applyUpdate @project_id, @doc_id, @update, @callback + describe("with UTF-16 surrogate pairs in the update", function() { + beforeEach(function() { + this.update = {op: [{p: 42, i: "\uD835\uDC00"}]}; + return this.UpdateManager.applyUpdate(this.project_id, this.doc_id, this.update, this.callback); + }); - it "should apply the update but with surrogate pairs removed", -> - @ShareJsUpdateManager.applyUpdate - .calledWith(@project_id, @doc_id, @update) - .should.equal true + return it("should apply the update but with surrogate pairs removed", function() { + this.ShareJsUpdateManager.applyUpdate + .calledWith(this.project_id, this.doc_id, this.update) + .should.equal(true); - # \uFFFD is 'replacement character' - @update.op[0].i.should.equal "\uFFFD\uFFFD" + // \uFFFD is 'replacement character' + return this.update.op[0].i.should.equal("\uFFFD\uFFFD"); + }); + }); - describe "with an error", -> - beforeEach -> - @error = new Error("something went wrong") - @ShareJsUpdateManager.applyUpdate = sinon.stub().yields(@error) - @UpdateManager.applyUpdate @project_id, @doc_id, @update, @callback + describe("with an error", function() { + beforeEach(function() { + this.error = new Error("something went wrong"); + this.ShareJsUpdateManager.applyUpdate = sinon.stub().yields(this.error); + return this.UpdateManager.applyUpdate(this.project_id, this.doc_id, this.update, this.callback); + }); - it "should call RealTimeRedisManager.sendData with the error", -> - @RealTimeRedisManager.sendData + it("should call RealTimeRedisManager.sendData with the error", function() { + return this.RealTimeRedisManager.sendData .calledWith({ - project_id: @project_id, - doc_id: @doc_id, - error: @error.message + project_id: this.project_id, + doc_id: this.doc_id, + error: this.error.message }) - .should.equal true + .should.equal(true); + }); - it "should call the callback with the error", -> - @callback.calledWith(@error).should.equal true + return it("should call the callback with the error", function() { + return this.callback.calledWith(this.error).should.equal(true); + }); + }); - describe "when ranges get collapsed", -> - beforeEach -> - @RangesManager.applyUpdate = sinon.stub().yields(null, @updated_ranges, true) - @SnapshotManager.recordSnapshot = sinon.stub().yields() - @UpdateManager.applyUpdate @project_id, @doc_id, @update, @callback + return describe("when ranges get collapsed", function() { + beforeEach(function() { + this.RangesManager.applyUpdate = sinon.stub().yields(null, this.updated_ranges, true); + this.SnapshotManager.recordSnapshot = sinon.stub().yields(); + return this.UpdateManager.applyUpdate(this.project_id, this.doc_id, this.update, this.callback); + }); - it "should call SnapshotManager.recordSnapshot", -> - @SnapshotManager.recordSnapshot + return it("should call SnapshotManager.recordSnapshot", function() { + return this.SnapshotManager.recordSnapshot .calledWith( - @project_id, - @doc_id, - @version, - @pathname, - @lines, - @ranges + this.project_id, + this.doc_id, + this.version, + this.pathname, + this.lines, + this.ranges ) - .should.equal true + .should.equal(true); + }); + }); + }); - describe "_addProjectHistoryMetadataToOps", -> - it "should add projectHistoryId, pathname and doc_length metadata to the ops", -> - lines = [ - 'some' - 'test' - 'data' - ] - appliedOps = [ - { v: 42, op: [{i: "foo", p: 4}, { i: "bar", p: 6 }] }, - { v: 45, op: [{d: "qux", p: 4}, { i: "bazbaz", p: 14 }] }, - { v: 49, op: [{i: "penguin", p: 18}] } - ] - @UpdateManager._addProjectHistoryMetadataToOps(appliedOps, @pathname, @projectHistoryId, lines) - appliedOps.should.deep.equal [{ - projectHistoryId: @projectHistoryId - v: 42 - op: [{i: "foo", p: 4}, { i: "bar", p: 6 }] - meta: - pathname: @pathname - doc_length: 14 - }, { - projectHistoryId: @projectHistoryId - v: 45 - op: [{d: "qux", p: 4}, { i: "bazbaz", p: 14 }] - meta: - pathname: @pathname - doc_length: 20 # 14 + 'foo' + 'bar' - }, { - projectHistoryId: @projectHistoryId - v: 49 - op: [{i: "penguin", p: 18}] - meta: - pathname: @pathname - doc_length: 23 # 14 - 'qux' + 'bazbaz' - }] + describe("_addProjectHistoryMetadataToOps", () => it("should add projectHistoryId, pathname and doc_length metadata to the ops", function() { + const lines = [ + 'some', + 'test', + 'data' + ]; + const appliedOps = [ + { v: 42, op: [{i: "foo", p: 4}, { i: "bar", p: 6 }] }, + { v: 45, op: [{d: "qux", p: 4}, { i: "bazbaz", p: 14 }] }, + { v: 49, op: [{i: "penguin", p: 18}] } + ]; + this.UpdateManager._addProjectHistoryMetadataToOps(appliedOps, this.pathname, this.projectHistoryId, lines); + return appliedOps.should.deep.equal([{ + projectHistoryId: this.projectHistoryId, + v: 42, + op: [{i: "foo", p: 4}, { i: "bar", p: 6 }], + meta: { + pathname: this.pathname, + doc_length: 14 + } + }, { + projectHistoryId: this.projectHistoryId, + v: 45, + op: [{d: "qux", p: 4}, { i: "bazbaz", p: 14 }], + meta: { + pathname: this.pathname, + doc_length: 20 + } // 14 + 'foo' + 'bar' + }, { + projectHistoryId: this.projectHistoryId, + v: 49, + op: [{i: "penguin", p: 18}], + meta: { + pathname: this.pathname, + doc_length: 23 + } // 14 - 'qux' + 'bazbaz' + }]); +})); - describe "lockUpdatesAndDo", -> - beforeEach -> - @method = sinon.stub().callsArgWith(3, null, @response_arg1) - @callback = sinon.stub() - @arg1 = "argument 1" - @response_arg1 = "response argument 1" - @lockValue = "mock-lock-value" - @LockManager.getLock = sinon.stub().callsArgWith(1, null, @lockValue) - @LockManager.releaseLock = sinon.stub().callsArg(2) + return describe("lockUpdatesAndDo", function() { + beforeEach(function() { + this.method = sinon.stub().callsArgWith(3, null, this.response_arg1); + this.callback = sinon.stub(); + this.arg1 = "argument 1"; + this.response_arg1 = "response argument 1"; + this.lockValue = "mock-lock-value"; + this.LockManager.getLock = sinon.stub().callsArgWith(1, null, this.lockValue); + return this.LockManager.releaseLock = sinon.stub().callsArg(2); + }); - describe "successfully", -> - beforeEach -> - @UpdateManager.continueProcessingUpdatesWithLock = sinon.stub() - @UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2) - @UpdateManager.lockUpdatesAndDo @method, @project_id, @doc_id, @arg1, @callback + describe("successfully", function() { + beforeEach(function() { + this.UpdateManager.continueProcessingUpdatesWithLock = sinon.stub(); + this.UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2); + return this.UpdateManager.lockUpdatesAndDo(this.method, this.project_id, this.doc_id, this.arg1, this.callback); + }); - it "should lock the doc", -> - @LockManager.getLock - .calledWith(@doc_id) - .should.equal true + it("should lock the doc", function() { + return this.LockManager.getLock + .calledWith(this.doc_id) + .should.equal(true); + }); - it "should process any outstanding updates", -> - @UpdateManager.processOutstandingUpdates - .calledWith(@project_id, @doc_id) - .should.equal true + it("should process any outstanding updates", function() { + return this.UpdateManager.processOutstandingUpdates + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); - it "should call the method", -> - @method - .calledWith(@project_id, @doc_id, @arg1) - .should.equal true + it("should call the method", function() { + return this.method + .calledWith(this.project_id, this.doc_id, this.arg1) + .should.equal(true); + }); - it "should return the method response to the callback", -> - @callback - .calledWith(null, @response_arg1) - .should.equal true + it("should return the method response to the callback", function() { + return this.callback + .calledWith(null, this.response_arg1) + .should.equal(true); + }); - it "should release the lock", -> - @LockManager.releaseLock - .calledWith(@doc_id, @lockValue) - .should.equal true + it("should release the lock", function() { + return this.LockManager.releaseLock + .calledWith(this.doc_id, this.lockValue) + .should.equal(true); + }); - it "should continue processing updates", -> - @UpdateManager.continueProcessingUpdatesWithLock - .calledWith(@project_id, @doc_id) - .should.equal true + return it("should continue processing updates", function() { + return this.UpdateManager.continueProcessingUpdatesWithLock + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); + }); - describe "when processOutstandingUpdates returns an error", -> - beforeEach -> - @UpdateManager.processOutstandingUpdates = sinon.stub().callsArgWith(2, @error = new Error("Something went wrong")) - @UpdateManager.lockUpdatesAndDo @method, @project_id, @doc_id, @arg1, @callback + describe("when processOutstandingUpdates returns an error", function() { + beforeEach(function() { + this.UpdateManager.processOutstandingUpdates = sinon.stub().callsArgWith(2, (this.error = new Error("Something went wrong"))); + return this.UpdateManager.lockUpdatesAndDo(this.method, this.project_id, this.doc_id, this.arg1, this.callback); + }); - it "should free the lock", -> - @LockManager.releaseLock.calledWith(@doc_id, @lockValue).should.equal true + it("should free the lock", function() { + return this.LockManager.releaseLock.calledWith(this.doc_id, this.lockValue).should.equal(true); + }); - it "should return the error in the callback", -> - @callback.calledWith(@error).should.equal true + return it("should return the error in the callback", function() { + return this.callback.calledWith(this.error).should.equal(true); + }); + }); - describe "when the method returns an error", -> - beforeEach -> - @UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2) - @method = sinon.stub().callsArgWith(3, @error = new Error("something went wrong"), @response_arg1) - @UpdateManager.lockUpdatesAndDo @method, @project_id, @doc_id, @arg1, @callback + return describe("when the method returns an error", function() { + beforeEach(function() { + this.UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2); + this.method = sinon.stub().callsArgWith(3, (this.error = new Error("something went wrong")), this.response_arg1); + return this.UpdateManager.lockUpdatesAndDo(this.method, this.project_id, this.doc_id, this.arg1, this.callback); + }); - it "should free the lock", -> - @LockManager.releaseLock.calledWith(@doc_id, @lockValue).should.equal true + it("should free the lock", function() { + return this.LockManager.releaseLock.calledWith(this.doc_id, this.lockValue).should.equal(true); + }); - it "should return the error in the callback", -> - @callback.calledWith(@error).should.equal true + return it("should return the error in the callback", function() { + return this.callback.calledWith(this.error).should.equal(true); + }); + }); + }); +}); From 21850f1e6f7d13276211193dc4394ba629c9a6cc Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 6 May 2020 12:11:22 +0200 Subject: [PATCH 616/769] decaffeinate: Run post-processing cleanups on DiffCodecTests.coffee and 23 other files --- .../unit/coffee/DiffCodec/DiffCodecTests.js | 15 ++++-- .../DispatchManager/DispatchManagerTests.js | 13 +++-- .../DocumentManager/DocumentManagerTests.js | 16 ++++-- .../HistoryManager/HistoryManagerTests.js | 6 +++ .../HistoryRedisManagerTests.js | 7 +++ .../HttpController/HttpControllerTests.js | 6 +++ .../coffee/LockManager/CheckingTheLock.js | 11 +++- .../coffee/LockManager/ReleasingTheLock.js | 7 +++ .../unit/coffee/LockManager/getLockTests.js | 8 +++ .../unit/coffee/LockManager/tryLockTests.js | 7 +++ .../PersistenceManagerTests.js | 6 +++ .../ProjectHistoryRedisManagerTests.js | 13 +++-- .../flushAndDeleteProjectTests.js | 7 +++ .../ProjectManager/flushProjectTests.js | 8 +++ .../ProjectManager/getProjectDocsTests.js | 6 +++ .../ProjectManager/updateProjectTests.js | 6 +++ .../RangesManager/RangesManagerTests.js | 8 +++ .../RateLimitManager/RateLimitManager.js | 6 +++ .../RealTimeRedisManagerTests.js | 7 +++ .../coffee/RedisManager/RedisManagerTests.js | 11 +++- .../unit/coffee/ShareJS/TextTransformTests.js | 51 +++++++++++-------- .../unit/coffee/ShareJsDB/ShareJsDBTests.js | 10 +++- .../ShareJsUpdateManagerTests.js | 6 +++ .../UpdateManager/UpdateManagerTests.js | 10 +++- 24 files changed, 207 insertions(+), 44 deletions(-) diff --git a/services/document-updater/test/unit/coffee/DiffCodec/DiffCodecTests.js b/services/document-updater/test/unit/coffee/DiffCodec/DiffCodecTests.js index f3e6f5bbee..85bb8c6a1b 100644 --- a/services/document-updater/test/unit/coffee/DiffCodec/DiffCodecTests.js +++ b/services/document-updater/test/unit/coffee/DiffCodec/DiffCodecTests.js @@ -1,3 +1,10 @@ +/* eslint-disable + handle-callback-err, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns @@ -22,7 +29,7 @@ describe("DiffCodec", function() { it("should insert new text correctly", function(done) { this.before = ["hello world"]; this.after = ["hello beautiful world"]; - return this.DiffCodec.diffAsShareJsOp(this.before, this.after, function(error, ops) { + return this.DiffCodec.diffAsShareJsOp(this.before, this.after, (error, ops) => { expect(ops).to.deep.equal([{ i: "beautiful ", p: 6 @@ -35,7 +42,7 @@ describe("DiffCodec", function() { it("should shift later inserts by previous inserts", function(done) { this.before = ["the boy played with the ball"]; this.after = ["the tall boy played with the red ball"]; - return this.DiffCodec.diffAsShareJsOp(this.before, this.after, function(error, ops) { + return this.DiffCodec.diffAsShareJsOp(this.before, this.after, (error, ops) => { expect(ops).to.deep.equal([ { i: "tall ", p: 4 }, { i: "red ", p: 29 } @@ -47,7 +54,7 @@ describe("DiffCodec", function() { it("should delete text correctly", function(done) { this.before = ["hello beautiful world"]; this.after = ["hello world"]; - return this.DiffCodec.diffAsShareJsOp(this.before, this.after, function(error, ops) { + return this.DiffCodec.diffAsShareJsOp(this.before, this.after, (error, ops) => { expect(ops).to.deep.equal([{ d: "beautiful ", p: 6 @@ -61,7 +68,7 @@ describe("DiffCodec", function() { return it("should shift later deletes by the first deletes", function(done) { this.before = ["the tall boy played with the red ball"]; this.after = ["the boy played with the ball"]; - return this.DiffCodec.diffAsShareJsOp(this.before, this.after, function(error, ops) { + return this.DiffCodec.diffAsShareJsOp(this.before, this.after, (error, ops) => { expect(ops).to.deep.equal([ { d: "tall ", p: 4 }, { d: "red ", p: 24 } diff --git a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.js b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.js index 4ba0e2d16c..8b0c9afcf9 100644 --- a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.js +++ b/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.js @@ -1,3 +1,10 @@ +/* eslint-disable + handle-callback-err, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns @@ -115,7 +122,7 @@ describe("DispatchManager", function() { }); }); - return describe("run", () => it("should call _waitForUpdateThenDispatchWorker until shutting down", function(done) { + return describe("run", function() { return it("should call _waitForUpdateThenDispatchWorker until shutting down", function(done) { let callCount = 0; this.worker._waitForUpdateThenDispatchWorker = callback => { if (callback == null) { callback = function(error) {}; } @@ -134,7 +141,7 @@ describe("DispatchManager", function() { var checkStatus = () => { if (!this.settings.shuttingDown) { // retry until shutdown setTimeout(checkStatus, 100); - return; + } else { this.worker._waitForUpdateThenDispatchWorker.callCount.should.equal(3); return done(); @@ -142,6 +149,6 @@ describe("DispatchManager", function() { }; return checkStatus(); - })); + }); }); }); }); diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.js b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.js index a338685948..cc13bbeb7a 100644 --- a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.js +++ b/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -60,7 +66,7 @@ describe("DocumentManager", function() { return this.lastUpdatedBy = 'last-author-id'; }); - afterEach(() => tk.reset()); + afterEach(function() { return tk.reset(); }); describe("flushAndDeleteDoc", function() { describe("successfully", function() { @@ -111,7 +117,7 @@ describe("DocumentManager", function() { }); }); - return describe("when ignoring flush errors", () => it("should remove the doc from redis", function(done) { + return describe("when ignoring flush errors", function() { return it("should remove the doc from redis", function(done) { return this.DocumentManager.flushAndDeleteDoc(this.project_id, this.doc_id, { ignoreFlushErrors: true }, error => { if (error != null) { return done(error); @@ -119,7 +125,7 @@ describe("DocumentManager", function() { this.RedisManager.removeDocFromMemory.called.should.equal(true); return done(); }); - })); + }); }); }); }); @@ -303,7 +309,7 @@ describe("DocumentManager", function() { }); }); - describe("setDoc", () => describe("with plain tex lines", function() { + describe("setDoc", function() { return describe("with plain tex lines", function() { beforeEach(function() { this.beforeLines = ["before", "lines"]; this.afterLines = ["after", "lines"]; @@ -417,7 +423,7 @@ describe("DocumentManager", function() { op.u.should.equal(true)); }); }); - })); + }); }); describe("acceptChanges", function() { beforeEach(function() { diff --git a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.js b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.js index d02b8fd295..64a07b06ae 100644 --- a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.js +++ b/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + mocha/no-nested-tests, + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.js b/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.js index 70e95769a7..b5184a11e6 100644 --- a/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.js +++ b/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.js b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.js index 9acbfb4325..5189f3515d 100644 --- a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.js +++ b/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.js b/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.js index 4c9c3f5f70..58392480d0 100644 --- a/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.js +++ b/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns @@ -46,7 +53,7 @@ describe('LockManager - checking the lock', function(){ it('should return true if the key does not exists', function(done){ existsStub.yields(null, "0"); - return LockManager.checkLock(doc_id, function(err, free){ + return LockManager.checkLock(doc_id, (err, free) => { free.should.equal(true); return done(); }); @@ -54,7 +61,7 @@ describe('LockManager - checking the lock', function(){ return it('should return false if the key does exists', function(done){ existsStub.yields(null, "1"); - return LockManager.checkLock(doc_id, function(err, free){ + return LockManager.checkLock(doc_id, (err, free) => { free.should.equal(false); return done(); }); diff --git a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.js b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.js index 67616c062c..0cf4ef88b1 100644 --- a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.js +++ b/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/document-updater/test/unit/coffee/LockManager/getLockTests.js b/services/document-updater/test/unit/coffee/LockManager/getLockTests.js index bdb301b8d5..0d99618e59 100644 --- a/services/document-updater/test/unit/coffee/LockManager/getLockTests.js +++ b/services/document-updater/test/unit/coffee/LockManager/getLockTests.js @@ -1,3 +1,11 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.js b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.js index cdc3e34f1e..9f6acf0002 100644 --- a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.js +++ b/services/document-updater/test/unit/coffee/LockManager/tryLockTests.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.js b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.js index cbef821472..68bedb4b2c 100644 --- a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.js +++ b/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js index 6187749c18..d3bba8f42f 100644 --- a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js +++ b/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -46,7 +53,7 @@ describe("ProjectHistoryRedisManager", function() { ); }); - afterEach(() => tk.reset()); + afterEach(function() { return tk.reset(); }); describe("queueOps", function() { beforeEach(function() { @@ -145,8 +152,8 @@ describe("ProjectHistoryRedisManager", function() { .should.equal(true); }); - describe("queueResyncProjectStructure", () => it("should queue an update", function() {})); + describe("queueResyncProjectStructure", function() { return it("should queue an update", function() {}); }); - return describe("queueResyncDocContent", () => it("should queue an update", function() {})); + return describe("queueResyncDocContent", function() { return it("should queue an update", function() {}); }); }); }); diff --git a/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.js b/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.js index ec572d7715..ebebf64740 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.js +++ b/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.js b/services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.js index 7160bbca10..7d74c61507 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.js +++ b/services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.js @@ -1,3 +1,11 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.js b/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.js index 98fd1e825b..bb700a2a21 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.js +++ b/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.js b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.js index 2c20c7322f..ffe8c49e97 100644 --- a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.js +++ b/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.js b/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.js index df6a146d9b..5634840221 100644 --- a/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.js +++ b/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.js @@ -1,3 +1,11 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/services/document-updater/test/unit/coffee/RateLimitManager/RateLimitManager.js b/services/document-updater/test/unit/coffee/RateLimitManager/RateLimitManager.js index 1e5cd6a71c..ed41a4834a 100644 --- a/services/document-updater/test/unit/coffee/RateLimitManager/RateLimitManager.js +++ b/services/document-updater/test/unit/coffee/RateLimitManager/RateLimitManager.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.js b/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.js index 135452f5a3..ae6cd4bba2 100644 --- a/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.js +++ b/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.js b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.js index cf22958b05..7e8cf40d0e 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.js +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.js @@ -1,3 +1,11 @@ +/* eslint-disable + camelcase, + mocha/no-identical-title, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns @@ -66,6 +74,7 @@ describe("RedisManager", function() { constructor() { this.start = new Date(); } + done() { const timeSpan = new Date - this.start; return timeSpan; @@ -86,7 +95,7 @@ describe("RedisManager", function() { return this.callback = sinon.stub(); }); - afterEach(() => tk.reset()); + afterEach(function() { return tk.reset(); }); describe("getDoc", function() { beforeEach(function() { diff --git a/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.js b/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.js index 06b5699808..4989b77034 100644 --- a/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.js +++ b/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + mocha/no-identical-title, + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -218,38 +225,38 @@ describe("ShareJS text type", function() { }); }); - describe("comment / insert", () => it("should not do anything", function() { + describe("comment / insert", function() { return it("should not do anything", function() { const dest = []; text._tc(dest, { i: "foo", p: 6 }, { c: "bar", p: 3 }); return dest.should.deep.equal([{ i: "foo", p: 6 }]); - })); + }); }); - describe("comment / delete", () => it("should not do anything", function() { + describe("comment / delete", function() { return it("should not do anything", function() { const dest = []; text._tc(dest, { d: "foo", p: 6 }, { c: "bar", p: 3 }); return dest.should.deep.equal([{ d: "foo", p: 6 }]); - })); + }); }); - return describe("comment / comment", () => it("should not do anything", function() { + return describe("comment / comment", function() { return it("should not do anything", function() { const dest = []; text._tc(dest, { c: "foo", p: 6 }, { c: "bar", p: 3 }); return dest.should.deep.equal([{ c: "foo", p: 6 }]); - })); + }); }); }); describe("apply", function() { - it("should apply an insert", () => text.apply("foo", [{ i: "bar", p: 2 }]).should.equal("fobaro")); + it("should apply an insert", function() { return text.apply("foo", [{ i: "bar", p: 2 }]).should.equal("fobaro"); }); - it("should apply a delete", () => text.apply("foo123bar", [{ d: "123", p: 3 }]).should.equal("foobar")); + it("should apply a delete", function() { return text.apply("foo123bar", [{ d: "123", p: 3 }]).should.equal("foobar"); }); - it("should do nothing with a comment", () => text.apply("foo123bar", [{ c: "123", p: 3 }]).should.equal("foo123bar")); + it("should do nothing with a comment", function() { return text.apply("foo123bar", [{ c: "123", p: 3 }]).should.equal("foo123bar"); }); - it("should throw an error when deleted content does not match", () => ((() => text.apply("foo123bar", [{ d: "456", p: 3 }]))).should.throw(Error)); + it("should throw an error when deleted content does not match", function() { return ((() => text.apply("foo123bar", [{ d: "456", p: 3 }]))).should.throw(Error); }); - return it("should throw an error when comment content does not match", () => ((() => text.apply("foo123bar", [{ c: "456", p: 3 }]))).should.throw(Error)); + return it("should throw an error when comment content does not match", function() { return ((() => text.apply("foo123bar", [{ c: "456", p: 3 }]))).should.throw(Error); }); }); - return describe("applying ops and comments in different orders", () => it("should not matter which op or comment is applied first", function() { + return describe("applying ops and comments in different orders", function() { return it("should not matter which op or comment is applied first", function() { let length, p; let asc, end; let asc1, end1; @@ -263,7 +270,7 @@ describe("ShareJS text type", function() { const applySnapshot = (snapshot, op) => text.apply(snapshot, op); const applyRanges = function(rangesTracker, ops) { - for (let op of Array.from(ops)) { + for (const op of Array.from(ops)) { rangesTracker.applyOp(op, {}); } return rangesTracker; @@ -271,14 +278,14 @@ describe("ShareJS text type", function() { const commentsEqual = function(comments1, comments2) { if (comments1.length !== comments2.length) { return false; } - comments1.sort(function(a,b) { + comments1.sort((a,b) => { if ((a.offset - b.offset) === 0) { return a.length - b.length; } else { return a.offset - b.offset; } }); - comments2.sort(function(a,b) { + comments2.sort((a,b) => { if ((a.offset - b.offset) === 0) { return a.length - b.length; } else { @@ -299,19 +306,19 @@ describe("ShareJS text type", function() { const OPS = []; // Insert ops - for (p = 0, end = SNAPSHOT.length, asc = 0 <= end; asc ? p <= end : p >= end; asc ? p++ : p--) { + for (p = 0, end = SNAPSHOT.length, asc = end >= 0; asc ? p <= end : p >= end; asc ? p++ : p--) { OPS.push({i: "a", p}); OPS.push({i: "bc", p}); } - for (p = 0, end1 = SNAPSHOT.length-1, asc1 = 0 <= end1; asc1 ? p <= end1 : p >= end1; asc1 ? p++ : p--) { + for (p = 0, end1 = SNAPSHOT.length-1, asc1 = end1 >= 0; asc1 ? p <= end1 : p >= end1; asc1 ? p++ : p--) { var asc2, end2; - for (length = 1, end2 = SNAPSHOT.length - p, asc2 = 1 <= end2; asc2 ? length <= end2 : length >= end2; asc2 ? length++ : length--) { + for (length = 1, end2 = SNAPSHOT.length - p, asc2 = end2 >= 1; asc2 ? length <= end2 : length >= end2; asc2 ? length++ : length--) { OPS.push({d: SNAPSHOT.slice(p, p+length), p}); } } - for (p = 0, end3 = SNAPSHOT.length-1, asc3 = 0 <= end3; asc3 ? p <= end3 : p >= end3; asc3 ? p++ : p--) { + for (p = 0, end3 = SNAPSHOT.length-1, asc3 = end3 >= 0; asc3 ? p <= end3 : p >= end3; asc3 ? p++ : p--) { var asc4, end4; - for (length = 1, end4 = SNAPSHOT.length - p, asc4 = 1 <= end4; asc4 ? length <= end4 : length >= end4; asc4 ? length++ : length--) { + for (length = 1, end4 = SNAPSHOT.length - p, asc4 = end4 >= 1; asc4 ? length <= end4 : length >= end4; asc4 ? length++ : length--) { OPS.push({c: SNAPSHOT.slice(p, p+length), p, t: this.t}); } } @@ -321,7 +328,7 @@ describe("ShareJS text type", function() { for (var op1 of Array.from(OPS)) { result.push((() => { const result1 = []; - for (let op2 of Array.from(OPS)) { + for (const op2 of Array.from(OPS)) { const op1_t = transform(op1, op2, "left"); const op2_t = transform(op2, op1, "right"); @@ -354,5 +361,5 @@ describe("ShareJS text type", function() { } return result; })(); - })); + }); }); }); diff --git a/services/document-updater/test/unit/coffee/ShareJsDB/ShareJsDBTests.js b/services/document-updater/test/unit/coffee/ShareJsDB/ShareJsDBTests.js index 27f6729bdb..165e8fcdf0 100644 --- a/services/document-updater/test/unit/coffee/ShareJsDB/ShareJsDBTests.js +++ b/services/document-updater/test/unit/coffee/ShareJsDB/ShareJsDBTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns @@ -107,7 +113,7 @@ describe("ShareJsDB", function() { }); }); - return describe("writeOps", () => describe("writing an op", function() { + return describe("writeOps", function() { return describe("writing an op", function() { beforeEach(function() { this.opData = { op: {p: 20, t: "foo"}, @@ -125,5 +131,5 @@ describe("ShareJsDB", function() { this.callback.called.should.equal(true); return (this.callback.args[0][0] != null).should.equal(false); }); - })); + }); }); }); diff --git a/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.js b/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.js index 4e0fbc52dd..c6a3fbac33 100644 --- a/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.js +++ b/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.js b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.js index ad195d5081..c5f89ab81c 100644 --- a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.js +++ b/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -350,7 +356,7 @@ describe("UpdateManager", function() { }); - describe("_addProjectHistoryMetadataToOps", () => it("should add projectHistoryId, pathname and doc_length metadata to the ops", function() { + describe("_addProjectHistoryMetadataToOps", function() { return it("should add projectHistoryId, pathname and doc_length metadata to the ops", function() { const lines = [ 'some', 'test', @@ -387,7 +393,7 @@ describe("UpdateManager", function() { doc_length: 23 } // 14 - 'qux' + 'bazbaz' }]); -})); +}); }); return describe("lockUpdatesAndDo", function() { beforeEach(function() { From bb20394243ddbfa49e15561c4c3ac41d93a038ea Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Wed, 6 May 2020 12:11:30 +0200 Subject: [PATCH 617/769] decaffeinate: rename test/unit/coffee to test/unit/js --- .../test/unit/{coffee => js}/DiffCodec/DiffCodecTests.js | 0 .../unit/{coffee => js}/DispatchManager/DispatchManagerTests.js | 0 .../unit/{coffee => js}/DocumentManager/DocumentManagerTests.js | 0 .../unit/{coffee => js}/HistoryManager/HistoryManagerTests.js | 0 .../HistoryRedisManager/HistoryRedisManagerTests.js | 0 .../unit/{coffee => js}/HttpController/HttpControllerTests.js | 0 .../test/unit/{coffee => js}/LockManager/CheckingTheLock.js | 0 .../test/unit/{coffee => js}/LockManager/ReleasingTheLock.js | 0 .../test/unit/{coffee => js}/LockManager/getLockTests.js | 0 .../test/unit/{coffee => js}/LockManager/tryLockTests.js | 0 .../{coffee => js}/PersistenceManager/PersistenceManagerTests.js | 0 .../ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js | 0 .../{coffee => js}/ProjectManager/flushAndDeleteProjectTests.js | 0 .../test/unit/{coffee => js}/ProjectManager/flushProjectTests.js | 0 .../unit/{coffee => js}/ProjectManager/getProjectDocsTests.js | 0 .../test/unit/{coffee => js}/ProjectManager/updateProjectTests.js | 0 .../test/unit/{coffee => js}/RangesManager/RangesManagerTests.js | 0 .../test/unit/{coffee => js}/RateLimitManager/RateLimitManager.js | 0 .../RealTimeRedisManager/RealTimeRedisManagerTests.js | 0 .../test/unit/{coffee => js}/RedisManager/RedisManagerTests.js | 0 .../test/unit/{coffee => js}/ShareJS/TextTransformTests.js | 0 .../test/unit/{coffee => js}/ShareJsDB/ShareJsDBTests.js | 0 .../ShareJsUpdateManager/ShareJsUpdateManagerTests.js | 0 .../test/unit/{coffee => js}/UpdateManager/UpdateManagerTests.js | 0 24 files changed, 0 insertions(+), 0 deletions(-) rename services/document-updater/test/unit/{coffee => js}/DiffCodec/DiffCodecTests.js (100%) rename services/document-updater/test/unit/{coffee => js}/DispatchManager/DispatchManagerTests.js (100%) rename services/document-updater/test/unit/{coffee => js}/DocumentManager/DocumentManagerTests.js (100%) rename services/document-updater/test/unit/{coffee => js}/HistoryManager/HistoryManagerTests.js (100%) rename services/document-updater/test/unit/{coffee => js}/HistoryRedisManager/HistoryRedisManagerTests.js (100%) rename services/document-updater/test/unit/{coffee => js}/HttpController/HttpControllerTests.js (100%) rename services/document-updater/test/unit/{coffee => js}/LockManager/CheckingTheLock.js (100%) rename services/document-updater/test/unit/{coffee => js}/LockManager/ReleasingTheLock.js (100%) rename services/document-updater/test/unit/{coffee => js}/LockManager/getLockTests.js (100%) rename services/document-updater/test/unit/{coffee => js}/LockManager/tryLockTests.js (100%) rename services/document-updater/test/unit/{coffee => js}/PersistenceManager/PersistenceManagerTests.js (100%) rename services/document-updater/test/unit/{coffee => js}/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js (100%) rename services/document-updater/test/unit/{coffee => js}/ProjectManager/flushAndDeleteProjectTests.js (100%) rename services/document-updater/test/unit/{coffee => js}/ProjectManager/flushProjectTests.js (100%) rename services/document-updater/test/unit/{coffee => js}/ProjectManager/getProjectDocsTests.js (100%) rename services/document-updater/test/unit/{coffee => js}/ProjectManager/updateProjectTests.js (100%) rename services/document-updater/test/unit/{coffee => js}/RangesManager/RangesManagerTests.js (100%) rename services/document-updater/test/unit/{coffee => js}/RateLimitManager/RateLimitManager.js (100%) rename services/document-updater/test/unit/{coffee => js}/RealTimeRedisManager/RealTimeRedisManagerTests.js (100%) rename services/document-updater/test/unit/{coffee => js}/RedisManager/RedisManagerTests.js (100%) rename services/document-updater/test/unit/{coffee => js}/ShareJS/TextTransformTests.js (100%) rename services/document-updater/test/unit/{coffee => js}/ShareJsDB/ShareJsDBTests.js (100%) rename services/document-updater/test/unit/{coffee => js}/ShareJsUpdateManager/ShareJsUpdateManagerTests.js (100%) rename services/document-updater/test/unit/{coffee => js}/UpdateManager/UpdateManagerTests.js (100%) diff --git a/services/document-updater/test/unit/coffee/DiffCodec/DiffCodecTests.js b/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/DiffCodec/DiffCodecTests.js rename to services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js diff --git a/services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.js b/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/DispatchManager/DispatchManagerTests.js rename to services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js diff --git a/services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.js b/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/DocumentManager/DocumentManagerTests.js rename to services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js diff --git a/services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.js b/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/HistoryManager/HistoryManagerTests.js rename to services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js diff --git a/services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.js b/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/HistoryRedisManager/HistoryRedisManagerTests.js rename to services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js diff --git a/services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.js b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/HttpController/HttpControllerTests.js rename to services/document-updater/test/unit/js/HttpController/HttpControllerTests.js diff --git a/services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.js b/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js similarity index 100% rename from services/document-updater/test/unit/coffee/LockManager/CheckingTheLock.js rename to services/document-updater/test/unit/js/LockManager/CheckingTheLock.js diff --git a/services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.js b/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js similarity index 100% rename from services/document-updater/test/unit/coffee/LockManager/ReleasingTheLock.js rename to services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js diff --git a/services/document-updater/test/unit/coffee/LockManager/getLockTests.js b/services/document-updater/test/unit/js/LockManager/getLockTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/LockManager/getLockTests.js rename to services/document-updater/test/unit/js/LockManager/getLockTests.js diff --git a/services/document-updater/test/unit/coffee/LockManager/tryLockTests.js b/services/document-updater/test/unit/js/LockManager/tryLockTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/LockManager/tryLockTests.js rename to services/document-updater/test/unit/js/LockManager/tryLockTests.js diff --git a/services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.js b/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/PersistenceManager/PersistenceManagerTests.js rename to services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js diff --git a/services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js b/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js rename to services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js diff --git a/services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/ProjectManager/flushAndDeleteProjectTests.js rename to services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js diff --git a/services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/ProjectManager/flushProjectTests.js rename to services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js diff --git a/services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.js b/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/ProjectManager/getProjectDocsTests.js rename to services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js diff --git a/services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/ProjectManager/updateProjectTests.js rename to services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js diff --git a/services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.js b/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/RangesManager/RangesManagerTests.js rename to services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js diff --git a/services/document-updater/test/unit/coffee/RateLimitManager/RateLimitManager.js b/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js similarity index 100% rename from services/document-updater/test/unit/coffee/RateLimitManager/RateLimitManager.js rename to services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js diff --git a/services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.js b/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/RealTimeRedisManager/RealTimeRedisManagerTests.js rename to services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.js b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.js rename to services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js diff --git a/services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.js b/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/ShareJS/TextTransformTests.js rename to services/document-updater/test/unit/js/ShareJS/TextTransformTests.js diff --git a/services/document-updater/test/unit/coffee/ShareJsDB/ShareJsDBTests.js b/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/ShareJsDB/ShareJsDBTests.js rename to services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js diff --git a/services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.js b/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/ShareJsUpdateManager/ShareJsUpdateManagerTests.js rename to services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js diff --git a/services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.js b/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js similarity index 100% rename from services/document-updater/test/unit/coffee/UpdateManager/UpdateManagerTests.js rename to services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js From 9a2f8ecbd48aa3cfa51638d1b26d5f865aabba4d Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Wed, 6 May 2020 12:11:36 +0200 Subject: [PATCH 618/769] prettier: convert test/unit decaffeinated files to Prettier format --- .../test/unit/js/DiffCodec/DiffCodecTests.js | 144 +- .../DispatchManager/DispatchManagerTests.js | 274 +- .../DocumentManager/DocumentManagerTests.js | 1786 +++++++----- .../js/HistoryManager/HistoryManagerTests.js | 624 +++-- .../HistoryRedisManagerTests.js | 158 +- .../js/HttpController/HttpControllerTests.js | 1743 ++++++------ .../unit/js/LockManager/CheckingTheLock.js | 102 +- .../unit/js/LockManager/ReleasingTheLock.js | 151 +- .../test/unit/js/LockManager/getLockTests.js | 202 +- .../test/unit/js/LockManager/tryLockTests.js | 235 +- .../PersistenceManagerTests.js | 638 +++-- .../ProjectHistoryRedisManagerTests.js | 300 ++- .../flushAndDeleteProjectTests.js | 235 +- .../js/ProjectManager/flushProjectTests.js | 218 +- .../js/ProjectManager/getProjectDocsTests.js | 333 ++- .../js/ProjectManager/updateProjectTests.js | 574 ++-- .../js/RangesManager/RangesManagerTests.js | 874 +++--- .../js/RateLimitManager/RateLimitManager.js | 220 +- .../RealTimeRedisManagerTests.js | 253 +- .../unit/js/RedisManager/RedisManagerTests.js | 2399 ++++++++++------- .../unit/js/ShareJS/TextTransformTests.js | 706 ++--- .../test/unit/js/ShareJsDB/ShareJsDBTests.js | 230 +- .../ShareJsUpdateManagerTests.js | 367 +-- .../js/UpdateManager/UpdateManagerTests.js | 1115 ++++---- 24 files changed, 7979 insertions(+), 5902 deletions(-) diff --git a/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js b/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js index 85bb8c6a1b..f208c17bd6 100644 --- a/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js +++ b/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js @@ -10,74 +10,86 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const { - expect -} = chai; -const modulePath = "../../../../app/js/DiffCodec.js"; -const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const { expect } = chai +const modulePath = '../../../../app/js/DiffCodec.js' +const SandboxedModule = require('sandboxed-module') -describe("DiffCodec", function() { - beforeEach(function() { - this.callback = sinon.stub(); - return this.DiffCodec = SandboxedModule.require(modulePath); - }); +describe('DiffCodec', function () { + beforeEach(function () { + this.callback = sinon.stub() + return (this.DiffCodec = SandboxedModule.require(modulePath)) + }) - return describe("diffAsShareJsOps", function() { - it("should insert new text correctly", function(done) { - this.before = ["hello world"]; - this.after = ["hello beautiful world"]; - return this.DiffCodec.diffAsShareJsOp(this.before, this.after, (error, ops) => { - expect(ops).to.deep.equal([{ - i: "beautiful ", - p: 6 - } - ]); - return done(); - }); - }); + return describe('diffAsShareJsOps', function () { + it('should insert new text correctly', function (done) { + this.before = ['hello world'] + this.after = ['hello beautiful world'] + return this.DiffCodec.diffAsShareJsOp( + this.before, + this.after, + (error, ops) => { + expect(ops).to.deep.equal([ + { + i: 'beautiful ', + p: 6 + } + ]) + return done() + } + ) + }) - it("should shift later inserts by previous inserts", function(done) { - this.before = ["the boy played with the ball"]; - this.after = ["the tall boy played with the red ball"]; - return this.DiffCodec.diffAsShareJsOp(this.before, this.after, (error, ops) => { - expect(ops).to.deep.equal([ - { i: "tall ", p: 4 }, - { i: "red ", p: 29 } - ]); - return done(); - }); - }); + it('should shift later inserts by previous inserts', function (done) { + this.before = ['the boy played with the ball'] + this.after = ['the tall boy played with the red ball'] + return this.DiffCodec.diffAsShareJsOp( + this.before, + this.after, + (error, ops) => { + expect(ops).to.deep.equal([ + { i: 'tall ', p: 4 }, + { i: 'red ', p: 29 } + ]) + return done() + } + ) + }) - it("should delete text correctly", function(done) { - this.before = ["hello beautiful world"]; - this.after = ["hello world"]; - return this.DiffCodec.diffAsShareJsOp(this.before, this.after, (error, ops) => { - expect(ops).to.deep.equal([{ - d: "beautiful ", - p: 6 - } - ]); - return done(); - }); - }); - - - return it("should shift later deletes by the first deletes", function(done) { - this.before = ["the tall boy played with the red ball"]; - this.after = ["the boy played with the ball"]; - return this.DiffCodec.diffAsShareJsOp(this.before, this.after, (error, ops) => { - expect(ops).to.deep.equal([ - { d: "tall ", p: 4 }, - { d: "red ", p: 24 } - ]); - return done(); - }); - }); - }); -}); - - + it('should delete text correctly', function (done) { + this.before = ['hello beautiful world'] + this.after = ['hello world'] + return this.DiffCodec.diffAsShareJsOp( + this.before, + this.after, + (error, ops) => { + expect(ops).to.deep.equal([ + { + d: 'beautiful ', + p: 6 + } + ]) + return done() + } + ) + }) + return it('should shift later deletes by the first deletes', function (done) { + this.before = ['the tall boy played with the red ball'] + this.after = ['the boy played with the ball'] + return this.DiffCodec.diffAsShareJsOp( + this.before, + this.after, + (error, ops) => { + expect(ops).to.deep.equal([ + { d: 'tall ', p: 4 }, + { d: 'red ', p: 24 } + ]) + return done() + } + ) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js b/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js index 8b0c9afcf9..48eb2fbb92 100644 --- a/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js +++ b/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js @@ -11,144 +11,166 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const modulePath = "../../../../app/js/DispatchManager.js"; -const SandboxedModule = require('sandboxed-module'); -const Errors = require("../../../../app/js/Errors.js"); +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const modulePath = '../../../../app/js/DispatchManager.js' +const SandboxedModule = require('sandboxed-module') +const Errors = require('../../../../app/js/Errors.js') -describe("DispatchManager", function() { - beforeEach(function() { - this.timeout(3000); - this.DispatchManager = SandboxedModule.require(modulePath, { requires: { - "./UpdateManager" : (this.UpdateManager = {}), - "logger-sharelatex": (this.logger = { log: sinon.stub(), error: sinon.stub(), warn: sinon.stub() }), - "settings-sharelatex": (this.settings = { - redis: { - documentupdater: {} - } - }), - "redis-sharelatex": (this.redis = {}), - "./RateLimitManager": {}, - "./Errors": Errors, - "./Metrics": { - Timer() { - return {done() {}}; - } - } - } - } - ); - this.callback = sinon.stub(); - return this.RateLimiter = { run(task,cb) { return task(cb); } };}); // run task without rate limit +describe('DispatchManager', function () { + beforeEach(function () { + this.timeout(3000) + this.DispatchManager = SandboxedModule.require(modulePath, { + requires: { + './UpdateManager': (this.UpdateManager = {}), + 'logger-sharelatex': (this.logger = { + log: sinon.stub(), + error: sinon.stub(), + warn: sinon.stub() + }), + 'settings-sharelatex': (this.settings = { + redis: { + documentupdater: {} + } + }), + 'redis-sharelatex': (this.redis = {}), + './RateLimitManager': {}, + './Errors': Errors, + './Metrics': { + Timer() { + return { done() {} } + } + } + } + }) + this.callback = sinon.stub() + return (this.RateLimiter = { + run(task, cb) { + return task(cb) + } + }) + }) // run task without rate limit - return describe("each worker", function() { - beforeEach(function() { - this.client = - {auth: sinon.stub()}; - this.redis.createClient = sinon.stub().returns(this.client); - return this.worker = this.DispatchManager.createDispatcher(this.RateLimiter); - }); - - it("should create a new redis client", function() { - return this.redis.createClient.called.should.equal(true); - }); - - describe("_waitForUpdateThenDispatchWorker", function() { - beforeEach(function() { - this.project_id = "project-id-123"; - this.doc_id = "doc-id-123"; - this.doc_key = `${this.project_id}:${this.doc_id}`; - return this.client.blpop = sinon.stub().callsArgWith(2, null, ["pending-updates-list", this.doc_key]); - }); + return describe('each worker', function () { + beforeEach(function () { + this.client = { auth: sinon.stub() } + this.redis.createClient = sinon.stub().returns(this.client) + return (this.worker = this.DispatchManager.createDispatcher( + this.RateLimiter + )) + }) - describe("in the normal case", function() { - beforeEach(function() { - this.UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArg(2); - return this.worker._waitForUpdateThenDispatchWorker(this.callback); - }); + it('should create a new redis client', function () { + return this.redis.createClient.called.should.equal(true) + }) - it("should call redis with BLPOP", function() { - return this.client.blpop - .calledWith("pending-updates-list", 0) - .should.equal(true); - }); - - it("should call processOutstandingUpdatesWithLock", function() { - return this.UpdateManager.processOutstandingUpdatesWithLock - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); + describe('_waitForUpdateThenDispatchWorker', function () { + beforeEach(function () { + this.project_id = 'project-id-123' + this.doc_id = 'doc-id-123' + this.doc_key = `${this.project_id}:${this.doc_id}` + return (this.client.blpop = sinon + .stub() + .callsArgWith(2, null, ['pending-updates-list', this.doc_key])) + }) - it("should not log any errors", function() { - this.logger.error.called.should.equal(false); - return this.logger.warn.called.should.equal(false); - }); + describe('in the normal case', function () { + beforeEach(function () { + this.UpdateManager.processOutstandingUpdatesWithLock = sinon + .stub() + .callsArg(2) + return this.worker._waitForUpdateThenDispatchWorker(this.callback) + }) - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); + it('should call redis with BLPOP', function () { + return this.client.blpop + .calledWith('pending-updates-list', 0) + .should.equal(true) + }) - describe("with an error", function() { - beforeEach(function() { - this.UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArgWith(2, new Error("a generic error")); - return this.worker._waitForUpdateThenDispatchWorker(this.callback); - }); + it('should call processOutstandingUpdatesWithLock', function () { + return this.UpdateManager.processOutstandingUpdatesWithLock + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) - it("should log an error", function() { - return this.logger.error.called.should.equal(true); - }); + it('should not log any errors', function () { + this.logger.error.called.should.equal(false) + return this.logger.warn.called.should.equal(false) + }) - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) - return describe("with a 'Delete component' error", function() { - beforeEach(function() { - this.UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArgWith(2, new Errors.DeleteMismatchError()); - return this.worker._waitForUpdateThenDispatchWorker(this.callback); - }); + describe('with an error', function () { + beforeEach(function () { + this.UpdateManager.processOutstandingUpdatesWithLock = sinon + .stub() + .callsArgWith(2, new Error('a generic error')) + return this.worker._waitForUpdateThenDispatchWorker(this.callback) + }) - it("should log a warning", function() { - return this.logger.warn.called.should.equal(true); - }); + it('should log an error', function () { + return this.logger.error.called.should.equal(true) + }) - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); - }); + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) - return describe("run", function() { return it("should call _waitForUpdateThenDispatchWorker until shutting down", function(done) { - let callCount = 0; - this.worker._waitForUpdateThenDispatchWorker = callback => { - if (callback == null) { callback = function(error) {}; } - callCount++; - if (callCount === 3) { - this.settings.shuttingDown = true; - } - return setTimeout(() => callback() - , 10); - }; - sinon.spy(this.worker, "_waitForUpdateThenDispatchWorker"); - - - this.worker.run(); + return describe("with a 'Delete component' error", function () { + beforeEach(function () { + this.UpdateManager.processOutstandingUpdatesWithLock = sinon + .stub() + .callsArgWith(2, new Errors.DeleteMismatchError()) + return this.worker._waitForUpdateThenDispatchWorker(this.callback) + }) - var checkStatus = () => { - if (!this.settings.shuttingDown) { // retry until shutdown - setTimeout(checkStatus, 100); - - } else { - this.worker._waitForUpdateThenDispatchWorker.callCount.should.equal(3); - return done(); - } - }; + it('should log a warning', function () { + return this.logger.warn.called.should.equal(true) + }) - return checkStatus(); - }); }); - }); -}); + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) + }) + + return describe('run', function () { + return it('should call _waitForUpdateThenDispatchWorker until shutting down', function (done) { + let callCount = 0 + this.worker._waitForUpdateThenDispatchWorker = (callback) => { + if (callback == null) { + callback = function (error) {} + } + callCount++ + if (callCount === 3) { + this.settings.shuttingDown = true + } + return setTimeout(() => callback(), 10) + } + sinon.spy(this.worker, '_waitForUpdateThenDispatchWorker') + + this.worker.run() + + var checkStatus = () => { + if (!this.settings.shuttingDown) { + // retry until shutdown + setTimeout(checkStatus, 100) + } else { + this.worker._waitForUpdateThenDispatchWorker.callCount.should.equal( + 3 + ) + return done() + } + } + + return checkStatus() + }) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js b/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js index cc13bbeb7a..4baa5cee8d 100644 --- a/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js +++ b/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js @@ -12,690 +12,1102 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const modulePath = "../../../../app/js/DocumentManager.js"; -const SandboxedModule = require('sandboxed-module'); -const Errors = require("../../../../app/js/Errors"); -const tk = require("timekeeper"); - -describe("DocumentManager", function() { - beforeEach(function() { - let Timer; - tk.freeze(new Date()); - this.DocumentManager = SandboxedModule.require(modulePath, { requires: { - "./RedisManager": (this.RedisManager = {}), - "./ProjectHistoryRedisManager": (this.ProjectHistoryRedisManager = {}), - "./PersistenceManager": (this.PersistenceManager = {}), - "./HistoryManager": (this.HistoryManager = { - flushDocChangesAsync: sinon.stub(), - flushProjectChangesAsync: sinon.stub() - }), - "logger-sharelatex": (this.logger = {log: sinon.stub(), warn: sinon.stub()}), - "./DocOpsManager": (this.DocOpsManager = {}), - "./Metrics": (this.Metrics = { - Timer: (Timer = (function() { - Timer = class Timer { - static initClass() { - this.prototype.done = sinon.stub(); - } - }; - Timer.initClass(); - return Timer; - })()) - }), - "./RealTimeRedisManager": (this.RealTimeRedisManager = {}), - "./DiffCodec": (this.DiffCodec = {}), - "./UpdateManager": (this.UpdateManager = {}), - "./RangesManager": (this.RangesManager = {}) - } - }); - this.project_id = "project-id-123"; - this.projectHistoryId = "history-id-123"; - this.projectHistoryType = "project-history"; - this.doc_id = "doc-id-123"; - this.user_id = 1234; - this.callback = sinon.stub(); - this.lines = ["one", "two", "three"]; - this.version = 42; - this.ranges = { comments: "mock", entries: "mock" }; - this.pathname = '/a/b/c.tex'; - this.unflushedTime = Date.now(); - this.lastUpdatedAt = Date.now(); - return this.lastUpdatedBy = 'last-author-id'; - }); - - afterEach(function() { return tk.reset(); }); - - describe("flushAndDeleteDoc", function() { - describe("successfully", function() { - beforeEach(function() { - this.RedisManager.removeDocFromMemory = sinon.stub().callsArg(2); - this.DocumentManager.flushDocIfLoaded = sinon.stub().callsArgWith(2); - return this.DocumentManager.flushAndDeleteDoc(this.project_id, this.doc_id, {}, this.callback); - }); - - it("should flush the doc", function() { - return this.DocumentManager.flushDocIfLoaded - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); - - it("should remove the doc from redis", function() { - return this.RedisManager.removeDocFromMemory - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); - - it("should call the callback without error", function() { - return this.callback.calledWith(null).should.equal(true); - }); - - it("should time the execution", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - - return it("should flush to the history api", function() { - return this.HistoryManager.flushDocChangesAsync - .calledWithExactly(this.project_id, this.doc_id) - .should.equal(true); - }); - }); - - return describe("when a flush error occurs", function() { - beforeEach(function() { - this.DocumentManager.flushDocIfLoaded = sinon.stub().callsArgWith(2, new Error("boom!")); - return this.RedisManager.removeDocFromMemory = sinon.stub().callsArg(2); - }); - - it("should not remove the doc from redis", function(done) { - return this.DocumentManager.flushAndDeleteDoc(this.project_id, this.doc_id, {}, error => { - error.should.exist; - this.RedisManager.removeDocFromMemory.called.should.equal(false); - return done(); - }); - }); - - return describe("when ignoring flush errors", function() { return it("should remove the doc from redis", function(done) { - return this.DocumentManager.flushAndDeleteDoc(this.project_id, this.doc_id, { ignoreFlushErrors: true }, error => { - if (error != null) { - return done(error); - } - this.RedisManager.removeDocFromMemory.called.should.equal(true); - return done(); - }); - }); }); - }); - }); - - describe("flushDocIfLoaded", function() { - describe("when the doc is in Redis", function() { - beforeEach(function() { - this.RedisManager.getDoc = sinon.stub().callsArgWith(2, null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId, this.unflushedTime, this.lastUpdatedAt, this.lastUpdatedBy); - this.RedisManager.clearUnflushedTime = sinon.stub().callsArgWith(1, null); - this.PersistenceManager.setDoc = sinon.stub().yields(); - return this.DocumentManager.flushDocIfLoaded(this.project_id, this.doc_id, this.callback); - }); - - it("should get the doc from redis", function() { - return this.RedisManager.getDoc - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); - - it("should write the doc lines to the persistence layer", function() { - return this.PersistenceManager.setDoc - .calledWith(this.project_id, this.doc_id, this.lines, this.version, this.ranges, this.lastUpdatedAt, this.lastUpdatedBy) - .should.equal(true); - }); - - it("should call the callback without error", function() { - return this.callback.calledWith(null).should.equal(true); - }); - - return it("should time the execution", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); - - return describe("when the document is not in Redis", function() { - beforeEach(function() { - this.RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null, null); - this.PersistenceManager.setDoc = sinon.stub().yields(); - this.DocOpsManager.flushDocOpsToMongo = sinon.stub().callsArgWith(2); - return this.DocumentManager.flushDocIfLoaded(this.project_id, this.doc_id, this.callback); - }); - - it("should get the doc from redis", function() { - return this.RedisManager.getDoc - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); - - it("should not write anything to the persistence layer", function() { - this.PersistenceManager.setDoc.called.should.equal(false); - return this.DocOpsManager.flushDocOpsToMongo.called.should.equal(false); - }); - - it("should call the callback without error", function() { - return this.callback.calledWith(null).should.equal(true); - }); - - return it("should time the execution", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); - }); - - describe("getDocAndRecentOps", function() { - describe("with a previous version specified", function() { - beforeEach(function() { - this.DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId); - this.RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, this.ops); - return this.DocumentManager.getDocAndRecentOps(this.project_id, this.doc_id, this.fromVersion, this.callback); - }); - - it("should get the doc", function() { - return this.DocumentManager.getDoc - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); - - it("should get the doc ops", function() { - return this.RedisManager.getPreviousDocOps - .calledWith(this.doc_id, this.fromVersion, this.version) - .should.equal(true); - }); - - it("should call the callback with the doc info", function() { - return this.callback.calledWith(null, this.lines, this.version, this.ops, this.ranges, this.pathname, this.projectHistoryId).should.equal(true); - }); - - return it("should time the execution", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); - - return describe("with no previous version specified", function() { - beforeEach(function() { - this.DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId); - this.RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, this.ops); - return this.DocumentManager.getDocAndRecentOps(this.project_id, this.doc_id, -1, this.callback); - }); - - it("should get the doc", function() { - return this.DocumentManager.getDoc - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); - - it("should not need to get the doc ops", function() { - return this.RedisManager.getPreviousDocOps.called.should.equal(false); - }); - - it("should call the callback with the doc info", function() { - return this.callback.calledWith(null, this.lines, this.version, [], this.ranges, this.pathname, this.projectHistoryId).should.equal(true); - }); - - return it("should time the execution", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); - }); - - describe("getDoc", function() { - describe("when the doc exists in Redis", function() { - beforeEach(function() { - this.RedisManager.getDoc = sinon.stub().callsArgWith(2, null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId, this.unflushedTime); - return this.DocumentManager.getDoc(this.project_id, this.doc_id, this.callback); - }); - - it("should get the doc from Redis", function() { - return this.RedisManager.getDoc - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); - - it("should call the callback with the doc info", function() { - return this.callback.calledWith(null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId, this.unflushedTime, true).should.equal(true); - }); - - return it("should time the execution", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); - - return describe("when the doc does not exist in Redis", function() { - beforeEach(function() { - this.RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null, null, null, null); - this.PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId, this.projectHistoryType); - this.RedisManager.putDocInMemory = sinon.stub().yields(); - this.RedisManager.setHistoryType = sinon.stub().yields(); - return this.DocumentManager.getDoc(this.project_id, this.doc_id, this.callback); - }); - - it("should try to get the doc from Redis", function() { - return this.RedisManager.getDoc - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); - - it("should get the doc from the PersistenceManager", function() { - return this.PersistenceManager.getDoc - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); - - it("should set the doc in Redis", function() { - return this.RedisManager.putDocInMemory - .calledWith(this.project_id, this.doc_id, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId) - .should.equal(true); - }); - - it("should set the history type in Redis", function() { - return this.RedisManager.setHistoryType - .calledWith(this.doc_id, this.projectHistoryType) - .should.equal(true); - }); - - it("should call the callback with the doc info", function() { - return this.callback.calledWith(null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId, null, false).should.equal(true); - }); - - return it("should time the execution", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); - }); - - describe("setDoc", function() { return describe("with plain tex lines", function() { - beforeEach(function() { - this.beforeLines = ["before", "lines"]; - this.afterLines = ["after", "lines"]; - this.ops = [{ i: "foo", p: 4 }, { d: "bar", p: 42 }]; - this.DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, this.beforeLines, this.version, this.ranges, this.pathname, this.projectHistoryId, this.unflushedTime, true); - this.DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, this.ops); - this.UpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null); - this.DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2); - return this.DocumentManager.flushAndDeleteDoc = sinon.stub().callsArg(3); - }); - - describe("when already loaded", function() { - beforeEach(function() { - return this.DocumentManager.setDoc(this.project_id, this.doc_id, this.afterLines, this.source, this.user_id, false, this.callback); - }); - - it("should get the current doc lines", function() { - return this.DocumentManager.getDoc - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); - - it("should return a diff of the old and new lines", function() { - return this.DiffCodec.diffAsShareJsOp - .calledWith(this.beforeLines, this.afterLines) - .should.equal(true); - }); - - it("should apply the diff as a ShareJS op", function() { - return this.UpdateManager.applyUpdate - .calledWith( - this.project_id, - this.doc_id, - { - doc: this.doc_id, - v: this.version, - op: this.ops, - meta: { - type: "external", - source: this.source, - user_id: this.user_id - } - } - ) - .should.equal(true); - }); - - it("should flush the doc to Mongo", function() { - return this.DocumentManager.flushDocIfLoaded - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); - - it("should not flush the project history", function() { - return this.HistoryManager.flushProjectChangesAsync - .called.should.equal(false); - }); - - it("should call the callback", function() { - return this.callback.calledWith(null).should.equal(true); - }); - - return it("should time the execution", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); - - describe("when not already loaded", function() { - beforeEach(function() { - this.DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, this.beforeLines, this.version, this.pathname, null, false); - return this.DocumentManager.setDoc(this.project_id, this.doc_id, this.afterLines, this.source, this.user_id, false, this.callback); - }); - - it("should flush and delete the doc from the doc updater", function() { - return this.DocumentManager.flushAndDeleteDoc - .calledWith(this.project_id, this.doc_id, {}) - .should.equal(true); - }); - - return it("should not flush the project history", function() { - return this.HistoryManager.flushProjectChangesAsync - .calledWithExactly(this.project_id) - .should.equal(true); - }); - }); - - describe("without new lines", function() { - beforeEach(function() { - return this.DocumentManager.setDoc(this.project_id, this.doc_id, null, this.source, this.user_id, false, this.callback); - }); - - it("should return the callback with an error", function() { - return this.callback.calledWith(new Error("No lines were passed to setDoc")); - }); - - return it("should not try to get the doc lines", function() { - return this.DocumentManager.getDoc.called.should.equal(false); - }); - }); - - return describe("with the undoing flag", function() { - beforeEach(function() { - // Copy ops so we don't interfere with other tests - this.ops = [{ i: "foo", p: 4 }, { d: "bar", p: 42 }]; - this.DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, this.ops); - return this.DocumentManager.setDoc(this.project_id, this.doc_id, this.afterLines, this.source, this.user_id, true, this.callback); - }); - - return it("should set the undo flag on each op", function() { - return Array.from(this.ops).map((op) => - op.u.should.equal(true)); - }); - }); - }); }); - - describe("acceptChanges", function() { - beforeEach(function() { - this.change_id = "mock-change-id"; - this.change_ids = [ "mock-change-id-1", "mock-change-id-2", "mock-change-id-3", "mock-change-id-4" ]; - this.version = 34; - this.lines = ["original", "lines"]; - this.ranges = { entries: "mock", comments: "mock" }; - this.updated_ranges = { entries: "updated", comments: "updated" }; - this.DocumentManager.getDoc = sinon.stub().yields(null, this.lines, this.version, this.ranges); - this.RangesManager.acceptChanges = sinon.stub().yields(null, this.updated_ranges); - return this.RedisManager.updateDocument = sinon.stub().yields(); - }); - - describe("successfully with a single change", function() { - beforeEach(function() { - return this.DocumentManager.acceptChanges(this.project_id, this.doc_id, [ this.change_id ], this.callback); - }); - - it("should get the document's current ranges", function() { - return this.DocumentManager.getDoc - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); - - it("should apply the accept change to the ranges", function() { - return this.RangesManager.acceptChanges - .calledWith([ this.change_id ], this.ranges) - .should.equal(true); - }); - - it("should save the updated ranges", function() { - return this.RedisManager.updateDocument - .calledWith(this.project_id, this.doc_id, this.lines, this.version, [], this.updated_ranges, {}) - .should.equal(true); - }); - - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); - - describe("successfully with multiple changes", function() { - beforeEach(function() { - return this.DocumentManager.acceptChanges(this.project_id, this.doc_id, this.change_ids, this.callback); - }); - - return it("should apply the accept change to the ranges", function() { - return this.RangesManager.acceptChanges - .calledWith(this.change_ids, this.ranges) - .should.equal(true); - }); - }); - - return describe("when the doc is not found", function() { - beforeEach(function() { - this.DocumentManager.getDoc = sinon.stub().yields(null, null, null, null); - return this.DocumentManager.acceptChanges(this.project_id, this.doc_id, [ this.change_id ], this.callback); - }); - - it("should not save anything", function() { - return this.RedisManager.updateDocument.called.should.equal(false); - }); - - return it("should call the callback with a not found error", function() { - const error = new Errors.NotFoundError(`document not found: ${this.doc_id}`); - return this.callback.calledWith(error).should.equal(true); - }); - }); - }); - - describe("deleteComment", function() { - beforeEach(function() { - this.comment_id = "mock-comment-id"; - this.version = 34; - this.lines = ["original", "lines"]; - this.ranges = { comments: ["one", "two", "three"] }; - this.updated_ranges = { comments: ["one", "three"] }; - this.DocumentManager.getDoc = sinon.stub().yields(null, this.lines, this.version, this.ranges); - this.RangesManager.deleteComment = sinon.stub().yields(null, this.updated_ranges); - return this.RedisManager.updateDocument = sinon.stub().yields(); - }); - - describe("successfully", function() { - beforeEach(function() { - return this.DocumentManager.deleteComment(this.project_id, this.doc_id, this.comment_id, this.callback); - }); - - it("should get the document's current ranges", function() { - return this.DocumentManager.getDoc - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); - - it("should delete the comment from the ranges", function() { - return this.RangesManager.deleteComment - .calledWith(this.comment_id, this.ranges) - .should.equal(true); - }); - - it("should save the updated ranges", function() { - return this.RedisManager.updateDocument - .calledWith(this.project_id, this.doc_id, this.lines, this.version, [], this.updated_ranges, {}) - .should.equal(true); - }); - - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); - - return describe("when the doc is not found", function() { - beforeEach(function() { - this.DocumentManager.getDoc = sinon.stub().yields(null, null, null, null); - return this.DocumentManager.acceptChanges(this.project_id, this.doc_id, [ this.comment_id ], this.callback); - }); - - it("should not save anything", function() { - return this.RedisManager.updateDocument.called.should.equal(false); - }); - - return it("should call the callback with a not found error", function() { - const error = new Errors.NotFoundError(`document not found: ${this.doc_id}`); - return this.callback.calledWith(error).should.equal(true); - }); - }); - }); - - describe("getDocAndFlushIfOld", function() { - beforeEach(function() { - return this.DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2); - }); - - describe("when the doc is in Redis", function() { - describe("and has changes to be flushed", function() { - beforeEach(function() { - this.DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, this.lines, this.version, this.ranges, this.projectHistoryId, this.pathname, Date.now() - 1e9, true); - return this.DocumentManager.getDocAndFlushIfOld(this.project_id, this.doc_id, this.callback); - }); - - it("should get the doc", function() { - return this.DocumentManager.getDoc - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); - - it("should flush the doc", function() { - return this.DocumentManager.flushDocIfLoaded - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); - - return it("should call the callback with the lines and versions", function() { - return this.callback.calledWith(null, this.lines, this.version).should.equal(true); - }); - }); - - return describe("and has only changes that don't need to be flushed", function() { - beforeEach(function() { - this.DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, this.lines, this.version, this.ranges, this.pathname, Date.now() - 100, true); - return this.DocumentManager.getDocAndFlushIfOld(this.project_id, this.doc_id, this.callback); - }); - - it("should get the doc", function() { - return this.DocumentManager.getDoc - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); - - it("should not flush the doc", function() { - return this.DocumentManager.flushDocIfLoaded - .called.should.equal(false); - }); - - return it("should call the callback with the lines and versions", function() { - return this.callback.calledWith(null, this.lines, this.version).should.equal(true); - }); - }); - }); - - return describe("when the doc is not in Redis", function() { - beforeEach(function() { - this.DocumentManager.getDoc = sinon.stub().callsArgWith(2, null, this.lines, this.version, this.ranges, null, false); - return this.DocumentManager.getDocAndFlushIfOld(this.project_id, this.doc_id, this.callback); - }); - - it("should get the doc", function() { - return this.DocumentManager.getDoc - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); - - it("should not flush the doc", function() { - return this.DocumentManager.flushDocIfLoaded - .called.should.equal(false); - }); - - return it("should call the callback with the lines and versions", function() { - return this.callback.calledWith(null, this.lines, this.version).should.equal(true); - }); - }); - }); - - describe("renameDoc", function() { - beforeEach(function() { - this.update = 'some-update'; - return this.RedisManager.renameDoc = sinon.stub().yields(); - }); - - return describe("successfully", function() { - beforeEach(function() { - return this.DocumentManager.renameDoc(this.project_id, this.doc_id, this.user_id, this.update, this.projectHistoryId, this.callback); - }); - - it("should rename the document", function() { - return this.RedisManager.renameDoc - .calledWith(this.project_id, this.doc_id, this.user_id, this.update, this.projectHistoryId) - .should.equal(true); - }); - - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); - }); - - return describe("resyncDocContents", function() { - describe("when doc is loaded in redis", function() { - beforeEach(function() { - this.RedisManager.getDoc = sinon.stub().callsArgWith(2, null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId); - this.ProjectHistoryRedisManager.queueResyncDocContent = sinon.stub(); - return this.DocumentManager.resyncDocContents(this.project_id, this.doc_id, this.callback); - }); - - it("gets the doc contents from redis", function() { - return this.RedisManager.getDoc - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); - - return it("queues a resync doc content update", function() { - return this.ProjectHistoryRedisManager.queueResyncDocContent - .calledWith(this.project_id, this.projectHistoryId, this.doc_id, this.lines, this.version, this.pathname, this.callback) - .should.equal(true); - }); - }); - - return describe("when doc is not loaded in redis", function() { - beforeEach(function() { - this.RedisManager.getDoc = sinon.stub().callsArgWith(2, null); - this.PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId); - this.ProjectHistoryRedisManager.queueResyncDocContent = sinon.stub(); - return this.DocumentManager.resyncDocContents(this.project_id, this.doc_id, this.callback); - }); - - it("tries to get the doc contents from redis", function() { - return this.RedisManager.getDoc - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); - - it("gets the doc contents from web", function() { - return this.PersistenceManager.getDoc - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); - - return it("queues a resync doc content update", function() { - return this.ProjectHistoryRedisManager.queueResyncDocContent - .calledWith(this.project_id, this.projectHistoryId, this.doc_id, this.lines, this.version, this.pathname, this.callback) - .should.equal(true); - }); - }); - }); -}); +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const modulePath = '../../../../app/js/DocumentManager.js' +const SandboxedModule = require('sandboxed-module') +const Errors = require('../../../../app/js/Errors') +const tk = require('timekeeper') + +describe('DocumentManager', function () { + beforeEach(function () { + let Timer + tk.freeze(new Date()) + this.DocumentManager = SandboxedModule.require(modulePath, { + requires: { + './RedisManager': (this.RedisManager = {}), + './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), + './PersistenceManager': (this.PersistenceManager = {}), + './HistoryManager': (this.HistoryManager = { + flushDocChangesAsync: sinon.stub(), + flushProjectChangesAsync: sinon.stub() + }), + 'logger-sharelatex': (this.logger = { + log: sinon.stub(), + warn: sinon.stub() + }), + './DocOpsManager': (this.DocOpsManager = {}), + './Metrics': (this.Metrics = { + Timer: (Timer = (function () { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub() + } + } + Timer.initClass() + return Timer + })()) + }), + './RealTimeRedisManager': (this.RealTimeRedisManager = {}), + './DiffCodec': (this.DiffCodec = {}), + './UpdateManager': (this.UpdateManager = {}), + './RangesManager': (this.RangesManager = {}) + } + }) + this.project_id = 'project-id-123' + this.projectHistoryId = 'history-id-123' + this.projectHistoryType = 'project-history' + this.doc_id = 'doc-id-123' + this.user_id = 1234 + this.callback = sinon.stub() + this.lines = ['one', 'two', 'three'] + this.version = 42 + this.ranges = { comments: 'mock', entries: 'mock' } + this.pathname = '/a/b/c.tex' + this.unflushedTime = Date.now() + this.lastUpdatedAt = Date.now() + return (this.lastUpdatedBy = 'last-author-id') + }) + + afterEach(function () { + return tk.reset() + }) + + describe('flushAndDeleteDoc', function () { + describe('successfully', function () { + beforeEach(function () { + this.RedisManager.removeDocFromMemory = sinon.stub().callsArg(2) + this.DocumentManager.flushDocIfLoaded = sinon.stub().callsArgWith(2) + return this.DocumentManager.flushAndDeleteDoc( + this.project_id, + this.doc_id, + {}, + this.callback + ) + }) + + it('should flush the doc', function () { + return this.DocumentManager.flushDocIfLoaded + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should remove the doc from redis', function () { + return this.RedisManager.removeDocFromMemory + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should call the callback without error', function () { + return this.callback.calledWith(null).should.equal(true) + }) + + it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + + return it('should flush to the history api', function () { + return this.HistoryManager.flushDocChangesAsync + .calledWithExactly(this.project_id, this.doc_id) + .should.equal(true) + }) + }) + + return describe('when a flush error occurs', function () { + beforeEach(function () { + this.DocumentManager.flushDocIfLoaded = sinon + .stub() + .callsArgWith(2, new Error('boom!')) + return (this.RedisManager.removeDocFromMemory = sinon + .stub() + .callsArg(2)) + }) + + it('should not remove the doc from redis', function (done) { + return this.DocumentManager.flushAndDeleteDoc( + this.project_id, + this.doc_id, + {}, + (error) => { + error.should.exist + this.RedisManager.removeDocFromMemory.called.should.equal(false) + return done() + } + ) + }) + + return describe('when ignoring flush errors', function () { + return it('should remove the doc from redis', function (done) { + return this.DocumentManager.flushAndDeleteDoc( + this.project_id, + this.doc_id, + { ignoreFlushErrors: true }, + (error) => { + if (error != null) { + return done(error) + } + this.RedisManager.removeDocFromMemory.called.should.equal(true) + return done() + } + ) + }) + }) + }) + }) + + describe('flushDocIfLoaded', function () { + describe('when the doc is in Redis', function () { + beforeEach(function () { + this.RedisManager.getDoc = sinon + .stub() + .callsArgWith( + 2, + null, + this.lines, + this.version, + this.ranges, + this.pathname, + this.projectHistoryId, + this.unflushedTime, + this.lastUpdatedAt, + this.lastUpdatedBy + ) + this.RedisManager.clearUnflushedTime = sinon + .stub() + .callsArgWith(1, null) + this.PersistenceManager.setDoc = sinon.stub().yields() + return this.DocumentManager.flushDocIfLoaded( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('should get the doc from redis', function () { + return this.RedisManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should write the doc lines to the persistence layer', function () { + return this.PersistenceManager.setDoc + .calledWith( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ranges, + this.lastUpdatedAt, + this.lastUpdatedBy + ) + .should.equal(true) + }) + + it('should call the callback without error', function () { + return this.callback.calledWith(null).should.equal(true) + }) + + return it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + return describe('when the document is not in Redis', function () { + beforeEach(function () { + this.RedisManager.getDoc = sinon + .stub() + .callsArgWith(2, null, null, null, null) + this.PersistenceManager.setDoc = sinon.stub().yields() + this.DocOpsManager.flushDocOpsToMongo = sinon.stub().callsArgWith(2) + return this.DocumentManager.flushDocIfLoaded( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('should get the doc from redis', function () { + return this.RedisManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should not write anything to the persistence layer', function () { + this.PersistenceManager.setDoc.called.should.equal(false) + return this.DocOpsManager.flushDocOpsToMongo.called.should.equal(false) + }) + + it('should call the callback without error', function () { + return this.callback.calledWith(null).should.equal(true) + }) + + return it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + }) + + describe('getDocAndRecentOps', function () { + describe('with a previous version specified', function () { + beforeEach(function () { + this.DocumentManager.getDoc = sinon + .stub() + .callsArgWith( + 2, + null, + this.lines, + this.version, + this.ranges, + this.pathname, + this.projectHistoryId + ) + this.RedisManager.getPreviousDocOps = sinon + .stub() + .callsArgWith(3, null, this.ops) + return this.DocumentManager.getDocAndRecentOps( + this.project_id, + this.doc_id, + this.fromVersion, + this.callback + ) + }) + + it('should get the doc', function () { + return this.DocumentManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should get the doc ops', function () { + return this.RedisManager.getPreviousDocOps + .calledWith(this.doc_id, this.fromVersion, this.version) + .should.equal(true) + }) + + it('should call the callback with the doc info', function () { + return this.callback + .calledWith( + null, + this.lines, + this.version, + this.ops, + this.ranges, + this.pathname, + this.projectHistoryId + ) + .should.equal(true) + }) + + return it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + return describe('with no previous version specified', function () { + beforeEach(function () { + this.DocumentManager.getDoc = sinon + .stub() + .callsArgWith( + 2, + null, + this.lines, + this.version, + this.ranges, + this.pathname, + this.projectHistoryId + ) + this.RedisManager.getPreviousDocOps = sinon + .stub() + .callsArgWith(3, null, this.ops) + return this.DocumentManager.getDocAndRecentOps( + this.project_id, + this.doc_id, + -1, + this.callback + ) + }) + + it('should get the doc', function () { + return this.DocumentManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should not need to get the doc ops', function () { + return this.RedisManager.getPreviousDocOps.called.should.equal(false) + }) + + it('should call the callback with the doc info', function () { + return this.callback + .calledWith( + null, + this.lines, + this.version, + [], + this.ranges, + this.pathname, + this.projectHistoryId + ) + .should.equal(true) + }) + + return it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + }) + + describe('getDoc', function () { + describe('when the doc exists in Redis', function () { + beforeEach(function () { + this.RedisManager.getDoc = sinon + .stub() + .callsArgWith( + 2, + null, + this.lines, + this.version, + this.ranges, + this.pathname, + this.projectHistoryId, + this.unflushedTime + ) + return this.DocumentManager.getDoc( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('should get the doc from Redis', function () { + return this.RedisManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should call the callback with the doc info', function () { + return this.callback + .calledWith( + null, + this.lines, + this.version, + this.ranges, + this.pathname, + this.projectHistoryId, + this.unflushedTime, + true + ) + .should.equal(true) + }) + + return it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + return describe('when the doc does not exist in Redis', function () { + beforeEach(function () { + this.RedisManager.getDoc = sinon + .stub() + .callsArgWith(2, null, null, null, null, null, null) + this.PersistenceManager.getDoc = sinon + .stub() + .callsArgWith( + 2, + null, + this.lines, + this.version, + this.ranges, + this.pathname, + this.projectHistoryId, + this.projectHistoryType + ) + this.RedisManager.putDocInMemory = sinon.stub().yields() + this.RedisManager.setHistoryType = sinon.stub().yields() + return this.DocumentManager.getDoc( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('should try to get the doc from Redis', function () { + return this.RedisManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should get the doc from the PersistenceManager', function () { + return this.PersistenceManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should set the doc in Redis', function () { + return this.RedisManager.putDocInMemory + .calledWith( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ranges, + this.pathname, + this.projectHistoryId + ) + .should.equal(true) + }) + + it('should set the history type in Redis', function () { + return this.RedisManager.setHistoryType + .calledWith(this.doc_id, this.projectHistoryType) + .should.equal(true) + }) + + it('should call the callback with the doc info', function () { + return this.callback + .calledWith( + null, + this.lines, + this.version, + this.ranges, + this.pathname, + this.projectHistoryId, + null, + false + ) + .should.equal(true) + }) + + return it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + }) + + describe('setDoc', function () { + return describe('with plain tex lines', function () { + beforeEach(function () { + this.beforeLines = ['before', 'lines'] + this.afterLines = ['after', 'lines'] + this.ops = [ + { i: 'foo', p: 4 }, + { d: 'bar', p: 42 } + ] + this.DocumentManager.getDoc = sinon + .stub() + .callsArgWith( + 2, + null, + this.beforeLines, + this.version, + this.ranges, + this.pathname, + this.projectHistoryId, + this.unflushedTime, + true + ) + this.DiffCodec.diffAsShareJsOp = sinon + .stub() + .callsArgWith(2, null, this.ops) + this.UpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null) + this.DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2) + return (this.DocumentManager.flushAndDeleteDoc = sinon + .stub() + .callsArg(3)) + }) + + describe('when already loaded', function () { + beforeEach(function () { + return this.DocumentManager.setDoc( + this.project_id, + this.doc_id, + this.afterLines, + this.source, + this.user_id, + false, + this.callback + ) + }) + + it('should get the current doc lines', function () { + return this.DocumentManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should return a diff of the old and new lines', function () { + return this.DiffCodec.diffAsShareJsOp + .calledWith(this.beforeLines, this.afterLines) + .should.equal(true) + }) + + it('should apply the diff as a ShareJS op', function () { + return this.UpdateManager.applyUpdate + .calledWith(this.project_id, this.doc_id, { + doc: this.doc_id, + v: this.version, + op: this.ops, + meta: { + type: 'external', + source: this.source, + user_id: this.user_id + } + }) + .should.equal(true) + }) + + it('should flush the doc to Mongo', function () { + return this.DocumentManager.flushDocIfLoaded + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should not flush the project history', function () { + return this.HistoryManager.flushProjectChangesAsync.called.should.equal( + false + ) + }) + + it('should call the callback', function () { + return this.callback.calledWith(null).should.equal(true) + }) + + return it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + describe('when not already loaded', function () { + beforeEach(function () { + this.DocumentManager.getDoc = sinon + .stub() + .callsArgWith( + 2, + null, + this.beforeLines, + this.version, + this.pathname, + null, + false + ) + return this.DocumentManager.setDoc( + this.project_id, + this.doc_id, + this.afterLines, + this.source, + this.user_id, + false, + this.callback + ) + }) + + it('should flush and delete the doc from the doc updater', function () { + return this.DocumentManager.flushAndDeleteDoc + .calledWith(this.project_id, this.doc_id, {}) + .should.equal(true) + }) + + return it('should not flush the project history', function () { + return this.HistoryManager.flushProjectChangesAsync + .calledWithExactly(this.project_id) + .should.equal(true) + }) + }) + + describe('without new lines', function () { + beforeEach(function () { + return this.DocumentManager.setDoc( + this.project_id, + this.doc_id, + null, + this.source, + this.user_id, + false, + this.callback + ) + }) + + it('should return the callback with an error', function () { + return this.callback.calledWith( + new Error('No lines were passed to setDoc') + ) + }) + + return it('should not try to get the doc lines', function () { + return this.DocumentManager.getDoc.called.should.equal(false) + }) + }) + + return describe('with the undoing flag', function () { + beforeEach(function () { + // Copy ops so we don't interfere with other tests + this.ops = [ + { i: 'foo', p: 4 }, + { d: 'bar', p: 42 } + ] + this.DiffCodec.diffAsShareJsOp = sinon + .stub() + .callsArgWith(2, null, this.ops) + return this.DocumentManager.setDoc( + this.project_id, + this.doc_id, + this.afterLines, + this.source, + this.user_id, + true, + this.callback + ) + }) + + return it('should set the undo flag on each op', function () { + return Array.from(this.ops).map((op) => op.u.should.equal(true)) + }) + }) + }) + }) + + describe('acceptChanges', function () { + beforeEach(function () { + this.change_id = 'mock-change-id' + this.change_ids = [ + 'mock-change-id-1', + 'mock-change-id-2', + 'mock-change-id-3', + 'mock-change-id-4' + ] + this.version = 34 + this.lines = ['original', 'lines'] + this.ranges = { entries: 'mock', comments: 'mock' } + this.updated_ranges = { entries: 'updated', comments: 'updated' } + this.DocumentManager.getDoc = sinon + .stub() + .yields(null, this.lines, this.version, this.ranges) + this.RangesManager.acceptChanges = sinon + .stub() + .yields(null, this.updated_ranges) + return (this.RedisManager.updateDocument = sinon.stub().yields()) + }) + + describe('successfully with a single change', function () { + beforeEach(function () { + return this.DocumentManager.acceptChanges( + this.project_id, + this.doc_id, + [this.change_id], + this.callback + ) + }) + + it("should get the document's current ranges", function () { + return this.DocumentManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should apply the accept change to the ranges', function () { + return this.RangesManager.acceptChanges + .calledWith([this.change_id], this.ranges) + .should.equal(true) + }) + + it('should save the updated ranges', function () { + return this.RedisManager.updateDocument + .calledWith( + this.project_id, + this.doc_id, + this.lines, + this.version, + [], + this.updated_ranges, + {} + ) + .should.equal(true) + }) + + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) + + describe('successfully with multiple changes', function () { + beforeEach(function () { + return this.DocumentManager.acceptChanges( + this.project_id, + this.doc_id, + this.change_ids, + this.callback + ) + }) + + return it('should apply the accept change to the ranges', function () { + return this.RangesManager.acceptChanges + .calledWith(this.change_ids, this.ranges) + .should.equal(true) + }) + }) + + return describe('when the doc is not found', function () { + beforeEach(function () { + this.DocumentManager.getDoc = sinon + .stub() + .yields(null, null, null, null) + return this.DocumentManager.acceptChanges( + this.project_id, + this.doc_id, + [this.change_id], + this.callback + ) + }) + + it('should not save anything', function () { + return this.RedisManager.updateDocument.called.should.equal(false) + }) + + return it('should call the callback with a not found error', function () { + const error = new Errors.NotFoundError( + `document not found: ${this.doc_id}` + ) + return this.callback.calledWith(error).should.equal(true) + }) + }) + }) + + describe('deleteComment', function () { + beforeEach(function () { + this.comment_id = 'mock-comment-id' + this.version = 34 + this.lines = ['original', 'lines'] + this.ranges = { comments: ['one', 'two', 'three'] } + this.updated_ranges = { comments: ['one', 'three'] } + this.DocumentManager.getDoc = sinon + .stub() + .yields(null, this.lines, this.version, this.ranges) + this.RangesManager.deleteComment = sinon + .stub() + .yields(null, this.updated_ranges) + return (this.RedisManager.updateDocument = sinon.stub().yields()) + }) + + describe('successfully', function () { + beforeEach(function () { + return this.DocumentManager.deleteComment( + this.project_id, + this.doc_id, + this.comment_id, + this.callback + ) + }) + + it("should get the document's current ranges", function () { + return this.DocumentManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should delete the comment from the ranges', function () { + return this.RangesManager.deleteComment + .calledWith(this.comment_id, this.ranges) + .should.equal(true) + }) + + it('should save the updated ranges', function () { + return this.RedisManager.updateDocument + .calledWith( + this.project_id, + this.doc_id, + this.lines, + this.version, + [], + this.updated_ranges, + {} + ) + .should.equal(true) + }) + + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) + + return describe('when the doc is not found', function () { + beforeEach(function () { + this.DocumentManager.getDoc = sinon + .stub() + .yields(null, null, null, null) + return this.DocumentManager.acceptChanges( + this.project_id, + this.doc_id, + [this.comment_id], + this.callback + ) + }) + + it('should not save anything', function () { + return this.RedisManager.updateDocument.called.should.equal(false) + }) + + return it('should call the callback with a not found error', function () { + const error = new Errors.NotFoundError( + `document not found: ${this.doc_id}` + ) + return this.callback.calledWith(error).should.equal(true) + }) + }) + }) + + describe('getDocAndFlushIfOld', function () { + beforeEach(function () { + return (this.DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2)) + }) + + describe('when the doc is in Redis', function () { + describe('and has changes to be flushed', function () { + beforeEach(function () { + this.DocumentManager.getDoc = sinon + .stub() + .callsArgWith( + 2, + null, + this.lines, + this.version, + this.ranges, + this.projectHistoryId, + this.pathname, + Date.now() - 1e9, + true + ) + return this.DocumentManager.getDocAndFlushIfOld( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('should get the doc', function () { + return this.DocumentManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should flush the doc', function () { + return this.DocumentManager.flushDocIfLoaded + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + return it('should call the callback with the lines and versions', function () { + return this.callback + .calledWith(null, this.lines, this.version) + .should.equal(true) + }) + }) + + return describe("and has only changes that don't need to be flushed", function () { + beforeEach(function () { + this.DocumentManager.getDoc = sinon + .stub() + .callsArgWith( + 2, + null, + this.lines, + this.version, + this.ranges, + this.pathname, + Date.now() - 100, + true + ) + return this.DocumentManager.getDocAndFlushIfOld( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('should get the doc', function () { + return this.DocumentManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should not flush the doc', function () { + return this.DocumentManager.flushDocIfLoaded.called.should.equal( + false + ) + }) + + return it('should call the callback with the lines and versions', function () { + return this.callback + .calledWith(null, this.lines, this.version) + .should.equal(true) + }) + }) + }) + + return describe('when the doc is not in Redis', function () { + beforeEach(function () { + this.DocumentManager.getDoc = sinon + .stub() + .callsArgWith( + 2, + null, + this.lines, + this.version, + this.ranges, + null, + false + ) + return this.DocumentManager.getDocAndFlushIfOld( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('should get the doc', function () { + return this.DocumentManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should not flush the doc', function () { + return this.DocumentManager.flushDocIfLoaded.called.should.equal(false) + }) + + return it('should call the callback with the lines and versions', function () { + return this.callback + .calledWith(null, this.lines, this.version) + .should.equal(true) + }) + }) + }) + + describe('renameDoc', function () { + beforeEach(function () { + this.update = 'some-update' + return (this.RedisManager.renameDoc = sinon.stub().yields()) + }) + + return describe('successfully', function () { + beforeEach(function () { + return this.DocumentManager.renameDoc( + this.project_id, + this.doc_id, + this.user_id, + this.update, + this.projectHistoryId, + this.callback + ) + }) + + it('should rename the document', function () { + return this.RedisManager.renameDoc + .calledWith( + this.project_id, + this.doc_id, + this.user_id, + this.update, + this.projectHistoryId + ) + .should.equal(true) + }) + + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) + }) + + return describe('resyncDocContents', function () { + describe('when doc is loaded in redis', function () { + beforeEach(function () { + this.RedisManager.getDoc = sinon + .stub() + .callsArgWith( + 2, + null, + this.lines, + this.version, + this.ranges, + this.pathname, + this.projectHistoryId + ) + this.ProjectHistoryRedisManager.queueResyncDocContent = sinon.stub() + return this.DocumentManager.resyncDocContents( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('gets the doc contents from redis', function () { + return this.RedisManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + return it('queues a resync doc content update', function () { + return this.ProjectHistoryRedisManager.queueResyncDocContent + .calledWith( + this.project_id, + this.projectHistoryId, + this.doc_id, + this.lines, + this.version, + this.pathname, + this.callback + ) + .should.equal(true) + }) + }) + + return describe('when doc is not loaded in redis', function () { + beforeEach(function () { + this.RedisManager.getDoc = sinon.stub().callsArgWith(2, null) + this.PersistenceManager.getDoc = sinon + .stub() + .callsArgWith( + 2, + null, + this.lines, + this.version, + this.ranges, + this.pathname, + this.projectHistoryId + ) + this.ProjectHistoryRedisManager.queueResyncDocContent = sinon.stub() + return this.DocumentManager.resyncDocContents( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('tries to get the doc contents from redis', function () { + return this.RedisManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('gets the doc contents from web', function () { + return this.PersistenceManager.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + return it('queues a resync doc content update', function () { + return this.ProjectHistoryRedisManager.queueResyncDocContent + .calledWith( + this.project_id, + this.projectHistoryId, + this.doc_id, + this.lines, + this.version, + this.pathname, + this.callback + ) + .should.equal(true) + }) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js b/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js index 64a07b06ae..263f1cd094 100644 --- a/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js +++ b/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js @@ -9,305 +9,417 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const SandboxedModule = require('sandboxed-module'); -const sinon = require('sinon'); -require('chai').should(); -const modulePath = require('path').join(__dirname, '../../../../app/js/HistoryManager'); +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +require('chai').should() +const modulePath = require('path').join( + __dirname, + '../../../../app/js/HistoryManager' +) -describe("HistoryManager", function() { - beforeEach(function() { - this.HistoryManager = SandboxedModule.require(modulePath, { requires: { - "request": (this.request = {}), - "settings-sharelatex": (this.Settings = { - apis: { - project_history: { - enabled: true, - url: "http://project_history.example.com" - }, - trackchanges: { - url: "http://trackchanges.example.com" - } - } - }), - "logger-sharelatex": (this.logger = { log: sinon.stub(), error: sinon.stub(), debug: sinon.stub() }), - "./DocumentManager": (this.DocumentManager = {}), - "./HistoryRedisManager": (this.HistoryRedisManager = {}), - "./RedisManager": (this.RedisManager = {}), - "./ProjectHistoryRedisManager": (this.ProjectHistoryRedisManager = {}), - "./Metrics": (this.metrics = {inc: sinon.stub()}) - } - }); - this.project_id = "mock-project-id"; - this.doc_id = "mock-doc-id"; - return this.callback = sinon.stub(); - }); +describe('HistoryManager', function () { + beforeEach(function () { + this.HistoryManager = SandboxedModule.require(modulePath, { + requires: { + request: (this.request = {}), + 'settings-sharelatex': (this.Settings = { + apis: { + project_history: { + enabled: true, + url: 'http://project_history.example.com' + }, + trackchanges: { + url: 'http://trackchanges.example.com' + } + } + }), + 'logger-sharelatex': (this.logger = { + log: sinon.stub(), + error: sinon.stub(), + debug: sinon.stub() + }), + './DocumentManager': (this.DocumentManager = {}), + './HistoryRedisManager': (this.HistoryRedisManager = {}), + './RedisManager': (this.RedisManager = {}), + './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), + './Metrics': (this.metrics = { inc: sinon.stub() }) + } + }) + this.project_id = 'mock-project-id' + this.doc_id = 'mock-doc-id' + return (this.callback = sinon.stub()) + }) - describe("flushDocChangesAsync", function() { - beforeEach(function() { - return this.request.post = sinon.stub().callsArgWith(1, null, {statusCode: 204}); - }); + describe('flushDocChangesAsync', function () { + beforeEach(function () { + return (this.request.post = sinon + .stub() + .callsArgWith(1, null, { statusCode: 204 })) + }) - describe("when the project uses track changes", function() { - beforeEach(function() { - this.RedisManager.getHistoryType = sinon.stub().yields(null, 'track-changes'); - return this.HistoryManager.flushDocChangesAsync(this.project_id, this.doc_id); - }); + describe('when the project uses track changes', function () { + beforeEach(function () { + this.RedisManager.getHistoryType = sinon + .stub() + .yields(null, 'track-changes') + return this.HistoryManager.flushDocChangesAsync( + this.project_id, + this.doc_id + ) + }) - return it("should send a request to the track changes api", function() { - return this.request.post - .calledWith(`${this.Settings.apis.trackchanges.url}/project/${this.project_id}/doc/${this.doc_id}/flush`) - .should.equal(true); - }); - }); + return it('should send a request to the track changes api', function () { + return this.request.post + .calledWith( + `${this.Settings.apis.trackchanges.url}/project/${this.project_id}/doc/${this.doc_id}/flush` + ) + .should.equal(true) + }) + }) - describe("when the project uses project history and double flush is not disabled", function() { - beforeEach(function() { - this.RedisManager.getHistoryType = sinon.stub().yields(null, 'project-history'); - return this.HistoryManager.flushDocChangesAsync(this.project_id, this.doc_id); - }); + describe('when the project uses project history and double flush is not disabled', function () { + beforeEach(function () { + this.RedisManager.getHistoryType = sinon + .stub() + .yields(null, 'project-history') + return this.HistoryManager.flushDocChangesAsync( + this.project_id, + this.doc_id + ) + }) - return it("should send a request to the track changes api", function() { - return this.request.post - .called - .should.equal(true); - }); - }); + return it('should send a request to the track changes api', function () { + return this.request.post.called.should.equal(true) + }) + }) - return describe("when the project uses project history and double flush is disabled", function() { - beforeEach(function() { - this.Settings.disableDoubleFlush = true; - this.RedisManager.getHistoryType = sinon.stub().yields(null, 'project-history'); - return this.HistoryManager.flushDocChangesAsync(this.project_id, this.doc_id); - }); + return describe('when the project uses project history and double flush is disabled', function () { + beforeEach(function () { + this.Settings.disableDoubleFlush = true + this.RedisManager.getHistoryType = sinon + .stub() + .yields(null, 'project-history') + return this.HistoryManager.flushDocChangesAsync( + this.project_id, + this.doc_id + ) + }) - return it("should not send a request to the track changes api", function() { - return this.request.post - .called - .should.equal(false); - }); - }); - }); + return it('should not send a request to the track changes api', function () { + return this.request.post.called.should.equal(false) + }) + }) + }) + describe('flushProjectChangesAsync', function () { + beforeEach(function () { + this.request.post = sinon + .stub() + .callsArgWith(1, null, { statusCode: 204 }) - describe("flushProjectChangesAsync", function() { - beforeEach(function() { - this.request.post = sinon.stub().callsArgWith(1, null, {statusCode: 204}); + return this.HistoryManager.flushProjectChangesAsync(this.project_id) + }) - return this.HistoryManager.flushProjectChangesAsync(this.project_id); - }); + return it('should send a request to the project history api', function () { + return this.request.post + .calledWith({ + url: `${this.Settings.apis.project_history.url}/project/${this.project_id}/flush`, + qs: { background: true } + }) + .should.equal(true) + }) + }) - return it("should send a request to the project history api", function() { - return this.request.post - .calledWith({url: `${this.Settings.apis.project_history.url}/project/${this.project_id}/flush`, qs:{background:true}}) - .should.equal(true); - }); - }); + describe('flushProjectChanges', function () { + describe('in the normal case', function () { + beforeEach(function () { + this.request.post = sinon + .stub() + .callsArgWith(1, null, { statusCode: 204 }) + return this.HistoryManager.flushProjectChanges(this.project_id, { + background: true + }) + }) - describe("flushProjectChanges", function() { + return it('should send a request to the project history api', function () { + return this.request.post + .calledWith({ + url: `${this.Settings.apis.project_history.url}/project/${this.project_id}/flush`, + qs: { background: true } + }) + .should.equal(true) + }) + }) - describe("in the normal case", function() { - beforeEach(function() { - this.request.post = sinon.stub().callsArgWith(1, null, {statusCode: 204}); - return this.HistoryManager.flushProjectChanges(this.project_id, {background:true});}); + return describe('with the skip_history_flush option', function () { + beforeEach(function () { + this.request.post = sinon.stub() + return this.HistoryManager.flushProjectChanges(this.project_id, { + skip_history_flush: true + }) + }) - return it("should send a request to the project history api", function() { - return this.request.post - .calledWith({url: `${this.Settings.apis.project_history.url}/project/${this.project_id}/flush`, qs:{background:true}}) - .should.equal(true); - }); - }); + return it('should not send a request to the project history api', function () { + return this.request.post.called.should.equal(false) + }) + }) + }) - return describe("with the skip_history_flush option", function() { - beforeEach(function() { - this.request.post = sinon.stub(); - return this.HistoryManager.flushProjectChanges(this.project_id, {skip_history_flush:true});}); + describe('recordAndFlushHistoryOps', function () { + beforeEach(function () { + this.ops = ['mock-ops'] + this.project_ops_length = 10 + this.doc_ops_length = 5 - return it("should not send a request to the project history api", function() { - return this.request.post - .called - .should.equal(false); - }); - }); - }); + this.HistoryManager.flushProjectChangesAsync = sinon.stub() + this.HistoryRedisManager.recordDocHasHistoryOps = sinon.stub().callsArg(3) + return (this.HistoryManager.flushDocChangesAsync = sinon.stub()) + }) - describe("recordAndFlushHistoryOps", function() { - beforeEach(function() { - this.ops = [ 'mock-ops' ]; - this.project_ops_length = 10; - this.doc_ops_length = 5; + describe('with no ops', function () { + beforeEach(function () { + return this.HistoryManager.recordAndFlushHistoryOps( + this.project_id, + this.doc_id, + [], + this.doc_ops_length, + this.project_ops_length, + this.callback + ) + }) - this.HistoryManager.flushProjectChangesAsync = sinon.stub(); - this.HistoryRedisManager.recordDocHasHistoryOps = sinon.stub().callsArg(3); - return this.HistoryManager.flushDocChangesAsync = sinon.stub(); - }); + it('should not flush project changes', function () { + return this.HistoryManager.flushProjectChangesAsync.called.should.equal( + false + ) + }) - describe("with no ops", function() { - beforeEach(function() { - return this.HistoryManager.recordAndFlushHistoryOps( - this.project_id, this.doc_id, [], this.doc_ops_length, this.project_ops_length, this.callback - ); - }); + it('should not record doc has history ops', function () { + return this.HistoryRedisManager.recordDocHasHistoryOps.called.should.equal( + false + ) + }) - it("should not flush project changes", function() { - return this.HistoryManager.flushProjectChangesAsync.called.should.equal(false); - }); + it('should not flush doc changes', function () { + return this.HistoryManager.flushDocChangesAsync.called.should.equal( + false + ) + }) - it("should not record doc has history ops", function() { - return this.HistoryRedisManager.recordDocHasHistoryOps.called.should.equal(false); - }); + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) - it("should not flush doc changes", function() { - return this.HistoryManager.flushDocChangesAsync.called.should.equal(false); - }); + describe('with enough ops to flush project changes', function () { + beforeEach(function () { + this.HistoryManager.shouldFlushHistoryOps = sinon.stub() + this.HistoryManager.shouldFlushHistoryOps + .withArgs(this.project_ops_length) + .returns(true) + this.HistoryManager.shouldFlushHistoryOps + .withArgs(this.doc_ops_length) + .returns(false) - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); + return this.HistoryManager.recordAndFlushHistoryOps( + this.project_id, + this.doc_id, + this.ops, + this.doc_ops_length, + this.project_ops_length, + this.callback + ) + }) - describe("with enough ops to flush project changes", function() { - beforeEach(function() { - this.HistoryManager.shouldFlushHistoryOps = sinon.stub(); - this.HistoryManager.shouldFlushHistoryOps.withArgs(this.project_ops_length).returns(true); - this.HistoryManager.shouldFlushHistoryOps.withArgs(this.doc_ops_length).returns(false); + it('should flush project changes', function () { + return this.HistoryManager.flushProjectChangesAsync + .calledWith(this.project_id) + .should.equal(true) + }) - return this.HistoryManager.recordAndFlushHistoryOps( - this.project_id, this.doc_id, this.ops, this.doc_ops_length, this.project_ops_length, this.callback - ); - }); + it('should record doc has history ops', function () { + return this.HistoryRedisManager.recordDocHasHistoryOps.calledWith( + this.project_id, + this.doc_id, + this.ops + ) + }) - it("should flush project changes", function() { - return this.HistoryManager.flushProjectChangesAsync - .calledWith(this.project_id) - .should.equal(true); - }); + it('should not flush doc changes', function () { + return this.HistoryManager.flushDocChangesAsync.called.should.equal( + false + ) + }) - it("should record doc has history ops", function() { - return this.HistoryRedisManager.recordDocHasHistoryOps - .calledWith(this.project_id, this.doc_id, this.ops); - }); + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) - it("should not flush doc changes", function() { - return this.HistoryManager.flushDocChangesAsync.called.should.equal(false); - }); + describe('with enough ops to flush doc changes', function () { + beforeEach(function () { + this.HistoryManager.shouldFlushHistoryOps = sinon.stub() + this.HistoryManager.shouldFlushHistoryOps + .withArgs(this.project_ops_length) + .returns(false) + this.HistoryManager.shouldFlushHistoryOps + .withArgs(this.doc_ops_length) + .returns(true) - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); + return this.HistoryManager.recordAndFlushHistoryOps( + this.project_id, + this.doc_id, + this.ops, + this.doc_ops_length, + this.project_ops_length, + this.callback + ) + }) - describe("with enough ops to flush doc changes", function() { - beforeEach(function() { - this.HistoryManager.shouldFlushHistoryOps = sinon.stub(); - this.HistoryManager.shouldFlushHistoryOps.withArgs(this.project_ops_length).returns(false); - this.HistoryManager.shouldFlushHistoryOps.withArgs(this.doc_ops_length).returns(true); + it('should not flush project changes', function () { + return this.HistoryManager.flushProjectChangesAsync.called.should.equal( + false + ) + }) - return this.HistoryManager.recordAndFlushHistoryOps( - this.project_id, this.doc_id, this.ops, this.doc_ops_length, this.project_ops_length, this.callback - ); - }); + it('should record doc has history ops', function () { + return this.HistoryRedisManager.recordDocHasHistoryOps.calledWith( + this.project_id, + this.doc_id, + this.ops + ) + }) - it("should not flush project changes", function() { - return this.HistoryManager.flushProjectChangesAsync.called.should.equal(false); - }); + it('should flush doc changes', function () { + return this.HistoryManager.flushDocChangesAsync + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) - it("should record doc has history ops", function() { - return this.HistoryRedisManager.recordDocHasHistoryOps - .calledWith(this.project_id, this.doc_id, this.ops); - }); + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) - it("should flush doc changes", function() { - return this.HistoryManager.flushDocChangesAsync - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); + describe('when recording doc has history ops errors', function () { + beforeEach(function () { + this.error = new Error('error') + this.HistoryRedisManager.recordDocHasHistoryOps = sinon + .stub() + .callsArgWith(3, this.error) - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); + return this.HistoryManager.recordAndFlushHistoryOps( + this.project_id, + this.doc_id, + this.ops, + this.doc_ops_length, + this.project_ops_length, + this.callback + ) + }) - describe("when recording doc has history ops errors", function() { - beforeEach(function() { - this.error = new Error("error"); - this.HistoryRedisManager.recordDocHasHistoryOps = - sinon.stub().callsArgWith(3, this.error); + it('should not flush doc changes', function () { + return this.HistoryManager.flushDocChangesAsync.called.should.equal( + false + ) + }) - return this.HistoryManager.recordAndFlushHistoryOps( - this.project_id, this.doc_id, this.ops, this.doc_ops_length, this.project_ops_length, this.callback - ); - }); + return it('should call the callback with the error', function () { + return this.callback.calledWith(this.error).should.equal(true) + }) + }) - it("should not flush doc changes", function() { - return this.HistoryManager.flushDocChangesAsync.called.should.equal(false); - }); + return describe('shouldFlushHistoryOps', function () { + it('should return false if the number of ops is not known', function () { + return this.HistoryManager.shouldFlushHistoryOps( + null, + ['a', 'b', 'c'].length, + 1 + ).should.equal(false) + }) - return it("should call the callback with the error", function() { - return this.callback.calledWith(this.error).should.equal(true); - }); - }); + it("should return false if the updates didn't take us past the threshold", function () { + // Currently there are 14 ops + // Previously we were on 11 ops + // We didn't pass over a multiple of 5 + this.HistoryManager.shouldFlushHistoryOps( + 14, + ['a', 'b', 'c'].length, + 5 + ).should.equal(false) - return describe("shouldFlushHistoryOps", function() { - it("should return false if the number of ops is not known", function() { - return this.HistoryManager.shouldFlushHistoryOps(null, ['a', 'b', 'c'].length, 1).should.equal(false); - }); + it('should return true if the updates took to the threshold', function () {}) + // Currently there are 15 ops + // Previously we were on 12 ops + // We've reached a new multiple of 5 + return this.HistoryManager.shouldFlushHistoryOps( + 15, + ['a', 'b', 'c'].length, + 5 + ).should.equal(true) + }) - it("should return false if the updates didn't take us past the threshold", function() { - // Currently there are 14 ops - // Previously we were on 11 ops - // We didn't pass over a multiple of 5 - this.HistoryManager.shouldFlushHistoryOps(14, ['a', 'b', 'c'].length, 5).should.equal(false); + return it('should return true if the updates took past the threshold', function () { + // Currently there are 19 ops + // Previously we were on 16 ops + // We didn't pass over a multiple of 5 + return this.HistoryManager.shouldFlushHistoryOps( + 17, + ['a', 'b', 'c'].length, + 5 + ).should.equal(true) + }) + }) + }) - it("should return true if the updates took to the threshold", function() {}); - // Currently there are 15 ops - // Previously we were on 12 ops - // We've reached a new multiple of 5 - return this.HistoryManager.shouldFlushHistoryOps(15, ['a', 'b', 'c'].length, 5).should.equal(true); - }); + return describe('resyncProjectHistory', function () { + beforeEach(function () { + this.projectHistoryId = 'history-id-1234' + this.docs = [ + { + doc: this.doc_id, + path: 'main.tex' + } + ] + this.files = [ + { + file: 'mock-file-id', + path: 'universe.png', + url: `www.filestore.test/${this.project_id}/mock-file-id` + } + ] + this.ProjectHistoryRedisManager.queueResyncProjectStructure = sinon + .stub() + .yields() + this.DocumentManager.resyncDocContentsWithLock = sinon.stub().yields() + return this.HistoryManager.resyncProjectHistory( + this.project_id, + this.projectHistoryId, + this.docs, + this.files, + this.callback + ) + }) - return it("should return true if the updates took past the threshold", function() { - // Currently there are 19 ops - // Previously we were on 16 ops - // We didn't pass over a multiple of 5 - return this.HistoryManager.shouldFlushHistoryOps(17, ['a', 'b', 'c'].length, 5).should.equal(true); - }); - }); - }); + it('should queue a project structure reync', function () { + return this.ProjectHistoryRedisManager.queueResyncProjectStructure + .calledWith( + this.project_id, + this.projectHistoryId, + this.docs, + this.files + ) + .should.equal(true) + }) - return describe("resyncProjectHistory", function() { - beforeEach(function() { - this.projectHistoryId = 'history-id-1234'; - this.docs = [{ - doc: this.doc_id, - path: 'main.tex' - } - ]; - this.files = [{ - file: 'mock-file-id', - path: 'universe.png', - url: `www.filestore.test/${this.project_id}/mock-file-id` - } - ]; - this.ProjectHistoryRedisManager.queueResyncProjectStructure = sinon.stub().yields(); - this.DocumentManager.resyncDocContentsWithLock = sinon.stub().yields(); - return this.HistoryManager.resyncProjectHistory(this.project_id, this.projectHistoryId, this.docs, this.files, this.callback); - }); + it('should queue doc content reyncs', function () { + return this.DocumentManager.resyncDocContentsWithLock + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) - it("should queue a project structure reync", function() { - return this.ProjectHistoryRedisManager.queueResyncProjectStructure - .calledWith(this.project_id, this.projectHistoryId, this.docs, this.files) - .should.equal(true); - }); - - it("should queue doc content reyncs", function() { - return this.DocumentManager - .resyncDocContentsWithLock - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); - - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); -}); + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js b/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js index b5184a11e6..68321533b3 100644 --- a/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js +++ b/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js @@ -11,79 +11,93 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const modulePath = "../../../../app/js/HistoryRedisManager.js"; -const SandboxedModule = require('sandboxed-module'); -const Errors = require("../../../../app/js/Errors"); +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const modulePath = '../../../../app/js/HistoryRedisManager.js' +const SandboxedModule = require('sandboxed-module') +const Errors = require('../../../../app/js/Errors') -describe("HistoryRedisManager", function() { - beforeEach(function() { - this.rclient = { - auth() {}, - exec: sinon.stub() - }; - this.rclient.multi = () => this.rclient; - this.HistoryRedisManager = SandboxedModule.require(modulePath, { requires: { - "redis-sharelatex": { createClient: () => this.rclient - }, - "settings-sharelatex": { - redis: { - history: (this.settings = { - key_schema: { - uncompressedHistoryOps({doc_id}) { return `UncompressedHistoryOps:${doc_id}`; }, - docsWithHistoryOps({project_id}) { return `DocsWithHistoryOps:${project_id}`; } - } - }) - } - }, - "logger-sharelatex": { log() {} } - } - }); - this.doc_id = "doc-id-123"; - this.project_id = "project-id-123"; - return this.callback = sinon.stub(); - }); +describe('HistoryRedisManager', function () { + beforeEach(function () { + this.rclient = { + auth() {}, + exec: sinon.stub() + } + this.rclient.multi = () => this.rclient + this.HistoryRedisManager = SandboxedModule.require(modulePath, { + requires: { + 'redis-sharelatex': { createClient: () => this.rclient }, + 'settings-sharelatex': { + redis: { + history: (this.settings = { + key_schema: { + uncompressedHistoryOps({ doc_id }) { + return `UncompressedHistoryOps:${doc_id}` + }, + docsWithHistoryOps({ project_id }) { + return `DocsWithHistoryOps:${project_id}` + } + } + }) + } + }, + 'logger-sharelatex': { log() {} } + } + }) + this.doc_id = 'doc-id-123' + this.project_id = 'project-id-123' + return (this.callback = sinon.stub()) + }) - return describe("recordDocHasHistoryOps", function() { - beforeEach(function() { - this.ops = [{ op: [{ i: "foo", p: 4 }] },{ op: [{ i: "bar", p: 56 }] }]; - return this.rclient.sadd = sinon.stub().yields(); - }); - - describe("with ops", function() { - beforeEach(function(done) { - return this.HistoryRedisManager.recordDocHasHistoryOps(this.project_id, this.doc_id, this.ops, (...args) => { - this.callback(...Array.from(args || [])); - return done(); - }); - }); - - return it("should add the doc_id to the set of which records the project docs", function() { - return this.rclient.sadd - .calledWith(`DocsWithHistoryOps:${this.project_id}`, this.doc_id) - .should.equal(true); - }); - }); + return describe('recordDocHasHistoryOps', function () { + beforeEach(function () { + this.ops = [{ op: [{ i: 'foo', p: 4 }] }, { op: [{ i: 'bar', p: 56 }] }] + return (this.rclient.sadd = sinon.stub().yields()) + }) - return describe("with no ops", function() { - beforeEach(function(done) { - return this.HistoryRedisManager.recordDocHasHistoryOps(this.project_id, this.doc_id, [], (...args) => { - this.callback(...Array.from(args || [])); - return done(); - }); - }); - - it("should not add the doc_id to the set of which records the project docs", function() { - return this.rclient.sadd - .called - .should.equal(false); - }); + describe('with ops', function () { + beforeEach(function (done) { + return this.HistoryRedisManager.recordDocHasHistoryOps( + this.project_id, + this.doc_id, + this.ops, + (...args) => { + this.callback(...Array.from(args || [])) + return done() + } + ) + }) - return it("should call the callback with an error", function() { - return this.callback.calledWith(new Error("cannot push no ops")).should.equal(true); - }); - }); - }); -}); + return it('should add the doc_id to the set of which records the project docs', function () { + return this.rclient.sadd + .calledWith(`DocsWithHistoryOps:${this.project_id}`, this.doc_id) + .should.equal(true) + }) + }) + + return describe('with no ops', function () { + beforeEach(function (done) { + return this.HistoryRedisManager.recordDocHasHistoryOps( + this.project_id, + this.doc_id, + [], + (...args) => { + this.callback(...Array.from(args || [])) + return done() + } + ) + }) + + it('should not add the doc_id to the set of which records the project docs', function () { + return this.rclient.sadd.called.should.equal(false) + }) + + return it('should call the callback with an error', function () { + return this.callback + .calledWith(new Error('cannot push no ops')) + .should.equal(true) + }) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js index 5189f3515d..8f4125fcfa 100644 --- a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js +++ b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js @@ -10,806 +10,943 @@ * DS206: Consider reworking classes to avoid initClass * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const modulePath = "../../../../app/js/HttpController.js"; -const SandboxedModule = require('sandboxed-module'); -const Errors = require("../../../../app/js/Errors.js"); - -describe("HttpController", function() { - beforeEach(function() { - let Timer; - this.HttpController = SandboxedModule.require(modulePath, { requires: { - "./DocumentManager": (this.DocumentManager = {}), - "./HistoryManager": (this.HistoryManager = - {flushProjectChangesAsync: sinon.stub()}), - "./ProjectManager": (this.ProjectManager = {}), - "logger-sharelatex" : (this.logger = { log: sinon.stub() }), - "./ProjectFlusher": {flushAllProjects() {}}, - "./DeleteQueueManager": (this.DeleteQueueManager = {}), - "./Metrics": (this.Metrics = {}), - "./Errors" : Errors - } - } - ); - this.Metrics.Timer = (Timer = (function() { - Timer = class Timer { - static initClass() { - this.prototype.done = sinon.stub(); - } - }; - Timer.initClass(); - return Timer; - })()); - this.project_id = "project-id-123"; - this.doc_id = "doc-id-123"; - this.next = sinon.stub(); - return this.res = { - send: sinon.stub(), - sendStatus: sinon.stub(), - json: sinon.stub() - }; - }); - - describe("getDoc", function() { - beforeEach(function() { - this.lines = ["one", "two", "three"]; - this.ops = ["mock-op-1", "mock-op-2"]; - this.version = 42; - this.fromVersion = 42; - this.ranges = { changes: "mock", comments: "mock" }; - this.pathname = '/a/b/c'; - return this.req = { - params: { - project_id: this.project_id, - doc_id: this.doc_id - } - }; - }); - - describe("when the document exists and no recent ops are requested", function() { - beforeEach(function() { - this.DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, this.lines, this.version, [], this.ranges, this.pathname); - return this.HttpController.getDoc(this.req, this.res, this.next); - }); - - it("should get the doc", function() { - return this.DocumentManager.getDocAndRecentOpsWithLock - .calledWith(this.project_id, this.doc_id, -1) - .should.equal(true); - }); - - it("should return the doc as JSON", function() { - return this.res.json - .calledWith({ - id: this.doc_id, - lines: this.lines, - version: this.version, - ops: [], - ranges: this.ranges, - pathname: this.pathname - }) - .should.equal(true); - }); - - it("should log the request", function() { - return this.logger.log - .calledWith({doc_id: this.doc_id, project_id: this.project_id}, "getting doc via http") - .should.equal(true); - }); - - return it("should time the request", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); - - describe("when recent ops are requested", function() { - beforeEach(function() { - this.DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, this.lines, this.version, this.ops, this.ranges, this.pathname); - this.req.query = {fromVersion: `${this.fromVersion}`}; - return this.HttpController.getDoc(this.req, this.res, this.next); - }); - - it("should get the doc", function() { - return this.DocumentManager.getDocAndRecentOpsWithLock - .calledWith(this.project_id, this.doc_id, this.fromVersion) - .should.equal(true); - }); - - it("should return the doc as JSON", function() { - return this.res.json - .calledWith({ - id: this.doc_id, - lines: this.lines, - version: this.version, - ops: this.ops, - ranges: this.ranges, - pathname: this.pathname - }) - .should.equal(true); - }); - - it("should log the request", function() { - return this.logger.log - .calledWith({doc_id: this.doc_id, project_id: this.project_id}, "getting doc via http") - .should.equal(true); - }); - - return it("should time the request", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); - - describe("when the document does not exist", function() { - beforeEach(function() { - this.DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, null, null, null); - return this.HttpController.getDoc(this.req, this.res, this.next); - }); - - return it("should call next with NotFoundError", function() { - return this.next - .calledWith(new Errors.NotFoundError("not found")) - .should.equal(true); - }); - }); - - return describe("when an errors occurs", function() { - beforeEach(function() { - this.DocumentManager.getDocAndRecentOpsWithLock = sinon.stub().callsArgWith(3, new Error("oops"), null, null); - return this.HttpController.getDoc(this.req, this.res, this.next); - }); - - return it("should call next with the error", function() { - return this.next - .calledWith(new Error("oops")) - .should.equal(true); - }); - }); - }); - - describe("setDoc", function() { - beforeEach(function() { - this.lines = ["one", "two", "three"]; - this.source = "dropbox"; - this.user_id = "user-id-123"; - return this.req = { - headers: {}, - params: { - project_id: this.project_id, - doc_id: this.doc_id - }, - body: { - lines: this.lines, - source: this.source, - user_id: this.user_id, - undoing: (this.undoing = true) - } - }; - }); - - describe("successfully", function() { - beforeEach(function() { - this.DocumentManager.setDocWithLock = sinon.stub().callsArgWith(6); - return this.HttpController.setDoc(this.req, this.res, this.next); - }); - - it("should set the doc", function() { - return this.DocumentManager.setDocWithLock - .calledWith(this.project_id, this.doc_id, this.lines, this.source, this.user_id, this.undoing) - .should.equal(true); - }); - - it("should return a successful No Content response", function() { - return this.res.sendStatus - .calledWith(204) - .should.equal(true); - }); - - it("should log the request", function() { - return this.logger.log - .calledWith({doc_id: this.doc_id, project_id: this.project_id, lines: this.lines, source: this.source, user_id: this.user_id, undoing: this.undoing}, "setting doc via http") - .should.equal(true); - }); - - return it("should time the request", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); - - describe("when an errors occurs", function() { - beforeEach(function() { - this.DocumentManager.setDocWithLock = sinon.stub().callsArgWith(6, new Error("oops")); - return this.HttpController.setDoc(this.req, this.res, this.next); - }); - - return it("should call next with the error", function() { - return this.next - .calledWith(new Error("oops")) - .should.equal(true); - }); - }); - - return describe("when the payload is too large", function() { - beforeEach(function() { - const lines = []; - for (let _ = 0; _ <= 200000; _++) { - lines.push("test test test"); - } - this.req.body.lines = lines; - this.DocumentManager.setDocWithLock = sinon.stub().callsArgWith(6); - return this.HttpController.setDoc(this.req, this.res, this.next); - }); - - it('should send back a 406 response', function() { - return this.res.sendStatus.calledWith(406).should.equal(true); - }); - - return it('should not call setDocWithLock', function() { - return this.DocumentManager.setDocWithLock.callCount.should.equal(0); - }); - }); - }); - - describe("flushProject", function() { - beforeEach(function() { - return this.req = { - params: { - project_id: this.project_id - } - }; - }); - - describe("successfully", function() { - beforeEach(function() { - this.ProjectManager.flushProjectWithLocks = sinon.stub().callsArgWith(1); - return this.HttpController.flushProject(this.req, this.res, this.next); - }); - - it("should flush the project", function() { - return this.ProjectManager.flushProjectWithLocks - .calledWith(this.project_id) - .should.equal(true); - }); - - it("should return a successful No Content response", function() { - return this.res.sendStatus - .calledWith(204) - .should.equal(true); - }); - - it("should log the request", function() { - return this.logger.log - .calledWith({project_id: this.project_id}, "flushing project via http") - .should.equal(true); - }); - - return it("should time the request", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); - - return describe("when an errors occurs", function() { - beforeEach(function() { - this.ProjectManager.flushProjectWithLocks = sinon.stub().callsArgWith(1, new Error("oops")); - return this.HttpController.flushProject(this.req, this.res, this.next); - }); - - return it("should call next with the error", function() { - return this.next - .calledWith(new Error("oops")) - .should.equal(true); - }); - }); - }); - - describe("flushDocIfLoaded", function() { - beforeEach(function() { - this.lines = ["one", "two", "three"]; - this.version = 42; - return this.req = { - params: { - project_id: this.project_id, - doc_id: this.doc_id - } - }; - }); - - describe("successfully", function() { - beforeEach(function() { - this.DocumentManager.flushDocIfLoadedWithLock = sinon.stub().callsArgWith(2); - return this.HttpController.flushDocIfLoaded(this.req, this.res, this.next); - }); - - it("should flush the doc", function() { - return this.DocumentManager.flushDocIfLoadedWithLock - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); - - it("should return a successful No Content response", function() { - return this.res.sendStatus - .calledWith(204) - .should.equal(true); - }); - - it("should log the request", function() { - return this.logger.log - .calledWith({doc_id: this.doc_id, project_id: this.project_id}, "flushing doc via http") - .should.equal(true); - }); - - return it("should time the request", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); - - return describe("when an errors occurs", function() { - beforeEach(function() { - this.DocumentManager.flushDocIfLoadedWithLock = sinon.stub().callsArgWith(2, new Error("oops")); - return this.HttpController.flushDocIfLoaded(this.req, this.res, this.next); - }); - - return it("should call next with the error", function() { - return this.next - .calledWith(new Error("oops")) - .should.equal(true); - }); - }); - }); - - describe("deleteDoc", function() { - beforeEach(function() { - return this.req = { - params: { - project_id: this.project_id, - doc_id: this.doc_id - }, - query: {} - };}); - - describe("successfully", function() { - beforeEach(function() { - this.DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArgWith(3); - return this.HttpController.deleteDoc(this.req, this.res, this.next); - }); - - it("should flush and delete the doc", function() { - return this.DocumentManager.flushAndDeleteDocWithLock - .calledWith(this.project_id, this.doc_id, { ignoreFlushErrors: false }) - .should.equal(true); - }); - - it("should flush project history", function() { - return this.HistoryManager.flushProjectChangesAsync - .calledWithExactly(this.project_id) - .should.equal(true); - }); - - it("should return a successful No Content response", function() { - return this.res.sendStatus - .calledWith(204) - .should.equal(true); - }); - - it("should log the request", function() { - return this.logger.log - .calledWith({doc_id: this.doc_id, project_id: this.project_id}, "deleting doc via http") - .should.equal(true); - }); - - return it("should time the request", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); - - describe("ignoring errors", function() { - beforeEach(function() { - this.req.query.ignore_flush_errors = 'true'; - this.DocumentManager.flushAndDeleteDocWithLock = sinon.stub().yields(); - return this.HttpController.deleteDoc(this.req, this.res, this.next); - }); - - it("should delete the doc", function() { - return this.DocumentManager.flushAndDeleteDocWithLock - .calledWith(this.project_id, this.doc_id, { ignoreFlushErrors: true }) - .should.equal(true); - }); - - return it("should return a successful No Content response", function() { - return this.res.sendStatus.calledWith(204).should.equal(true); - }); - }); - - return describe("when an errors occurs", function() { - beforeEach(function() { - this.DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArgWith(3, new Error("oops")); - return this.HttpController.deleteDoc(this.req, this.res, this.next); - }); - - it("should flush project history", function() { - return this.HistoryManager.flushProjectChangesAsync - .calledWithExactly(this.project_id) - .should.equal(true); - }); - - return it("should call next with the error", function() { - return this.next - .calledWith(new Error("oops")) - .should.equal(true); - }); - }); - }); - - describe("deleteProject", function() { - beforeEach(function() { - return this.req = { - params: { - project_id: this.project_id - } - }; - }); - - describe("successfully", function() { - beforeEach(function() { - this.ProjectManager.flushAndDeleteProjectWithLocks = sinon.stub().callsArgWith(2); - return this.HttpController.deleteProject(this.req, this.res, this.next); - }); - - it("should delete the project", function() { - return this.ProjectManager.flushAndDeleteProjectWithLocks - .calledWith(this.project_id) - .should.equal(true); - }); - - it("should return a successful No Content response", function() { - return this.res.sendStatus - .calledWith(204) - .should.equal(true); - }); - - it("should log the request", function() { - return this.logger.log - .calledWith({project_id: this.project_id}, "deleting project via http") - .should.equal(true); - }); - - return it("should time the request", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); - - describe("with the background=true option from realtime", function() { - beforeEach(function() { - this.ProjectManager.queueFlushAndDeleteProject = sinon.stub().callsArgWith(1); - this.req.query = {background:true, shutdown:true}; - return this.HttpController.deleteProject(this.req, this.res, this.next); - }); - - return it("should queue the flush and delete", function() { - return this.ProjectManager.queueFlushAndDeleteProject - .calledWith(this.project_id) - .should.equal(true); - }); - }); - - return describe("when an errors occurs", function() { - beforeEach(function() { - this.ProjectManager.flushAndDeleteProjectWithLocks = sinon.stub().callsArgWith(2, new Error("oops")); - return this.HttpController.deleteProject(this.req, this.res, this.next); - }); - - return it("should call next with the error", function() { - return this.next - .calledWith(new Error("oops")) - .should.equal(true); - }); - }); - }); - - describe("acceptChanges", function() { - beforeEach(function() { - return this.req = { - params: { - project_id: this.project_id, - doc_id: this.doc_id, - change_id: (this.change_id = "mock-change-od-1") - } - }; - }); - - describe("successfully with a single change", function() { - beforeEach(function() { - this.DocumentManager.acceptChangesWithLock = sinon.stub().callsArgWith(3); - return this.HttpController.acceptChanges(this.req, this.res, this.next); - }); - - it("should accept the change", function() { - return this.DocumentManager.acceptChangesWithLock - .calledWith(this.project_id, this.doc_id, [ this.change_id ]) - .should.equal(true); - }); - - it("should return a successful No Content response", function() { - return this.res.sendStatus - .calledWith(204) - .should.equal(true); - }); - - it("should log the request", function() { - return this.logger.log - .calledWith({project_id: this.project_id, doc_id: this.doc_id}, "accepting 1 changes via http") - .should.equal(true); - }); - - return it("should time the request", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); - - describe("succesfully with with multiple changes", function() { - beforeEach(function() { - this.change_ids = [ "mock-change-od-1", "mock-change-od-2", "mock-change-od-3", "mock-change-od-4" ]; - this.req.body = - {change_ids: this.change_ids}; - this.DocumentManager.acceptChangesWithLock = sinon.stub().callsArgWith(3); - return this.HttpController.acceptChanges(this.req, this.res, this.next); - }); - - it("should accept the changes in the body payload", function() { - return this.DocumentManager.acceptChangesWithLock - .calledWith(this.project_id, this.doc_id, this.change_ids) - .should.equal(true); - }); - - return it("should log the request with the correct number of changes", function() { - return this.logger.log - .calledWith({project_id: this.project_id, doc_id: this.doc_id}, `accepting ${ this.change_ids.length } changes via http`) - .should.equal(true); - }); - }); - - return describe("when an errors occurs", function() { - beforeEach(function() { - this.DocumentManager.acceptChangesWithLock = sinon.stub().callsArgWith(3, new Error("oops")); - return this.HttpController.acceptChanges(this.req, this.res, this.next); - }); - - return it("should call next with the error", function() { - return this.next - .calledWith(new Error("oops")) - .should.equal(true); - }); - }); - }); - - describe("deleteComment", function() { - beforeEach(function() { - return this.req = { - params: { - project_id: this.project_id, - doc_id: this.doc_id, - comment_id: (this.comment_id = "mock-comment-id") - } - }; - }); - - describe("successfully", function() { - beforeEach(function() { - this.DocumentManager.deleteCommentWithLock = sinon.stub().callsArgWith(3); - return this.HttpController.deleteComment(this.req, this.res, this.next); - }); - - it("should accept the change", function() { - return this.DocumentManager.deleteCommentWithLock - .calledWith(this.project_id, this.doc_id, this.comment_id) - .should.equal(true); - }); - - it("should return a successful No Content response", function() { - return this.res.sendStatus - .calledWith(204) - .should.equal(true); - }); - - it("should log the request", function() { - return this.logger.log - .calledWith({project_id: this.project_id, doc_id: this.doc_id, comment_id: this.comment_id}, "deleting comment via http") - .should.equal(true); - }); - - return it("should time the request", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); - - return describe("when an errors occurs", function() { - beforeEach(function() { - this.DocumentManager.deleteCommentWithLock = sinon.stub().callsArgWith(3, new Error("oops")); - return this.HttpController.deleteComment(this.req, this.res, this.next); - }); - - return it("should call next with the error", function() { - return this.next - .calledWith(new Error("oops")) - .should.equal(true); - }); - }); - }); - - describe("getProjectDocsAndFlushIfOld", function() { - beforeEach(function() { - this.state = "01234567890abcdef"; - this.docs = [{_id: "1234", lines: "hello", v: 23}, {_id: "4567", lines: "world", v: 45}]; - return this.req = { - params: { - project_id: this.project_id - }, - query: { - state: this.state - } - }; - }); - - describe("successfully", function() { - beforeEach(function() { - this.ProjectManager.getProjectDocsAndFlushIfOld = sinon.stub().callsArgWith(3,null, this.docs); - return this.HttpController.getProjectDocsAndFlushIfOld(this.req, this.res, this.next); - }); - - it("should get docs from the project manager", function() { - return this.ProjectManager.getProjectDocsAndFlushIfOld - .calledWith(this.project_id, this.state, {}) - .should.equal(true); - }); - - it("should return a successful response", function() { - return this.res.send - .calledWith(this.docs) - .should.equal(true); - }); - - it("should log the request", function() { - return this.logger.log - .calledWith({project_id: this.project_id, exclude: []}, "getting docs via http") - .should.equal(true); - }); - - it("should log the response", function() { - return this.logger.log - .calledWith({project_id: this.project_id, result: ["1234:23", "4567:45"]}, "got docs via http") - .should.equal(true); - }); - - return it("should time the request", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); - - describe("when there is a conflict", function() { - beforeEach(function() { - this.ProjectManager.getProjectDocsAndFlushIfOld = sinon.stub().callsArgWith(3, new Errors.ProjectStateChangedError("project state changed")); - return this.HttpController.getProjectDocsAndFlushIfOld(this.req, this.res, this.next); - }); - - return it("should return an HTTP 409 Conflict response", function() { - return this.res.sendStatus - .calledWith(409) - .should.equal(true); - }); - }); - - return describe("when an error occurs", function() { - beforeEach(function() { - this.ProjectManager.getProjectDocsAndFlushIfOld = sinon.stub().callsArgWith(3, new Error("oops")); - return this.HttpController.getProjectDocsAndFlushIfOld(this.req, this.res, this.next); - }); - - return it("should call next with the error", function() { - return this.next - .calledWith(new Error("oops")) - .should.equal(true); - }); - }); - }); - - describe("updateProject", function() { - beforeEach(function() { - this.projectHistoryId = "history-id-123"; - this.userId = "user-id-123"; - this.docUpdates = sinon.stub(); - this.fileUpdates = sinon.stub(); - this.version = 1234567; - return this.req = { - body: {projectHistoryId: this.projectHistoryId, userId: this.userId, docUpdates: this.docUpdates, fileUpdates: this.fileUpdates, version: this.version}, - params: { - project_id: this.project_id - } - }; - }); - - describe("successfully", function() { - beforeEach(function() { - this.ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(6); - return this.HttpController.updateProject(this.req, this.res, this.next); - }); - - it("should accept the change", function() { - return this.ProjectManager.updateProjectWithLocks - .calledWith(this.project_id, this.projectHistoryId, this.userId, this.docUpdates, this.fileUpdates, this.version) - .should.equal(true); - }); - - it("should return a successful No Content response", function() { - return this.res.sendStatus - .calledWith(204) - .should.equal(true); - }); - - return it("should time the request", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); - - return describe("when an errors occurs", function() { - beforeEach(function() { - this.ProjectManager.updateProjectWithLocks = sinon.stub().callsArgWith(6, new Error("oops")); - return this.HttpController.updateProject(this.req, this.res, this.next); - }); - - return it("should call next with the error", function() { - return this.next - .calledWith(new Error("oops")) - .should.equal(true); - }); - }); - }); - - return describe("resyncProjectHistory", function() { - beforeEach(function() { - this.projectHistoryId = "history-id-123"; - this.docs = sinon.stub(); - this.files = sinon.stub(); - this.fileUpdates = sinon.stub(); - return this.req = { - body: - {projectHistoryId: this.projectHistoryId, docs: this.docs, files: this.files}, - params: { - project_id: this.project_id - } - }; - }); - - describe("successfully", function() { - beforeEach(function() { - this.HistoryManager.resyncProjectHistory = sinon.stub().callsArgWith(4); - return this.HttpController.resyncProjectHistory(this.req, this.res, this.next); - }); - - it("should accept the change", function() { - return this.HistoryManager.resyncProjectHistory - .calledWith(this.project_id, this.projectHistoryId, this.docs, this.files) - .should.equal(true); - }); - - return it("should return a successful No Content response", function() { - return this.res.sendStatus - .calledWith(204) - .should.equal(true); - }); - }); - - return describe("when an errors occurs", function() { - beforeEach(function() { - this.HistoryManager.resyncProjectHistory = sinon.stub().callsArgWith(4, new Error("oops")); - return this.HttpController.resyncProjectHistory(this.req, this.res, this.next); - }); - - return it("should call next with the error", function() { - return this.next - .calledWith(new Error("oops")) - .should.equal(true); - }); - }); - }); -}); +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const modulePath = '../../../../app/js/HttpController.js' +const SandboxedModule = require('sandboxed-module') +const Errors = require('../../../../app/js/Errors.js') + +describe('HttpController', function () { + beforeEach(function () { + let Timer + this.HttpController = SandboxedModule.require(modulePath, { + requires: { + './DocumentManager': (this.DocumentManager = {}), + './HistoryManager': (this.HistoryManager = { + flushProjectChangesAsync: sinon.stub() + }), + './ProjectManager': (this.ProjectManager = {}), + 'logger-sharelatex': (this.logger = { log: sinon.stub() }), + './ProjectFlusher': { flushAllProjects() {} }, + './DeleteQueueManager': (this.DeleteQueueManager = {}), + './Metrics': (this.Metrics = {}), + './Errors': Errors + } + }) + this.Metrics.Timer = Timer = (function () { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub() + } + } + Timer.initClass() + return Timer + })() + this.project_id = 'project-id-123' + this.doc_id = 'doc-id-123' + this.next = sinon.stub() + return (this.res = { + send: sinon.stub(), + sendStatus: sinon.stub(), + json: sinon.stub() + }) + }) + + describe('getDoc', function () { + beforeEach(function () { + this.lines = ['one', 'two', 'three'] + this.ops = ['mock-op-1', 'mock-op-2'] + this.version = 42 + this.fromVersion = 42 + this.ranges = { changes: 'mock', comments: 'mock' } + this.pathname = '/a/b/c' + return (this.req = { + params: { + project_id: this.project_id, + doc_id: this.doc_id + } + }) + }) + + describe('when the document exists and no recent ops are requested', function () { + beforeEach(function () { + this.DocumentManager.getDocAndRecentOpsWithLock = sinon + .stub() + .callsArgWith( + 3, + null, + this.lines, + this.version, + [], + this.ranges, + this.pathname + ) + return this.HttpController.getDoc(this.req, this.res, this.next) + }) + + it('should get the doc', function () { + return this.DocumentManager.getDocAndRecentOpsWithLock + .calledWith(this.project_id, this.doc_id, -1) + .should.equal(true) + }) + + it('should return the doc as JSON', function () { + return this.res.json + .calledWith({ + id: this.doc_id, + lines: this.lines, + version: this.version, + ops: [], + ranges: this.ranges, + pathname: this.pathname + }) + .should.equal(true) + }) + + it('should log the request', function () { + return this.logger.log + .calledWith( + { doc_id: this.doc_id, project_id: this.project_id }, + 'getting doc via http' + ) + .should.equal(true) + }) + + return it('should time the request', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + describe('when recent ops are requested', function () { + beforeEach(function () { + this.DocumentManager.getDocAndRecentOpsWithLock = sinon + .stub() + .callsArgWith( + 3, + null, + this.lines, + this.version, + this.ops, + this.ranges, + this.pathname + ) + this.req.query = { fromVersion: `${this.fromVersion}` } + return this.HttpController.getDoc(this.req, this.res, this.next) + }) + + it('should get the doc', function () { + return this.DocumentManager.getDocAndRecentOpsWithLock + .calledWith(this.project_id, this.doc_id, this.fromVersion) + .should.equal(true) + }) + + it('should return the doc as JSON', function () { + return this.res.json + .calledWith({ + id: this.doc_id, + lines: this.lines, + version: this.version, + ops: this.ops, + ranges: this.ranges, + pathname: this.pathname + }) + .should.equal(true) + }) + + it('should log the request', function () { + return this.logger.log + .calledWith( + { doc_id: this.doc_id, project_id: this.project_id }, + 'getting doc via http' + ) + .should.equal(true) + }) + + return it('should time the request', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + describe('when the document does not exist', function () { + beforeEach(function () { + this.DocumentManager.getDocAndRecentOpsWithLock = sinon + .stub() + .callsArgWith(3, null, null, null) + return this.HttpController.getDoc(this.req, this.res, this.next) + }) + + return it('should call next with NotFoundError', function () { + return this.next + .calledWith(new Errors.NotFoundError('not found')) + .should.equal(true) + }) + }) + + return describe('when an errors occurs', function () { + beforeEach(function () { + this.DocumentManager.getDocAndRecentOpsWithLock = sinon + .stub() + .callsArgWith(3, new Error('oops'), null, null) + return this.HttpController.getDoc(this.req, this.res, this.next) + }) + + return it('should call next with the error', function () { + return this.next.calledWith(new Error('oops')).should.equal(true) + }) + }) + }) + + describe('setDoc', function () { + beforeEach(function () { + this.lines = ['one', 'two', 'three'] + this.source = 'dropbox' + this.user_id = 'user-id-123' + return (this.req = { + headers: {}, + params: { + project_id: this.project_id, + doc_id: this.doc_id + }, + body: { + lines: this.lines, + source: this.source, + user_id: this.user_id, + undoing: (this.undoing = true) + } + }) + }) + + describe('successfully', function () { + beforeEach(function () { + this.DocumentManager.setDocWithLock = sinon.stub().callsArgWith(6) + return this.HttpController.setDoc(this.req, this.res, this.next) + }) + + it('should set the doc', function () { + return this.DocumentManager.setDocWithLock + .calledWith( + this.project_id, + this.doc_id, + this.lines, + this.source, + this.user_id, + this.undoing + ) + .should.equal(true) + }) + + it('should return a successful No Content response', function () { + return this.res.sendStatus.calledWith(204).should.equal(true) + }) + + it('should log the request', function () { + return this.logger.log + .calledWith( + { + doc_id: this.doc_id, + project_id: this.project_id, + lines: this.lines, + source: this.source, + user_id: this.user_id, + undoing: this.undoing + }, + 'setting doc via http' + ) + .should.equal(true) + }) + + return it('should time the request', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + describe('when an errors occurs', function () { + beforeEach(function () { + this.DocumentManager.setDocWithLock = sinon + .stub() + .callsArgWith(6, new Error('oops')) + return this.HttpController.setDoc(this.req, this.res, this.next) + }) + + return it('should call next with the error', function () { + return this.next.calledWith(new Error('oops')).should.equal(true) + }) + }) + + return describe('when the payload is too large', function () { + beforeEach(function () { + const lines = [] + for (let _ = 0; _ <= 200000; _++) { + lines.push('test test test') + } + this.req.body.lines = lines + this.DocumentManager.setDocWithLock = sinon.stub().callsArgWith(6) + return this.HttpController.setDoc(this.req, this.res, this.next) + }) + + it('should send back a 406 response', function () { + return this.res.sendStatus.calledWith(406).should.equal(true) + }) + + return it('should not call setDocWithLock', function () { + return this.DocumentManager.setDocWithLock.callCount.should.equal(0) + }) + }) + }) + + describe('flushProject', function () { + beforeEach(function () { + return (this.req = { + params: { + project_id: this.project_id + } + }) + }) + + describe('successfully', function () { + beforeEach(function () { + this.ProjectManager.flushProjectWithLocks = sinon.stub().callsArgWith(1) + return this.HttpController.flushProject(this.req, this.res, this.next) + }) + + it('should flush the project', function () { + return this.ProjectManager.flushProjectWithLocks + .calledWith(this.project_id) + .should.equal(true) + }) + + it('should return a successful No Content response', function () { + return this.res.sendStatus.calledWith(204).should.equal(true) + }) + + it('should log the request', function () { + return this.logger.log + .calledWith( + { project_id: this.project_id }, + 'flushing project via http' + ) + .should.equal(true) + }) + + return it('should time the request', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + return describe('when an errors occurs', function () { + beforeEach(function () { + this.ProjectManager.flushProjectWithLocks = sinon + .stub() + .callsArgWith(1, new Error('oops')) + return this.HttpController.flushProject(this.req, this.res, this.next) + }) + + return it('should call next with the error', function () { + return this.next.calledWith(new Error('oops')).should.equal(true) + }) + }) + }) + + describe('flushDocIfLoaded', function () { + beforeEach(function () { + this.lines = ['one', 'two', 'three'] + this.version = 42 + return (this.req = { + params: { + project_id: this.project_id, + doc_id: this.doc_id + } + }) + }) + + describe('successfully', function () { + beforeEach(function () { + this.DocumentManager.flushDocIfLoadedWithLock = sinon + .stub() + .callsArgWith(2) + return this.HttpController.flushDocIfLoaded( + this.req, + this.res, + this.next + ) + }) + + it('should flush the doc', function () { + return this.DocumentManager.flushDocIfLoadedWithLock + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should return a successful No Content response', function () { + return this.res.sendStatus.calledWith(204).should.equal(true) + }) + + it('should log the request', function () { + return this.logger.log + .calledWith( + { doc_id: this.doc_id, project_id: this.project_id }, + 'flushing doc via http' + ) + .should.equal(true) + }) + + return it('should time the request', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + return describe('when an errors occurs', function () { + beforeEach(function () { + this.DocumentManager.flushDocIfLoadedWithLock = sinon + .stub() + .callsArgWith(2, new Error('oops')) + return this.HttpController.flushDocIfLoaded( + this.req, + this.res, + this.next + ) + }) + + return it('should call next with the error', function () { + return this.next.calledWith(new Error('oops')).should.equal(true) + }) + }) + }) + + describe('deleteDoc', function () { + beforeEach(function () { + return (this.req = { + params: { + project_id: this.project_id, + doc_id: this.doc_id + }, + query: {} + }) + }) + + describe('successfully', function () { + beforeEach(function () { + this.DocumentManager.flushAndDeleteDocWithLock = sinon + .stub() + .callsArgWith(3) + return this.HttpController.deleteDoc(this.req, this.res, this.next) + }) + + it('should flush and delete the doc', function () { + return this.DocumentManager.flushAndDeleteDocWithLock + .calledWith(this.project_id, this.doc_id, { + ignoreFlushErrors: false + }) + .should.equal(true) + }) + + it('should flush project history', function () { + return this.HistoryManager.flushProjectChangesAsync + .calledWithExactly(this.project_id) + .should.equal(true) + }) + + it('should return a successful No Content response', function () { + return this.res.sendStatus.calledWith(204).should.equal(true) + }) + + it('should log the request', function () { + return this.logger.log + .calledWith( + { doc_id: this.doc_id, project_id: this.project_id }, + 'deleting doc via http' + ) + .should.equal(true) + }) + + return it('should time the request', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + describe('ignoring errors', function () { + beforeEach(function () { + this.req.query.ignore_flush_errors = 'true' + this.DocumentManager.flushAndDeleteDocWithLock = sinon.stub().yields() + return this.HttpController.deleteDoc(this.req, this.res, this.next) + }) + + it('should delete the doc', function () { + return this.DocumentManager.flushAndDeleteDocWithLock + .calledWith(this.project_id, this.doc_id, { ignoreFlushErrors: true }) + .should.equal(true) + }) + + return it('should return a successful No Content response', function () { + return this.res.sendStatus.calledWith(204).should.equal(true) + }) + }) + + return describe('when an errors occurs', function () { + beforeEach(function () { + this.DocumentManager.flushAndDeleteDocWithLock = sinon + .stub() + .callsArgWith(3, new Error('oops')) + return this.HttpController.deleteDoc(this.req, this.res, this.next) + }) + + it('should flush project history', function () { + return this.HistoryManager.flushProjectChangesAsync + .calledWithExactly(this.project_id) + .should.equal(true) + }) + + return it('should call next with the error', function () { + return this.next.calledWith(new Error('oops')).should.equal(true) + }) + }) + }) + + describe('deleteProject', function () { + beforeEach(function () { + return (this.req = { + params: { + project_id: this.project_id + } + }) + }) + + describe('successfully', function () { + beforeEach(function () { + this.ProjectManager.flushAndDeleteProjectWithLocks = sinon + .stub() + .callsArgWith(2) + return this.HttpController.deleteProject(this.req, this.res, this.next) + }) + + it('should delete the project', function () { + return this.ProjectManager.flushAndDeleteProjectWithLocks + .calledWith(this.project_id) + .should.equal(true) + }) + + it('should return a successful No Content response', function () { + return this.res.sendStatus.calledWith(204).should.equal(true) + }) + + it('should log the request', function () { + return this.logger.log + .calledWith( + { project_id: this.project_id }, + 'deleting project via http' + ) + .should.equal(true) + }) + + return it('should time the request', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + describe('with the background=true option from realtime', function () { + beforeEach(function () { + this.ProjectManager.queueFlushAndDeleteProject = sinon + .stub() + .callsArgWith(1) + this.req.query = { background: true, shutdown: true } + return this.HttpController.deleteProject(this.req, this.res, this.next) + }) + + return it('should queue the flush and delete', function () { + return this.ProjectManager.queueFlushAndDeleteProject + .calledWith(this.project_id) + .should.equal(true) + }) + }) + + return describe('when an errors occurs', function () { + beforeEach(function () { + this.ProjectManager.flushAndDeleteProjectWithLocks = sinon + .stub() + .callsArgWith(2, new Error('oops')) + return this.HttpController.deleteProject(this.req, this.res, this.next) + }) + + return it('should call next with the error', function () { + return this.next.calledWith(new Error('oops')).should.equal(true) + }) + }) + }) + + describe('acceptChanges', function () { + beforeEach(function () { + return (this.req = { + params: { + project_id: this.project_id, + doc_id: this.doc_id, + change_id: (this.change_id = 'mock-change-od-1') + } + }) + }) + + describe('successfully with a single change', function () { + beforeEach(function () { + this.DocumentManager.acceptChangesWithLock = sinon + .stub() + .callsArgWith(3) + return this.HttpController.acceptChanges(this.req, this.res, this.next) + }) + + it('should accept the change', function () { + return this.DocumentManager.acceptChangesWithLock + .calledWith(this.project_id, this.doc_id, [this.change_id]) + .should.equal(true) + }) + + it('should return a successful No Content response', function () { + return this.res.sendStatus.calledWith(204).should.equal(true) + }) + + it('should log the request', function () { + return this.logger.log + .calledWith( + { project_id: this.project_id, doc_id: this.doc_id }, + 'accepting 1 changes via http' + ) + .should.equal(true) + }) + + return it('should time the request', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + describe('succesfully with with multiple changes', function () { + beforeEach(function () { + this.change_ids = [ + 'mock-change-od-1', + 'mock-change-od-2', + 'mock-change-od-3', + 'mock-change-od-4' + ] + this.req.body = { change_ids: this.change_ids } + this.DocumentManager.acceptChangesWithLock = sinon + .stub() + .callsArgWith(3) + return this.HttpController.acceptChanges(this.req, this.res, this.next) + }) + + it('should accept the changes in the body payload', function () { + return this.DocumentManager.acceptChangesWithLock + .calledWith(this.project_id, this.doc_id, this.change_ids) + .should.equal(true) + }) + + return it('should log the request with the correct number of changes', function () { + return this.logger.log + .calledWith( + { project_id: this.project_id, doc_id: this.doc_id }, + `accepting ${this.change_ids.length} changes via http` + ) + .should.equal(true) + }) + }) + + return describe('when an errors occurs', function () { + beforeEach(function () { + this.DocumentManager.acceptChangesWithLock = sinon + .stub() + .callsArgWith(3, new Error('oops')) + return this.HttpController.acceptChanges(this.req, this.res, this.next) + }) + + return it('should call next with the error', function () { + return this.next.calledWith(new Error('oops')).should.equal(true) + }) + }) + }) + + describe('deleteComment', function () { + beforeEach(function () { + return (this.req = { + params: { + project_id: this.project_id, + doc_id: this.doc_id, + comment_id: (this.comment_id = 'mock-comment-id') + } + }) + }) + + describe('successfully', function () { + beforeEach(function () { + this.DocumentManager.deleteCommentWithLock = sinon + .stub() + .callsArgWith(3) + return this.HttpController.deleteComment(this.req, this.res, this.next) + }) + + it('should accept the change', function () { + return this.DocumentManager.deleteCommentWithLock + .calledWith(this.project_id, this.doc_id, this.comment_id) + .should.equal(true) + }) + + it('should return a successful No Content response', function () { + return this.res.sendStatus.calledWith(204).should.equal(true) + }) + + it('should log the request', function () { + return this.logger.log + .calledWith( + { + project_id: this.project_id, + doc_id: this.doc_id, + comment_id: this.comment_id + }, + 'deleting comment via http' + ) + .should.equal(true) + }) + + return it('should time the request', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + return describe('when an errors occurs', function () { + beforeEach(function () { + this.DocumentManager.deleteCommentWithLock = sinon + .stub() + .callsArgWith(3, new Error('oops')) + return this.HttpController.deleteComment(this.req, this.res, this.next) + }) + + return it('should call next with the error', function () { + return this.next.calledWith(new Error('oops')).should.equal(true) + }) + }) + }) + + describe('getProjectDocsAndFlushIfOld', function () { + beforeEach(function () { + this.state = '01234567890abcdef' + this.docs = [ + { _id: '1234', lines: 'hello', v: 23 }, + { _id: '4567', lines: 'world', v: 45 } + ] + return (this.req = { + params: { + project_id: this.project_id + }, + query: { + state: this.state + } + }) + }) + + describe('successfully', function () { + beforeEach(function () { + this.ProjectManager.getProjectDocsAndFlushIfOld = sinon + .stub() + .callsArgWith(3, null, this.docs) + return this.HttpController.getProjectDocsAndFlushIfOld( + this.req, + this.res, + this.next + ) + }) + + it('should get docs from the project manager', function () { + return this.ProjectManager.getProjectDocsAndFlushIfOld + .calledWith(this.project_id, this.state, {}) + .should.equal(true) + }) + + it('should return a successful response', function () { + return this.res.send.calledWith(this.docs).should.equal(true) + }) + + it('should log the request', function () { + return this.logger.log + .calledWith( + { project_id: this.project_id, exclude: [] }, + 'getting docs via http' + ) + .should.equal(true) + }) + + it('should log the response', function () { + return this.logger.log + .calledWith( + { project_id: this.project_id, result: ['1234:23', '4567:45'] }, + 'got docs via http' + ) + .should.equal(true) + }) + + return it('should time the request', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + describe('when there is a conflict', function () { + beforeEach(function () { + this.ProjectManager.getProjectDocsAndFlushIfOld = sinon + .stub() + .callsArgWith( + 3, + new Errors.ProjectStateChangedError('project state changed') + ) + return this.HttpController.getProjectDocsAndFlushIfOld( + this.req, + this.res, + this.next + ) + }) + + return it('should return an HTTP 409 Conflict response', function () { + return this.res.sendStatus.calledWith(409).should.equal(true) + }) + }) + + return describe('when an error occurs', function () { + beforeEach(function () { + this.ProjectManager.getProjectDocsAndFlushIfOld = sinon + .stub() + .callsArgWith(3, new Error('oops')) + return this.HttpController.getProjectDocsAndFlushIfOld( + this.req, + this.res, + this.next + ) + }) + + return it('should call next with the error', function () { + return this.next.calledWith(new Error('oops')).should.equal(true) + }) + }) + }) + + describe('updateProject', function () { + beforeEach(function () { + this.projectHistoryId = 'history-id-123' + this.userId = 'user-id-123' + this.docUpdates = sinon.stub() + this.fileUpdates = sinon.stub() + this.version = 1234567 + return (this.req = { + body: { + projectHistoryId: this.projectHistoryId, + userId: this.userId, + docUpdates: this.docUpdates, + fileUpdates: this.fileUpdates, + version: this.version + }, + params: { + project_id: this.project_id + } + }) + }) + + describe('successfully', function () { + beforeEach(function () { + this.ProjectManager.updateProjectWithLocks = sinon + .stub() + .callsArgWith(6) + return this.HttpController.updateProject(this.req, this.res, this.next) + }) + + it('should accept the change', function () { + return this.ProjectManager.updateProjectWithLocks + .calledWith( + this.project_id, + this.projectHistoryId, + this.userId, + this.docUpdates, + this.fileUpdates, + this.version + ) + .should.equal(true) + }) + + it('should return a successful No Content response', function () { + return this.res.sendStatus.calledWith(204).should.equal(true) + }) + + return it('should time the request', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + return describe('when an errors occurs', function () { + beforeEach(function () { + this.ProjectManager.updateProjectWithLocks = sinon + .stub() + .callsArgWith(6, new Error('oops')) + return this.HttpController.updateProject(this.req, this.res, this.next) + }) + + return it('should call next with the error', function () { + return this.next.calledWith(new Error('oops')).should.equal(true) + }) + }) + }) + + return describe('resyncProjectHistory', function () { + beforeEach(function () { + this.projectHistoryId = 'history-id-123' + this.docs = sinon.stub() + this.files = sinon.stub() + this.fileUpdates = sinon.stub() + return (this.req = { + body: { + projectHistoryId: this.projectHistoryId, + docs: this.docs, + files: this.files + }, + params: { + project_id: this.project_id + } + }) + }) + + describe('successfully', function () { + beforeEach(function () { + this.HistoryManager.resyncProjectHistory = sinon.stub().callsArgWith(4) + return this.HttpController.resyncProjectHistory( + this.req, + this.res, + this.next + ) + }) + + it('should accept the change', function () { + return this.HistoryManager.resyncProjectHistory + .calledWith( + this.project_id, + this.projectHistoryId, + this.docs, + this.files + ) + .should.equal(true) + }) + + return it('should return a successful No Content response', function () { + return this.res.sendStatus.calledWith(204).should.equal(true) + }) + }) + + return describe('when an errors occurs', function () { + beforeEach(function () { + this.HistoryManager.resyncProjectHistory = sinon + .stub() + .callsArgWith(4, new Error('oops')) + return this.HttpController.resyncProjectHistory( + this.req, + this.res, + this.next + ) + }) + + return it('should call next with the error', function () { + return this.next.calledWith(new Error('oops')).should.equal(true) + }) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js b/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js index 58392480d0..d91b25b64c 100644 --- a/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js +++ b/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js @@ -11,59 +11,57 @@ * DS206: Consider reworking classes to avoid initClass * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -require('coffee-script'); -const sinon = require('sinon'); -const assert = require('assert'); -const path = require('path'); -const modulePath = path.join(__dirname, '../../../../app/js/LockManager.js'); -const project_id = 1234; -const doc_id = 5678; -const blockingKey = `Blocking:${doc_id}`; -const SandboxedModule = require('sandboxed-module'); +require('coffee-script') +const sinon = require('sinon') +const assert = require('assert') +const path = require('path') +const modulePath = path.join(__dirname, '../../../../app/js/LockManager.js') +const project_id = 1234 +const doc_id = 5678 +const blockingKey = `Blocking:${doc_id}` +const SandboxedModule = require('sandboxed-module') -describe('LockManager - checking the lock', function(){ +describe('LockManager - checking the lock', function () { + let Profiler + const existsStub = sinon.stub() - let Profiler; - const existsStub = sinon.stub(); - - const mocks = { - "logger-sharelatex": { log() {} - }, - "redis-sharelatex": { - createClient(){ - return { - auth() {}, - exists: existsStub - }; - } - }, - "./Metrics": {inc() {}}, - "./Profiler": (Profiler = (function() { - Profiler = class Profiler { - static initClass() { - this.prototype.log = sinon.stub().returns({ end: sinon.stub() }); - this.prototype.end = sinon.stub(); - } - }; - Profiler.initClass(); - return Profiler; - })()) - }; - const LockManager = SandboxedModule.require(modulePath, {requires: mocks}); + const mocks = { + 'logger-sharelatex': { log() {} }, + 'redis-sharelatex': { + createClient() { + return { + auth() {}, + exists: existsStub + } + } + }, + './Metrics': { inc() {} }, + './Profiler': (Profiler = (function () { + Profiler = class Profiler { + static initClass() { + this.prototype.log = sinon.stub().returns({ end: sinon.stub() }) + this.prototype.end = sinon.stub() + } + } + Profiler.initClass() + return Profiler + })()) + } + const LockManager = SandboxedModule.require(modulePath, { requires: mocks }) - it('should return true if the key does not exists', function(done){ - existsStub.yields(null, "0"); - return LockManager.checkLock(doc_id, (err, free) => { - free.should.equal(true); - return done(); - }); - }); + it('should return true if the key does not exists', function (done) { + existsStub.yields(null, '0') + return LockManager.checkLock(doc_id, (err, free) => { + free.should.equal(true) + return done() + }) + }) - return it('should return false if the key does exists', function(done){ - existsStub.yields(null, "1"); - return LockManager.checkLock(doc_id, (err, free) => { - free.should.equal(false); - return done(); - }); - }); -}); + return it('should return false if the key does exists', function (done) { + existsStub.yields(null, '1') + return LockManager.checkLock(doc_id, (err, free) => { + free.should.equal(false) + return done() + }) + }) +}) diff --git a/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js b/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js index 0cf4ef88b1..9c5d70e1e0 100644 --- a/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js +++ b/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js @@ -11,79 +11,90 @@ * DS206: Consider reworking classes to avoid initClass * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -require('coffee-script'); -const sinon = require('sinon'); -const assert = require('assert'); -const path = require('path'); -const modulePath = path.join(__dirname, '../../../../app/js/LockManager.js'); -const project_id = 1234; -const doc_id = 5678; -const SandboxedModule = require('sandboxed-module'); +require('coffee-script') +const sinon = require('sinon') +const assert = require('assert') +const path = require('path') +const modulePath = path.join(__dirname, '../../../../app/js/LockManager.js') +const project_id = 1234 +const doc_id = 5678 +const SandboxedModule = require('sandboxed-module') -describe('LockManager - releasing the lock', function(){ - beforeEach(function() { - let Profiler; - this.client = { - auth() {}, - eval: sinon.stub() - }; - const mocks = { - "logger-sharelatex": { - log() {}, - error() {} - }, - "redis-sharelatex": { - createClient : () => this.client - }, - "settings-sharelatex": { - redis: { - lock: { - key_schema: { - blockingKey({doc_id}) { return `Blocking:${doc_id}`; } - } - } - } - }, - "./Metrics": {inc() {}}, - "./Profiler": (Profiler = (function() { - Profiler = class Profiler { - static initClass() { - this.prototype.log = sinon.stub().returns({ end: sinon.stub() }); - this.prototype.end = sinon.stub(); - } - }; - Profiler.initClass(); - return Profiler; - })()) - }; - this.LockManager = SandboxedModule.require(modulePath, {requires: mocks}); - this.lockValue = "lock-value-stub"; - return this.callback = sinon.stub(); - }); +describe('LockManager - releasing the lock', function () { + beforeEach(function () { + let Profiler + this.client = { + auth() {}, + eval: sinon.stub() + } + const mocks = { + 'logger-sharelatex': { + log() {}, + error() {} + }, + 'redis-sharelatex': { + createClient: () => this.client + }, + 'settings-sharelatex': { + redis: { + lock: { + key_schema: { + blockingKey({ doc_id }) { + return `Blocking:${doc_id}` + } + } + } + } + }, + './Metrics': { inc() {} }, + './Profiler': (Profiler = (function () { + Profiler = class Profiler { + static initClass() { + this.prototype.log = sinon.stub().returns({ end: sinon.stub() }) + this.prototype.end = sinon.stub() + } + } + Profiler.initClass() + return Profiler + })()) + } + this.LockManager = SandboxedModule.require(modulePath, { requires: mocks }) + this.lockValue = 'lock-value-stub' + return (this.callback = sinon.stub()) + }) - describe("when the lock is current", function() { - beforeEach(function() { - this.client.eval = sinon.stub().yields(null, 1); - return this.LockManager.releaseLock(doc_id, this.lockValue, this.callback); - }); + describe('when the lock is current', function () { + beforeEach(function () { + this.client.eval = sinon.stub().yields(null, 1) + return this.LockManager.releaseLock(doc_id, this.lockValue, this.callback) + }) - it('should clear the data from redis', function() { - return this.client.eval.calledWith(this.LockManager.unlockScript, 1, `Blocking:${doc_id}`, this.lockValue).should.equal(true); - }); + it('should clear the data from redis', function () { + return this.client.eval + .calledWith( + this.LockManager.unlockScript, + 1, + `Blocking:${doc_id}`, + this.lockValue + ) + .should.equal(true) + }) - return it('should call the callback', function() { - return this.callback.called.should.equal(true); - }); - }); + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) - return describe("when the lock has expired", function() { - beforeEach(function() { - this.client.eval = sinon.stub().yields(null, 0); - return this.LockManager.releaseLock(doc_id, this.lockValue, this.callback); - }); + return describe('when the lock has expired', function () { + beforeEach(function () { + this.client.eval = sinon.stub().yields(null, 0) + return this.LockManager.releaseLock(doc_id, this.lockValue, this.callback) + }) - return it('should return an error if the lock has expired', function() { - return this.callback.calledWith(new Error("tried to release timed out lock")).should.equal(true); - }); - }); -}); + return it('should return an error if the lock has expired', function () { + return this.callback + .calledWith(new Error('tried to release timed out lock')) + .should.equal(true) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/LockManager/getLockTests.js b/services/document-updater/test/unit/js/LockManager/getLockTests.js index 0d99618e59..1c41f2b0b4 100644 --- a/services/document-updater/test/unit/js/LockManager/getLockTests.js +++ b/services/document-updater/test/unit/js/LockManager/getLockTests.js @@ -14,116 +14,114 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const modulePath = "../../../../app/js/LockManager.js"; -const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const modulePath = '../../../../app/js/LockManager.js' +const SandboxedModule = require('sandboxed-module') -describe('LockManager - getting the lock', function() { - beforeEach(function() { - let Profiler; - this.LockManager = SandboxedModule.require(modulePath, { requires: { - "logger-sharelatex": { log() {} - }, - "redis-sharelatex": { - createClient : () => { - return {auth() {}}; - } - }, - "./Metrics": {inc() {}}, - "./Profiler": (Profiler = (function() { - Profiler = class Profiler { - static initClass() { - this.prototype.log = sinon.stub().returns({ end: sinon.stub() }); - this.prototype.end = sinon.stub(); - } - }; - Profiler.initClass(); - return Profiler; - })()) - } - } - ); - this.callback = sinon.stub(); - return this.doc_id = "doc-id-123"; - }); - - describe("when the lock is not set", function() { - beforeEach(function(done) { - this.lockValue = "mock-lock-value"; - this.LockManager.tryLock = sinon.stub().callsArgWith(1, null, true, this.lockValue); - return this.LockManager.getLock(this.doc_id, (...args) => { - this.callback(...Array.from(args || [])); - return done(); - }); - }); +describe('LockManager - getting the lock', function () { + beforeEach(function () { + let Profiler + this.LockManager = SandboxedModule.require(modulePath, { + requires: { + 'logger-sharelatex': { log() {} }, + 'redis-sharelatex': { + createClient: () => { + return { auth() {} } + } + }, + './Metrics': { inc() {} }, + './Profiler': (Profiler = (function () { + Profiler = class Profiler { + static initClass() { + this.prototype.log = sinon.stub().returns({ end: sinon.stub() }) + this.prototype.end = sinon.stub() + } + } + Profiler.initClass() + return Profiler + })()) + } + }) + this.callback = sinon.stub() + return (this.doc_id = 'doc-id-123') + }) - it("should try to get the lock", function() { - return this.LockManager.tryLock - .calledWith(this.doc_id) - .should.equal(true); - }); + describe('when the lock is not set', function () { + beforeEach(function (done) { + this.lockValue = 'mock-lock-value' + this.LockManager.tryLock = sinon + .stub() + .callsArgWith(1, null, true, this.lockValue) + return this.LockManager.getLock(this.doc_id, (...args) => { + this.callback(...Array.from(args || [])) + return done() + }) + }) - it("should only need to try once", function() { - return this.LockManager.tryLock.callCount.should.equal(1); - }); + it('should try to get the lock', function () { + return this.LockManager.tryLock.calledWith(this.doc_id).should.equal(true) + }) - return it("should return the callback with the lock value", function() { - return this.callback.calledWith(null, this.lockValue).should.equal(true); - }); - }); + it('should only need to try once', function () { + return this.LockManager.tryLock.callCount.should.equal(1) + }) - describe("when the lock is initially set", function() { - beforeEach(function(done) { - this.lockValue = "mock-lock-value"; - const startTime = Date.now(); - let tries = 0; - this.LockManager.LOCK_TEST_INTERVAL = 5; - this.LockManager.tryLock = (doc_id, callback) => { - if (callback == null) { callback = function(error, isFree) {}; } - if (((Date.now() - startTime) < 20) || (tries < 2)) { - tries = tries + 1; - return callback(null, false); - } else { - return callback(null, true, this.lockValue); - } - }; - sinon.spy(this.LockManager, "tryLock"); + return it('should return the callback with the lock value', function () { + return this.callback.calledWith(null, this.lockValue).should.equal(true) + }) + }) - return this.LockManager.getLock(this.doc_id, (...args) => { - this.callback(...Array.from(args || [])); - return done(); - }); - }); + describe('when the lock is initially set', function () { + beforeEach(function (done) { + this.lockValue = 'mock-lock-value' + const startTime = Date.now() + let tries = 0 + this.LockManager.LOCK_TEST_INTERVAL = 5 + this.LockManager.tryLock = (doc_id, callback) => { + if (callback == null) { + callback = function (error, isFree) {} + } + if (Date.now() - startTime < 20 || tries < 2) { + tries = tries + 1 + return callback(null, false) + } else { + return callback(null, true, this.lockValue) + } + } + sinon.spy(this.LockManager, 'tryLock') - it("should call tryLock multiple times until free", function() { - return (this.LockManager.tryLock.callCount > 1).should.equal(true); - }); + return this.LockManager.getLock(this.doc_id, (...args) => { + this.callback(...Array.from(args || [])) + return done() + }) + }) - return it("should return the callback with the lock value", function() { - return this.callback.calledWith(null, this.lockValue).should.equal(true); - }); - }); + it('should call tryLock multiple times until free', function () { + return (this.LockManager.tryLock.callCount > 1).should.equal(true) + }) - return describe("when the lock times out", function() { - beforeEach(function(done) { - const time = Date.now(); - this.LockManager.MAX_LOCK_WAIT_TIME = 5; - this.LockManager.tryLock = sinon.stub().callsArgWith(1, null, false); - return this.LockManager.getLock(this.doc_id, (...args) => { - this.callback(...Array.from(args || [])); - return done(); - }); - }); - - return it("should return the callback with an error", function() { - const e = new Error("Timeout"); - e.doc_id = this.doc_id; - return this.callback.calledWith(e).should.equal(true); - }); - }); -}); - + return it('should return the callback with the lock value', function () { + return this.callback.calledWith(null, this.lockValue).should.equal(true) + }) + }) + return describe('when the lock times out', function () { + beforeEach(function (done) { + const time = Date.now() + this.LockManager.MAX_LOCK_WAIT_TIME = 5 + this.LockManager.tryLock = sinon.stub().callsArgWith(1, null, false) + return this.LockManager.getLock(this.doc_id, (...args) => { + this.callback(...Array.from(args || [])) + return done() + }) + }) + return it('should return the callback with an error', function () { + const e = new Error('Timeout') + e.doc_id = this.doc_id + return this.callback.calledWith(e).should.equal(true) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/LockManager/tryLockTests.js b/services/document-updater/test/unit/js/LockManager/tryLockTests.js index 9f6acf0002..77aecd87d1 100644 --- a/services/document-updater/test/unit/js/LockManager/tryLockTests.js +++ b/services/document-updater/test/unit/js/LockManager/tryLockTests.js @@ -11,129 +11,140 @@ * DS206: Consider reworking classes to avoid initClass * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const modulePath = "../../../../app/js/LockManager.js"; -const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const modulePath = '../../../../app/js/LockManager.js' +const SandboxedModule = require('sandboxed-module') -describe('LockManager - trying the lock', function() { - beforeEach(function() { - let Profiler; - this.LockManager = SandboxedModule.require(modulePath, { requires: { - "logger-sharelatex": { log() {} - }, - "redis-sharelatex": { - createClient : () => { - return { - auth() {}, - set: (this.set = sinon.stub()) - }; - } - }, - "./Metrics": {inc() {}}, - "settings-sharelatex": { - redis: { - lock: { - key_schema: { - blockingKey({doc_id}) { return `Blocking:${doc_id}`; } - } - } - } - }, - "./Profiler": (this.Profiler = (Profiler = (function() { - Profiler = class Profiler { - static initClass() { - this.prototype.log = sinon.stub().returns({ end: sinon.stub() }); - this.prototype.end = sinon.stub(); - } - }; - Profiler.initClass(); - return Profiler; - })())) - } - } - ); +describe('LockManager - trying the lock', function () { + beforeEach(function () { + let Profiler + this.LockManager = SandboxedModule.require(modulePath, { + requires: { + 'logger-sharelatex': { log() {} }, + 'redis-sharelatex': { + createClient: () => { + return { + auth() {}, + set: (this.set = sinon.stub()) + } + } + }, + './Metrics': { inc() {} }, + 'settings-sharelatex': { + redis: { + lock: { + key_schema: { + blockingKey({ doc_id }) { + return `Blocking:${doc_id}` + } + } + } + } + }, + './Profiler': (this.Profiler = Profiler = (function () { + Profiler = class Profiler { + static initClass() { + this.prototype.log = sinon.stub().returns({ end: sinon.stub() }) + this.prototype.end = sinon.stub() + } + } + Profiler.initClass() + return Profiler + })()) + } + }) - this.callback = sinon.stub(); - return this.doc_id = "doc-id-123"; - }); - - describe("when the lock is not set", function() { - beforeEach(function() { - this.lockValue = "mock-lock-value"; - this.LockManager.randomLock = sinon.stub().returns(this.lockValue); - this.set.callsArgWith(5, null, "OK"); - return this.LockManager.tryLock(this.doc_id, this.callback); - }); + this.callback = sinon.stub() + return (this.doc_id = 'doc-id-123') + }) - it("should set the lock key with an expiry if it is not set", function() { - return this.set.calledWith(`Blocking:${this.doc_id}`, this.lockValue, "EX", 30, "NX") - .should.equal(true); - }); + describe('when the lock is not set', function () { + beforeEach(function () { + this.lockValue = 'mock-lock-value' + this.LockManager.randomLock = sinon.stub().returns(this.lockValue) + this.set.callsArgWith(5, null, 'OK') + return this.LockManager.tryLock(this.doc_id, this.callback) + }) - return it("should return the callback with true and the lock value", function() { - return this.callback.calledWith(null, true, this.lockValue).should.equal(true); - }); - }); + it('should set the lock key with an expiry if it is not set', function () { + return this.set + .calledWith(`Blocking:${this.doc_id}`, this.lockValue, 'EX', 30, 'NX') + .should.equal(true) + }) - describe("when the lock is already set", function() { - beforeEach(function() { - this.set.callsArgWith(5, null, null); - return this.LockManager.tryLock(this.doc_id, this.callback); - }); + return it('should return the callback with true and the lock value', function () { + return this.callback + .calledWith(null, true, this.lockValue) + .should.equal(true) + }) + }) - return it("should return the callback with false", function() { - return this.callback.calledWith(null, false).should.equal(true); - }); - }); + describe('when the lock is already set', function () { + beforeEach(function () { + this.set.callsArgWith(5, null, null) + return this.LockManager.tryLock(this.doc_id, this.callback) + }) - return describe("when it takes a long time for redis to set the lock", function() { - beforeEach(function() { - this.Profiler.prototype.end = () => 7000; // take a long time - this.Profiler.prototype.log = sinon.stub().returns({ end: this.Profiler.prototype.end }); - this.lockValue = "mock-lock-value"; - this.LockManager.randomLock = sinon.stub().returns(this.lockValue); - this.LockManager.releaseLock = sinon.stub().callsArgWith(2,null); - return this.set.callsArgWith(5, null, "OK"); - }); + return it('should return the callback with false', function () { + return this.callback.calledWith(null, false).should.equal(true) + }) + }) - describe("in all cases", function() { - beforeEach(function() { - return this.LockManager.tryLock(this.doc_id, this.callback); - }); + return describe('when it takes a long time for redis to set the lock', function () { + beforeEach(function () { + this.Profiler.prototype.end = () => 7000 // take a long time + this.Profiler.prototype.log = sinon + .stub() + .returns({ end: this.Profiler.prototype.end }) + this.lockValue = 'mock-lock-value' + this.LockManager.randomLock = sinon.stub().returns(this.lockValue) + this.LockManager.releaseLock = sinon.stub().callsArgWith(2, null) + return this.set.callsArgWith(5, null, 'OK') + }) - it("should set the lock key with an expiry if it is not set", function() { - return this.set.calledWith(`Blocking:${this.doc_id}`, this.lockValue, "EX", 30, "NX") - .should.equal(true); - }); + describe('in all cases', function () { + beforeEach(function () { + return this.LockManager.tryLock(this.doc_id, this.callback) + }) - return it("should try to release the lock", function() { - return this.LockManager.releaseLock.calledWith(this.doc_id, this.lockValue).should.equal(true); - }); - }); + it('should set the lock key with an expiry if it is not set', function () { + return this.set + .calledWith(`Blocking:${this.doc_id}`, this.lockValue, 'EX', 30, 'NX') + .should.equal(true) + }) - describe("if the lock is released successfully", function() { - beforeEach(function() { - this.LockManager.releaseLock = sinon.stub().callsArgWith(2,null); - return this.LockManager.tryLock(this.doc_id, this.callback); - }); + return it('should try to release the lock', function () { + return this.LockManager.releaseLock + .calledWith(this.doc_id, this.lockValue) + .should.equal(true) + }) + }) - return it("should return the callback with false", function() { - return this.callback.calledWith(null, false).should.equal(true); - }); - }); + describe('if the lock is released successfully', function () { + beforeEach(function () { + this.LockManager.releaseLock = sinon.stub().callsArgWith(2, null) + return this.LockManager.tryLock(this.doc_id, this.callback) + }) - return describe("if the lock has already timed out", function() { - beforeEach(function() { - this.LockManager.releaseLock = sinon.stub().callsArgWith(2, new Error("tried to release timed out lock")); - return this.LockManager.tryLock(this.doc_id, this.callback); - }); + return it('should return the callback with false', function () { + return this.callback.calledWith(null, false).should.equal(true) + }) + }) - return it("should return the callback with an error", function() { - const e = new Error("tried to release timed out lock"); - return this.callback.calledWith(e).should.equal(true); - }); - }); - }); -}); + return describe('if the lock has already timed out', function () { + beforeEach(function () { + this.LockManager.releaseLock = sinon + .stub() + .callsArgWith(2, new Error('tried to release timed out lock')) + return this.LockManager.tryLock(this.doc_id, this.callback) + }) + + return it('should return the callback with an error', function () { + const e = new Error('tried to release timed out lock') + return this.callback.calledWith(e).should.equal(true) + }) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js b/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js index 68bedb4b2c..98039d7823 100644 --- a/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js +++ b/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js @@ -10,301 +10,427 @@ * DS206: Consider reworking classes to avoid initClass * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const modulePath = "../../../../app/js/PersistenceManager.js"; -const SandboxedModule = require('sandboxed-module'); -const Errors = require("../../../../app/js/Errors"); +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const modulePath = '../../../../app/js/PersistenceManager.js' +const SandboxedModule = require('sandboxed-module') +const Errors = require('../../../../app/js/Errors') -describe("PersistenceManager", function() { - beforeEach(function() { - let Timer; - this.request = sinon.stub(); - this.request.defaults = () => this.request; - this.PersistenceManager = SandboxedModule.require(modulePath, { requires: { - "requestretry": this.request, - "settings-sharelatex": (this.Settings = {}), - "./Metrics": (this.Metrics = { - Timer: (Timer = (function() { - Timer = class Timer { - static initClass() { - this.prototype.done = sinon.stub(); - } - }; - Timer.initClass(); - return Timer; - })()), - inc: sinon.stub() - }), - "logger-sharelatex": (this.logger = {log: sinon.stub(), err: sinon.stub()}) - } - }); - this.project_id = "project-id-123"; - this.projectHistoryId = "history-id-123"; - this.doc_id = "doc-id-123"; - this.lines = ["one", "two", "three"]; - this.version = 42; - this.callback = sinon.stub(); - this.ranges = { comments: "mock", entries: "mock" }; - this.pathname = '/a/b/c.tex'; - this.lastUpdatedAt = Date.now(); - this.lastUpdatedBy = 'last-author-id'; - return this.Settings.apis = { - web: { - url: (this.url = "www.example.com"), - user: (this.user = "sharelatex"), - pass: (this.pass = "password") - } - }; - }); +describe('PersistenceManager', function () { + beforeEach(function () { + let Timer + this.request = sinon.stub() + this.request.defaults = () => this.request + this.PersistenceManager = SandboxedModule.require(modulePath, { + requires: { + requestretry: this.request, + 'settings-sharelatex': (this.Settings = {}), + './Metrics': (this.Metrics = { + Timer: (Timer = (function () { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub() + } + } + Timer.initClass() + return Timer + })()), + inc: sinon.stub() + }), + 'logger-sharelatex': (this.logger = { + log: sinon.stub(), + err: sinon.stub() + }) + } + }) + this.project_id = 'project-id-123' + this.projectHistoryId = 'history-id-123' + this.doc_id = 'doc-id-123' + this.lines = ['one', 'two', 'three'] + this.version = 42 + this.callback = sinon.stub() + this.ranges = { comments: 'mock', entries: 'mock' } + this.pathname = '/a/b/c.tex' + this.lastUpdatedAt = Date.now() + this.lastUpdatedBy = 'last-author-id' + return (this.Settings.apis = { + web: { + url: (this.url = 'www.example.com'), + user: (this.user = 'sharelatex'), + pass: (this.pass = 'password') + } + }) + }) - describe("getDoc", function() { - beforeEach(function() { - return this.webResponse = { - lines: this.lines, - version: this.version, - ranges: this.ranges, - pathname: this.pathname, - projectHistoryId: this.projectHistoryId - };}); + describe('getDoc', function () { + beforeEach(function () { + return (this.webResponse = { + lines: this.lines, + version: this.version, + ranges: this.ranges, + pathname: this.pathname, + projectHistoryId: this.projectHistoryId + }) + }) - describe("with a successful response from the web api", function() { - beforeEach(function() { - this.request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(this.webResponse)); - return this.PersistenceManager.getDoc(this.project_id, this.doc_id, this.callback); - }); + describe('with a successful response from the web api', function () { + beforeEach(function () { + this.request.callsArgWith( + 1, + null, + { statusCode: 200 }, + JSON.stringify(this.webResponse) + ) + return this.PersistenceManager.getDoc( + this.project_id, + this.doc_id, + this.callback + ) + }) - it("should call the web api", function() { - return this.request - .calledWith({ - url: `${this.url}/project/${this.project_id}/doc/${this.doc_id}`, - method: "GET", - headers: { - "accept": "application/json" - }, - auth: { - user: this.user, - pass: this.pass, - sendImmediately: true - }, - jar: false, - timeout: 5000 - }) - .should.equal(true); - }); + it('should call the web api', function () { + return this.request + .calledWith({ + url: `${this.url}/project/${this.project_id}/doc/${this.doc_id}`, + method: 'GET', + headers: { + accept: 'application/json' + }, + auth: { + user: this.user, + pass: this.pass, + sendImmediately: true + }, + jar: false, + timeout: 5000 + }) + .should.equal(true) + }) - it("should call the callback with the doc lines, version and ranges", function() { - return this.callback - .calledWith(null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId) - .should.equal(true); - }); + it('should call the callback with the doc lines, version and ranges', function () { + return this.callback + .calledWith( + null, + this.lines, + this.version, + this.ranges, + this.pathname, + this.projectHistoryId + ) + .should.equal(true) + }) - it("should time the execution", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); + it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) - return it("should increment the metric", function() { - return this.Metrics.inc.calledWith("getDoc", 1, {status: 200}).should.equal(true); - }); - }); + return it('should increment the metric', function () { + return this.Metrics.inc + .calledWith('getDoc', 1, { status: 200 }) + .should.equal(true) + }) + }) - describe("when request returns an error", function() { - beforeEach(function() { - this.error = new Error("oops"); - this.error.code = "EOOPS"; - this.request.callsArgWith(1, this.error, null, null); - return this.PersistenceManager.getDoc(this.project_id, this.doc_id, this.callback); - }); + describe('when request returns an error', function () { + beforeEach(function () { + this.error = new Error('oops') + this.error.code = 'EOOPS' + this.request.callsArgWith(1, this.error, null, null) + return this.PersistenceManager.getDoc( + this.project_id, + this.doc_id, + this.callback + ) + }) - it("should return the error", function() { - return this.callback.calledWith(this.error).should.equal(true); - }); + it('should return the error', function () { + return this.callback.calledWith(this.error).should.equal(true) + }) - it("should time the execution", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); + it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) - return it("should increment the metric", function() { - return this.Metrics.inc.calledWith("getDoc", 1, {status: "EOOPS"}).should.equal(true); - }); - }); + return it('should increment the metric', function () { + return this.Metrics.inc + .calledWith('getDoc', 1, { status: 'EOOPS' }) + .should.equal(true) + }) + }) - describe("when the request returns 404", function() { - beforeEach(function() { - this.request.callsArgWith(1, null, {statusCode: 404}, ""); - return this.PersistenceManager.getDoc(this.project_id, this.doc_id, this.callback); - }); + describe('when the request returns 404', function () { + beforeEach(function () { + this.request.callsArgWith(1, null, { statusCode: 404 }, '') + return this.PersistenceManager.getDoc( + this.project_id, + this.doc_id, + this.callback + ) + }) - it("should return a NotFoundError", function() { - return this.callback.calledWith(new Errors.NotFoundError("not found")).should.equal(true); - }); + it('should return a NotFoundError', function () { + return this.callback + .calledWith(new Errors.NotFoundError('not found')) + .should.equal(true) + }) - it("should time the execution", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); + it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) - return it("should increment the metric", function() { - return this.Metrics.inc.calledWith("getDoc", 1, {status: 404}).should.equal(true); - }); - }); + return it('should increment the metric', function () { + return this.Metrics.inc + .calledWith('getDoc', 1, { status: 404 }) + .should.equal(true) + }) + }) - describe("when the request returns an error status code", function() { - beforeEach(function() { - this.request.callsArgWith(1, null, {statusCode: 500}, ""); - return this.PersistenceManager.getDoc(this.project_id, this.doc_id, this.callback); - }); + describe('when the request returns an error status code', function () { + beforeEach(function () { + this.request.callsArgWith(1, null, { statusCode: 500 }, '') + return this.PersistenceManager.getDoc( + this.project_id, + this.doc_id, + this.callback + ) + }) - it("should return an error", function() { - return this.callback.calledWith(new Error("web api error")).should.equal(true); - }); + it('should return an error', function () { + return this.callback + .calledWith(new Error('web api error')) + .should.equal(true) + }) - it("should time the execution", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); + it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) - return it("should increment the metric", function() { - return this.Metrics.inc.calledWith("getDoc", 1, {status: 500}).should.equal(true); - }); - }); + return it('should increment the metric', function () { + return this.Metrics.inc + .calledWith('getDoc', 1, { status: 500 }) + .should.equal(true) + }) + }) - describe("when request returns an doc without lines", function() { - beforeEach(function() { - delete this.webResponse.lines; - this.request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(this.webResponse)); - return this.PersistenceManager.getDoc(this.project_id, this.doc_id, this.callback); - }); + describe('when request returns an doc without lines', function () { + beforeEach(function () { + delete this.webResponse.lines + this.request.callsArgWith( + 1, + null, + { statusCode: 200 }, + JSON.stringify(this.webResponse) + ) + return this.PersistenceManager.getDoc( + this.project_id, + this.doc_id, + this.callback + ) + }) - return it("should return and error", function() { - return this.callback.calledWith(new Error("web API response had no doc lines")).should.equal(true); - }); - }); + return it('should return and error', function () { + return this.callback + .calledWith(new Error('web API response had no doc lines')) + .should.equal(true) + }) + }) - describe("when request returns an doc without a version", function() { - beforeEach(function() { - delete this.webResponse.version; - this.request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(this.webResponse)); - return this.PersistenceManager.getDoc(this.project_id, this.doc_id, this.callback); - }); + describe('when request returns an doc without a version', function () { + beforeEach(function () { + delete this.webResponse.version + this.request.callsArgWith( + 1, + null, + { statusCode: 200 }, + JSON.stringify(this.webResponse) + ) + return this.PersistenceManager.getDoc( + this.project_id, + this.doc_id, + this.callback + ) + }) - return it("should return and error", function() { - return this.callback.calledWith(new Error("web API response had no valid doc version")).should.equal(true); - }); - }); + return it('should return and error', function () { + return this.callback + .calledWith(new Error('web API response had no valid doc version')) + .should.equal(true) + }) + }) - return describe("when request returns an doc without a pathname", function() { - beforeEach(function() { - delete this.webResponse.pathname; - this.request.callsArgWith(1, null, {statusCode: 200}, JSON.stringify(this.webResponse)); - return this.PersistenceManager.getDoc(this.project_id, this.doc_id, this.callback); - }); + return describe('when request returns an doc without a pathname', function () { + beforeEach(function () { + delete this.webResponse.pathname + this.request.callsArgWith( + 1, + null, + { statusCode: 200 }, + JSON.stringify(this.webResponse) + ) + return this.PersistenceManager.getDoc( + this.project_id, + this.doc_id, + this.callback + ) + }) - return it("should return and error", function() { - return this.callback.calledWith(new Error("web API response had no valid doc pathname")).should.equal(true); - }); - }); - }); + return it('should return and error', function () { + return this.callback + .calledWith(new Error('web API response had no valid doc pathname')) + .should.equal(true) + }) + }) + }) - return describe("setDoc", function() { - describe("with a successful response from the web api", function() { - beforeEach(function() { - this.request.callsArgWith(1, null, {statusCode: 200}); - return this.PersistenceManager.setDoc(this.project_id, this.doc_id, this.lines, this.version, this.ranges, this.lastUpdatedAt, this.lastUpdatedBy, this.callback); - }); + return describe('setDoc', function () { + describe('with a successful response from the web api', function () { + beforeEach(function () { + this.request.callsArgWith(1, null, { statusCode: 200 }) + return this.PersistenceManager.setDoc( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ranges, + this.lastUpdatedAt, + this.lastUpdatedBy, + this.callback + ) + }) - it("should call the web api", function() { - return this.request - .calledWith({ - url: `${this.url}/project/${this.project_id}/doc/${this.doc_id}`, - json: { - lines: this.lines, - version: this.version, - ranges: this.ranges, - lastUpdatedAt: this.lastUpdatedAt, - lastUpdatedBy: this.lastUpdatedBy - }, - method: "POST", - auth: { - user: this.user, - pass: this.pass, - sendImmediately: true - }, - jar: false, - timeout: 5000 - }) - .should.equal(true); - }); + it('should call the web api', function () { + return this.request + .calledWith({ + url: `${this.url}/project/${this.project_id}/doc/${this.doc_id}`, + json: { + lines: this.lines, + version: this.version, + ranges: this.ranges, + lastUpdatedAt: this.lastUpdatedAt, + lastUpdatedBy: this.lastUpdatedBy + }, + method: 'POST', + auth: { + user: this.user, + pass: this.pass, + sendImmediately: true + }, + jar: false, + timeout: 5000 + }) + .should.equal(true) + }) - it("should call the callback without error", function() { - return this.callback.calledWith(null).should.equal(true); - }); + it('should call the callback without error', function () { + return this.callback.calledWith(null).should.equal(true) + }) - it("should time the execution", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); + it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) - return it("should increment the metric", function() { - return this.Metrics.inc.calledWith("setDoc", 1, {status: 200}).should.equal(true); - }); - }); + return it('should increment the metric', function () { + return this.Metrics.inc + .calledWith('setDoc', 1, { status: 200 }) + .should.equal(true) + }) + }) - describe("when request returns an error", function() { - beforeEach(function() { - this.error = new Error("oops"); - this.error.code = "EOOPS"; - this.request.callsArgWith(1, this.error, null, null); - return this.PersistenceManager.setDoc(this.project_id, this.doc_id, this.lines, this.version, this.ranges, this.lastUpdatedAt, this.lastUpdatedBy, this.callback); - }); + describe('when request returns an error', function () { + beforeEach(function () { + this.error = new Error('oops') + this.error.code = 'EOOPS' + this.request.callsArgWith(1, this.error, null, null) + return this.PersistenceManager.setDoc( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ranges, + this.lastUpdatedAt, + this.lastUpdatedBy, + this.callback + ) + }) - it("should return the error", function() { - return this.callback.calledWith(this.error).should.equal(true); - }); + it('should return the error', function () { + return this.callback.calledWith(this.error).should.equal(true) + }) - it("should time the execution", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); + it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) - return it("should increment the metric", function() { - return this.Metrics.inc.calledWith("setDoc", 1, {status: "EOOPS"}).should.equal(true); - }); - }); + return it('should increment the metric', function () { + return this.Metrics.inc + .calledWith('setDoc', 1, { status: 'EOOPS' }) + .should.equal(true) + }) + }) - describe("when the request returns 404", function() { - beforeEach(function() { - this.request.callsArgWith(1, null, {statusCode: 404}, ""); - return this.PersistenceManager.setDoc(this.project_id, this.doc_id, this.lines, this.version, this.ranges, this.lastUpdatedAt, this.lastUpdatedBy, this.callback); - }); + describe('when the request returns 404', function () { + beforeEach(function () { + this.request.callsArgWith(1, null, { statusCode: 404 }, '') + return this.PersistenceManager.setDoc( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ranges, + this.lastUpdatedAt, + this.lastUpdatedBy, + this.callback + ) + }) - it("should return a NotFoundError", function() { - return this.callback.calledWith(new Errors.NotFoundError("not found")).should.equal(true); - }); + it('should return a NotFoundError', function () { + return this.callback + .calledWith(new Errors.NotFoundError('not found')) + .should.equal(true) + }) - it("should time the execution", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); + it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) - return it("should increment the metric", function() { - return this.Metrics.inc.calledWith("setDoc", 1, {status: 404}).should.equal(true); - }); - }); + return it('should increment the metric', function () { + return this.Metrics.inc + .calledWith('setDoc', 1, { status: 404 }) + .should.equal(true) + }) + }) - return describe("when the request returns an error status code", function() { - beforeEach(function() { - this.request.callsArgWith(1, null, {statusCode: 500}, ""); - return this.PersistenceManager.setDoc(this.project_id, this.doc_id, this.lines, this.version, this.ranges, this.lastUpdatedAt, this.lastUpdatedBy, this.callback); - }); + return describe('when the request returns an error status code', function () { + beforeEach(function () { + this.request.callsArgWith(1, null, { statusCode: 500 }, '') + return this.PersistenceManager.setDoc( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ranges, + this.lastUpdatedAt, + this.lastUpdatedBy, + this.callback + ) + }) - it("should return an error", function() { - return this.callback.calledWith(new Error("web api error")).should.equal(true); - }); + it('should return an error', function () { + return this.callback + .calledWith(new Error('web api error')) + .should.equal(true) + }) - it("should time the execution", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); + it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) - return it("should increment the metric", function() { - return this.Metrics.inc.calledWith("setDoc", 1, {status: 500}).should.equal(true); - }); - }); - }); -}); + return it('should increment the metric', function () { + return this.Metrics.inc + .calledWith('setDoc', 1, { status: 500 }) + .should.equal(true) + }) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js b/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js index d3bba8f42f..c4ec5b1c12 100644 --- a/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js +++ b/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js @@ -11,149 +11,191 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const modulePath = "../../../../app/js/ProjectHistoryRedisManager.js"; -const SandboxedModule = require('sandboxed-module'); -const tk = require("timekeeper"); +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const modulePath = '../../../../app/js/ProjectHistoryRedisManager.js' +const SandboxedModule = require('sandboxed-module') +const tk = require('timekeeper') -describe("ProjectHistoryRedisManager", function() { - beforeEach(function() { - this.project_id = "project-id-123"; - this.projectHistoryId = "history-id-123"; - this.user_id = "user-id-123"; - this.callback = sinon.stub(); - this.rclient = {}; - tk.freeze(new Date()); - return this.ProjectHistoryRedisManager = SandboxedModule.require(modulePath, { - requires: { - "settings-sharelatex": (this.settings = { - redis: { - project_history: { - key_schema: { - projectHistoryOps({project_id}) { return `ProjectHistory:Ops:${project_id}`; }, - projectHistoryFirstOpTimestamp({project_id}) { return `ProjectHistory:FirstOpTimestamp:${project_id}`; } - } - } - } - }), - "redis-sharelatex": { - createClient: () => this.rclient - }, - "logger-sharelatex": { - log() {} - }, - "./Metrics": (this.metrics = { summary: sinon.stub()}) - }, - globals: { - JSON: (this.JSON = JSON) - } - } - ); - }); +describe('ProjectHistoryRedisManager', function () { + beforeEach(function () { + this.project_id = 'project-id-123' + this.projectHistoryId = 'history-id-123' + this.user_id = 'user-id-123' + this.callback = sinon.stub() + this.rclient = {} + tk.freeze(new Date()) + return (this.ProjectHistoryRedisManager = SandboxedModule.require( + modulePath, + { + requires: { + 'settings-sharelatex': (this.settings = { + redis: { + project_history: { + key_schema: { + projectHistoryOps({ project_id }) { + return `ProjectHistory:Ops:${project_id}` + }, + projectHistoryFirstOpTimestamp({ project_id }) { + return `ProjectHistory:FirstOpTimestamp:${project_id}` + } + } + } + } + }), + 'redis-sharelatex': { + createClient: () => this.rclient + }, + 'logger-sharelatex': { + log() {} + }, + './Metrics': (this.metrics = { summary: sinon.stub() }) + }, + globals: { + JSON: (this.JSON = JSON) + } + } + )) + }) - afterEach(function() { return tk.reset(); }); + afterEach(function () { + return tk.reset() + }) - describe("queueOps", function() { - beforeEach(function() { - this.ops = ["mock-op-1", "mock-op-2"]; - this.multi = {exec: sinon.stub()}; - this.multi.rpush = sinon.stub(); - this.multi.setnx = sinon.stub(); - this.rclient.multi = () => this.multi; - // @rclient = multi: () => @multi - return this.ProjectHistoryRedisManager.queueOps(this.project_id, ...Array.from(this.ops), this.callback); - }); + describe('queueOps', function () { + beforeEach(function () { + this.ops = ['mock-op-1', 'mock-op-2'] + this.multi = { exec: sinon.stub() } + this.multi.rpush = sinon.stub() + this.multi.setnx = sinon.stub() + this.rclient.multi = () => this.multi + // @rclient = multi: () => @multi + return this.ProjectHistoryRedisManager.queueOps( + this.project_id, + ...Array.from(this.ops), + this.callback + ) + }) - it("should queue an update", function() { - return this.multi.rpush - .calledWithExactly( - `ProjectHistory:Ops:${this.project_id}`, - this.ops[0], - this.ops[1] - ).should.equal(true); - }); + it('should queue an update', function () { + return this.multi.rpush + .calledWithExactly( + `ProjectHistory:Ops:${this.project_id}`, + this.ops[0], + this.ops[1] + ) + .should.equal(true) + }) - return it("should set the queue timestamp if not present", function() { - return this.multi.setnx - .calledWithExactly( - `ProjectHistory:FirstOpTimestamp:${this.project_id}`, - Date.now() - ).should.equal(true); - }); - }); + return it('should set the queue timestamp if not present', function () { + return this.multi.setnx + .calledWithExactly( + `ProjectHistory:FirstOpTimestamp:${this.project_id}`, + Date.now() + ) + .should.equal(true) + }) + }) - describe("queueRenameEntity", function() { - beforeEach(function() { - this.file_id = 1234; + describe('queueRenameEntity', function () { + beforeEach(function () { + this.file_id = 1234 - this.rawUpdate = { - pathname: (this.pathname = '/old'), - newPathname: (this.newPathname = '/new'), - version: (this.version = 2) - }; + this.rawUpdate = { + pathname: (this.pathname = '/old'), + newPathname: (this.newPathname = '/new'), + version: (this.version = 2) + } - this.ProjectHistoryRedisManager.queueOps = sinon.stub(); - return this.ProjectHistoryRedisManager.queueRenameEntity(this.project_id, this.projectHistoryId, 'file', this.file_id, this.user_id, this.rawUpdate, this.callback); - }); + this.ProjectHistoryRedisManager.queueOps = sinon.stub() + return this.ProjectHistoryRedisManager.queueRenameEntity( + this.project_id, + this.projectHistoryId, + 'file', + this.file_id, + this.user_id, + this.rawUpdate, + this.callback + ) + }) - return it("should queue an update", function() { - const update = { - pathname: this.pathname, - new_pathname: this.newPathname, - meta: { - user_id: this.user_id, - ts: new Date() - }, - version: this.version, - projectHistoryId: this.projectHistoryId, - file: this.file_id - }; + return it('should queue an update', function () { + const update = { + pathname: this.pathname, + new_pathname: this.newPathname, + meta: { + user_id: this.user_id, + ts: new Date() + }, + version: this.version, + projectHistoryId: this.projectHistoryId, + file: this.file_id + } - return this.ProjectHistoryRedisManager.queueOps - .calledWithExactly(this.project_id, this.JSON.stringify(update), this.callback) - .should.equal(true); - }); - }); + return this.ProjectHistoryRedisManager.queueOps + .calledWithExactly( + this.project_id, + this.JSON.stringify(update), + this.callback + ) + .should.equal(true) + }) + }) - return describe("queueAddEntity", function() { - beforeEach(function() { - this.rclient.rpush = sinon.stub().yields(); - this.doc_id = 1234; + return describe('queueAddEntity', function () { + beforeEach(function () { + this.rclient.rpush = sinon.stub().yields() + this.doc_id = 1234 - this.rawUpdate = { - pathname: (this.pathname = '/old'), - docLines: (this.docLines = 'a\nb'), - version: (this.version = 2), - url: (this.url = 'filestore.example.com') - }; + this.rawUpdate = { + pathname: (this.pathname = '/old'), + docLines: (this.docLines = 'a\nb'), + version: (this.version = 2), + url: (this.url = 'filestore.example.com') + } - this.ProjectHistoryRedisManager.queueOps = sinon.stub(); - return this.ProjectHistoryRedisManager.queueAddEntity(this.project_id, this.projectHistoryId, 'doc', this.doc_id, this.user_id, this.rawUpdate, this.callback); - }); + this.ProjectHistoryRedisManager.queueOps = sinon.stub() + return this.ProjectHistoryRedisManager.queueAddEntity( + this.project_id, + this.projectHistoryId, + 'doc', + this.doc_id, + this.user_id, + this.rawUpdate, + this.callback + ) + }) - it("should queue an update", function() { - const update = { - pathname: this.pathname, - docLines: this.docLines, - url: this.url, - meta: { - user_id: this.user_id, - ts: new Date() - }, - version: this.version, - projectHistoryId: this.projectHistoryId, - doc: this.doc_id - }; + it('should queue an update', function () { + const update = { + pathname: this.pathname, + docLines: this.docLines, + url: this.url, + meta: { + user_id: this.user_id, + ts: new Date() + }, + version: this.version, + projectHistoryId: this.projectHistoryId, + doc: this.doc_id + } - return this.ProjectHistoryRedisManager.queueOps - .calledWithExactly(this.project_id, this.JSON.stringify(update), this.callback) - .should.equal(true); - }); + return this.ProjectHistoryRedisManager.queueOps + .calledWithExactly( + this.project_id, + this.JSON.stringify(update), + this.callback + ) + .should.equal(true) + }) - describe("queueResyncProjectStructure", function() { return it("should queue an update", function() {}); }); + describe('queueResyncProjectStructure', function () { + return it('should queue an update', function () {}) + }) - return describe("queueResyncDocContent", function() { return it("should queue an update", function() {}); }); - }); -}); + return describe('queueResyncDocContent', function () { + return it('should queue an update', function () {}) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js index ebebf64740..7afa407e7c 100644 --- a/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js @@ -12,121 +12,146 @@ * DS206: Consider reworking classes to avoid initClass * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const modulePath = "../../../../app/js/ProjectManager.js"; -const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const modulePath = '../../../../app/js/ProjectManager.js' +const SandboxedModule = require('sandboxed-module') -describe("ProjectManager - flushAndDeleteProject", function() { - beforeEach(function() { - let Timer; - this.ProjectManager = SandboxedModule.require(modulePath, { requires: { - "./RedisManager": (this.RedisManager = {}), - "./ProjectHistoryRedisManager": (this.ProjectHistoryRedisManager = {}), - "./DocumentManager": (this.DocumentManager = {}), - "logger-sharelatex": (this.logger = { log: sinon.stub(), error: sinon.stub() }), - "./HistoryManager": (this.HistoryManager = - {flushProjectChanges: sinon.stub().callsArg(2)}), - "./Metrics": (this.Metrics = { - Timer: (Timer = (function() { - Timer = class Timer { - static initClass() { - this.prototype.done = sinon.stub(); - } - }; - Timer.initClass(); - return Timer; - })()) - }) - } - } - ); - this.project_id = "project-id-123"; - return this.callback = sinon.stub(); - }); +describe('ProjectManager - flushAndDeleteProject', function () { + beforeEach(function () { + let Timer + this.ProjectManager = SandboxedModule.require(modulePath, { + requires: { + './RedisManager': (this.RedisManager = {}), + './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), + './DocumentManager': (this.DocumentManager = {}), + 'logger-sharelatex': (this.logger = { + log: sinon.stub(), + error: sinon.stub() + }), + './HistoryManager': (this.HistoryManager = { + flushProjectChanges: sinon.stub().callsArg(2) + }), + './Metrics': (this.Metrics = { + Timer: (Timer = (function () { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub() + } + } + Timer.initClass() + return Timer + })()) + }) + } + }) + this.project_id = 'project-id-123' + return (this.callback = sinon.stub()) + }) - describe("successfully", function() { - beforeEach(function(done) { - this.doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"]; - this.RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, this.doc_ids); - this.DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArg(3); - return this.ProjectManager.flushAndDeleteProjectWithLocks(this.project_id, {}, error => { - this.callback(error); - return done(); - }); - }); + describe('successfully', function () { + beforeEach(function (done) { + this.doc_ids = ['doc-id-1', 'doc-id-2', 'doc-id-3'] + this.RedisManager.getDocIdsInProject = sinon + .stub() + .callsArgWith(1, null, this.doc_ids) + this.DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArg(3) + return this.ProjectManager.flushAndDeleteProjectWithLocks( + this.project_id, + {}, + (error) => { + this.callback(error) + return done() + } + ) + }) - it("should get the doc ids in the project", function() { - return this.RedisManager.getDocIdsInProject - .calledWith(this.project_id) - .should.equal(true); - }); + it('should get the doc ids in the project', function () { + return this.RedisManager.getDocIdsInProject + .calledWith(this.project_id) + .should.equal(true) + }) - it("should delete each doc in the project", function() { - return Array.from(this.doc_ids).map((doc_id) => - this.DocumentManager.flushAndDeleteDocWithLock - .calledWith(this.project_id, doc_id, {}) - .should.equal(true)); - }); + it('should delete each doc in the project', function () { + return Array.from(this.doc_ids).map((doc_id) => + this.DocumentManager.flushAndDeleteDocWithLock + .calledWith(this.project_id, doc_id, {}) + .should.equal(true) + ) + }) - it("should flush project history", function() { - return this.HistoryManager.flushProjectChanges - .calledWith(this.project_id, {}) - .should.equal(true); - }); + it('should flush project history', function () { + return this.HistoryManager.flushProjectChanges + .calledWith(this.project_id, {}) + .should.equal(true) + }) - it("should call the callback without error", function() { - return this.callback.calledWith(null).should.equal(true); - }); + it('should call the callback without error', function () { + return this.callback.calledWith(null).should.equal(true) + }) - return it("should time the execution", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); + return it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) - return describe("when a doc errors", function() { - beforeEach(function(done) { - this.doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"]; - this.RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, this.doc_ids); - this.DocumentManager.flushAndDeleteDocWithLock = sinon.spy((project_id, doc_id, options, callback) => { - if (doc_id === "doc-id-1") { - return callback(this.error = new Error("oops, something went wrong")); - } else { - return callback(); - } - }); - return this.ProjectManager.flushAndDeleteProjectWithLocks(this.project_id, {}, error => { - this.callback(error); - return done(); - }); - }); + return describe('when a doc errors', function () { + beforeEach(function (done) { + this.doc_ids = ['doc-id-1', 'doc-id-2', 'doc-id-3'] + this.RedisManager.getDocIdsInProject = sinon + .stub() + .callsArgWith(1, null, this.doc_ids) + this.DocumentManager.flushAndDeleteDocWithLock = sinon.spy( + (project_id, doc_id, options, callback) => { + if (doc_id === 'doc-id-1') { + return callback( + (this.error = new Error('oops, something went wrong')) + ) + } else { + return callback() + } + } + ) + return this.ProjectManager.flushAndDeleteProjectWithLocks( + this.project_id, + {}, + (error) => { + this.callback(error) + return done() + } + ) + }) - it("should still flush each doc in the project", function() { - return Array.from(this.doc_ids).map((doc_id) => - this.DocumentManager.flushAndDeleteDocWithLock - .calledWith(this.project_id, doc_id, {}) - .should.equal(true)); - }); + it('should still flush each doc in the project', function () { + return Array.from(this.doc_ids).map((doc_id) => + this.DocumentManager.flushAndDeleteDocWithLock + .calledWith(this.project_id, doc_id, {}) + .should.equal(true) + ) + }) - it("should still flush project history", function() { - return this.HistoryManager.flushProjectChanges - .calledWith(this.project_id, {}) - .should.equal(true); - }); + it('should still flush project history', function () { + return this.HistoryManager.flushProjectChanges + .calledWith(this.project_id, {}) + .should.equal(true) + }) - it("should record the error", function() { - return this.logger.error - .calledWith({err: this.error, project_id: this.project_id, doc_id: "doc-id-1"}, "error deleting doc") - .should.equal(true); - }); + it('should record the error', function () { + return this.logger.error + .calledWith( + { err: this.error, project_id: this.project_id, doc_id: 'doc-id-1' }, + 'error deleting doc' + ) + .should.equal(true) + }) - it("should call the callback with an error", function() { - return this.callback.calledWith(new Error()).should.equal(true); - }); + it('should call the callback with an error', function () { + return this.callback.calledWith(new Error()).should.equal(true) + }) - return it("should time the execution", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); -}); + return it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js index 7d74c61507..110a827e64 100644 --- a/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js @@ -14,109 +14,133 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const modulePath = "../../../../app/js/ProjectManager.js"; -const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const modulePath = '../../../../app/js/ProjectManager.js' +const SandboxedModule = require('sandboxed-module') -describe("ProjectManager - flushProject", function() { - beforeEach(function() { - let Timer; - this.ProjectManager = SandboxedModule.require(modulePath, { requires: { - "./RedisManager": (this.RedisManager = {}), - "./ProjectHistoryRedisManager": (this.ProjectHistoryRedisManager = {}), - "./DocumentManager": (this.DocumentManager = {}), - "logger-sharelatex": (this.logger = { log: sinon.stub(), error: sinon.stub() }), - "./HistoryManager": (this.HistoryManager = {}), - "./Metrics": (this.Metrics = { - Timer: (Timer = (function() { - Timer = class Timer { - static initClass() { - this.prototype.done = sinon.stub(); - } - }; - Timer.initClass(); - return Timer; - })()) - }) - } - } - ); - this.project_id = "project-id-123"; - return this.callback = sinon.stub(); - }); +describe('ProjectManager - flushProject', function () { + beforeEach(function () { + let Timer + this.ProjectManager = SandboxedModule.require(modulePath, { + requires: { + './RedisManager': (this.RedisManager = {}), + './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), + './DocumentManager': (this.DocumentManager = {}), + 'logger-sharelatex': (this.logger = { + log: sinon.stub(), + error: sinon.stub() + }), + './HistoryManager': (this.HistoryManager = {}), + './Metrics': (this.Metrics = { + Timer: (Timer = (function () { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub() + } + } + Timer.initClass() + return Timer + })()) + }) + } + }) + this.project_id = 'project-id-123' + return (this.callback = sinon.stub()) + }) - describe("successfully", function() { - beforeEach(function(done) { - this.doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"]; - this.RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, this.doc_ids); - this.DocumentManager.flushDocIfLoadedWithLock = sinon.stub().callsArg(2); - return this.ProjectManager.flushProjectWithLocks(this.project_id, error => { - this.callback(error); - return done(); - }); - }); + describe('successfully', function () { + beforeEach(function (done) { + this.doc_ids = ['doc-id-1', 'doc-id-2', 'doc-id-3'] + this.RedisManager.getDocIdsInProject = sinon + .stub() + .callsArgWith(1, null, this.doc_ids) + this.DocumentManager.flushDocIfLoadedWithLock = sinon.stub().callsArg(2) + return this.ProjectManager.flushProjectWithLocks( + this.project_id, + (error) => { + this.callback(error) + return done() + } + ) + }) - it("should get the doc ids in the project", function() { - return this.RedisManager.getDocIdsInProject - .calledWith(this.project_id) - .should.equal(true); - }); - - it("should flush each doc in the project", function() { - return Array.from(this.doc_ids).map((doc_id) => - this.DocumentManager.flushDocIfLoadedWithLock - .calledWith(this.project_id, doc_id) - .should.equal(true)); - }); + it('should get the doc ids in the project', function () { + return this.RedisManager.getDocIdsInProject + .calledWith(this.project_id) + .should.equal(true) + }) - it("should call the callback without error", function() { - return this.callback.calledWith(null).should.equal(true); - }); + it('should flush each doc in the project', function () { + return Array.from(this.doc_ids).map((doc_id) => + this.DocumentManager.flushDocIfLoadedWithLock + .calledWith(this.project_id, doc_id) + .should.equal(true) + ) + }) - return it("should time the execution", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); + it('should call the callback without error', function () { + return this.callback.calledWith(null).should.equal(true) + }) - return describe("when a doc errors", function() { - beforeEach(function(done) { - this.doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"]; - this.RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, this.doc_ids); - this.DocumentManager.flushDocIfLoadedWithLock = sinon.spy((project_id, doc_id, callback) => { - if (callback == null) { callback = function(error) {}; } - if (doc_id === "doc-id-1") { - return callback(this.error = new Error("oops, something went wrong")); - } else { - return callback(); - } - }); - return this.ProjectManager.flushProjectWithLocks(this.project_id, error => { - this.callback(error); - return done(); - }); - }); - - it("should still flush each doc in the project", function() { - return Array.from(this.doc_ids).map((doc_id) => - this.DocumentManager.flushDocIfLoadedWithLock - .calledWith(this.project_id, doc_id) - .should.equal(true)); - }); + return it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) - it("should record the error", function() { - return this.logger.error - .calledWith({err: this.error, project_id: this.project_id, doc_id: "doc-id-1"}, "error flushing doc") - .should.equal(true); - }); + return describe('when a doc errors', function () { + beforeEach(function (done) { + this.doc_ids = ['doc-id-1', 'doc-id-2', 'doc-id-3'] + this.RedisManager.getDocIdsInProject = sinon + .stub() + .callsArgWith(1, null, this.doc_ids) + this.DocumentManager.flushDocIfLoadedWithLock = sinon.spy( + (project_id, doc_id, callback) => { + if (callback == null) { + callback = function (error) {} + } + if (doc_id === 'doc-id-1') { + return callback( + (this.error = new Error('oops, something went wrong')) + ) + } else { + return callback() + } + } + ) + return this.ProjectManager.flushProjectWithLocks( + this.project_id, + (error) => { + this.callback(error) + return done() + } + ) + }) - it("should call the callback with an error", function() { - return this.callback.calledWith(new Error()).should.equal(true); - }); + it('should still flush each doc in the project', function () { + return Array.from(this.doc_ids).map((doc_id) => + this.DocumentManager.flushDocIfLoadedWithLock + .calledWith(this.project_id, doc_id) + .should.equal(true) + ) + }) - return it("should time the execution", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); -}); + it('should record the error', function () { + return this.logger.error + .calledWith( + { err: this.error, project_id: this.project_id, doc_id: 'doc-id-1' }, + 'error flushing doc' + ) + .should.equal(true) + }) + + it('should call the callback with an error', function () { + return this.callback.calledWith(new Error()).should.equal(true) + }) + + return it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js b/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js index bb700a2a21..72ffe39ec1 100644 --- a/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js @@ -10,158 +10,215 @@ * DS206: Consider reworking classes to avoid initClass * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const modulePath = "../../../../app/js/ProjectManager.js"; -const SandboxedModule = require('sandboxed-module'); -const Errors = require("../../../../app/js/Errors.js"); +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const modulePath = '../../../../app/js/ProjectManager.js' +const SandboxedModule = require('sandboxed-module') +const Errors = require('../../../../app/js/Errors.js') -describe("ProjectManager - getProjectDocsAndFlushIfOld", function() { - beforeEach(function() { - let Timer; - this.ProjectManager = SandboxedModule.require(modulePath, { requires: { - "./RedisManager": (this.RedisManager = {}), - "./ProjectHistoryRedisManager": (this.ProjectHistoryRedisManager = {}), - "./DocumentManager": (this.DocumentManager = {}), - "logger-sharelatex": (this.logger = { log: sinon.stub(), error: sinon.stub() }), - "./HistoryManager": (this.HistoryManager = {}), - "./Metrics": (this.Metrics = { - Timer: (Timer = (function() { - Timer = class Timer { - static initClass() { - this.prototype.done = sinon.stub(); - } - }; - Timer.initClass(); - return Timer; - })()) - }) - } - } - ); - this.project_id = "project-id-123"; - this.callback = sinon.stub(); - return this.doc_versions = [111, 222, 333];}); +describe('ProjectManager - getProjectDocsAndFlushIfOld', function () { + beforeEach(function () { + let Timer + this.ProjectManager = SandboxedModule.require(modulePath, { + requires: { + './RedisManager': (this.RedisManager = {}), + './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), + './DocumentManager': (this.DocumentManager = {}), + 'logger-sharelatex': (this.logger = { + log: sinon.stub(), + error: sinon.stub() + }), + './HistoryManager': (this.HistoryManager = {}), + './Metrics': (this.Metrics = { + Timer: (Timer = (function () { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub() + } + } + Timer.initClass() + return Timer + })()) + }) + } + }) + this.project_id = 'project-id-123' + this.callback = sinon.stub() + return (this.doc_versions = [111, 222, 333]) + }) - describe("successfully", function() { - beforeEach(function(done) { - this.doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"]; - this.doc_lines = [["aaa","aaa"],["bbb","bbb"],["ccc","ccc"]]; - this.docs = [ - {_id: this.doc_ids[0], lines: this.doc_lines[0], v: this.doc_versions[0]}, - {_id: this.doc_ids[1], lines: this.doc_lines[1], v: this.doc_versions[1]}, - {_id: this.doc_ids[2], lines: this.doc_lines[2], v: this.doc_versions[2]} - ]; - this.RedisManager.checkOrSetProjectState = sinon.stub().callsArgWith(2, null); - this.RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, this.doc_ids); - this.DocumentManager.getDocAndFlushIfOldWithLock = sinon.stub(); - this.DocumentManager.getDocAndFlushIfOldWithLock.withArgs(this.project_id, this.doc_ids[0]) - .callsArgWith(2, null, this.doc_lines[0], this.doc_versions[0]); - this.DocumentManager.getDocAndFlushIfOldWithLock.withArgs(this.project_id, this.doc_ids[1]) - .callsArgWith(2, null, this.doc_lines[1], this.doc_versions[1]); - this.DocumentManager.getDocAndFlushIfOldWithLock.withArgs(this.project_id, this.doc_ids[2]) - .callsArgWith(2, null, this.doc_lines[2], this.doc_versions[2]); - return this.ProjectManager.getProjectDocsAndFlushIfOld(this.project_id, this.projectStateHash, this.excludeVersions, (error, docs) => { - this.callback(error, docs); - return done(); - }); - }); + describe('successfully', function () { + beforeEach(function (done) { + this.doc_ids = ['doc-id-1', 'doc-id-2', 'doc-id-3'] + this.doc_lines = [ + ['aaa', 'aaa'], + ['bbb', 'bbb'], + ['ccc', 'ccc'] + ] + this.docs = [ + { + _id: this.doc_ids[0], + lines: this.doc_lines[0], + v: this.doc_versions[0] + }, + { + _id: this.doc_ids[1], + lines: this.doc_lines[1], + v: this.doc_versions[1] + }, + { + _id: this.doc_ids[2], + lines: this.doc_lines[2], + v: this.doc_versions[2] + } + ] + this.RedisManager.checkOrSetProjectState = sinon + .stub() + .callsArgWith(2, null) + this.RedisManager.getDocIdsInProject = sinon + .stub() + .callsArgWith(1, null, this.doc_ids) + this.DocumentManager.getDocAndFlushIfOldWithLock = sinon.stub() + this.DocumentManager.getDocAndFlushIfOldWithLock + .withArgs(this.project_id, this.doc_ids[0]) + .callsArgWith(2, null, this.doc_lines[0], this.doc_versions[0]) + this.DocumentManager.getDocAndFlushIfOldWithLock + .withArgs(this.project_id, this.doc_ids[1]) + .callsArgWith(2, null, this.doc_lines[1], this.doc_versions[1]) + this.DocumentManager.getDocAndFlushIfOldWithLock + .withArgs(this.project_id, this.doc_ids[2]) + .callsArgWith(2, null, this.doc_lines[2], this.doc_versions[2]) + return this.ProjectManager.getProjectDocsAndFlushIfOld( + this.project_id, + this.projectStateHash, + this.excludeVersions, + (error, docs) => { + this.callback(error, docs) + return done() + } + ) + }) - it("should check the project state", function() { - return this.RedisManager.checkOrSetProjectState - .calledWith(this.project_id, this.projectStateHash) - .should.equal(true); - }); + it('should check the project state', function () { + return this.RedisManager.checkOrSetProjectState + .calledWith(this.project_id, this.projectStateHash) + .should.equal(true) + }) - it("should get the doc ids in the project", function() { - return this.RedisManager.getDocIdsInProject - .calledWith(this.project_id) - .should.equal(true); - }); + it('should get the doc ids in the project', function () { + return this.RedisManager.getDocIdsInProject + .calledWith(this.project_id) + .should.equal(true) + }) - it("should call the callback without error", function() { - return this.callback.calledWith(null, this.docs).should.equal(true); - }); + it('should call the callback without error', function () { + return this.callback.calledWith(null, this.docs).should.equal(true) + }) - return it("should time the execution", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); + return it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) - describe("when the state does not match", function() { - beforeEach(function(done) { - this.doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"]; - this.RedisManager.checkOrSetProjectState = sinon.stub().callsArgWith(2, null, true); - return this.ProjectManager.getProjectDocsAndFlushIfOld(this.project_id, this.projectStateHash, this.excludeVersions, (error, docs) => { - this.callback(error, docs); - return done(); - }); - }); + describe('when the state does not match', function () { + beforeEach(function (done) { + this.doc_ids = ['doc-id-1', 'doc-id-2', 'doc-id-3'] + this.RedisManager.checkOrSetProjectState = sinon + .stub() + .callsArgWith(2, null, true) + return this.ProjectManager.getProjectDocsAndFlushIfOld( + this.project_id, + this.projectStateHash, + this.excludeVersions, + (error, docs) => { + this.callback(error, docs) + return done() + } + ) + }) - it("should check the project state", function() { - return this.RedisManager.checkOrSetProjectState - .calledWith(this.project_id, this.projectStateHash) - .should.equal(true); - }); + it('should check the project state', function () { + return this.RedisManager.checkOrSetProjectState + .calledWith(this.project_id, this.projectStateHash) + .should.equal(true) + }) - it("should call the callback with an error", function() { - return this.callback.calledWith(new Errors.ProjectStateChangedError("project state changed")).should.equal(true); - }); + it('should call the callback with an error', function () { + return this.callback + .calledWith( + new Errors.ProjectStateChangedError('project state changed') + ) + .should.equal(true) + }) - return it("should time the execution", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); + return it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) - describe("when a doc errors", function() { - beforeEach(function(done) { - this.doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"]; - this.RedisManager.checkOrSetProjectState = sinon.stub().callsArgWith(2, null); - this.RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, this.doc_ids); - this.DocumentManager.getDocAndFlushIfOldWithLock = sinon.stub(); - this.DocumentManager.getDocAndFlushIfOldWithLock.withArgs(this.project_id, "doc-id-1") - .callsArgWith(2, null, ["test doc content"], this.doc_versions[1]); - this.DocumentManager.getDocAndFlushIfOldWithLock.withArgs(this.project_id, "doc-id-2") - .callsArgWith(2, (this.error = new Error("oops"))); // trigger an error - return this.ProjectManager.getProjectDocsAndFlushIfOld(this.project_id, this.projectStateHash, this.excludeVersions, (error, docs) => { - this.callback(error); - return done(); - }); - }); + describe('when a doc errors', function () { + beforeEach(function (done) { + this.doc_ids = ['doc-id-1', 'doc-id-2', 'doc-id-3'] + this.RedisManager.checkOrSetProjectState = sinon + .stub() + .callsArgWith(2, null) + this.RedisManager.getDocIdsInProject = sinon + .stub() + .callsArgWith(1, null, this.doc_ids) + this.DocumentManager.getDocAndFlushIfOldWithLock = sinon.stub() + this.DocumentManager.getDocAndFlushIfOldWithLock + .withArgs(this.project_id, 'doc-id-1') + .callsArgWith(2, null, ['test doc content'], this.doc_versions[1]) + this.DocumentManager.getDocAndFlushIfOldWithLock + .withArgs(this.project_id, 'doc-id-2') + .callsArgWith(2, (this.error = new Error('oops'))) // trigger an error + return this.ProjectManager.getProjectDocsAndFlushIfOld( + this.project_id, + this.projectStateHash, + this.excludeVersions, + (error, docs) => { + this.callback(error) + return done() + } + ) + }) - it("should record the error", function() { - return this.logger.error - .calledWith({err: this.error, project_id: this.project_id, doc_id: "doc-id-2"}, "error getting project doc lines in getProjectDocsAndFlushIfOld") - .should.equal(true); - }); + it('should record the error', function () { + return this.logger.error + .calledWith( + { err: this.error, project_id: this.project_id, doc_id: 'doc-id-2' }, + 'error getting project doc lines in getProjectDocsAndFlushIfOld' + ) + .should.equal(true) + }) - it("should call the callback with an error", function() { - return this.callback.calledWith(new Error("oops")).should.equal(true); - }); + it('should call the callback with an error', function () { + return this.callback.calledWith(new Error('oops')).should.equal(true) + }) - return it("should time the execution", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); + return it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) - return describe("clearing the project state with clearProjectState", function() { - beforeEach(function(done) { - this.RedisManager.clearProjectState = sinon.stub().callsArg(1); - return this.ProjectManager.clearProjectState(this.project_id, error => { - this.callback(error); - return done(); - }); - }); + return describe('clearing the project state with clearProjectState', function () { + beforeEach(function (done) { + this.RedisManager.clearProjectState = sinon.stub().callsArg(1) + return this.ProjectManager.clearProjectState(this.project_id, (error) => { + this.callback(error) + return done() + }) + }) - it("should clear the project state", function() { - return this.RedisManager.clearProjectState - .calledWith(this.project_id) - .should.equal(true); - }); + it('should clear the project state', function () { + return this.RedisManager.clearProjectState + .calledWith(this.project_id) + .should.equal(true) + }) - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); -}); + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js index ffe8c49e97..5b439ef839 100644 --- a/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js @@ -10,239 +10,393 @@ * DS206: Consider reworking classes to avoid initClass * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const modulePath = "../../../../app/js/ProjectManager.js"; -const SandboxedModule = require('sandboxed-module'); -const _ = require('lodash'); +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const modulePath = '../../../../app/js/ProjectManager.js' +const SandboxedModule = require('sandboxed-module') +const _ = require('lodash') -describe("ProjectManager", function() { - beforeEach(function() { - let Timer; - this.ProjectManager = SandboxedModule.require(modulePath, { requires: { - "./RedisManager": (this.RedisManager = {}), - "./ProjectHistoryRedisManager": (this.ProjectHistoryRedisManager = {}), - "./DocumentManager": (this.DocumentManager = {}), - "logger-sharelatex": (this.logger = { log: sinon.stub(), error: sinon.stub() }), - "./HistoryManager": (this.HistoryManager = {}), - "./Metrics": (this.Metrics = { - Timer: (Timer = (function() { - Timer = class Timer { - static initClass() { - this.prototype.done = sinon.stub(); - } - }; - Timer.initClass(); - return Timer; - })()) - }) - } - } - ); +describe('ProjectManager', function () { + beforeEach(function () { + let Timer + this.ProjectManager = SandboxedModule.require(modulePath, { + requires: { + './RedisManager': (this.RedisManager = {}), + './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), + './DocumentManager': (this.DocumentManager = {}), + 'logger-sharelatex': (this.logger = { + log: sinon.stub(), + error: sinon.stub() + }), + './HistoryManager': (this.HistoryManager = {}), + './Metrics': (this.Metrics = { + Timer: (Timer = (function () { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub() + } + } + Timer.initClass() + return Timer + })()) + }) + } + }) - this.project_id = "project-id-123"; - this.projectHistoryId = 'history-id-123'; - this.user_id = "user-id-123"; - this.version = 1234567; - this.HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(false); - this.HistoryManager.flushProjectChangesAsync = sinon.stub(); - return this.callback = sinon.stub(); - }); + this.project_id = 'project-id-123' + this.projectHistoryId = 'history-id-123' + this.user_id = 'user-id-123' + this.version = 1234567 + this.HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(false) + this.HistoryManager.flushProjectChangesAsync = sinon.stub() + return (this.callback = sinon.stub()) + }) - return describe("updateProjectWithLocks", function() { - describe("rename operations", function() { - beforeEach(function() { - this.firstDocUpdate = { - id: 1, - pathname: 'foo', - newPathname: 'foo' - }; - this.secondDocUpdate = { - id: 2, - pathname: 'bar', - newPathname: 'bar2' - }; - this.docUpdates = [ this.firstDocUpdate, this.secondDocUpdate ]; - this.firstFileUpdate = { - id: 2, - pathname: 'bar', - newPathname: 'bar2' - }; - this.fileUpdates = [ this.firstFileUpdate ]; - this.DocumentManager.renameDocWithLock = sinon.stub().yields(); - return this.ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields(); - }); + return describe('updateProjectWithLocks', function () { + describe('rename operations', function () { + beforeEach(function () { + this.firstDocUpdate = { + id: 1, + pathname: 'foo', + newPathname: 'foo' + } + this.secondDocUpdate = { + id: 2, + pathname: 'bar', + newPathname: 'bar2' + } + this.docUpdates = [this.firstDocUpdate, this.secondDocUpdate] + this.firstFileUpdate = { + id: 2, + pathname: 'bar', + newPathname: 'bar2' + } + this.fileUpdates = [this.firstFileUpdate] + this.DocumentManager.renameDocWithLock = sinon.stub().yields() + return (this.ProjectHistoryRedisManager.queueRenameEntity = sinon + .stub() + .yields()) + }) - describe("successfully", function() { - beforeEach(function() { - return this.ProjectManager.updateProjectWithLocks(this.project_id, this.projectHistoryId, this.user_id, this.docUpdates, this.fileUpdates, this.version, this.callback); - }); + describe('successfully', function () { + beforeEach(function () { + return this.ProjectManager.updateProjectWithLocks( + this.project_id, + this.projectHistoryId, + this.user_id, + this.docUpdates, + this.fileUpdates, + this.version, + this.callback + ) + }) - it("should rename the docs in the updates", function() { - const firstDocUpdateWithVersion = _.extend({}, this.firstDocUpdate, {version: `${this.version}.0`}); - const secondDocUpdateWithVersion = _.extend({}, this.secondDocUpdate, {version: `${this.version}.1`}); - this.DocumentManager.renameDocWithLock - .calledWith(this.project_id, this.firstDocUpdate.id, this.user_id, firstDocUpdateWithVersion, this.projectHistoryId) - .should.equal(true); - return this.DocumentManager.renameDocWithLock - .calledWith(this.project_id, this.secondDocUpdate.id, this.user_id, secondDocUpdateWithVersion, this.projectHistoryId) - .should.equal(true); - }); + it('should rename the docs in the updates', function () { + const firstDocUpdateWithVersion = _.extend({}, this.firstDocUpdate, { + version: `${this.version}.0` + }) + const secondDocUpdateWithVersion = _.extend( + {}, + this.secondDocUpdate, + { version: `${this.version}.1` } + ) + this.DocumentManager.renameDocWithLock + .calledWith( + this.project_id, + this.firstDocUpdate.id, + this.user_id, + firstDocUpdateWithVersion, + this.projectHistoryId + ) + .should.equal(true) + return this.DocumentManager.renameDocWithLock + .calledWith( + this.project_id, + this.secondDocUpdate.id, + this.user_id, + secondDocUpdateWithVersion, + this.projectHistoryId + ) + .should.equal(true) + }) - it("should rename the files in the updates", function() { - const firstFileUpdateWithVersion = _.extend({}, this.firstFileUpdate, {version: `${this.version}.2`}); - return this.ProjectHistoryRedisManager.queueRenameEntity - .calledWith(this.project_id, this.projectHistoryId, 'file', this.firstFileUpdate.id, this.user_id, firstFileUpdateWithVersion) - .should.equal(true); - }); + it('should rename the files in the updates', function () { + const firstFileUpdateWithVersion = _.extend( + {}, + this.firstFileUpdate, + { version: `${this.version}.2` } + ) + return this.ProjectHistoryRedisManager.queueRenameEntity + .calledWith( + this.project_id, + this.projectHistoryId, + 'file', + this.firstFileUpdate.id, + this.user_id, + firstFileUpdateWithVersion + ) + .should.equal(true) + }) - it("should not flush the history", function() { - return this.HistoryManager.flushProjectChangesAsync - .calledWith(this.project_id) - .should.equal(false); - }); + it('should not flush the history', function () { + return this.HistoryManager.flushProjectChangesAsync + .calledWith(this.project_id) + .should.equal(false) + }) - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) - describe("when renaming a doc fails", function() { - beforeEach(function() { - this.error = new Error('error'); - this.DocumentManager.renameDocWithLock = sinon.stub().yields(this.error); - return this.ProjectManager.updateProjectWithLocks(this.project_id, this.projectHistoryId, this.user_id, this.docUpdates, this.fileUpdates, this.version, this.callback); - }); + describe('when renaming a doc fails', function () { + beforeEach(function () { + this.error = new Error('error') + this.DocumentManager.renameDocWithLock = sinon + .stub() + .yields(this.error) + return this.ProjectManager.updateProjectWithLocks( + this.project_id, + this.projectHistoryId, + this.user_id, + this.docUpdates, + this.fileUpdates, + this.version, + this.callback + ) + }) - return it("should call the callback with the error", function() { - return this.callback.calledWith(this.error).should.equal(true); - }); - }); + return it('should call the callback with the error', function () { + return this.callback.calledWith(this.error).should.equal(true) + }) + }) - describe("when renaming a file fails", function() { - beforeEach(function() { - this.error = new Error('error'); - this.ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields(this.error); - return this.ProjectManager.updateProjectWithLocks(this.project_id, this.projectHistoryId, this.user_id, this.docUpdates, this.fileUpdates, this.version, this.callback); - }); + describe('when renaming a file fails', function () { + beforeEach(function () { + this.error = new Error('error') + this.ProjectHistoryRedisManager.queueRenameEntity = sinon + .stub() + .yields(this.error) + return this.ProjectManager.updateProjectWithLocks( + this.project_id, + this.projectHistoryId, + this.user_id, + this.docUpdates, + this.fileUpdates, + this.version, + this.callback + ) + }) - return it("should call the callback with the error", function() { - return this.callback.calledWith(this.error).should.equal(true); - }); - }); + return it('should call the callback with the error', function () { + return this.callback.calledWith(this.error).should.equal(true) + }) + }) - return describe("with enough ops to flush", function() { - beforeEach(function() { - this.HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(true); - return this.ProjectManager.updateProjectWithLocks(this.project_id, this.projectHistoryId, this.user_id, this.docUpdates, this.fileUpdates, this.version, this.callback); - }); + return describe('with enough ops to flush', function () { + beforeEach(function () { + this.HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(true) + return this.ProjectManager.updateProjectWithLocks( + this.project_id, + this.projectHistoryId, + this.user_id, + this.docUpdates, + this.fileUpdates, + this.version, + this.callback + ) + }) - return it("should flush the history", function() { - return this.HistoryManager.flushProjectChangesAsync - .calledWith(this.project_id) - .should.equal(true); - }); - }); - }); + return it('should flush the history', function () { + return this.HistoryManager.flushProjectChangesAsync + .calledWith(this.project_id) + .should.equal(true) + }) + }) + }) - return describe("add operations", function() { - beforeEach(function() { - this.firstDocUpdate = { - id: 1, - docLines: "a\nb" - }; - this.secondDocUpdate = { - id: 2, - docLines: "a\nb" - }; - this.docUpdates = [ this.firstDocUpdate, this.secondDocUpdate ]; - this.firstFileUpdate = { - id: 3, - url: 'filestore.example.com/2' - }; - this.secondFileUpdate = { - id: 4, - url: 'filestore.example.com/3' - }; - this.fileUpdates = [ this.firstFileUpdate, this.secondFileUpdate ]; - return this.ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields(); - }); + return describe('add operations', function () { + beforeEach(function () { + this.firstDocUpdate = { + id: 1, + docLines: 'a\nb' + } + this.secondDocUpdate = { + id: 2, + docLines: 'a\nb' + } + this.docUpdates = [this.firstDocUpdate, this.secondDocUpdate] + this.firstFileUpdate = { + id: 3, + url: 'filestore.example.com/2' + } + this.secondFileUpdate = { + id: 4, + url: 'filestore.example.com/3' + } + this.fileUpdates = [this.firstFileUpdate, this.secondFileUpdate] + return (this.ProjectHistoryRedisManager.queueAddEntity = sinon + .stub() + .yields()) + }) - describe("successfully", function() { - beforeEach(function() { - return this.ProjectManager.updateProjectWithLocks(this.project_id, this.projectHistoryId, this.user_id, this.docUpdates, this.fileUpdates, this.version, this.callback); - }); + describe('successfully', function () { + beforeEach(function () { + return this.ProjectManager.updateProjectWithLocks( + this.project_id, + this.projectHistoryId, + this.user_id, + this.docUpdates, + this.fileUpdates, + this.version, + this.callback + ) + }) - it("should add the docs in the updates", function() { - const firstDocUpdateWithVersion = _.extend({}, this.firstDocUpdate, {version: `${this.version}.0`}); - const secondDocUpdateWithVersion = _.extend({}, this.secondDocUpdate, {version: `${this.version}.1`}); - this.ProjectHistoryRedisManager.queueAddEntity.getCall(0) - .calledWith(this.project_id, this.projectHistoryId, 'doc', this.firstDocUpdate.id, this.user_id, firstDocUpdateWithVersion) - .should.equal(true); - return this.ProjectHistoryRedisManager.queueAddEntity.getCall(1) - .calledWith(this.project_id, this.projectHistoryId, 'doc', this.secondDocUpdate.id, this.user_id, secondDocUpdateWithVersion) - .should.equal(true); - }); + it('should add the docs in the updates', function () { + const firstDocUpdateWithVersion = _.extend({}, this.firstDocUpdate, { + version: `${this.version}.0` + }) + const secondDocUpdateWithVersion = _.extend( + {}, + this.secondDocUpdate, + { version: `${this.version}.1` } + ) + this.ProjectHistoryRedisManager.queueAddEntity + .getCall(0) + .calledWith( + this.project_id, + this.projectHistoryId, + 'doc', + this.firstDocUpdate.id, + this.user_id, + firstDocUpdateWithVersion + ) + .should.equal(true) + return this.ProjectHistoryRedisManager.queueAddEntity + .getCall(1) + .calledWith( + this.project_id, + this.projectHistoryId, + 'doc', + this.secondDocUpdate.id, + this.user_id, + secondDocUpdateWithVersion + ) + .should.equal(true) + }) - it("should add the files in the updates", function() { - const firstFileUpdateWithVersion = _.extend({}, this.firstFileUpdate, {version: `${this.version}.2`}); - const secondFileUpdateWithVersion = _.extend({}, this.secondFileUpdate, {version: `${this.version}.3`}); - this.ProjectHistoryRedisManager.queueAddEntity.getCall(2) - .calledWith(this.project_id, this.projectHistoryId, 'file', this.firstFileUpdate.id, this.user_id, firstFileUpdateWithVersion) - .should.equal(true); - return this.ProjectHistoryRedisManager.queueAddEntity.getCall(3) - .calledWith(this.project_id, this.projectHistoryId, 'file', this.secondFileUpdate.id, this.user_id, secondFileUpdateWithVersion) - .should.equal(true); - }); + it('should add the files in the updates', function () { + const firstFileUpdateWithVersion = _.extend( + {}, + this.firstFileUpdate, + { version: `${this.version}.2` } + ) + const secondFileUpdateWithVersion = _.extend( + {}, + this.secondFileUpdate, + { version: `${this.version}.3` } + ) + this.ProjectHistoryRedisManager.queueAddEntity + .getCall(2) + .calledWith( + this.project_id, + this.projectHistoryId, + 'file', + this.firstFileUpdate.id, + this.user_id, + firstFileUpdateWithVersion + ) + .should.equal(true) + return this.ProjectHistoryRedisManager.queueAddEntity + .getCall(3) + .calledWith( + this.project_id, + this.projectHistoryId, + 'file', + this.secondFileUpdate.id, + this.user_id, + secondFileUpdateWithVersion + ) + .should.equal(true) + }) - it("should not flush the history", function() { - return this.HistoryManager.flushProjectChangesAsync - .calledWith(this.project_id) - .should.equal(false); - }); + it('should not flush the history', function () { + return this.HistoryManager.flushProjectChangesAsync + .calledWith(this.project_id) + .should.equal(false) + }) - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) - describe("when adding a doc fails", function() { - beforeEach(function() { - this.error = new Error('error'); - this.ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields(this.error); - return this.ProjectManager.updateProjectWithLocks(this.project_id, this.projectHistoryId, this.user_id, this.docUpdates, this.fileUpdates, this.version, this.callback); - }); + describe('when adding a doc fails', function () { + beforeEach(function () { + this.error = new Error('error') + this.ProjectHistoryRedisManager.queueAddEntity = sinon + .stub() + .yields(this.error) + return this.ProjectManager.updateProjectWithLocks( + this.project_id, + this.projectHistoryId, + this.user_id, + this.docUpdates, + this.fileUpdates, + this.version, + this.callback + ) + }) - return it("should call the callback with the error", function() { - return this.callback.calledWith(this.error).should.equal(true); - }); - }); + return it('should call the callback with the error', function () { + return this.callback.calledWith(this.error).should.equal(true) + }) + }) - describe("when adding a file fails", function() { - beforeEach(function() { - this.error = new Error('error'); - this.ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields(this.error); - return this.ProjectManager.updateProjectWithLocks(this.project_id, this.projectHistoryId, this.user_id, this.docUpdates, this.fileUpdates, this.version, this.callback); - }); + describe('when adding a file fails', function () { + beforeEach(function () { + this.error = new Error('error') + this.ProjectHistoryRedisManager.queueAddEntity = sinon + .stub() + .yields(this.error) + return this.ProjectManager.updateProjectWithLocks( + this.project_id, + this.projectHistoryId, + this.user_id, + this.docUpdates, + this.fileUpdates, + this.version, + this.callback + ) + }) - return it("should call the callback with the error", function() { - return this.callback.calledWith(this.error).should.equal(true); - }); - }); + return it('should call the callback with the error', function () { + return this.callback.calledWith(this.error).should.equal(true) + }) + }) - return describe("with enough ops to flush", function() { - beforeEach(function() { - this.HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(true); - return this.ProjectManager.updateProjectWithLocks(this.project_id, this.projectHistoryId, this.user_id, this.docUpdates, this.fileUpdates, this.version, this.callback); - }); + return describe('with enough ops to flush', function () { + beforeEach(function () { + this.HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(true) + return this.ProjectManager.updateProjectWithLocks( + this.project_id, + this.projectHistoryId, + this.user_id, + this.docUpdates, + this.fileUpdates, + this.version, + this.callback + ) + }) - return it("should flush the history", function() { - return this.HistoryManager.flushProjectChangesAsync - .calledWith(this.project_id) - .should.equal(true); - }); - }); - }); - }); -}); + return it('should flush the history', function () { + return this.HistoryManager.flushProjectChangesAsync + .calledWith(this.project_id) + .should.equal(true) + }) + }) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js b/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js index 5634840221..a336125a1c 100644 --- a/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js +++ b/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js @@ -12,392 +12,524 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const { - expect -} = chai; -const modulePath = "../../../../app/js/RangesManager.js"; -const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const { expect } = chai +const modulePath = '../../../../app/js/RangesManager.js' +const SandboxedModule = require('sandboxed-module') -describe("RangesManager", function() { - beforeEach(function() { - this.RangesManager = SandboxedModule.require(modulePath, { - requires: { - "logger-sharelatex": (this.logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() }) - } - }); +describe('RangesManager', function () { + beforeEach(function () { + this.RangesManager = SandboxedModule.require(modulePath, { + requires: { + 'logger-sharelatex': (this.logger = { + error: sinon.stub(), + log: sinon.stub(), + warn: sinon.stub() + }) + } + }) - this.doc_id = "doc-id-123"; - this.project_id = "project-id-123"; - this.user_id = "user-id-123"; - return this.callback = sinon.stub(); - }); + this.doc_id = 'doc-id-123' + this.project_id = 'project-id-123' + this.user_id = 'user-id-123' + return (this.callback = sinon.stub()) + }) - describe("applyUpdate", function() { - beforeEach(function() { - this.updates = [{ - meta: { - user_id: this.user_id - }, - op: [{ - i: "two ", - p: 4 - }] - }]; - this.entries = { - comments: [{ - op: { - c: "three ", - p: 4 - }, - metadata: { - user_id: this.user_id - } - }], - changes: [{ - op: { - i: "five", - p: 15 - }, - metadata: { - user_id: this.user_id - } - }] - }; - return this.newDocLines = ["one two three four five"];}); // old is "one three four five" - - describe("successfully", function() { - beforeEach(function() { - return this.RangesManager.applyUpdate(this.project_id, this.doc_id, this.entries, this.updates, this.newDocLines, this.callback); - }); - - return it("should return the modified the comments and changes", function() { - this.callback.called.should.equal(true); - const [error, entries, ranges_were_collapsed] = Array.from(this.callback.args[0]); - expect(error).to.be.null; - expect(ranges_were_collapsed).to.equal(false); - entries.comments[0].op.should.deep.equal({ - c: "three ", - p: 8 - }); - return entries.changes[0].op.should.deep.equal({ - i: "five", - p: 19 - }); - }); - }); - - describe("with empty comments", function() { - beforeEach(function() { - this.entries.comments = []; - return this.RangesManager.applyUpdate(this.project_id, this.doc_id, this.entries, this.updates, this.newDocLines, this.callback); - }); - - return it("should return an object with no comments", function() { - // Save space in redis and don't store just {} - this.callback.called.should.equal(true); - const [error, entries] = Array.from(this.callback.args[0]); - expect(error).to.be.null; - return expect(entries.comments).to.be.undefined; - }); - }); - - describe("with empty changes", function() { - beforeEach(function() { - this.entries.changes = []; - return this.RangesManager.applyUpdate(this.project_id, this.doc_id, this.entries, this.updates, this.newDocLines, this.callback); - }); - - return it("should return an object with no changes", function() { - // Save space in redis and don't store just {} - this.callback.called.should.equal(true); - const [error, entries] = Array.from(this.callback.args[0]); - expect(error).to.be.null; - return expect(entries.changes).to.be.undefined; - }); - }); - - describe("with too many comments", function() { - beforeEach(function() { - this.RangesManager.MAX_COMMENTS = 2; - this.updates = [{ - meta: { - user_id: this.user_id - }, - op: [{ - c: "one", - p: 0, - t: "thread-id-1" - }] - }]; - this.entries = { - comments: [{ - op: { - c: "three ", - p: 4, - t: "thread-id-2" - }, - metadata: { - user_id: this.user_id - } - }, { - op: { - c: "four ", - p: 10, - t: "thread-id-3" - }, - metadata: { - user_id: this.user_id - } - }], - changes: [] - }; - return this.RangesManager.applyUpdate(this.project_id, this.doc_id, this.entries, this.updates, this.newDocLines, this.callback); - }); - - return it("should return an error", function() { - this.callback.called.should.equal(true); - const [error, entries] = Array.from(this.callback.args[0]); - expect(error).to.not.be.null; - return expect(error.message).to.equal("too many comments or tracked changes"); - }); - }); - - describe("with too many changes", function() { - beforeEach(function() { - this.RangesManager.MAX_CHANGES = 2; - this.updates = [{ - meta: { - user_id: this.user_id, - tc: "track-changes-id-yes" - }, - op: [{ - i: "one ", - p: 0 - }] - }]; - this.entries = { - changes: [{ - op: { - i: "three", - p: 4 - }, - metadata: { - user_id: this.user_id - } - }, { - op: { - i: "four", - p: 10 - }, - metadata: { - user_id: this.user_id - } - }], - comments: [] - }; - this.newDocLines = ["one two three four"]; - return this.RangesManager.applyUpdate(this.project_id, this.doc_id, this.entries, this.updates, this.newDocLines, this.callback); - }); - - return it("should return an error", function() { - // Save space in redis and don't store just {} - this.callback.called.should.equal(true); - const [error, entries] = Array.from(this.callback.args[0]); - expect(error).to.not.be.null; - return expect(error.message).to.equal("too many comments or tracked changes"); - }); - }); - - describe("inconsistent changes", function() { - beforeEach(function() { - this.updates = [{ - meta: { - user_id: this.user_id - }, - op: [{ - c: "doesn't match", - p: 0 - }] - }]; - return this.RangesManager.applyUpdate(this.project_id, this.doc_id, this.entries, this.updates, this.newDocLines, this.callback); - }); - - return it("should return an error", function() { - // Save space in redis and don't store just {} - this.callback.called.should.equal(true); - const [error, entries] = Array.from(this.callback.args[0]); - expect(error).to.not.be.null; - return expect(error.message).to.equal("Change ({\"op\":{\"i\":\"five\",\"p\":15},\"metadata\":{\"user_id\":\"user-id-123\"}}) doesn't match text (\"our \")"); - }); - }); + describe('applyUpdate', function () { + beforeEach(function () { + this.updates = [ + { + meta: { + user_id: this.user_id + }, + op: [ + { + i: 'two ', + p: 4 + } + ] + } + ] + this.entries = { + comments: [ + { + op: { + c: 'three ', + p: 4 + }, + metadata: { + user_id: this.user_id + } + } + ], + changes: [ + { + op: { + i: 'five', + p: 15 + }, + metadata: { + user_id: this.user_id + } + } + ] + } + return (this.newDocLines = ['one two three four five']) + }) // old is "one three four five" + describe('successfully', function () { + beforeEach(function () { + return this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.entries, + this.updates, + this.newDocLines, + this.callback + ) + }) - return describe("with an update that collapses a range", function() { - beforeEach(function() { - this.updates = [{ - meta: { - user_id: this.user_id - }, - op: [{ - d: "one", - p: 0, - t: "thread-id-1" - }] - }]; - this.entries = { - comments: [{ - op: { - c: "n", - p: 1, - t: "thread-id-2" - }, - metadata: { - user_id: this.user_id - } - }], - changes: [] - }; - return this.RangesManager.applyUpdate(this.project_id, this.doc_id, this.entries, this.updates, this.newDocLines, this.callback); - }); + return it('should return the modified the comments and changes', function () { + this.callback.called.should.equal(true) + const [error, entries, ranges_were_collapsed] = Array.from( + this.callback.args[0] + ) + expect(error).to.be.null + expect(ranges_were_collapsed).to.equal(false) + entries.comments[0].op.should.deep.equal({ + c: 'three ', + p: 8 + }) + return entries.changes[0].op.should.deep.equal({ + i: 'five', + p: 19 + }) + }) + }) - return it("should return ranges_were_collapsed == true", function() { - this.callback.called.should.equal(true); - const [error, entries, ranges_were_collapsed] = Array.from(this.callback.args[0]); - return expect(ranges_were_collapsed).to.equal(true); - }); - }); - }); + describe('with empty comments', function () { + beforeEach(function () { + this.entries.comments = [] + return this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.entries, + this.updates, + this.newDocLines, + this.callback + ) + }) - return describe("acceptChanges", function() { - beforeEach(function() { - this.RangesManager = SandboxedModule.require(modulePath, { - requires: { - "logger-sharelatex": (this.logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() }), - "./RangesTracker":(this.RangesTracker = SandboxedModule.require("../../../../app/js/RangesTracker.js")) - } - } - ); + return it('should return an object with no comments', function () { + // Save space in redis and don't store just {} + this.callback.called.should.equal(true) + const [error, entries] = Array.from(this.callback.args[0]) + expect(error).to.be.null + return expect(entries.comments).to.be.undefined + }) + }) - this.ranges = { - comments: [], - changes: [{ - id: "a1", - op: { - i: "lorem", - p: 0 - } - }, { - id: "a2", - op: { - i: "ipsum", - p: 10 - } - }, { - id: "a3", - op: { - i: "dolor", - p: 20 - } - }, { - id: "a4", - op: { - i: "sit", - p: 30 - } - }, { - id: "a5", - op: { - i: "amet", - p: 40 - } - }] - }; - return this.removeChangeIdsSpy = sinon.spy(this.RangesTracker.prototype, "removeChangeIds"); - }); + describe('with empty changes', function () { + beforeEach(function () { + this.entries.changes = [] + return this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.entries, + this.updates, + this.newDocLines, + this.callback + ) + }) - describe("successfully with a single change", function() { - beforeEach(function(done) { - this.change_ids = [ this.ranges.changes[1].id ]; - return this.RangesManager.acceptChanges(this.change_ids, this.ranges, (err, ranges) => { - this.rangesResponse = ranges; - return done(); - }); - }); + return it('should return an object with no changes', function () { + // Save space in redis and don't store just {} + this.callback.called.should.equal(true) + const [error, entries] = Array.from(this.callback.args[0]) + expect(error).to.be.null + return expect(entries.changes).to.be.undefined + }) + }) - it("should log the call with the correct number of changes", function() { - return this.logger.log - .calledWith("accepting 1 changes in ranges") - .should.equal(true); - }); + describe('with too many comments', function () { + beforeEach(function () { + this.RangesManager.MAX_COMMENTS = 2 + this.updates = [ + { + meta: { + user_id: this.user_id + }, + op: [ + { + c: 'one', + p: 0, + t: 'thread-id-1' + } + ] + } + ] + this.entries = { + comments: [ + { + op: { + c: 'three ', + p: 4, + t: 'thread-id-2' + }, + metadata: { + user_id: this.user_id + } + }, + { + op: { + c: 'four ', + p: 10, + t: 'thread-id-3' + }, + metadata: { + user_id: this.user_id + } + } + ], + changes: [] + } + return this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.entries, + this.updates, + this.newDocLines, + this.callback + ) + }) - it("should delegate the change removal to the ranges tracker", function() { - return this.removeChangeIdsSpy - .calledWith(this.change_ids) - .should.equal(true); - }); + return it('should return an error', function () { + this.callback.called.should.equal(true) + const [error, entries] = Array.from(this.callback.args[0]) + expect(error).to.not.be.null + return expect(error.message).to.equal( + 'too many comments or tracked changes' + ) + }) + }) - it("should remove the change", function() { - return expect(this.rangesResponse.changes - .find(change => change.id === this.ranges.changes[1].id)) - .to.be.undefined; - }); + describe('with too many changes', function () { + beforeEach(function () { + this.RangesManager.MAX_CHANGES = 2 + this.updates = [ + { + meta: { + user_id: this.user_id, + tc: 'track-changes-id-yes' + }, + op: [ + { + i: 'one ', + p: 0 + } + ] + } + ] + this.entries = { + changes: [ + { + op: { + i: 'three', + p: 4 + }, + metadata: { + user_id: this.user_id + } + }, + { + op: { + i: 'four', + p: 10 + }, + metadata: { + user_id: this.user_id + } + } + ], + comments: [] + } + this.newDocLines = ['one two three four'] + return this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.entries, + this.updates, + this.newDocLines, + this.callback + ) + }) - it("should return the original number of changes minus 1", function() { - return this.rangesResponse.changes.length - .should.equal(this.ranges.changes.length - 1); - }); - - return it("should not touch other changes", function() { - return [ 0, 2, 3, 4].map((i) => - expect(this.rangesResponse.changes - .find(change => change.id === this.ranges.changes[i].id)) - .to.deep.equal(this.ranges.changes[i])); - }); - }); + return it('should return an error', function () { + // Save space in redis and don't store just {} + this.callback.called.should.equal(true) + const [error, entries] = Array.from(this.callback.args[0]) + expect(error).to.not.be.null + return expect(error.message).to.equal( + 'too many comments or tracked changes' + ) + }) + }) - return describe("successfully with multiple changes", function() { - beforeEach(function(done) { - this.change_ids = [ this.ranges.changes[1].id, this.ranges.changes[3].id, this.ranges.changes[4].id ]; - return this.RangesManager.acceptChanges(this.change_ids, this.ranges, (err, ranges) => { - this.rangesResponse = ranges; - return done(); - }); - }); + describe('inconsistent changes', function () { + beforeEach(function () { + this.updates = [ + { + meta: { + user_id: this.user_id + }, + op: [ + { + c: "doesn't match", + p: 0 + } + ] + } + ] + return this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.entries, + this.updates, + this.newDocLines, + this.callback + ) + }) - it("should log the call with the correct number of changes", function() { - return this.logger.log - .calledWith(`accepting ${ this.change_ids.length } changes in ranges`) - .should.equal(true); - }); + return it('should return an error', function () { + // Save space in redis and don't store just {} + this.callback.called.should.equal(true) + const [error, entries] = Array.from(this.callback.args[0]) + expect(error).to.not.be.null + return expect(error.message).to.equal( + 'Change ({"op":{"i":"five","p":15},"metadata":{"user_id":"user-id-123"}}) doesn\'t match text ("our ")' + ) + }) + }) - it("should delegate the change removal to the ranges tracker", function() { - return this.removeChangeIdsSpy - .calledWith(this.change_ids) - .should.equal(true); - }); + return describe('with an update that collapses a range', function () { + beforeEach(function () { + this.updates = [ + { + meta: { + user_id: this.user_id + }, + op: [ + { + d: 'one', + p: 0, + t: 'thread-id-1' + } + ] + } + ] + this.entries = { + comments: [ + { + op: { + c: 'n', + p: 1, + t: 'thread-id-2' + }, + metadata: { + user_id: this.user_id + } + } + ], + changes: [] + } + return this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.entries, + this.updates, + this.newDocLines, + this.callback + ) + }) - it("should remove the changes", function() { - return [ 1, 3, 4].map((i) => - expect(this.rangesResponse.changes - .find(change => change.id === this.ranges.changes[1].id)) - .to.be.undefined); - }); - - it("should return the original number of changes minus the number of accepted changes", function() { - return this.rangesResponse.changes.length - .should.equal(this.ranges.changes.length - 3); - }); + return it('should return ranges_were_collapsed == true', function () { + this.callback.called.should.equal(true) + const [error, entries, ranges_were_collapsed] = Array.from( + this.callback.args[0] + ) + return expect(ranges_were_collapsed).to.equal(true) + }) + }) + }) - return it("should not touch other changes", function() { - return [ 0, 2 ].map((i) => - expect(this.rangesResponse.changes - .find(change => change.id === this.ranges.changes[i].id)) - .to.deep.equal(this.ranges.changes[i])); - }); - }); -}); -}); - + return describe('acceptChanges', function () { + beforeEach(function () { + this.RangesManager = SandboxedModule.require(modulePath, { + requires: { + 'logger-sharelatex': (this.logger = { + error: sinon.stub(), + log: sinon.stub(), + warn: sinon.stub() + }), + './RangesTracker': (this.RangesTracker = SandboxedModule.require( + '../../../../app/js/RangesTracker.js' + )) + } + }) + + this.ranges = { + comments: [], + changes: [ + { + id: 'a1', + op: { + i: 'lorem', + p: 0 + } + }, + { + id: 'a2', + op: { + i: 'ipsum', + p: 10 + } + }, + { + id: 'a3', + op: { + i: 'dolor', + p: 20 + } + }, + { + id: 'a4', + op: { + i: 'sit', + p: 30 + } + }, + { + id: 'a5', + op: { + i: 'amet', + p: 40 + } + } + ] + } + return (this.removeChangeIdsSpy = sinon.spy( + this.RangesTracker.prototype, + 'removeChangeIds' + )) + }) + + describe('successfully with a single change', function () { + beforeEach(function (done) { + this.change_ids = [this.ranges.changes[1].id] + return this.RangesManager.acceptChanges( + this.change_ids, + this.ranges, + (err, ranges) => { + this.rangesResponse = ranges + return done() + } + ) + }) + + it('should log the call with the correct number of changes', function () { + return this.logger.log + .calledWith('accepting 1 changes in ranges') + .should.equal(true) + }) + + it('should delegate the change removal to the ranges tracker', function () { + return this.removeChangeIdsSpy + .calledWith(this.change_ids) + .should.equal(true) + }) + + it('should remove the change', function () { + return expect( + this.rangesResponse.changes.find( + (change) => change.id === this.ranges.changes[1].id + ) + ).to.be.undefined + }) + + it('should return the original number of changes minus 1', function () { + return this.rangesResponse.changes.length.should.equal( + this.ranges.changes.length - 1 + ) + }) + + return it('should not touch other changes', function () { + return [0, 2, 3, 4].map((i) => + expect( + this.rangesResponse.changes.find( + (change) => change.id === this.ranges.changes[i].id + ) + ).to.deep.equal(this.ranges.changes[i]) + ) + }) + }) + + return describe('successfully with multiple changes', function () { + beforeEach(function (done) { + this.change_ids = [ + this.ranges.changes[1].id, + this.ranges.changes[3].id, + this.ranges.changes[4].id + ] + return this.RangesManager.acceptChanges( + this.change_ids, + this.ranges, + (err, ranges) => { + this.rangesResponse = ranges + return done() + } + ) + }) + + it('should log the call with the correct number of changes', function () { + return this.logger.log + .calledWith(`accepting ${this.change_ids.length} changes in ranges`) + .should.equal(true) + }) + + it('should delegate the change removal to the ranges tracker', function () { + return this.removeChangeIdsSpy + .calledWith(this.change_ids) + .should.equal(true) + }) + + it('should remove the changes', function () { + return [1, 3, 4].map( + (i) => + expect( + this.rangesResponse.changes.find( + (change) => change.id === this.ranges.changes[1].id + ) + ).to.be.undefined + ) + }) + + it('should return the original number of changes minus the number of accepted changes', function () { + return this.rangesResponse.changes.length.should.equal( + this.ranges.changes.length - 3 + ) + }) + + return it('should not touch other changes', function () { + return [0, 2].map((i) => + expect( + this.rangesResponse.changes.find( + (change) => change.id === this.ranges.changes[i].id + ) + ).to.deep.equal(this.ranges.changes[i]) + ) + }) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js b/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js index ed41a4834a..e84d557501 100644 --- a/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js +++ b/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js @@ -10,129 +10,127 @@ * DS206: Consider reworking classes to avoid initClass * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const { - expect -} = chai; -const modulePath = "../../../../app/js/RateLimitManager.js"; -const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const { expect } = chai +const modulePath = '../../../../app/js/RateLimitManager.js' +const SandboxedModule = require('sandboxed-module') -describe("RateLimitManager", function() { - beforeEach(function() { - let Timer; - this.RateLimitManager = SandboxedModule.require(modulePath, { requires: { - "logger-sharelatex": (this.logger = { log: sinon.stub() }), - "settings-sharelatex": (this.settings = {}), - "./Metrics": (this.Metrics = { - Timer: (Timer = (function() { - Timer = class Timer { - static initClass() { - this.prototype.done = sinon.stub(); - } - }; - Timer.initClass(); - return Timer; - })()), - gauge: sinon.stub() - }) - } - } - ); - this.callback = sinon.stub(); - return this.RateLimiter = new this.RateLimitManager(1); - }); +describe('RateLimitManager', function () { + beforeEach(function () { + let Timer + this.RateLimitManager = SandboxedModule.require(modulePath, { + requires: { + 'logger-sharelatex': (this.logger = { log: sinon.stub() }), + 'settings-sharelatex': (this.settings = {}), + './Metrics': (this.Metrics = { + Timer: (Timer = (function () { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub() + } + } + Timer.initClass() + return Timer + })()), + gauge: sinon.stub() + }) + } + }) + this.callback = sinon.stub() + return (this.RateLimiter = new this.RateLimitManager(1)) + }) - describe("for a single task", function() { - beforeEach(function() { - this.task = sinon.stub(); - return this.RateLimiter.run(this.task, this.callback); - }); + describe('for a single task', function () { + beforeEach(function () { + this.task = sinon.stub() + return this.RateLimiter.run(this.task, this.callback) + }) - it("should execute the task in the background", function() { - return this.task.called.should.equal(true); - }); + it('should execute the task in the background', function () { + return this.task.called.should.equal(true) + }) - it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); + it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) - return it("should finish with a worker count of one", function() { - // because it's in the background - return expect(this.RateLimiter.ActiveWorkerCount).to.equal(1); - }); - }); + return it('should finish with a worker count of one', function () { + // because it's in the background + return expect(this.RateLimiter.ActiveWorkerCount).to.equal(1) + }) + }) - describe("for multiple tasks", function() { - beforeEach(function(done) { - this.task = sinon.stub(); - this.finalTask = sinon.stub(); - const task = cb => { - this.task(); - return setTimeout(cb, 100); - }; - const finalTask = cb => { - this.finalTask(); - return setTimeout(cb, 100); - }; - this.RateLimiter.run(task, this.callback); - this.RateLimiter.run(task, this.callback); - this.RateLimiter.run(task, this.callback); - return this.RateLimiter.run(finalTask, err => { - this.callback(err); - return done(); - }); - }); + describe('for multiple tasks', function () { + beforeEach(function (done) { + this.task = sinon.stub() + this.finalTask = sinon.stub() + const task = (cb) => { + this.task() + return setTimeout(cb, 100) + } + const finalTask = (cb) => { + this.finalTask() + return setTimeout(cb, 100) + } + this.RateLimiter.run(task, this.callback) + this.RateLimiter.run(task, this.callback) + this.RateLimiter.run(task, this.callback) + return this.RateLimiter.run(finalTask, (err) => { + this.callback(err) + return done() + }) + }) - it("should execute the first three tasks", function() { - return this.task.calledThrice.should.equal(true); - }); + it('should execute the first three tasks', function () { + return this.task.calledThrice.should.equal(true) + }) - it("should execute the final task", function() { - return this.finalTask.called.should.equal(true); - }); + it('should execute the final task', function () { + return this.finalTask.called.should.equal(true) + }) - it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); + it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) - return it("should finish with worker count of zero", function() { - return expect(this.RateLimiter.ActiveWorkerCount).to.equal(0); - }); - }); + return it('should finish with worker count of zero', function () { + return expect(this.RateLimiter.ActiveWorkerCount).to.equal(0) + }) + }) - return describe("for a mixture of long-running tasks", function() { - beforeEach(function(done) { - this.task = sinon.stub(); - this.finalTask = sinon.stub(); - const finalTask = cb => { - this.finalTask(); - return setTimeout(cb, 100); - }; - this.RateLimiter.run(this.task, this.callback); - this.RateLimiter.run(this.task, this.callback); - this.RateLimiter.run(this.task, this.callback); - return this.RateLimiter.run(finalTask, err => { - this.callback(err); - return done(); - }); - }); + return describe('for a mixture of long-running tasks', function () { + beforeEach(function (done) { + this.task = sinon.stub() + this.finalTask = sinon.stub() + const finalTask = (cb) => { + this.finalTask() + return setTimeout(cb, 100) + } + this.RateLimiter.run(this.task, this.callback) + this.RateLimiter.run(this.task, this.callback) + this.RateLimiter.run(this.task, this.callback) + return this.RateLimiter.run(finalTask, (err) => { + this.callback(err) + return done() + }) + }) - it("should execute the first three tasks", function() { - return this.task.calledThrice.should.equal(true); - }); + it('should execute the first three tasks', function () { + return this.task.calledThrice.should.equal(true) + }) - it("should execute the final task", function() { - return this.finalTask.called.should.equal(true); - }); + it('should execute the final task', function () { + return this.finalTask.called.should.equal(true) + }) - it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); + it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) - return it("should finish with worker count of three", function() { - return expect(this.RateLimiter.ActiveWorkerCount).to.equal(3); - }); - }); -}); + return it('should finish with worker count of three', function () { + return expect(this.RateLimiter.ActiveWorkerCount).to.equal(3) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js b/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js index ae6cd4bba2..9e2f2e270f 100644 --- a/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js +++ b/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js @@ -10,127 +10,162 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const modulePath = "../../../../app/js/RealTimeRedisManager.js"; -const SandboxedModule = require('sandboxed-module'); -const Errors = require("../../../../app/js/Errors"); +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const modulePath = '../../../../app/js/RealTimeRedisManager.js' +const SandboxedModule = require('sandboxed-module') +const Errors = require('../../../../app/js/Errors') -describe("RealTimeRedisManager", function() { - beforeEach(function() { - this.rclient = { - auth() {}, - exec: sinon.stub() - }; - this.rclient.multi = () => this.rclient; - this.pubsubClient = - {publish: sinon.stub()}; - this.RealTimeRedisManager = SandboxedModule.require(modulePath, { requires: { - "redis-sharelatex": { createClient: config => (config.name === 'pubsub') ? this.pubsubClient : this.rclient - }, - "settings-sharelatex": { - redis: { - documentupdater: (this.settings = { - key_schema: { - pendingUpdates({doc_id}) { return `PendingUpdates:${doc_id}`; } - } - }), - pubsub: { - name: "pubsub" - } - } - }, - "logger-sharelatex": { log() {} }, - "crypto": (this.crypto = { randomBytes: sinon.stub().withArgs(4).returns(Buffer.from([0x1, 0x2, 0x3, 0x4])) }), - "os": (this.os = {hostname: sinon.stub().returns("somehost")}), - "./Metrics": (this.metrics = { summary: sinon.stub()}) - } - }); +describe('RealTimeRedisManager', function () { + beforeEach(function () { + this.rclient = { + auth() {}, + exec: sinon.stub() + } + this.rclient.multi = () => this.rclient + this.pubsubClient = { publish: sinon.stub() } + this.RealTimeRedisManager = SandboxedModule.require(modulePath, { + requires: { + 'redis-sharelatex': { + createClient: (config) => + config.name === 'pubsub' ? this.pubsubClient : this.rclient + }, + 'settings-sharelatex': { + redis: { + documentupdater: (this.settings = { + key_schema: { + pendingUpdates({ doc_id }) { + return `PendingUpdates:${doc_id}` + } + } + }), + pubsub: { + name: 'pubsub' + } + } + }, + 'logger-sharelatex': { log() {} }, + crypto: (this.crypto = { + randomBytes: sinon + .stub() + .withArgs(4) + .returns(Buffer.from([0x1, 0x2, 0x3, 0x4])) + }), + os: (this.os = { hostname: sinon.stub().returns('somehost') }), + './Metrics': (this.metrics = { summary: sinon.stub() }) + } + }) - this.doc_id = "doc-id-123"; - this.project_id = "project-id-123"; - return this.callback = sinon.stub(); - }); + this.doc_id = 'doc-id-123' + this.project_id = 'project-id-123' + return (this.callback = sinon.stub()) + }) - describe("getPendingUpdatesForDoc", function() { - beforeEach(function() { - this.rclient.lrange = sinon.stub(); - return this.rclient.ltrim = sinon.stub(); - }); + describe('getPendingUpdatesForDoc', function () { + beforeEach(function () { + this.rclient.lrange = sinon.stub() + return (this.rclient.ltrim = sinon.stub()) + }) - describe("successfully", function() { - beforeEach(function() { - this.updates = [ - { op: [{ i: "foo", p: 4 }] }, - { op: [{ i: "foo", p: 4 }] } - ]; - this.jsonUpdates = this.updates.map(update => JSON.stringify(update)); - this.rclient.exec = sinon.stub().callsArgWith(0, null, [this.jsonUpdates]); - return this.RealTimeRedisManager.getPendingUpdatesForDoc(this.doc_id, this.callback); - }); + describe('successfully', function () { + beforeEach(function () { + this.updates = [ + { op: [{ i: 'foo', p: 4 }] }, + { op: [{ i: 'foo', p: 4 }] } + ] + this.jsonUpdates = this.updates.map((update) => JSON.stringify(update)) + this.rclient.exec = sinon + .stub() + .callsArgWith(0, null, [this.jsonUpdates]) + return this.RealTimeRedisManager.getPendingUpdatesForDoc( + this.doc_id, + this.callback + ) + }) - it("should get the pending updates", function() { - return this.rclient.lrange - .calledWith(`PendingUpdates:${this.doc_id}`, 0, 7) - .should.equal(true); - }); + it('should get the pending updates', function () { + return this.rclient.lrange + .calledWith(`PendingUpdates:${this.doc_id}`, 0, 7) + .should.equal(true) + }) - it("should delete the pending updates", function() { - return this.rclient.ltrim - .calledWith(`PendingUpdates:${this.doc_id}`, 8, -1) - .should.equal(true); - }); + it('should delete the pending updates', function () { + return this.rclient.ltrim + .calledWith(`PendingUpdates:${this.doc_id}`, 8, -1) + .should.equal(true) + }) - return it("should call the callback with the updates", function() { - return this.callback.calledWith(null, this.updates).should.equal(true); - }); - }); + return it('should call the callback with the updates', function () { + return this.callback.calledWith(null, this.updates).should.equal(true) + }) + }) - return describe("when the JSON doesn't parse", function() { - beforeEach(function() { - this.jsonUpdates = [ - JSON.stringify({ op: [{ i: "foo", p: 4 }] }), - "broken json" - ]; - this.rclient.exec = sinon.stub().callsArgWith(0, null, [this.jsonUpdates]); - return this.RealTimeRedisManager.getPendingUpdatesForDoc(this.doc_id, this.callback); - }); + return describe("when the JSON doesn't parse", function () { + beforeEach(function () { + this.jsonUpdates = [ + JSON.stringify({ op: [{ i: 'foo', p: 4 }] }), + 'broken json' + ] + this.rclient.exec = sinon + .stub() + .callsArgWith(0, null, [this.jsonUpdates]) + return this.RealTimeRedisManager.getPendingUpdatesForDoc( + this.doc_id, + this.callback + ) + }) - return it("should return an error to the callback", function() { - return this.callback.calledWith(new Error("JSON parse error")).should.equal(true); - }); - }); - }); + return it('should return an error to the callback', function () { + return this.callback + .calledWith(new Error('JSON parse error')) + .should.equal(true) + }) + }) + }) + describe('getUpdatesLength', function () { + beforeEach(function () { + this.rclient.llen = sinon.stub().yields(null, (this.length = 3)) + return this.RealTimeRedisManager.getUpdatesLength( + this.doc_id, + this.callback + ) + }) - describe("getUpdatesLength", function() { - beforeEach(function() { - this.rclient.llen = sinon.stub().yields(null, (this.length = 3)); - return this.RealTimeRedisManager.getUpdatesLength(this.doc_id, this.callback); - }); + it('should look up the length', function () { + return this.rclient.llen + .calledWith(`PendingUpdates:${this.doc_id}`) + .should.equal(true) + }) - it("should look up the length", function() { - return this.rclient.llen.calledWith(`PendingUpdates:${this.doc_id}`).should.equal(true); - }); + return it('should return the length', function () { + return this.callback.calledWith(null, this.length).should.equal(true) + }) + }) - return it("should return the length", function() { - return this.callback.calledWith(null, this.length).should.equal(true); - }); - }); + return describe('sendData', function () { + beforeEach(function () { + this.message_id = 'doc:somehost:01020304-0' + return this.RealTimeRedisManager.sendData({ op: 'thisop' }) + }) - return describe("sendData", function() { - beforeEach(function() { - this.message_id = "doc:somehost:01020304-0"; - return this.RealTimeRedisManager.sendData({op: "thisop"}); - }); + it('should send the op with a message id', function () { + return this.pubsubClient.publish + .calledWith( + 'applied-ops', + JSON.stringify({ op: 'thisop', _id: this.message_id }) + ) + .should.equal(true) + }) - it("should send the op with a message id", function() { - return this.pubsubClient.publish.calledWith("applied-ops", JSON.stringify({op:"thisop",_id:this.message_id})).should.equal(true); - }); - - return it("should track the payload size", function() { - return this.metrics.summary.calledWith("redis.publish.applied-ops", JSON.stringify({op:"thisop",_id:this.message_id}).length).should.equal(true); - }); - }); -}); + return it('should track the payload size', function () { + return this.metrics.summary + .calledWith( + 'redis.publish.applied-ops', + JSON.stringify({ op: 'thisop', _id: this.message_id }).length + ) + .should.equal(true) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js index 7e8cf40d0e..e36df094f1 100644 --- a/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js +++ b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js @@ -11,997 +11,1408 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const modulePath = "../../../../app/js/RedisManager.js"; -const SandboxedModule = require('sandboxed-module'); -const Errors = require("../../../../app/js/Errors"); -const crypto = require("crypto"); -const tk = require("timekeeper"); - -describe("RedisManager", function() { - beforeEach(function() { - let Timer; - this.multi = {exec: sinon.stub()}; - this.rclient = {multi: () => this.multi}; - tk.freeze(new Date()); - this.RedisManager = SandboxedModule.require(modulePath, { - requires: { - "logger-sharelatex": (this.logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() }), - "./ProjectHistoryRedisManager": (this.ProjectHistoryRedisManager = {}), - "settings-sharelatex": (this.settings = { - documentupdater: {logHashErrors: {write:true, read:true}}, - apis: { - project_history: {enabled: true} - }, - redis: { - documentupdater: { - key_schema: { - blockingKey({doc_id}) { return `Blocking:${doc_id}`; }, - docLines({doc_id}) { return `doclines:${doc_id}`; }, - docOps({doc_id}) { return `DocOps:${doc_id}`; }, - docVersion({doc_id}) { return `DocVersion:${doc_id}`; }, - docHash({doc_id}) { return `DocHash:${doc_id}`; }, - projectKey({doc_id}) { return `ProjectId:${doc_id}`; }, - pendingUpdates({doc_id}) { return `PendingUpdates:${doc_id}`; }, - docsInProject({project_id}) { return `DocsIn:${project_id}`; }, - ranges({doc_id}) { return `Ranges:${doc_id}`; }, - pathname({doc_id}) { return `Pathname:${doc_id}`; }, - projectHistoryId({doc_id}) { return `ProjectHistoryId:${doc_id}`; }, - projectHistoryType({doc_id}) { return `ProjectHistoryType:${doc_id}`; }, - projectState({project_id}) { return `ProjectState:${project_id}`; }, - unflushedTime({doc_id}) { return `UnflushedTime:${doc_id}`; }, - lastUpdatedBy({doc_id}) { return `lastUpdatedBy:${doc_id}`; }, - lastUpdatedAt({doc_id}) { return `lastUpdatedAt:${doc_id}`; } - } - }, - history: { - key_schema: { - uncompressedHistoryOps({doc_id}) { return `UncompressedHistoryOps:${doc_id}`; }, - docsWithHistoryOps({project_id}) { return `DocsWithHistoryOps:${project_id}`; } - } - } - } - }), - "redis-sharelatex": { - createClient: () => this.rclient - }, - "./Metrics": (this.metrics = { - inc: sinon.stub(), - summary: sinon.stub(), - Timer: (Timer = class Timer { - constructor() { - this.start = new Date(); - } - - done() { - const timeSpan = new Date - this.start; - return timeSpan; - } - }) - }), - "./Errors": Errors - }, - globals: { - JSON: (this.JSON = JSON) - } - } - ); - - this.doc_id = "doc-id-123"; - this.project_id = "project-id-123"; - this.projectHistoryId = 123; - return this.callback = sinon.stub(); - }); - - afterEach(function() { return tk.reset(); }); - - describe("getDoc", function() { - beforeEach(function() { - this.lines = ["one", "two", "three", "これは"]; // include some utf8 - this.jsonlines = JSON.stringify(this.lines); - this.version = 42; - this.hash = crypto.createHash('sha1').update(this.jsonlines,'utf8').digest('hex'); - this.ranges = { comments: "mock", entries: "mock" }; - this.json_ranges = JSON.stringify(this.ranges); - this.unflushed_time = 12345; - this.pathname = '/a/b/c.tex'; - this.multi.get = sinon.stub(); - this.multi.exec = sinon.stub().callsArgWith(0, null, [this.jsonlines, this.version, this.hash, this.project_id, this.json_ranges, this.pathname, this.projectHistoryId.toString(), this.unflushed_time]); - return this.rclient.sadd = sinon.stub().yields(null, 0); - }); - - describe("successfully", function() { - beforeEach(function() { - return this.RedisManager.getDoc(this.project_id, this.doc_id, this.callback); - }); - - it("should get the lines from redis", function() { - return this.multi.get - .calledWith(`doclines:${this.doc_id}`) - .should.equal(true); - }); - - it("should get the version from", function() { - return this.multi.get - .calledWith(`DocVersion:${this.doc_id}`) - .should.equal(true); - }); - - it('should get the hash', function() { - return this.multi.get - .calledWith(`DocHash:${this.doc_id}`) - .should.equal(true); - }); - - it("should get the ranges", function() { - return this.multi.get - .calledWith(`Ranges:${this.doc_id}`) - .should.equal(true); - }); - - it("should get the unflushed time", function() { - return this.multi.get - .calledWith(`UnflushedTime:${this.doc_id}`) - .should.equal(true); - }); - - it("should get the pathname", function() { - return this.multi.get - .calledWith(`Pathname:${this.doc_id}`) - .should.equal(true); - }); - - it("should get the projectHistoryId as an integer", function() { - return this.multi.get - .calledWith(`ProjectHistoryId:${this.doc_id}`) - .should.equal(true); - }); - - it("should get lastUpdatedAt", function() { - return this.multi.get - .calledWith(`lastUpdatedAt:${this.doc_id}`) - .should.equal(true); - }); - - it("should get lastUpdatedBy", function() { - return this.multi.get - .calledWith(`lastUpdatedBy:${this.doc_id}`) - .should.equal(true); - }); - - it("should check if the document is in the DocsIn set", function() { - return this.rclient.sadd - .calledWith(`DocsIn:${this.project_id}`) - .should.equal(true); - }); - - it('should return the document', function() { - return this.callback - .calledWithExactly(null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId, this.unflushed_time, this.lastUpdatedAt, this.lastUpdatedBy) - .should.equal(true); - }); - - return it('should not log any errors', function() { - return this.logger.error.calledWith() - .should.equal(false); - }); - }); - - describe("when the document is not present", function() { - beforeEach(function() { - this.multi.exec = sinon.stub().callsArgWith(0, null, [null, null, null, null, null, null, null, null, null, null]); - this.rclient.sadd = sinon.stub().yields(); - return this.RedisManager.getDoc(this.project_id, this.doc_id, this.callback); - }); - - it("should not check if the document is in the DocsIn set", function() { - return this.rclient.sadd - .calledWith(`DocsIn:${this.project_id}`) - .should.equal(false); - }); - - it('should return an empty result', function() { - return this.callback - .calledWithExactly(null, null, 0, {}, null, null, null, null, null) - .should.equal(true); - }); - - return it('should not log any errors', function() { - return this.logger.error.calledWith() - .should.equal(false); - }); - }); - - describe("when the document is missing from the DocsIn set", function() { - beforeEach(function() { - this.rclient.sadd = sinon.stub().yields(null, 1); - return this.RedisManager.getDoc(this.project_id, this.doc_id, this.callback); - }); - - it('should log an error', function() { - return this.logger.error.calledWith() - .should.equal(true); - }); - - return it('should return the document', function() { - return this.callback - .calledWithExactly(null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId, this.unflushed_time, this.lastUpdatedAt, this.lastUpdatedBy) - .should.equal(true); - }); - }); - - describe("with a corrupted document", function() { - beforeEach(function() { - this.badHash = "INVALID-HASH-VALUE"; - this.multi.exec = sinon.stub().callsArgWith(0, null, [this.jsonlines, this.version, this.badHash, this.project_id, this.json_ranges]); - return this.RedisManager.getDoc(this.project_id, this.doc_id, this.callback); - }); - - it('should log a hash error', function() { - return this.logger.error.calledWith() - .should.equal(true); - }); - - return it('should return the document', function() { - return this.callback - .calledWith(null, this.lines, this.version, this.ranges) - .should.equal(true); - }); - }); - - - describe("with a slow request to redis", function() { - beforeEach(function() { - this.multi.exec = sinon.stub().callsArgWith(0, null, [this.jsonlines, this.version, this.badHash, this.project_id, this.json_ranges, this.pathname, this.unflushed_time]); - this.clock = sinon.useFakeTimers(); - this.multi.exec = cb => { - this.clock.tick(6000); - return cb(null, [this.jsonlines, this.version, this.another_project_id, this.json_ranges, this.pathname, this.unflushed_time]); - }; - - return this.RedisManager.getDoc(this.project_id, this.doc_id, this.callback); - }); - - afterEach(function() { - return this.clock.restore(); - }); - - return it('should return an error', function() { - return this.callback - .calledWith(new Error("redis getDoc exceeded timeout")) - .should.equal(true); - }); - }); - - return describe("getDoc with an invalid project id", function() { - beforeEach(function() { - this.another_project_id = "project-id-456"; - this.multi.exec = sinon.stub().callsArgWith(0, null, [this.jsonlines, this.version, this.hash, this.another_project_id, this.json_ranges, this.pathname, this.unflushed_time]); - return this.RedisManager.getDoc(this.project_id, this.doc_id, this.callback); - }); - - return it('should return an error', function() { - return this.callback - .calledWith(new Errors.NotFoundError("not found")) - .should.equal(true); - }); - }); - }); - - describe("getPreviousDocOpsTests", function() { - describe("with a start and an end value", function() { - beforeEach(function() { - this.first_version_in_redis = 30; - this.version = 70; - this.length = this.version - this.first_version_in_redis; - this.start = 50; - this.end = 60; - this.ops = [ - { "mock": "op-1" }, - { "mock": "op-2" } - ]; - this.jsonOps = this.ops.map(op => JSON.stringify(op)); - this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length); - this.rclient.get = sinon.stub().callsArgWith(1, null, this.version.toString()); - this.rclient.lrange = sinon.stub().callsArgWith(3, null, this.jsonOps); - return this.RedisManager.getPreviousDocOps(this.doc_id, this.start, this.end, this.callback); - }); - - it("should get the length of the existing doc ops", function() { - return this.rclient.llen - .calledWith(`DocOps:${this.doc_id}`) - .should.equal(true); - }); - - it("should get the current version of the doc", function() { - return this.rclient.get - .calledWith(`DocVersion:${this.doc_id}`) - .should.equal(true); - }); - - it("should get the appropriate docs ops", function() { - return this.rclient.lrange - .calledWith(`DocOps:${this.doc_id}`, this.start - this.first_version_in_redis, this.end - this.first_version_in_redis) - .should.equal(true); - }); - - return it("should return the docs with the doc ops deserialized", function() { - return this.callback.calledWith(null, this.ops).should.equal(true); - }); - }); - - describe("with an end value of -1", function() { - beforeEach(function() { - this.first_version_in_redis = 30; - this.version = 70; - this.length = this.version - this.first_version_in_redis; - this.start = 50; - this.end = -1; - this.ops = [ - { "mock": "op-1" }, - { "mock": "op-2" } - ]; - this.jsonOps = this.ops.map(op => JSON.stringify(op)); - this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length); - this.rclient.get = sinon.stub().callsArgWith(1, null, this.version.toString()); - this.rclient.lrange = sinon.stub().callsArgWith(3, null, this.jsonOps); - return this.RedisManager.getPreviousDocOps(this.doc_id, this.start, this.end, this.callback); - }); - - it("should get the appropriate docs ops to the end of list", function() { - return this.rclient.lrange - .calledWith(`DocOps:${this.doc_id}`, this.start - this.first_version_in_redis, -1) - .should.equal(true); - }); - - return it("should return the docs with the doc ops deserialized", function() { - return this.callback.calledWith(null, this.ops).should.equal(true); - }); - }); - - describe("when the requested range is not in Redis", function() { - beforeEach(function() { - this.first_version_in_redis = 30; - this.version = 70; - this.length = this.version - this.first_version_in_redis; - this.start = 20; - this.end = -1; - this.ops = [ - { "mock": "op-1" }, - { "mock": "op-2" } - ]; - this.jsonOps = this.ops.map(op => JSON.stringify(op)); - this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length); - this.rclient.get = sinon.stub().callsArgWith(1, null, this.version.toString()); - this.rclient.lrange = sinon.stub().callsArgWith(3, null, this.jsonOps); - return this.RedisManager.getPreviousDocOps(this.doc_id, this.start, this.end, this.callback); - }); - - it("should return an error", function() { - return this.callback.calledWith(new Errors.OpRangeNotAvailableError("doc ops range is not loaded in redis")).should.equal(true); - }); - - return it("should log out the problem", function() { - return this.logger.warn.called.should.equal(true); - }); - }); - - return describe("with a slow request to redis", function() { - beforeEach(function() { - this.first_version_in_redis = 30; - this.version = 70; - this.length = this.version - this.first_version_in_redis; - this.start = 50; - this.end = 60; - this.ops = [ - { "mock": "op-1" }, - { "mock": "op-2" } - ]; - this.jsonOps = this.ops.map(op => JSON.stringify(op)); - this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length); - this.rclient.get = sinon.stub().callsArgWith(1, null, this.version.toString()); - this.clock = sinon.useFakeTimers(); - this.rclient.lrange = (key, start, end, cb) => { - this.clock.tick(6000); - return cb(null, this.jsonOps); - }; - return this.RedisManager.getPreviousDocOps(this.doc_id, this.start, this.end, this.callback); - }); - - afterEach(function() { - return this.clock.restore(); - }); - - return it('should return an error', function() { - return this.callback - .calledWith(new Error("redis getPreviousDocOps exceeded timeout")) - .should.equal(true); - }); - }); - }); - - - describe("updateDocument", function() { - beforeEach(function() { - this.lines = ["one", "two", "three", "これは"]; - this.ops = [{ op: [{ i: "foo", p: 4 }] },{ op: [{ i: "bar", p: 8 }] }]; - this.version = 42; - this.hash = crypto.createHash('sha1').update(JSON.stringify(this.lines),'utf8').digest('hex'); - this.ranges = { comments: "mock", entries: "mock" }; - this.updateMeta = { user_id: 'last-author-fake-id' }; - this.doc_update_list_length = sinon.stub(); - this.project_update_list_length = sinon.stub(); - - this.RedisManager.getDocVersion = sinon.stub(); - this.multi.set = sinon.stub(); - this.multi.rpush = sinon.stub(); - this.multi.expire = sinon.stub(); - this.multi.ltrim = sinon.stub(); - this.multi.del = sinon.stub(); - this.multi.exec = sinon.stub().callsArgWith(0, null, - [this.hash, null, null, null, null, null, null, this.doc_update_list_length, null, null] - ); - return this.ProjectHistoryRedisManager.queueOps = sinon.stub().callsArgWith( - this.ops.length + 1, null, this.project_update_list_length - ); - }); - - describe("with a consistent version", function() { - beforeEach(function() {}); - - - describe("with project history enabled", function() { - beforeEach(function() { - this.settings.apis.project_history.enabled = true; - this.RedisManager.getDocVersion.withArgs(this.doc_id).yields(null, this.version - this.ops.length); - return this.RedisManager.updateDocument(this.project_id, this.doc_id, this.lines, this.version, this.ops, this.ranges, this.updateMeta, this.callback); - }); - - it("should get the current doc version to check for consistency", function() { - return this.RedisManager.getDocVersion - .calledWith(this.doc_id) - .should.equal(true); - }); - - it("should set the doclines", function() { - return this.multi.set - .calledWith(`doclines:${this.doc_id}`, JSON.stringify(this.lines)) - .should.equal(true); - }); - - it("should set the version", function() { - return this.multi.set - .calledWith(`DocVersion:${this.doc_id}`, this.version) - .should.equal(true); - }); - - it("should set the hash", function() { - return this.multi.set - .calledWith(`DocHash:${this.doc_id}`, this.hash) - .should.equal(true); - }); - - it("should set the ranges", function() { - return this.multi.set - .calledWith(`Ranges:${this.doc_id}`, JSON.stringify(this.ranges)) - .should.equal(true); - }); - - it("should set the unflushed time", function() { - return this.multi.set - .calledWith(`UnflushedTime:${this.doc_id}`, Date.now(), "NX") - .should.equal(true); - }); - - it("should set the last updated time", function() { - return this.multi.set - .calledWith(`lastUpdatedAt:${this.doc_id}`, Date.now()) - .should.equal(true); - }); - - it("should set the last updater", function() { - return this.multi.set - .calledWith(`lastUpdatedBy:${this.doc_id}`, 'last-author-fake-id') - .should.equal(true); - }); - - it("should push the doc op into the doc ops list", function() { - return this.multi.rpush - .calledWith(`DocOps:${this.doc_id}`, JSON.stringify(this.ops[0]), JSON.stringify(this.ops[1])) - .should.equal(true); - }); - - it("should renew the expiry ttl on the doc ops array", function() { - return this.multi.expire - .calledWith(`DocOps:${this.doc_id}`, this.RedisManager.DOC_OPS_TTL) - .should.equal(true); - }); - - it("should truncate the list to 100 members", function() { - return this.multi.ltrim - .calledWith(`DocOps:${this.doc_id}`, -this.RedisManager.DOC_OPS_MAX_LENGTH, -1) - .should.equal(true); - }); - - it("should push the updates into the history ops list", function() { - return this.multi.rpush - .calledWith(`UncompressedHistoryOps:${this.doc_id}`, JSON.stringify(this.ops[0]), JSON.stringify(this.ops[1])) - .should.equal(true); - }); - - it("should push the updates into the project history ops list", function() { - return this.ProjectHistoryRedisManager.queueOps - .calledWith(this.project_id, JSON.stringify(this.ops[0])) - .should.equal(true); - }); - - it("should call the callback", function() { - return this.callback - .calledWith(null, this.doc_update_list_length, this.project_update_list_length) - .should.equal(true); - }); - - return it('should not log any errors', function() { - return this.logger.error.calledWith() - .should.equal(false); - }); - }); - - describe("with project history disabled", function() { - beforeEach(function() { - this.settings.apis.project_history.enabled = false; - this.RedisManager.getDocVersion.withArgs(this.doc_id).yields(null, this.version - this.ops.length); - return this.RedisManager.updateDocument(this.project_id, this.doc_id, this.lines, this.version, this.ops, this.ranges, this.updateMeta, this.callback); - }); - - it("should not push the updates into the project history ops list", function() { - return this.ProjectHistoryRedisManager.queueOps.called.should.equal(false); - }); - - return it("should call the callback", function() { - return this.callback - .calledWith(null, this.doc_update_list_length) - .should.equal(true); - }); - }); - - return describe("with a doc using project history only", function() { - beforeEach(function() { - this.RedisManager.getDocVersion.withArgs(this.doc_id).yields(null, this.version - this.ops.length, 'project-history'); - return this.RedisManager.updateDocument(this.project_id, this.doc_id, this.lines, this.version, this.ops, this.ranges, this.updateMeta, this.callback); - }); - - it("should not push the updates to the track-changes ops list", function() { - return this.multi.rpush - .calledWith(`UncompressedHistoryOps:${this.doc_id}`) - .should.equal(false); - }); - - it("should push the updates into the project history ops list", function() { - return this.ProjectHistoryRedisManager.queueOps - .calledWith(this.project_id, JSON.stringify(this.ops[0])) - .should.equal(true); - }); - - return it("should call the callback with the project update count only", function() { - return this.callback - .calledWith(null, undefined, this.project_update_list_length) - .should.equal(true); - }); - }); - }); - - describe("with an inconsistent version", function() { - beforeEach(function() { - this.RedisManager.getDocVersion.withArgs(this.doc_id).yields(null, this.version - this.ops.length - 1); - return this.RedisManager.updateDocument(this.project_id, this.doc_id, this.lines, this.version, this.ops, this.ranges, this.updateMeta, this.callback); - }); - - it("should not call multi.exec", function() { - return this.multi.exec.called.should.equal(false); - }); - - return it("should call the callback with an error", function() { - return this.callback - .calledWith(new Error(`Version mismatch. '${this.doc_id}' is corrupted.`)) - .should.equal(true); - }); - }); - - describe("with no updates", function() { - beforeEach(function() { - this.RedisManager.getDocVersion.withArgs(this.doc_id).yields(null, this.version); - return this.RedisManager.updateDocument(this.project_id, this.doc_id, this.lines, this.version, [], this.ranges, this.updateMeta, this.callback); - }); - - it("should not try to enqueue doc updates", function() { - return this.multi.rpush - .called - .should.equal(false); - }); - - it("should not try to enqueue project updates", function() { - return this.ProjectHistoryRedisManager.queueOps - .called - .should.equal(false); - }); - - return it("should still set the doclines", function() { - return this.multi.set - .calledWith(`doclines:${this.doc_id}`, JSON.stringify(this.lines)) - .should.equal(true); - }); - }); - - describe("with empty ranges", function() { - beforeEach(function() { - this.RedisManager.getDocVersion.withArgs(this.doc_id).yields(null, this.version - this.ops.length); - return this.RedisManager.updateDocument(this.project_id, this.doc_id, this.lines, this.version, this.ops, {}, this.updateMeta, this.callback); - }); - - it("should not set the ranges", function() { - return this.multi.set - .calledWith(`Ranges:${this.doc_id}`, JSON.stringify(this.ranges)) - .should.equal(false); - }); - - return it("should delete the ranges key", function() { - return this.multi.del - .calledWith(`Ranges:${this.doc_id}`) - .should.equal(true); - }); - }); - - describe("with null bytes in the serialized doc lines", function() { - beforeEach(function() { - this.RedisManager.getDocVersion.withArgs(this.doc_id).yields(null, this.version - this.ops.length); - this._stringify = JSON.stringify; - this.JSON.stringify = () => '["bad bytes! \u0000 <- here"]'; - return this.RedisManager.updateDocument(this.project_id, this.doc_id, this.lines, this.version, this.ops, this.ranges, this.updateMeta, this.callback); - }); - - afterEach(function() { - return this.JSON.stringify = this._stringify; - }); - - it("should log an error", function() { - return this.logger.error.called.should.equal(true); - }); - - return it("should call the callback with an error", function() { - return this.callback.calledWith(new Error("null bytes found in doc lines")).should.equal(true); - }); - }); - - describe("with ranges that are too big", function() { - beforeEach(function() { - this.RedisManager.getDocVersion.withArgs(this.doc_id).yields(null, this.version - this.ops.length); - this.RedisManager._serializeRanges = sinon.stub().yields(new Error("ranges are too large")); - return this.RedisManager.updateDocument(this.project_id, this.doc_id, this.lines, this.version, this.ops, this.ranges, this.updateMeta, this.callback); - }); - - it('should log an error', function() { - return this.logger.error.called.should.equal(true); - }); - - return it("should call the callback with the error", function() { - return this.callback.calledWith(new Error("ranges are too large")).should.equal(true); - }); - }); - - return describe("without user id from meta", function() { - beforeEach(function() { - this.RedisManager.getDocVersion.withArgs(this.doc_id).yields(null, this.version - this.ops.length); - return this.RedisManager.updateDocument(this.project_id, this.doc_id, this.lines, this.version, this.ops, this.ranges, {}, this.callback); - }); - - it("should set the last updater to null", function() { - return this.multi.del - .calledWith(`lastUpdatedBy:${this.doc_id}`) - .should.equal(true); - }); - - return it("should still set the last updated time", function() { - return this.multi.set - .calledWith(`lastUpdatedAt:${this.doc_id}`, Date.now()) - .should.equal(true); - }); - }); - }); - - describe("putDocInMemory", function() { - beforeEach(function() { - this.multi.set = sinon.stub(); - this.rclient.sadd = sinon.stub().yields(); - this.multi.del = sinon.stub(); - this.lines = ["one", "two", "three", "これは"]; - this.version = 42; - this.hash = crypto.createHash('sha1').update(JSON.stringify(this.lines),'utf8').digest('hex'); - this.multi.exec = sinon.stub().callsArgWith(0, null, [this.hash]); - this.ranges = { comments: "mock", entries: "mock" }; - return this.pathname = '/a/b/c.tex'; - }); - - describe("with non-empty ranges", function() { - beforeEach(function(done) { - return this.RedisManager.putDocInMemory(this.project_id, this.doc_id, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId, done); - }); - - it("should set the lines", function() { - return this.multi.set - .calledWith(`doclines:${this.doc_id}`, JSON.stringify(this.lines)) - .should.equal(true); - }); - - it("should set the version", function() { - return this.multi.set - .calledWith(`DocVersion:${this.doc_id}`, this.version) - .should.equal(true); - }); - - it("should set the hash", function() { - return this.multi.set - .calledWith(`DocHash:${this.doc_id}`, this.hash) - .should.equal(true); - }); - - it("should set the ranges", function() { - return this.multi.set - .calledWith(`Ranges:${this.doc_id}`, JSON.stringify(this.ranges)) - .should.equal(true); - }); - - it("should set the project_id for the doc", function() { - return this.multi.set - .calledWith(`ProjectId:${this.doc_id}`, this.project_id) - .should.equal(true); - }); - - it("should set the pathname for the doc", function() { - return this.multi.set - .calledWith(`Pathname:${this.doc_id}`, this.pathname) - .should.equal(true); - }); - - it("should set the projectHistoryId for the doc", function() { - return this.multi.set - .calledWith(`ProjectHistoryId:${this.doc_id}`, this.projectHistoryId) - .should.equal(true); - }); - - it("should add the doc_id to the project set", function() { - return this.rclient.sadd - .calledWith(`DocsIn:${this.project_id}`, this.doc_id) - .should.equal(true); - }); - - return it('should not log any errors', function() { - return this.logger.error.calledWith() - .should.equal(false); - }); - }); - - describe("with empty ranges", function() { - beforeEach(function(done) { - return this.RedisManager.putDocInMemory(this.project_id, this.doc_id, this.lines, this.version, {}, this.pathname, this.projectHistoryId, done); - }); - - it("should delete the ranges key", function() { - return this.multi.del - .calledWith(`Ranges:${this.doc_id}`) - .should.equal(true); - }); - - return it("should not set the ranges", function() { - return this.multi.set - .calledWith(`Ranges:${this.doc_id}`, JSON.stringify(this.ranges)) - .should.equal(false); - }); - }); - - describe("with null bytes in the serialized doc lines", function() { - beforeEach(function() { - this._stringify = JSON.stringify; - this.JSON.stringify = () => '["bad bytes! \u0000 <- here"]'; - return this.RedisManager.putDocInMemory(this.project_id, this.doc_id, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId, this.callback); - }); - - afterEach(function() { - return this.JSON.stringify = this._stringify; - }); - - it("should log an error", function() { - return this.logger.error.called.should.equal(true); - }); - - return it("should call the callback with an error", function() { - return this.callback.calledWith(new Error("null bytes found in doc lines")).should.equal(true); - }); - }); - - return describe("with ranges that are too big", function() { - beforeEach(function() { - this.RedisManager._serializeRanges = sinon.stub().yields(new Error("ranges are too large")); - return this.RedisManager.putDocInMemory(this.project_id, this.doc_id, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId, this.callback); - }); - - it('should log an error', function() { - return this.logger.error.called.should.equal(true); - }); - - return it("should call the callback with the error", function() { - return this.callback.calledWith(new Error("ranges are too large")).should.equal(true); - }); - }); - }); - - describe("removeDocFromMemory", function() { - beforeEach(function(done) { - this.multi.strlen = sinon.stub(); - this.multi.del = sinon.stub(); - this.multi.srem = sinon.stub(); - this.multi.exec.yields(); - return this.RedisManager.removeDocFromMemory(this.project_id, this.doc_id, done); - }); - - it("should check the length of the current doclines", function() { - return this.multi.strlen - .calledWith(`doclines:${this.doc_id}`) - .should.equal(true); - }); - - it("should delete the lines", function() { - return this.multi.del - .calledWith(`doclines:${this.doc_id}`) - .should.equal(true); - }); - - it("should delete the version", function() { - return this.multi.del - .calledWith(`DocVersion:${this.doc_id}`) - .should.equal(true); - }); - - it("should delete the hash", function() { - return this.multi.del - .calledWith(`DocHash:${this.doc_id}`) - .should.equal(true); - }); - - it("should delete the unflushed time", function() { - return this.multi.del - .calledWith(`UnflushedTime:${this.doc_id}`) - .should.equal(true); - }); - - it("should delete the project_id for the doc", function() { - return this.multi.del - .calledWith(`ProjectId:${this.doc_id}`) - .should.equal(true); - }); - - it("should remove the doc_id from the project set", function() { - return this.multi.srem - .calledWith(`DocsIn:${this.project_id}`, this.doc_id) - .should.equal(true); - }); - - it("should delete the pathname for the doc", function() { - return this.multi.del - .calledWith(`Pathname:${this.doc_id}`) - .should.equal(true); - }); - - it("should delete the pathname for the doc", function() { - return this.multi.del - .calledWith(`ProjectHistoryId:${this.doc_id}`) - .should.equal(true); - }); - - it("should delete lastUpdatedAt", function() { - return this.multi.del - .calledWith(`lastUpdatedAt:${this.doc_id}`) - .should.equal(true); - }); - - return it("should delete lastUpdatedBy", function() { - return this.multi.del - .calledWith(`lastUpdatedBy:${this.doc_id}`) - .should.equal(true); - }); - }); - - - describe("clearProjectState", function() { - beforeEach(function(done) { - this.rclient.del = sinon.stub().callsArg(1); - return this.RedisManager.clearProjectState(this.project_id, done); - }); - - return it("should delete the project state", function() { - return this.rclient.del - .calledWith(`ProjectState:${this.project_id}`) - .should.equal(true); - }); - }); - - return describe("renameDoc", function() { - beforeEach(function() { - this.rclient.rpush = sinon.stub().yields(); - this.rclient.set = sinon.stub().yields(); - return this.update = { - id: this.doc_id, - pathname: (this.pathname = 'pathname'), - newPathname: (this.newPathname = 'new-pathname') - }; - }); - - describe("the document is cached in redis", function() { - beforeEach(function() { - this.RedisManager.getDoc = sinon.stub().callsArgWith(2, null, 'lines', 'version'); - this.ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields(); - return this.RedisManager.renameDoc(this.project_id, this.doc_id, this.userId, this.update, this.projectHistoryId, this.callback); - }); - - it("update the cached pathname", function() { - return this.rclient.set - .calledWith(`Pathname:${this.doc_id}`, this.newPathname) - .should.equal(true); - }); - - return it("should queue an update", function() { - return this.ProjectHistoryRedisManager.queueRenameEntity - .calledWithExactly(this.project_id, this.projectHistoryId, 'doc', this.doc_id, this.userId, this.update, this.callback) - .should.equal(true); - }); - }); - - describe("the document is not cached in redis", function() { - beforeEach(function() { - this.RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null); - this.ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields(); - return this.RedisManager.renameDoc(this.project_id, this.doc_id, this.userId, this.update, this.projectHistoryId, this.callback); - }); - - it("does not update the cached pathname", function() { - return this.rclient.set.called.should.equal(false); - }); - - return it("should queue an update", function() { - return this.ProjectHistoryRedisManager.queueRenameEntity - .calledWithExactly(this.project_id, this.projectHistoryId, 'doc', this.doc_id, this.userId, this.update, this.callback) - .should.equal(true); - }); - }); - - return describe("getDocVersion", function() { - beforeEach(function() { - return this.version = 12345; - }); - - describe("when the document does not have a project history type set", function() { - beforeEach(function() { - this.rclient.mget = sinon.stub().withArgs(`DocVersion:${this.doc_id}`, `ProjectHistoryType:${this.doc_id}`).callsArgWith(2, null, [`${this.version}`]); - return this.RedisManager.getDocVersion(this.doc_id, this.callback); - }); - - return it("should return the document version and an undefined history type", function() { - return this.callback.calledWithExactly(null, this.version, undefined).should.equal(true); - }); - }); - - return describe("when the document has a project history type set", function() { - beforeEach(function() { - this.rclient.mget = sinon.stub().withArgs(`DocVersion:${this.doc_id}`, `ProjectHistoryType:${this.doc_id}`).callsArgWith(2, null, [`${this.version}`, 'project-history']); - return this.RedisManager.getDocVersion(this.doc_id, this.callback); - }); - - return it("should return the document version and history type", function() { - return this.callback.calledWithExactly(null, this.version, 'project-history').should.equal(true); - }); - }); - }); - }); -}); +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const modulePath = '../../../../app/js/RedisManager.js' +const SandboxedModule = require('sandboxed-module') +const Errors = require('../../../../app/js/Errors') +const crypto = require('crypto') +const tk = require('timekeeper') + +describe('RedisManager', function () { + beforeEach(function () { + let Timer + this.multi = { exec: sinon.stub() } + this.rclient = { multi: () => this.multi } + tk.freeze(new Date()) + this.RedisManager = SandboxedModule.require(modulePath, { + requires: { + 'logger-sharelatex': (this.logger = { + error: sinon.stub(), + log: sinon.stub(), + warn: sinon.stub() + }), + './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), + 'settings-sharelatex': (this.settings = { + documentupdater: { logHashErrors: { write: true, read: true } }, + apis: { + project_history: { enabled: true } + }, + redis: { + documentupdater: { + key_schema: { + blockingKey({ doc_id }) { + return `Blocking:${doc_id}` + }, + docLines({ doc_id }) { + return `doclines:${doc_id}` + }, + docOps({ doc_id }) { + return `DocOps:${doc_id}` + }, + docVersion({ doc_id }) { + return `DocVersion:${doc_id}` + }, + docHash({ doc_id }) { + return `DocHash:${doc_id}` + }, + projectKey({ doc_id }) { + return `ProjectId:${doc_id}` + }, + pendingUpdates({ doc_id }) { + return `PendingUpdates:${doc_id}` + }, + docsInProject({ project_id }) { + return `DocsIn:${project_id}` + }, + ranges({ doc_id }) { + return `Ranges:${doc_id}` + }, + pathname({ doc_id }) { + return `Pathname:${doc_id}` + }, + projectHistoryId({ doc_id }) { + return `ProjectHistoryId:${doc_id}` + }, + projectHistoryType({ doc_id }) { + return `ProjectHistoryType:${doc_id}` + }, + projectState({ project_id }) { + return `ProjectState:${project_id}` + }, + unflushedTime({ doc_id }) { + return `UnflushedTime:${doc_id}` + }, + lastUpdatedBy({ doc_id }) { + return `lastUpdatedBy:${doc_id}` + }, + lastUpdatedAt({ doc_id }) { + return `lastUpdatedAt:${doc_id}` + } + } + }, + history: { + key_schema: { + uncompressedHistoryOps({ doc_id }) { + return `UncompressedHistoryOps:${doc_id}` + }, + docsWithHistoryOps({ project_id }) { + return `DocsWithHistoryOps:${project_id}` + } + } + } + } + }), + 'redis-sharelatex': { + createClient: () => this.rclient + }, + './Metrics': (this.metrics = { + inc: sinon.stub(), + summary: sinon.stub(), + Timer: (Timer = class Timer { + constructor() { + this.start = new Date() + } + + done() { + const timeSpan = new Date() - this.start + return timeSpan + } + }) + }), + './Errors': Errors + }, + globals: { + JSON: (this.JSON = JSON) + } + }) + + this.doc_id = 'doc-id-123' + this.project_id = 'project-id-123' + this.projectHistoryId = 123 + return (this.callback = sinon.stub()) + }) + + afterEach(function () { + return tk.reset() + }) + + describe('getDoc', function () { + beforeEach(function () { + this.lines = ['one', 'two', 'three', 'これは'] // include some utf8 + this.jsonlines = JSON.stringify(this.lines) + this.version = 42 + this.hash = crypto + .createHash('sha1') + .update(this.jsonlines, 'utf8') + .digest('hex') + this.ranges = { comments: 'mock', entries: 'mock' } + this.json_ranges = JSON.stringify(this.ranges) + this.unflushed_time = 12345 + this.pathname = '/a/b/c.tex' + this.multi.get = sinon.stub() + this.multi.exec = sinon + .stub() + .callsArgWith(0, null, [ + this.jsonlines, + this.version, + this.hash, + this.project_id, + this.json_ranges, + this.pathname, + this.projectHistoryId.toString(), + this.unflushed_time + ]) + return (this.rclient.sadd = sinon.stub().yields(null, 0)) + }) + + describe('successfully', function () { + beforeEach(function () { + return this.RedisManager.getDoc( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('should get the lines from redis', function () { + return this.multi.get + .calledWith(`doclines:${this.doc_id}`) + .should.equal(true) + }) + + it('should get the version from', function () { + return this.multi.get + .calledWith(`DocVersion:${this.doc_id}`) + .should.equal(true) + }) + + it('should get the hash', function () { + return this.multi.get + .calledWith(`DocHash:${this.doc_id}`) + .should.equal(true) + }) + + it('should get the ranges', function () { + return this.multi.get + .calledWith(`Ranges:${this.doc_id}`) + .should.equal(true) + }) + + it('should get the unflushed time', function () { + return this.multi.get + .calledWith(`UnflushedTime:${this.doc_id}`) + .should.equal(true) + }) + + it('should get the pathname', function () { + return this.multi.get + .calledWith(`Pathname:${this.doc_id}`) + .should.equal(true) + }) + + it('should get the projectHistoryId as an integer', function () { + return this.multi.get + .calledWith(`ProjectHistoryId:${this.doc_id}`) + .should.equal(true) + }) + + it('should get lastUpdatedAt', function () { + return this.multi.get + .calledWith(`lastUpdatedAt:${this.doc_id}`) + .should.equal(true) + }) + + it('should get lastUpdatedBy', function () { + return this.multi.get + .calledWith(`lastUpdatedBy:${this.doc_id}`) + .should.equal(true) + }) + + it('should check if the document is in the DocsIn set', function () { + return this.rclient.sadd + .calledWith(`DocsIn:${this.project_id}`) + .should.equal(true) + }) + + it('should return the document', function () { + return this.callback + .calledWithExactly( + null, + this.lines, + this.version, + this.ranges, + this.pathname, + this.projectHistoryId, + this.unflushed_time, + this.lastUpdatedAt, + this.lastUpdatedBy + ) + .should.equal(true) + }) + + return it('should not log any errors', function () { + return this.logger.error.calledWith().should.equal(false) + }) + }) + + describe('when the document is not present', function () { + beforeEach(function () { + this.multi.exec = sinon + .stub() + .callsArgWith(0, null, [ + null, + null, + null, + null, + null, + null, + null, + null, + null, + null + ]) + this.rclient.sadd = sinon.stub().yields() + return this.RedisManager.getDoc( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('should not check if the document is in the DocsIn set', function () { + return this.rclient.sadd + .calledWith(`DocsIn:${this.project_id}`) + .should.equal(false) + }) + + it('should return an empty result', function () { + return this.callback + .calledWithExactly(null, null, 0, {}, null, null, null, null, null) + .should.equal(true) + }) + + return it('should not log any errors', function () { + return this.logger.error.calledWith().should.equal(false) + }) + }) + + describe('when the document is missing from the DocsIn set', function () { + beforeEach(function () { + this.rclient.sadd = sinon.stub().yields(null, 1) + return this.RedisManager.getDoc( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('should log an error', function () { + return this.logger.error.calledWith().should.equal(true) + }) + + return it('should return the document', function () { + return this.callback + .calledWithExactly( + null, + this.lines, + this.version, + this.ranges, + this.pathname, + this.projectHistoryId, + this.unflushed_time, + this.lastUpdatedAt, + this.lastUpdatedBy + ) + .should.equal(true) + }) + }) + + describe('with a corrupted document', function () { + beforeEach(function () { + this.badHash = 'INVALID-HASH-VALUE' + this.multi.exec = sinon + .stub() + .callsArgWith(0, null, [ + this.jsonlines, + this.version, + this.badHash, + this.project_id, + this.json_ranges + ]) + return this.RedisManager.getDoc( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('should log a hash error', function () { + return this.logger.error.calledWith().should.equal(true) + }) + + return it('should return the document', function () { + return this.callback + .calledWith(null, this.lines, this.version, this.ranges) + .should.equal(true) + }) + }) + + describe('with a slow request to redis', function () { + beforeEach(function () { + this.multi.exec = sinon + .stub() + .callsArgWith(0, null, [ + this.jsonlines, + this.version, + this.badHash, + this.project_id, + this.json_ranges, + this.pathname, + this.unflushed_time + ]) + this.clock = sinon.useFakeTimers() + this.multi.exec = (cb) => { + this.clock.tick(6000) + return cb(null, [ + this.jsonlines, + this.version, + this.another_project_id, + this.json_ranges, + this.pathname, + this.unflushed_time + ]) + } + + return this.RedisManager.getDoc( + this.project_id, + this.doc_id, + this.callback + ) + }) + + afterEach(function () { + return this.clock.restore() + }) + + return it('should return an error', function () { + return this.callback + .calledWith(new Error('redis getDoc exceeded timeout')) + .should.equal(true) + }) + }) + + return describe('getDoc with an invalid project id', function () { + beforeEach(function () { + this.another_project_id = 'project-id-456' + this.multi.exec = sinon + .stub() + .callsArgWith(0, null, [ + this.jsonlines, + this.version, + this.hash, + this.another_project_id, + this.json_ranges, + this.pathname, + this.unflushed_time + ]) + return this.RedisManager.getDoc( + this.project_id, + this.doc_id, + this.callback + ) + }) + + return it('should return an error', function () { + return this.callback + .calledWith(new Errors.NotFoundError('not found')) + .should.equal(true) + }) + }) + }) + + describe('getPreviousDocOpsTests', function () { + describe('with a start and an end value', function () { + beforeEach(function () { + this.first_version_in_redis = 30 + this.version = 70 + this.length = this.version - this.first_version_in_redis + this.start = 50 + this.end = 60 + this.ops = [{ mock: 'op-1' }, { mock: 'op-2' }] + this.jsonOps = this.ops.map((op) => JSON.stringify(op)) + this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length) + this.rclient.get = sinon + .stub() + .callsArgWith(1, null, this.version.toString()) + this.rclient.lrange = sinon.stub().callsArgWith(3, null, this.jsonOps) + return this.RedisManager.getPreviousDocOps( + this.doc_id, + this.start, + this.end, + this.callback + ) + }) + + it('should get the length of the existing doc ops', function () { + return this.rclient.llen + .calledWith(`DocOps:${this.doc_id}`) + .should.equal(true) + }) + + it('should get the current version of the doc', function () { + return this.rclient.get + .calledWith(`DocVersion:${this.doc_id}`) + .should.equal(true) + }) + + it('should get the appropriate docs ops', function () { + return this.rclient.lrange + .calledWith( + `DocOps:${this.doc_id}`, + this.start - this.first_version_in_redis, + this.end - this.first_version_in_redis + ) + .should.equal(true) + }) + + return it('should return the docs with the doc ops deserialized', function () { + return this.callback.calledWith(null, this.ops).should.equal(true) + }) + }) + + describe('with an end value of -1', function () { + beforeEach(function () { + this.first_version_in_redis = 30 + this.version = 70 + this.length = this.version - this.first_version_in_redis + this.start = 50 + this.end = -1 + this.ops = [{ mock: 'op-1' }, { mock: 'op-2' }] + this.jsonOps = this.ops.map((op) => JSON.stringify(op)) + this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length) + this.rclient.get = sinon + .stub() + .callsArgWith(1, null, this.version.toString()) + this.rclient.lrange = sinon.stub().callsArgWith(3, null, this.jsonOps) + return this.RedisManager.getPreviousDocOps( + this.doc_id, + this.start, + this.end, + this.callback + ) + }) + + it('should get the appropriate docs ops to the end of list', function () { + return this.rclient.lrange + .calledWith( + `DocOps:${this.doc_id}`, + this.start - this.first_version_in_redis, + -1 + ) + .should.equal(true) + }) + + return it('should return the docs with the doc ops deserialized', function () { + return this.callback.calledWith(null, this.ops).should.equal(true) + }) + }) + + describe('when the requested range is not in Redis', function () { + beforeEach(function () { + this.first_version_in_redis = 30 + this.version = 70 + this.length = this.version - this.first_version_in_redis + this.start = 20 + this.end = -1 + this.ops = [{ mock: 'op-1' }, { mock: 'op-2' }] + this.jsonOps = this.ops.map((op) => JSON.stringify(op)) + this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length) + this.rclient.get = sinon + .stub() + .callsArgWith(1, null, this.version.toString()) + this.rclient.lrange = sinon.stub().callsArgWith(3, null, this.jsonOps) + return this.RedisManager.getPreviousDocOps( + this.doc_id, + this.start, + this.end, + this.callback + ) + }) + + it('should return an error', function () { + return this.callback + .calledWith( + new Errors.OpRangeNotAvailableError( + 'doc ops range is not loaded in redis' + ) + ) + .should.equal(true) + }) + + return it('should log out the problem', function () { + return this.logger.warn.called.should.equal(true) + }) + }) + + return describe('with a slow request to redis', function () { + beforeEach(function () { + this.first_version_in_redis = 30 + this.version = 70 + this.length = this.version - this.first_version_in_redis + this.start = 50 + this.end = 60 + this.ops = [{ mock: 'op-1' }, { mock: 'op-2' }] + this.jsonOps = this.ops.map((op) => JSON.stringify(op)) + this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length) + this.rclient.get = sinon + .stub() + .callsArgWith(1, null, this.version.toString()) + this.clock = sinon.useFakeTimers() + this.rclient.lrange = (key, start, end, cb) => { + this.clock.tick(6000) + return cb(null, this.jsonOps) + } + return this.RedisManager.getPreviousDocOps( + this.doc_id, + this.start, + this.end, + this.callback + ) + }) + + afterEach(function () { + return this.clock.restore() + }) + + return it('should return an error', function () { + return this.callback + .calledWith(new Error('redis getPreviousDocOps exceeded timeout')) + .should.equal(true) + }) + }) + }) + + describe('updateDocument', function () { + beforeEach(function () { + this.lines = ['one', 'two', 'three', 'これは'] + this.ops = [{ op: [{ i: 'foo', p: 4 }] }, { op: [{ i: 'bar', p: 8 }] }] + this.version = 42 + this.hash = crypto + .createHash('sha1') + .update(JSON.stringify(this.lines), 'utf8') + .digest('hex') + this.ranges = { comments: 'mock', entries: 'mock' } + this.updateMeta = { user_id: 'last-author-fake-id' } + this.doc_update_list_length = sinon.stub() + this.project_update_list_length = sinon.stub() + + this.RedisManager.getDocVersion = sinon.stub() + this.multi.set = sinon.stub() + this.multi.rpush = sinon.stub() + this.multi.expire = sinon.stub() + this.multi.ltrim = sinon.stub() + this.multi.del = sinon.stub() + this.multi.exec = sinon + .stub() + .callsArgWith(0, null, [ + this.hash, + null, + null, + null, + null, + null, + null, + this.doc_update_list_length, + null, + null + ]) + return (this.ProjectHistoryRedisManager.queueOps = sinon + .stub() + .callsArgWith( + this.ops.length + 1, + null, + this.project_update_list_length + )) + }) + + describe('with a consistent version', function () { + beforeEach(function () {}) + + describe('with project history enabled', function () { + beforeEach(function () { + this.settings.apis.project_history.enabled = true + this.RedisManager.getDocVersion + .withArgs(this.doc_id) + .yields(null, this.version - this.ops.length) + return this.RedisManager.updateDocument( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ops, + this.ranges, + this.updateMeta, + this.callback + ) + }) + + it('should get the current doc version to check for consistency', function () { + return this.RedisManager.getDocVersion + .calledWith(this.doc_id) + .should.equal(true) + }) + + it('should set the doclines', function () { + return this.multi.set + .calledWith(`doclines:${this.doc_id}`, JSON.stringify(this.lines)) + .should.equal(true) + }) + + it('should set the version', function () { + return this.multi.set + .calledWith(`DocVersion:${this.doc_id}`, this.version) + .should.equal(true) + }) + + it('should set the hash', function () { + return this.multi.set + .calledWith(`DocHash:${this.doc_id}`, this.hash) + .should.equal(true) + }) + + it('should set the ranges', function () { + return this.multi.set + .calledWith(`Ranges:${this.doc_id}`, JSON.stringify(this.ranges)) + .should.equal(true) + }) + + it('should set the unflushed time', function () { + return this.multi.set + .calledWith(`UnflushedTime:${this.doc_id}`, Date.now(), 'NX') + .should.equal(true) + }) + + it('should set the last updated time', function () { + return this.multi.set + .calledWith(`lastUpdatedAt:${this.doc_id}`, Date.now()) + .should.equal(true) + }) + + it('should set the last updater', function () { + return this.multi.set + .calledWith(`lastUpdatedBy:${this.doc_id}`, 'last-author-fake-id') + .should.equal(true) + }) + + it('should push the doc op into the doc ops list', function () { + return this.multi.rpush + .calledWith( + `DocOps:${this.doc_id}`, + JSON.stringify(this.ops[0]), + JSON.stringify(this.ops[1]) + ) + .should.equal(true) + }) + + it('should renew the expiry ttl on the doc ops array', function () { + return this.multi.expire + .calledWith(`DocOps:${this.doc_id}`, this.RedisManager.DOC_OPS_TTL) + .should.equal(true) + }) + + it('should truncate the list to 100 members', function () { + return this.multi.ltrim + .calledWith( + `DocOps:${this.doc_id}`, + -this.RedisManager.DOC_OPS_MAX_LENGTH, + -1 + ) + .should.equal(true) + }) + + it('should push the updates into the history ops list', function () { + return this.multi.rpush + .calledWith( + `UncompressedHistoryOps:${this.doc_id}`, + JSON.stringify(this.ops[0]), + JSON.stringify(this.ops[1]) + ) + .should.equal(true) + }) + + it('should push the updates into the project history ops list', function () { + return this.ProjectHistoryRedisManager.queueOps + .calledWith(this.project_id, JSON.stringify(this.ops[0])) + .should.equal(true) + }) + + it('should call the callback', function () { + return this.callback + .calledWith( + null, + this.doc_update_list_length, + this.project_update_list_length + ) + .should.equal(true) + }) + + return it('should not log any errors', function () { + return this.logger.error.calledWith().should.equal(false) + }) + }) + + describe('with project history disabled', function () { + beforeEach(function () { + this.settings.apis.project_history.enabled = false + this.RedisManager.getDocVersion + .withArgs(this.doc_id) + .yields(null, this.version - this.ops.length) + return this.RedisManager.updateDocument( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ops, + this.ranges, + this.updateMeta, + this.callback + ) + }) + + it('should not push the updates into the project history ops list', function () { + return this.ProjectHistoryRedisManager.queueOps.called.should.equal( + false + ) + }) + + return it('should call the callback', function () { + return this.callback + .calledWith(null, this.doc_update_list_length) + .should.equal(true) + }) + }) + + return describe('with a doc using project history only', function () { + beforeEach(function () { + this.RedisManager.getDocVersion + .withArgs(this.doc_id) + .yields(null, this.version - this.ops.length, 'project-history') + return this.RedisManager.updateDocument( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ops, + this.ranges, + this.updateMeta, + this.callback + ) + }) + + it('should not push the updates to the track-changes ops list', function () { + return this.multi.rpush + .calledWith(`UncompressedHistoryOps:${this.doc_id}`) + .should.equal(false) + }) + + it('should push the updates into the project history ops list', function () { + return this.ProjectHistoryRedisManager.queueOps + .calledWith(this.project_id, JSON.stringify(this.ops[0])) + .should.equal(true) + }) + + return it('should call the callback with the project update count only', function () { + return this.callback + .calledWith(null, undefined, this.project_update_list_length) + .should.equal(true) + }) + }) + }) + + describe('with an inconsistent version', function () { + beforeEach(function () { + this.RedisManager.getDocVersion + .withArgs(this.doc_id) + .yields(null, this.version - this.ops.length - 1) + return this.RedisManager.updateDocument( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ops, + this.ranges, + this.updateMeta, + this.callback + ) + }) + + it('should not call multi.exec', function () { + return this.multi.exec.called.should.equal(false) + }) + + return it('should call the callback with an error', function () { + return this.callback + .calledWith( + new Error(`Version mismatch. '${this.doc_id}' is corrupted.`) + ) + .should.equal(true) + }) + }) + + describe('with no updates', function () { + beforeEach(function () { + this.RedisManager.getDocVersion + .withArgs(this.doc_id) + .yields(null, this.version) + return this.RedisManager.updateDocument( + this.project_id, + this.doc_id, + this.lines, + this.version, + [], + this.ranges, + this.updateMeta, + this.callback + ) + }) + + it('should not try to enqueue doc updates', function () { + return this.multi.rpush.called.should.equal(false) + }) + + it('should not try to enqueue project updates', function () { + return this.ProjectHistoryRedisManager.queueOps.called.should.equal( + false + ) + }) + + return it('should still set the doclines', function () { + return this.multi.set + .calledWith(`doclines:${this.doc_id}`, JSON.stringify(this.lines)) + .should.equal(true) + }) + }) + + describe('with empty ranges', function () { + beforeEach(function () { + this.RedisManager.getDocVersion + .withArgs(this.doc_id) + .yields(null, this.version - this.ops.length) + return this.RedisManager.updateDocument( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ops, + {}, + this.updateMeta, + this.callback + ) + }) + + it('should not set the ranges', function () { + return this.multi.set + .calledWith(`Ranges:${this.doc_id}`, JSON.stringify(this.ranges)) + .should.equal(false) + }) + + return it('should delete the ranges key', function () { + return this.multi.del + .calledWith(`Ranges:${this.doc_id}`) + .should.equal(true) + }) + }) + + describe('with null bytes in the serialized doc lines', function () { + beforeEach(function () { + this.RedisManager.getDocVersion + .withArgs(this.doc_id) + .yields(null, this.version - this.ops.length) + this._stringify = JSON.stringify + this.JSON.stringify = () => '["bad bytes! \u0000 <- here"]' + return this.RedisManager.updateDocument( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ops, + this.ranges, + this.updateMeta, + this.callback + ) + }) + + afterEach(function () { + return (this.JSON.stringify = this._stringify) + }) + + it('should log an error', function () { + return this.logger.error.called.should.equal(true) + }) + + return it('should call the callback with an error', function () { + return this.callback + .calledWith(new Error('null bytes found in doc lines')) + .should.equal(true) + }) + }) + + describe('with ranges that are too big', function () { + beforeEach(function () { + this.RedisManager.getDocVersion + .withArgs(this.doc_id) + .yields(null, this.version - this.ops.length) + this.RedisManager._serializeRanges = sinon + .stub() + .yields(new Error('ranges are too large')) + return this.RedisManager.updateDocument( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ops, + this.ranges, + this.updateMeta, + this.callback + ) + }) + + it('should log an error', function () { + return this.logger.error.called.should.equal(true) + }) + + return it('should call the callback with the error', function () { + return this.callback + .calledWith(new Error('ranges are too large')) + .should.equal(true) + }) + }) + + return describe('without user id from meta', function () { + beforeEach(function () { + this.RedisManager.getDocVersion + .withArgs(this.doc_id) + .yields(null, this.version - this.ops.length) + return this.RedisManager.updateDocument( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ops, + this.ranges, + {}, + this.callback + ) + }) + + it('should set the last updater to null', function () { + return this.multi.del + .calledWith(`lastUpdatedBy:${this.doc_id}`) + .should.equal(true) + }) + + return it('should still set the last updated time', function () { + return this.multi.set + .calledWith(`lastUpdatedAt:${this.doc_id}`, Date.now()) + .should.equal(true) + }) + }) + }) + + describe('putDocInMemory', function () { + beforeEach(function () { + this.multi.set = sinon.stub() + this.rclient.sadd = sinon.stub().yields() + this.multi.del = sinon.stub() + this.lines = ['one', 'two', 'three', 'これは'] + this.version = 42 + this.hash = crypto + .createHash('sha1') + .update(JSON.stringify(this.lines), 'utf8') + .digest('hex') + this.multi.exec = sinon.stub().callsArgWith(0, null, [this.hash]) + this.ranges = { comments: 'mock', entries: 'mock' } + return (this.pathname = '/a/b/c.tex') + }) + + describe('with non-empty ranges', function () { + beforeEach(function (done) { + return this.RedisManager.putDocInMemory( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ranges, + this.pathname, + this.projectHistoryId, + done + ) + }) + + it('should set the lines', function () { + return this.multi.set + .calledWith(`doclines:${this.doc_id}`, JSON.stringify(this.lines)) + .should.equal(true) + }) + + it('should set the version', function () { + return this.multi.set + .calledWith(`DocVersion:${this.doc_id}`, this.version) + .should.equal(true) + }) + + it('should set the hash', function () { + return this.multi.set + .calledWith(`DocHash:${this.doc_id}`, this.hash) + .should.equal(true) + }) + + it('should set the ranges', function () { + return this.multi.set + .calledWith(`Ranges:${this.doc_id}`, JSON.stringify(this.ranges)) + .should.equal(true) + }) + + it('should set the project_id for the doc', function () { + return this.multi.set + .calledWith(`ProjectId:${this.doc_id}`, this.project_id) + .should.equal(true) + }) + + it('should set the pathname for the doc', function () { + return this.multi.set + .calledWith(`Pathname:${this.doc_id}`, this.pathname) + .should.equal(true) + }) + + it('should set the projectHistoryId for the doc', function () { + return this.multi.set + .calledWith(`ProjectHistoryId:${this.doc_id}`, this.projectHistoryId) + .should.equal(true) + }) + + it('should add the doc_id to the project set', function () { + return this.rclient.sadd + .calledWith(`DocsIn:${this.project_id}`, this.doc_id) + .should.equal(true) + }) + + return it('should not log any errors', function () { + return this.logger.error.calledWith().should.equal(false) + }) + }) + + describe('with empty ranges', function () { + beforeEach(function (done) { + return this.RedisManager.putDocInMemory( + this.project_id, + this.doc_id, + this.lines, + this.version, + {}, + this.pathname, + this.projectHistoryId, + done + ) + }) + + it('should delete the ranges key', function () { + return this.multi.del + .calledWith(`Ranges:${this.doc_id}`) + .should.equal(true) + }) + + return it('should not set the ranges', function () { + return this.multi.set + .calledWith(`Ranges:${this.doc_id}`, JSON.stringify(this.ranges)) + .should.equal(false) + }) + }) + + describe('with null bytes in the serialized doc lines', function () { + beforeEach(function () { + this._stringify = JSON.stringify + this.JSON.stringify = () => '["bad bytes! \u0000 <- here"]' + return this.RedisManager.putDocInMemory( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ranges, + this.pathname, + this.projectHistoryId, + this.callback + ) + }) + + afterEach(function () { + return (this.JSON.stringify = this._stringify) + }) + + it('should log an error', function () { + return this.logger.error.called.should.equal(true) + }) + + return it('should call the callback with an error', function () { + return this.callback + .calledWith(new Error('null bytes found in doc lines')) + .should.equal(true) + }) + }) + + return describe('with ranges that are too big', function () { + beforeEach(function () { + this.RedisManager._serializeRanges = sinon + .stub() + .yields(new Error('ranges are too large')) + return this.RedisManager.putDocInMemory( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ranges, + this.pathname, + this.projectHistoryId, + this.callback + ) + }) + + it('should log an error', function () { + return this.logger.error.called.should.equal(true) + }) + + return it('should call the callback with the error', function () { + return this.callback + .calledWith(new Error('ranges are too large')) + .should.equal(true) + }) + }) + }) + + describe('removeDocFromMemory', function () { + beforeEach(function (done) { + this.multi.strlen = sinon.stub() + this.multi.del = sinon.stub() + this.multi.srem = sinon.stub() + this.multi.exec.yields() + return this.RedisManager.removeDocFromMemory( + this.project_id, + this.doc_id, + done + ) + }) + + it('should check the length of the current doclines', function () { + return this.multi.strlen + .calledWith(`doclines:${this.doc_id}`) + .should.equal(true) + }) + + it('should delete the lines', function () { + return this.multi.del + .calledWith(`doclines:${this.doc_id}`) + .should.equal(true) + }) + + it('should delete the version', function () { + return this.multi.del + .calledWith(`DocVersion:${this.doc_id}`) + .should.equal(true) + }) + + it('should delete the hash', function () { + return this.multi.del + .calledWith(`DocHash:${this.doc_id}`) + .should.equal(true) + }) + + it('should delete the unflushed time', function () { + return this.multi.del + .calledWith(`UnflushedTime:${this.doc_id}`) + .should.equal(true) + }) + + it('should delete the project_id for the doc', function () { + return this.multi.del + .calledWith(`ProjectId:${this.doc_id}`) + .should.equal(true) + }) + + it('should remove the doc_id from the project set', function () { + return this.multi.srem + .calledWith(`DocsIn:${this.project_id}`, this.doc_id) + .should.equal(true) + }) + + it('should delete the pathname for the doc', function () { + return this.multi.del + .calledWith(`Pathname:${this.doc_id}`) + .should.equal(true) + }) + + it('should delete the pathname for the doc', function () { + return this.multi.del + .calledWith(`ProjectHistoryId:${this.doc_id}`) + .should.equal(true) + }) + + it('should delete lastUpdatedAt', function () { + return this.multi.del + .calledWith(`lastUpdatedAt:${this.doc_id}`) + .should.equal(true) + }) + + return it('should delete lastUpdatedBy', function () { + return this.multi.del + .calledWith(`lastUpdatedBy:${this.doc_id}`) + .should.equal(true) + }) + }) + + describe('clearProjectState', function () { + beforeEach(function (done) { + this.rclient.del = sinon.stub().callsArg(1) + return this.RedisManager.clearProjectState(this.project_id, done) + }) + + return it('should delete the project state', function () { + return this.rclient.del + .calledWith(`ProjectState:${this.project_id}`) + .should.equal(true) + }) + }) + + return describe('renameDoc', function () { + beforeEach(function () { + this.rclient.rpush = sinon.stub().yields() + this.rclient.set = sinon.stub().yields() + return (this.update = { + id: this.doc_id, + pathname: (this.pathname = 'pathname'), + newPathname: (this.newPathname = 'new-pathname') + }) + }) + + describe('the document is cached in redis', function () { + beforeEach(function () { + this.RedisManager.getDoc = sinon + .stub() + .callsArgWith(2, null, 'lines', 'version') + this.ProjectHistoryRedisManager.queueRenameEntity = sinon + .stub() + .yields() + return this.RedisManager.renameDoc( + this.project_id, + this.doc_id, + this.userId, + this.update, + this.projectHistoryId, + this.callback + ) + }) + + it('update the cached pathname', function () { + return this.rclient.set + .calledWith(`Pathname:${this.doc_id}`, this.newPathname) + .should.equal(true) + }) + + return it('should queue an update', function () { + return this.ProjectHistoryRedisManager.queueRenameEntity + .calledWithExactly( + this.project_id, + this.projectHistoryId, + 'doc', + this.doc_id, + this.userId, + this.update, + this.callback + ) + .should.equal(true) + }) + }) + + describe('the document is not cached in redis', function () { + beforeEach(function () { + this.RedisManager.getDoc = sinon + .stub() + .callsArgWith(2, null, null, null) + this.ProjectHistoryRedisManager.queueRenameEntity = sinon + .stub() + .yields() + return this.RedisManager.renameDoc( + this.project_id, + this.doc_id, + this.userId, + this.update, + this.projectHistoryId, + this.callback + ) + }) + + it('does not update the cached pathname', function () { + return this.rclient.set.called.should.equal(false) + }) + + return it('should queue an update', function () { + return this.ProjectHistoryRedisManager.queueRenameEntity + .calledWithExactly( + this.project_id, + this.projectHistoryId, + 'doc', + this.doc_id, + this.userId, + this.update, + this.callback + ) + .should.equal(true) + }) + }) + + return describe('getDocVersion', function () { + beforeEach(function () { + return (this.version = 12345) + }) + + describe('when the document does not have a project history type set', function () { + beforeEach(function () { + this.rclient.mget = sinon + .stub() + .withArgs( + `DocVersion:${this.doc_id}`, + `ProjectHistoryType:${this.doc_id}` + ) + .callsArgWith(2, null, [`${this.version}`]) + return this.RedisManager.getDocVersion(this.doc_id, this.callback) + }) + + return it('should return the document version and an undefined history type', function () { + return this.callback + .calledWithExactly(null, this.version, undefined) + .should.equal(true) + }) + }) + + return describe('when the document has a project history type set', function () { + beforeEach(function () { + this.rclient.mget = sinon + .stub() + .withArgs( + `DocVersion:${this.doc_id}`, + `ProjectHistoryType:${this.doc_id}` + ) + .callsArgWith(2, null, [`${this.version}`, 'project-history']) + return this.RedisManager.getDocVersion(this.doc_id, this.callback) + }) + + return it('should return the document version and history type', function () { + return this.callback + .calledWithExactly(null, this.version, 'project-history') + .should.equal(true) + }) + }) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js b/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js index 4989b77034..8ea99aee5c 100644 --- a/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js +++ b/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js @@ -13,353 +13,427 @@ * DS205: Consider reworking code to avoid use of IIFEs * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const text = require("../../../../app/js/sharejs/types/text"); -require("chai").should(); -const RangesTracker = require("../../../../app/js/RangesTracker"); +const text = require('../../../../app/js/sharejs/types/text') +require('chai').should() +const RangesTracker = require('../../../../app/js/RangesTracker') -describe("ShareJS text type", function() { - beforeEach(function() { - return this.t = "mock-thread-id"; - }); - - describe("transform", function() { - describe("insert / insert", function() { - it("with an insert before", function() { - const dest = []; - text._tc(dest, { i: "foo", p: 9 }, { i: "bar", p: 3 }); - return dest.should.deep.equal([{ i: "foo", p: 12 }]); - }); +describe('ShareJS text type', function () { + beforeEach(function () { + return (this.t = 'mock-thread-id') + }) - it("with an insert after", function() { - const dest = []; - text._tc(dest, { i: "foo", p: 3 }, { i: "bar", p: 9 }); - return dest.should.deep.equal([{ i: "foo", p: 3 }]); - }); + describe('transform', function () { + describe('insert / insert', function () { + it('with an insert before', function () { + const dest = [] + text._tc(dest, { i: 'foo', p: 9 }, { i: 'bar', p: 3 }) + return dest.should.deep.equal([{ i: 'foo', p: 12 }]) + }) - it("with an insert at the same place with side == 'right'", function() { - const dest = []; - text._tc(dest, { i: "foo", p: 3 }, { i: "bar", p: 3 }, 'right'); - return dest.should.deep.equal([{ i: "foo", p: 6 }]); - }); + it('with an insert after', function () { + const dest = [] + text._tc(dest, { i: 'foo', p: 3 }, { i: 'bar', p: 9 }) + return dest.should.deep.equal([{ i: 'foo', p: 3 }]) + }) - return it("with an insert at the same place with side == 'left'", function() { - const dest = []; - text._tc(dest, { i: "foo", p: 3 }, { i: "bar", p: 3 }, 'left'); - return dest.should.deep.equal([{ i: "foo", p: 3 }]); - }); - }); + it("with an insert at the same place with side == 'right'", function () { + const dest = [] + text._tc(dest, { i: 'foo', p: 3 }, { i: 'bar', p: 3 }, 'right') + return dest.should.deep.equal([{ i: 'foo', p: 6 }]) + }) - describe("insert / delete", function() { - it("with a delete before", function() { - const dest = []; - text._tc(dest, { i: "foo", p: 9 }, { d: "bar", p: 3 }); - return dest.should.deep.equal([{ i: "foo", p: 6 }]); - }); + return it("with an insert at the same place with side == 'left'", function () { + const dest = [] + text._tc(dest, { i: 'foo', p: 3 }, { i: 'bar', p: 3 }, 'left') + return dest.should.deep.equal([{ i: 'foo', p: 3 }]) + }) + }) - it("with a delete after", function() { - const dest = []; - text._tc(dest, { i: "foo", p: 3 }, { d: "bar", p: 9 }); - return dest.should.deep.equal([{ i: "foo", p: 3 }]); - }); + describe('insert / delete', function () { + it('with a delete before', function () { + const dest = [] + text._tc(dest, { i: 'foo', p: 9 }, { d: 'bar', p: 3 }) + return dest.should.deep.equal([{ i: 'foo', p: 6 }]) + }) - it("with a delete at the same place with side == 'right'", function() { - const dest = []; - text._tc(dest, { i: "foo", p: 3 }, { d: "bar", p: 3 }, 'right'); - return dest.should.deep.equal([{ i: "foo", p: 3 }]); - }); + it('with a delete after', function () { + const dest = [] + text._tc(dest, { i: 'foo', p: 3 }, { d: 'bar', p: 9 }) + return dest.should.deep.equal([{ i: 'foo', p: 3 }]) + }) - return it("with a delete at the same place with side == 'left'", function() { - const dest = []; - - text._tc(dest, { i: "foo", p: 3 }, { d: "bar", p: 3 }, 'left'); - return dest.should.deep.equal([{ i: "foo", p: 3 }]); - }); - }); + it("with a delete at the same place with side == 'right'", function () { + const dest = [] + text._tc(dest, { i: 'foo', p: 3 }, { d: 'bar', p: 3 }, 'right') + return dest.should.deep.equal([{ i: 'foo', p: 3 }]) + }) - describe("delete / insert", function() { - it("with an insert before", function() { - const dest = []; - text._tc(dest, { d: "foo", p: 9 }, { i: "bar", p: 3 }); - return dest.should.deep.equal([{ d: "foo", p: 12 }]); - }); + return it("with a delete at the same place with side == 'left'", function () { + const dest = [] - it("with an insert after", function() { - const dest = []; - text._tc(dest, { d: "foo", p: 3 }, { i: "bar", p: 9 }); - return dest.should.deep.equal([{ d: "foo", p: 3 }]); - }); + text._tc(dest, { i: 'foo', p: 3 }, { d: 'bar', p: 3 }, 'left') + return dest.should.deep.equal([{ i: 'foo', p: 3 }]) + }) + }) - it("with an insert at the same place with side == 'right'", function() { - const dest = []; - text._tc(dest, { d: "foo", p: 3 }, { i: "bar", p: 3 }, 'right'); - return dest.should.deep.equal([{ d: "foo", p: 6 }]); - }); + describe('delete / insert', function () { + it('with an insert before', function () { + const dest = [] + text._tc(dest, { d: 'foo', p: 9 }, { i: 'bar', p: 3 }) + return dest.should.deep.equal([{ d: 'foo', p: 12 }]) + }) - it("with an insert at the same place with side == 'left'", function() { - const dest = []; - text._tc(dest, { d: "foo", p: 3 }, { i: "bar", p: 3 }, 'left'); - return dest.should.deep.equal([{ d: "foo", p: 6 }]); - }); - - return it("with a delete that overlaps the insert location", function() { - const dest = []; - text._tc(dest, { d: "foo", p: 3 }, { i: "bar", p: 4 }); - return dest.should.deep.equal([{ d: "f", p: 3 }, { d: "oo", p: 6 }]); - }); - }); - + it('with an insert after', function () { + const dest = [] + text._tc(dest, { d: 'foo', p: 3 }, { i: 'bar', p: 9 }) + return dest.should.deep.equal([{ d: 'foo', p: 3 }]) + }) - describe("delete / delete", function() { - it("with a delete before", function() { - const dest = []; - text._tc(dest, { d: "foo", p: 9 }, { d: "bar", p: 3 }); - return dest.should.deep.equal([{ d: "foo", p: 6 }]); - }); + it("with an insert at the same place with side == 'right'", function () { + const dest = [] + text._tc(dest, { d: 'foo', p: 3 }, { i: 'bar', p: 3 }, 'right') + return dest.should.deep.equal([{ d: 'foo', p: 6 }]) + }) - it("with a delete after", function() { - const dest = []; - text._tc(dest, { d: "foo", p: 3 }, { d: "bar", p: 9 }); - return dest.should.deep.equal([{ d: "foo", p: 3 }]); - }); + it("with an insert at the same place with side == 'left'", function () { + const dest = [] + text._tc(dest, { d: 'foo', p: 3 }, { i: 'bar', p: 3 }, 'left') + return dest.should.deep.equal([{ d: 'foo', p: 6 }]) + }) - it("with deleting the same content", function() { - const dest = []; - text._tc(dest, { d: "foo", p: 3 }, { d: "foo", p: 3 }, 'right'); - return dest.should.deep.equal([]); - }); + return it('with a delete that overlaps the insert location', function () { + const dest = [] + text._tc(dest, { d: 'foo', p: 3 }, { i: 'bar', p: 4 }) + return dest.should.deep.equal([ + { d: 'f', p: 3 }, + { d: 'oo', p: 6 } + ]) + }) + }) - it("with the delete overlapping before", function() { - const dest = []; - text._tc(dest, { d: "foobar", p: 3 }, { d: "abcfoo", p: 0 }, 'right'); - return dest.should.deep.equal([{ d: "bar", p: 0 }]); - }); + describe('delete / delete', function () { + it('with a delete before', function () { + const dest = [] + text._tc(dest, { d: 'foo', p: 9 }, { d: 'bar', p: 3 }) + return dest.should.deep.equal([{ d: 'foo', p: 6 }]) + }) - it("with the delete overlapping after", function() { - const dest = []; - text._tc(dest, { d: "abcfoo", p: 3 }, { d: "foobar", p: 6 }); - return dest.should.deep.equal([{ d: "abc", p: 3 }]); - }); + it('with a delete after', function () { + const dest = [] + text._tc(dest, { d: 'foo', p: 3 }, { d: 'bar', p: 9 }) + return dest.should.deep.equal([{ d: 'foo', p: 3 }]) + }) - it("with the delete overlapping the whole delete", function() { - const dest = []; - text._tc(dest, { d: "abcfoo123", p: 3 }, { d: "foo", p: 6 }); - return dest.should.deep.equal([{ d: "abc123", p: 3 }]); - }); + it('with deleting the same content', function () { + const dest = [] + text._tc(dest, { d: 'foo', p: 3 }, { d: 'foo', p: 3 }, 'right') + return dest.should.deep.equal([]) + }) - return it("with the delete inside the whole delete", function() { - const dest = []; - text._tc(dest, { d: "foo", p: 6 }, { d: "abcfoo123", p: 3 }); - return dest.should.deep.equal([]); - }); - }); - - describe("comment / insert", function() { - it("with an insert before", function() { - const dest = []; - text._tc(dest, { c: "foo", p: 9, t: this.t }, { i: "bar", p: 3 }); - return dest.should.deep.equal([{ c: "foo", p: 12, t: this.t }]); - }); + it('with the delete overlapping before', function () { + const dest = [] + text._tc(dest, { d: 'foobar', p: 3 }, { d: 'abcfoo', p: 0 }, 'right') + return dest.should.deep.equal([{ d: 'bar', p: 0 }]) + }) - it("with an insert after", function() { - const dest = []; - text._tc(dest, { c: "foo", p: 3, t: this.t }, { i: "bar", p: 9 }); - return dest.should.deep.equal([{ c: "foo", p: 3, t: this.t }]); - }); + it('with the delete overlapping after', function () { + const dest = [] + text._tc(dest, { d: 'abcfoo', p: 3 }, { d: 'foobar', p: 6 }) + return dest.should.deep.equal([{ d: 'abc', p: 3 }]) + }) - it("with an insert at the left edge", function() { - const dest = []; - text._tc(dest, { c: "foo", p: 3, t: this.t }, { i: "bar", p: 3 }); - // RangesTracker doesn't inject inserts into comments on edges, so neither should we - return dest.should.deep.equal([{ c: "foo", p: 6, t: this.t }]); - }); + it('with the delete overlapping the whole delete', function () { + const dest = [] + text._tc(dest, { d: 'abcfoo123', p: 3 }, { d: 'foo', p: 6 }) + return dest.should.deep.equal([{ d: 'abc123', p: 3 }]) + }) - it("with an insert at the right edge", function() { - const dest = []; - text._tc(dest, { c: "foo", p: 3, t: this.t }, { i: "bar", p: 6 }); - // RangesTracker doesn't inject inserts into comments on edges, so neither should we - return dest.should.deep.equal([{ c: "foo", p: 3, t: this.t }]); - }); + return it('with the delete inside the whole delete', function () { + const dest = [] + text._tc(dest, { d: 'foo', p: 6 }, { d: 'abcfoo123', p: 3 }) + return dest.should.deep.equal([]) + }) + }) - return it("with an insert in the middle", function() { - const dest = []; - text._tc(dest, { c: "foo", p: 3, t: this.t }, { i: "bar", p: 5 }); - return dest.should.deep.equal([{ c: "fobaro", p: 3, t: this.t }]); - }); - }); - - describe("comment / delete", function() { - it("with a delete before", function() { - const dest = []; - text._tc(dest, { c: "foo", p: 9, t: this.t }, { d: "bar", p: 3 }); - return dest.should.deep.equal([{ c: "foo", p: 6, t: this.t }]); - }); + describe('comment / insert', function () { + it('with an insert before', function () { + const dest = [] + text._tc(dest, { c: 'foo', p: 9, t: this.t }, { i: 'bar', p: 3 }) + return dest.should.deep.equal([{ c: 'foo', p: 12, t: this.t }]) + }) - it("with a delete after", function() { - const dest = []; - text._tc(dest, { c: "foo", p: 3, t: this.t }, { i: "bar", p: 9 }); - return dest.should.deep.equal([{ c: "foo", p: 3, t: this.t }]); - }); + it('with an insert after', function () { + const dest = [] + text._tc(dest, { c: 'foo', p: 3, t: this.t }, { i: 'bar', p: 9 }) + return dest.should.deep.equal([{ c: 'foo', p: 3, t: this.t }]) + }) - it("with a delete overlapping the comment content before", function() { - const dest = []; - text._tc(dest, { c: "foobar", p: 6, t: this.t }, { d: "123foo", p: 3 }); - return dest.should.deep.equal([{ c: "bar", p: 3, t: this.t }]); - }); + it('with an insert at the left edge', function () { + const dest = [] + text._tc(dest, { c: 'foo', p: 3, t: this.t }, { i: 'bar', p: 3 }) + // RangesTracker doesn't inject inserts into comments on edges, so neither should we + return dest.should.deep.equal([{ c: 'foo', p: 6, t: this.t }]) + }) - it("with a delete overlapping the comment content after", function() { - const dest = []; - text._tc(dest, { c: "foobar", p: 6, t: this.t }, { d: "bar123", p: 9 }); - return dest.should.deep.equal([{ c: "foo", p: 6, t: this.t }]); - }); + it('with an insert at the right edge', function () { + const dest = [] + text._tc(dest, { c: 'foo', p: 3, t: this.t }, { i: 'bar', p: 6 }) + // RangesTracker doesn't inject inserts into comments on edges, so neither should we + return dest.should.deep.equal([{ c: 'foo', p: 3, t: this.t }]) + }) - it("with a delete overlapping the comment content in the middle", function() { - const dest = []; - text._tc(dest, { c: "foo123bar", p: 6, t: this.t }, { d: "123", p: 9 }); - return dest.should.deep.equal([{ c: "foobar", p: 6, t: this.t }]); - }); + return it('with an insert in the middle', function () { + const dest = [] + text._tc(dest, { c: 'foo', p: 3, t: this.t }, { i: 'bar', p: 5 }) + return dest.should.deep.equal([{ c: 'fobaro', p: 3, t: this.t }]) + }) + }) - return it("with a delete overlapping the whole comment", function() { - const dest = []; - text._tc(dest, { c: "foo", p: 6, t: this.t }, { d: "123foo456", p: 3 }); - return dest.should.deep.equal([{ c: "", p: 3, t: this.t }]); - }); - }); - - describe("comment / insert", function() { return it("should not do anything", function() { - const dest = []; - text._tc(dest, { i: "foo", p: 6 }, { c: "bar", p: 3 }); - return dest.should.deep.equal([{ i: "foo", p: 6 }]); - }); }); - - describe("comment / delete", function() { return it("should not do anything", function() { - const dest = []; - text._tc(dest, { d: "foo", p: 6 }, { c: "bar", p: 3 }); - return dest.should.deep.equal([{ d: "foo", p: 6 }]); - }); }); - - return describe("comment / comment", function() { return it("should not do anything", function() { - const dest = []; - text._tc(dest, { c: "foo", p: 6 }, { c: "bar", p: 3 }); - return dest.should.deep.equal([{ c: "foo", p: 6 }]); - }); }); -}); + describe('comment / delete', function () { + it('with a delete before', function () { + const dest = [] + text._tc(dest, { c: 'foo', p: 9, t: this.t }, { d: 'bar', p: 3 }) + return dest.should.deep.equal([{ c: 'foo', p: 6, t: this.t }]) + }) - describe("apply", function() { - it("should apply an insert", function() { return text.apply("foo", [{ i: "bar", p: 2 }]).should.equal("fobaro"); }); + it('with a delete after', function () { + const dest = [] + text._tc(dest, { c: 'foo', p: 3, t: this.t }, { i: 'bar', p: 9 }) + return dest.should.deep.equal([{ c: 'foo', p: 3, t: this.t }]) + }) - it("should apply a delete", function() { return text.apply("foo123bar", [{ d: "123", p: 3 }]).should.equal("foobar"); }); + it('with a delete overlapping the comment content before', function () { + const dest = [] + text._tc(dest, { c: 'foobar', p: 6, t: this.t }, { d: '123foo', p: 3 }) + return dest.should.deep.equal([{ c: 'bar', p: 3, t: this.t }]) + }) - it("should do nothing with a comment", function() { return text.apply("foo123bar", [{ c: "123", p: 3 }]).should.equal("foo123bar"); }); - - it("should throw an error when deleted content does not match", function() { return ((() => text.apply("foo123bar", [{ d: "456", p: 3 }]))).should.throw(Error); }); - - return it("should throw an error when comment content does not match", function() { return ((() => text.apply("foo123bar", [{ c: "456", p: 3 }]))).should.throw(Error); }); - }); - - return describe("applying ops and comments in different orders", function() { return it("should not matter which op or comment is applied first", function() { - let length, p; - let asc, end; - let asc1, end1; - let asc3, end3; - const transform = function(op1, op2, side) { - const d = []; - text._tc(d, op1, op2, side); - return d; - }; - - const applySnapshot = (snapshot, op) => text.apply(snapshot, op); - - const applyRanges = function(rangesTracker, ops) { - for (const op of Array.from(ops)) { - rangesTracker.applyOp(op, {}); - } - return rangesTracker; - }; - - const commentsEqual = function(comments1, comments2) { - if (comments1.length !== comments2.length) { return false; } - comments1.sort((a,b) => { - if ((a.offset - b.offset) === 0) { - return a.length - b.length; + it('with a delete overlapping the comment content after', function () { + const dest = [] + text._tc(dest, { c: 'foobar', p: 6, t: this.t }, { d: 'bar123', p: 9 }) + return dest.should.deep.equal([{ c: 'foo', p: 6, t: this.t }]) + }) + + it('with a delete overlapping the comment content in the middle', function () { + const dest = [] + text._tc(dest, { c: 'foo123bar', p: 6, t: this.t }, { d: '123', p: 9 }) + return dest.should.deep.equal([{ c: 'foobar', p: 6, t: this.t }]) + }) + + return it('with a delete overlapping the whole comment', function () { + const dest = [] + text._tc(dest, { c: 'foo', p: 6, t: this.t }, { d: '123foo456', p: 3 }) + return dest.should.deep.equal([{ c: '', p: 3, t: this.t }]) + }) + }) + + describe('comment / insert', function () { + return it('should not do anything', function () { + const dest = [] + text._tc(dest, { i: 'foo', p: 6 }, { c: 'bar', p: 3 }) + return dest.should.deep.equal([{ i: 'foo', p: 6 }]) + }) + }) + + describe('comment / delete', function () { + return it('should not do anything', function () { + const dest = [] + text._tc(dest, { d: 'foo', p: 6 }, { c: 'bar', p: 3 }) + return dest.should.deep.equal([{ d: 'foo', p: 6 }]) + }) + }) + + return describe('comment / comment', function () { + return it('should not do anything', function () { + const dest = [] + text._tc(dest, { c: 'foo', p: 6 }, { c: 'bar', p: 3 }) + return dest.should.deep.equal([{ c: 'foo', p: 6 }]) + }) + }) + }) + + describe('apply', function () { + it('should apply an insert', function () { + return text.apply('foo', [{ i: 'bar', p: 2 }]).should.equal('fobaro') + }) + + it('should apply a delete', function () { + return text + .apply('foo123bar', [{ d: '123', p: 3 }]) + .should.equal('foobar') + }) + + it('should do nothing with a comment', function () { + return text + .apply('foo123bar', [{ c: '123', p: 3 }]) + .should.equal('foo123bar') + }) + + it('should throw an error when deleted content does not match', function () { + return (() => text.apply('foo123bar', [{ d: '456', p: 3 }])).should.throw( + Error + ) + }) + + return it('should throw an error when comment content does not match', function () { + return (() => text.apply('foo123bar', [{ c: '456', p: 3 }])).should.throw( + Error + ) + }) + }) + + return describe('applying ops and comments in different orders', function () { + return it('should not matter which op or comment is applied first', function () { + let length, p + let asc, end + let asc1, end1 + let asc3, end3 + const transform = function (op1, op2, side) { + const d = [] + text._tc(d, op1, op2, side) + return d + } + + const applySnapshot = (snapshot, op) => text.apply(snapshot, op) + + const applyRanges = function (rangesTracker, ops) { + for (const op of Array.from(ops)) { + rangesTracker.applyOp(op, {}) + } + return rangesTracker + } + + const commentsEqual = function (comments1, comments2) { + if (comments1.length !== comments2.length) { + return false + } + comments1.sort((a, b) => { + if (a.offset - b.offset === 0) { + return a.length - b.length + } else { + return a.offset - b.offset + } + }) + comments2.sort((a, b) => { + if (a.offset - b.offset === 0) { + return a.length - b.length + } else { + return a.offset - b.offset + } + }) + for (let i = 0; i < comments1.length; i++) { + const comment1 = comments1[i] + const comment2 = comments2[i] + if ( + comment1.offset !== comment2.offset || + comment1.length !== comment2.length + ) { + return false + } + } + return true + } + + const SNAPSHOT = '123' + + const OPS = [] + // Insert ops + for ( + p = 0, end = SNAPSHOT.length, asc = end >= 0; + asc ? p <= end : p >= end; + asc ? p++ : p-- + ) { + OPS.push({ i: 'a', p }) + OPS.push({ i: 'bc', p }) + } + for ( + p = 0, end1 = SNAPSHOT.length - 1, asc1 = end1 >= 0; + asc1 ? p <= end1 : p >= end1; + asc1 ? p++ : p-- + ) { + var asc2, end2 + for ( + length = 1, end2 = SNAPSHOT.length - p, asc2 = end2 >= 1; + asc2 ? length <= end2 : length >= end2; + asc2 ? length++ : length-- + ) { + OPS.push({ d: SNAPSHOT.slice(p, p + length), p }) + } + } + for ( + p = 0, end3 = SNAPSHOT.length - 1, asc3 = end3 >= 0; + asc3 ? p <= end3 : p >= end3; + asc3 ? p++ : p-- + ) { + var asc4, end4 + for ( + length = 1, end4 = SNAPSHOT.length - p, asc4 = end4 >= 1; + asc4 ? length <= end4 : length >= end4; + asc4 ? length++ : length-- + ) { + OPS.push({ c: SNAPSHOT.slice(p, p + length), p, t: this.t }) + } + } + + return (() => { + const result = [] + for (var op1 of Array.from(OPS)) { + result.push( + (() => { + const result1 = [] + for (const op2 of Array.from(OPS)) { + const op1_t = transform(op1, op2, 'left') + const op2_t = transform(op2, op1, 'right') + + const rt12 = new RangesTracker() + const snapshot12 = applySnapshot( + applySnapshot(SNAPSHOT, [op1]), + op2_t + ) + applyRanges(rt12, [op1]) + applyRanges(rt12, op2_t) + + const rt21 = new RangesTracker() + const snapshot21 = applySnapshot( + applySnapshot(SNAPSHOT, [op2]), + op1_t + ) + applyRanges(rt21, [op2]) + applyRanges(rt21, op1_t) + + if (snapshot12 !== snapshot21) { + console.error( + { op1, op2, op1_t, op2_t, snapshot12, snapshot21 }, + 'Ops are not consistent' + ) + throw new Error('OT is inconsistent') + } + + if (!commentsEqual(rt12.comments, rt21.comments)) { + console.log(rt12.comments) + console.log(rt21.comments) + console.error( + { + op1, + op2, + op1_t, + op2_t, + rt12_comments: rt12.comments, + rt21_comments: rt21.comments + }, + 'Comments are not consistent' + ) + throw new Error('OT is inconsistent') } else { - return a.offset - b.offset; + result1.push(undefined) } - }); - comments2.sort((a,b) => { - if ((a.offset - b.offset) === 0) { - return a.length - b.length; - } else { - return a.offset - b.offset; - } - }); - for (let i = 0; i < comments1.length; i++) { - const comment1 = comments1[i]; - const comment2 = comments2[i]; - if ((comment1.offset !== comment2.offset) || (comment1.length !== comment2.length)) { - return false; - } - } - return true; - }; - - const SNAPSHOT = "123"; - - const OPS = []; - // Insert ops - for (p = 0, end = SNAPSHOT.length, asc = end >= 0; asc ? p <= end : p >= end; asc ? p++ : p--) { - OPS.push({i: "a", p}); - OPS.push({i: "bc", p}); + } + return result1 + })() + ) } - for (p = 0, end1 = SNAPSHOT.length-1, asc1 = end1 >= 0; asc1 ? p <= end1 : p >= end1; asc1 ? p++ : p--) { - var asc2, end2; - for (length = 1, end2 = SNAPSHOT.length - p, asc2 = end2 >= 1; asc2 ? length <= end2 : length >= end2; asc2 ? length++ : length--) { - OPS.push({d: SNAPSHOT.slice(p, p+length), p}); - } - } - for (p = 0, end3 = SNAPSHOT.length-1, asc3 = end3 >= 0; asc3 ? p <= end3 : p >= end3; asc3 ? p++ : p--) { - var asc4, end4; - for (length = 1, end4 = SNAPSHOT.length - p, asc4 = end4 >= 1; asc4 ? length <= end4 : length >= end4; asc4 ? length++ : length--) { - OPS.push({c: SNAPSHOT.slice(p, p+length), p, t: this.t}); - } - } - - return (() => { - const result = []; - for (var op1 of Array.from(OPS)) { - result.push((() => { - const result1 = []; - for (const op2 of Array.from(OPS)) { - const op1_t = transform(op1, op2, "left"); - const op2_t = transform(op2, op1, "right"); - - const rt12 = new RangesTracker(); - const snapshot12 = applySnapshot(applySnapshot(SNAPSHOT, [op1]), op2_t); - applyRanges(rt12, [op1]); - applyRanges(rt12, op2_t); - - const rt21 = new RangesTracker(); - const snapshot21 = applySnapshot(applySnapshot(SNAPSHOT, [op2]), op1_t); - applyRanges(rt21, [op2]); - applyRanges(rt21, op1_t); - - if (snapshot12 !== snapshot21) { - console.error({op1, op2, op1_t, op2_t, snapshot12, snapshot21}, "Ops are not consistent"); - throw new Error("OT is inconsistent"); - } - - if (!commentsEqual(rt12.comments, rt21.comments)) { - console.log(rt12.comments); - console.log(rt21.comments); - console.error({op1, op2, op1_t, op2_t, rt12_comments: rt12.comments, rt21_comments: rt21.comments}, "Comments are not consistent"); - throw new Error("OT is inconsistent"); - } else { - result1.push(undefined); - } - } - return result1; - })()); - } - return result; - })(); - }); }); -}); + return result + })() + }) + }) +}) diff --git a/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js b/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js index 165e8fcdf0..7f7d377c1d 100644 --- a/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js +++ b/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js @@ -10,126 +10,142 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const { - expect -} = chai; -const modulePath = "../../../../app/js/ShareJsDB.js"; -const SandboxedModule = require('sandboxed-module'); -const Errors = require("../../../../app/js/Errors"); +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const { expect } = chai +const modulePath = '../../../../app/js/ShareJsDB.js' +const SandboxedModule = require('sandboxed-module') +const Errors = require('../../../../app/js/Errors') -describe("ShareJsDB", function() { - beforeEach(function() { - this.doc_id = "document-id"; - this.project_id = "project-id"; - this.doc_key = `${this.project_id}:${this.doc_id}`; - this.callback = sinon.stub(); - this.ShareJsDB = SandboxedModule.require(modulePath, { requires: { - "./RedisManager": (this.RedisManager = {}) - } - }); +describe('ShareJsDB', function () { + beforeEach(function () { + this.doc_id = 'document-id' + this.project_id = 'project-id' + this.doc_key = `${this.project_id}:${this.doc_id}` + this.callback = sinon.stub() + this.ShareJsDB = SandboxedModule.require(modulePath, { + requires: { + './RedisManager': (this.RedisManager = {}) + } + }) - this.version = 42; - this.lines = ["one", "two", "three"]; - return this.db = new this.ShareJsDB(this.project_id, this.doc_id, this.lines, this.version); - }); + this.version = 42 + this.lines = ['one', 'two', 'three'] + return (this.db = new this.ShareJsDB( + this.project_id, + this.doc_id, + this.lines, + this.version + )) + }) - describe("getSnapshot", function() { - describe("successfully", function() { - beforeEach(function() { - return this.db.getSnapshot(this.doc_key, this.callback); - }); + describe('getSnapshot', function () { + describe('successfully', function () { + beforeEach(function () { + return this.db.getSnapshot(this.doc_key, this.callback) + }) - it("should return the doc lines", function() { - return this.callback.args[0][1].snapshot.should.equal(this.lines.join("\n")); - }); + it('should return the doc lines', function () { + return this.callback.args[0][1].snapshot.should.equal( + this.lines.join('\n') + ) + }) - it("should return the doc version", function() { - return this.callback.args[0][1].v.should.equal(this.version); - }); + it('should return the doc version', function () { + return this.callback.args[0][1].v.should.equal(this.version) + }) - return it("should return the type as text", function() { - return this.callback.args[0][1].type.should.equal("text"); - }); - }); + return it('should return the type as text', function () { + return this.callback.args[0][1].type.should.equal('text') + }) + }) - return describe("when the key does not match", function() { - beforeEach(function() { - return this.db.getSnapshot("bad:key", this.callback); - }); + return describe('when the key does not match', function () { + beforeEach(function () { + return this.db.getSnapshot('bad:key', this.callback) + }) - return it("should return the callback with a NotFoundError", function() { - return this.callback.calledWith(new Errors.NotFoundError("not found")).should.equal(true); - }); - }); - }); + return it('should return the callback with a NotFoundError', function () { + return this.callback + .calledWith(new Errors.NotFoundError('not found')) + .should.equal(true) + }) + }) + }) - describe("getOps", function() { - describe("with start == end", function() { - beforeEach(function() { - this.start = (this.end = 42); - return this.db.getOps(this.doc_key, this.start, this.end, this.callback); - }); + describe('getOps', function () { + describe('with start == end', function () { + beforeEach(function () { + this.start = this.end = 42 + return this.db.getOps(this.doc_key, this.start, this.end, this.callback) + }) - return it("should return an empty array", function() { - return this.callback.calledWith(null, []).should.equal(true); - }); - }); - - describe("with a non empty range", function() { - beforeEach(function() { - this.start = 35; - this.end = 42; - this.RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, this.ops); - return this.db.getOps(this.doc_key, this.start, this.end, this.callback); - }); + return it('should return an empty array', function () { + return this.callback.calledWith(null, []).should.equal(true) + }) + }) - it("should get the range from redis", function() { - return this.RedisManager.getPreviousDocOps - .calledWith(this.doc_id, this.start, this.end-1) - .should.equal(true); - }); + describe('with a non empty range', function () { + beforeEach(function () { + this.start = 35 + this.end = 42 + this.RedisManager.getPreviousDocOps = sinon + .stub() + .callsArgWith(3, null, this.ops) + return this.db.getOps(this.doc_key, this.start, this.end, this.callback) + }) - return it("should return the ops", function() { - return this.callback.calledWith(null, this.ops).should.equal(true); - }); - }); + it('should get the range from redis', function () { + return this.RedisManager.getPreviousDocOps + .calledWith(this.doc_id, this.start, this.end - 1) + .should.equal(true) + }) - return describe("with no specified end", function() { - beforeEach(function() { - this.start = 35; - this.end = null; - this.RedisManager.getPreviousDocOps = sinon.stub().callsArgWith(3, null, this.ops); - return this.db.getOps(this.doc_key, this.start, this.end, this.callback); - }); - - return it("should get until the end of the list", function() { - return this.RedisManager.getPreviousDocOps - .calledWith(this.doc_id, this.start, -1) - .should.equal(true); - }); - }); - }); + return it('should return the ops', function () { + return this.callback.calledWith(null, this.ops).should.equal(true) + }) + }) - return describe("writeOps", function() { return describe("writing an op", function() { - beforeEach(function() { - this.opData = { - op: {p: 20, t: "foo"}, - meta: {source: "bar"}, - v: this.version - }; - return this.db.writeOp(this.doc_key, this.opData, this.callback); - }); + return describe('with no specified end', function () { + beforeEach(function () { + this.start = 35 + this.end = null + this.RedisManager.getPreviousDocOps = sinon + .stub() + .callsArgWith(3, null, this.ops) + return this.db.getOps(this.doc_key, this.start, this.end, this.callback) + }) - it("should write into appliedOps", function() { - return expect(this.db.appliedOps[this.doc_key]).to.deep.equal([this.opData]); - }); + return it('should get until the end of the list', function () { + return this.RedisManager.getPreviousDocOps + .calledWith(this.doc_id, this.start, -1) + .should.equal(true) + }) + }) + }) - return it("should call the callback without an error", function() { - this.callback.called.should.equal(true); - return (this.callback.args[0][0] != null).should.equal(false); - }); - }); }); -}); + return describe('writeOps', function () { + return describe('writing an op', function () { + beforeEach(function () { + this.opData = { + op: { p: 20, t: 'foo' }, + meta: { source: 'bar' }, + v: this.version + } + return this.db.writeOp(this.doc_key, this.opData, this.callback) + }) + + it('should write into appliedOps', function () { + return expect(this.db.appliedOps[this.doc_key]).to.deep.equal([ + this.opData + ]) + }) + + return it('should call the callback without an error', function () { + this.callback.called.should.equal(true) + return (this.callback.args[0][0] != null).should.equal(false) + }) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js b/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js index c6a3fbac33..56afe4c584 100644 --- a/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js +++ b/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js @@ -9,179 +9,230 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const modulePath = "../../../../app/js/ShareJsUpdateManager.js"; -const SandboxedModule = require('sandboxed-module'); -const crypto = require('crypto'); +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const modulePath = '../../../../app/js/ShareJsUpdateManager.js' +const SandboxedModule = require('sandboxed-module') +const crypto = require('crypto') -describe("ShareJsUpdateManager", function() { - beforeEach(function() { - let Model; - this.project_id = "project-id-123"; - this.doc_id = "document-id-123"; - this.callback = sinon.stub(); - return this.ShareJsUpdateManager = SandboxedModule.require(modulePath, { - requires: { - "./sharejs/server/model": - (Model = class Model { - constructor(db) { - this.db = db; - } - }), - "./ShareJsDB" : (this.ShareJsDB = { mockDB: true }), - "redis-sharelatex" : { createClient: () => { return this.rclient = {auth() {}}; } - }, - "logger-sharelatex": (this.logger = { log: sinon.stub() }), - "./RealTimeRedisManager": (this.RealTimeRedisManager = {}), - "./Metrics": (this.metrics = { inc: sinon.stub() }) - }, - globals: { - clearTimeout: (this.clearTimeout = sinon.stub()) - } - } - ); - }); +describe('ShareJsUpdateManager', function () { + beforeEach(function () { + let Model + this.project_id = 'project-id-123' + this.doc_id = 'document-id-123' + this.callback = sinon.stub() + return (this.ShareJsUpdateManager = SandboxedModule.require(modulePath, { + requires: { + './sharejs/server/model': (Model = class Model { + constructor(db) { + this.db = db + } + }), + './ShareJsDB': (this.ShareJsDB = { mockDB: true }), + 'redis-sharelatex': { + createClient: () => { + return (this.rclient = { auth() {} }) + } + }, + 'logger-sharelatex': (this.logger = { log: sinon.stub() }), + './RealTimeRedisManager': (this.RealTimeRedisManager = {}), + './Metrics': (this.metrics = { inc: sinon.stub() }) + }, + globals: { + clearTimeout: (this.clearTimeout = sinon.stub()) + } + })) + }) - describe("applyUpdate", function() { - beforeEach(function() { - this.lines = ["one", "two"]; - this.version = 34; - this.updatedDocLines = ["onefoo", "two"]; - const content = this.updatedDocLines.join("\n"); - this.hash = crypto.createHash('sha1').update("blob " + content.length + "\x00").update(content, 'utf8').digest('hex'); - this.update = {p: 4, t: "foo", v:this.version, hash:this.hash}; - this.model = { - applyOp: sinon.stub().callsArg(2), - getSnapshot: sinon.stub(), - db: { - appliedOps: {} - } - }; - this.ShareJsUpdateManager.getNewShareJsModel = sinon.stub().returns(this.model); - this.ShareJsUpdateManager._listenForOps = sinon.stub(); - return this.ShareJsUpdateManager.removeDocFromCache = sinon.stub().callsArg(1); - }); + describe('applyUpdate', function () { + beforeEach(function () { + this.lines = ['one', 'two'] + this.version = 34 + this.updatedDocLines = ['onefoo', 'two'] + const content = this.updatedDocLines.join('\n') + this.hash = crypto + .createHash('sha1') + .update('blob ' + content.length + '\x00') + .update(content, 'utf8') + .digest('hex') + this.update = { p: 4, t: 'foo', v: this.version, hash: this.hash } + this.model = { + applyOp: sinon.stub().callsArg(2), + getSnapshot: sinon.stub(), + db: { + appliedOps: {} + } + } + this.ShareJsUpdateManager.getNewShareJsModel = sinon + .stub() + .returns(this.model) + this.ShareJsUpdateManager._listenForOps = sinon.stub() + return (this.ShareJsUpdateManager.removeDocFromCache = sinon + .stub() + .callsArg(1)) + }) - describe("successfully", function() { - beforeEach(function(done) { - this.model.getSnapshot.callsArgWith(1, null, {snapshot: this.updatedDocLines.join("\n"), v: this.version}); - this.model.db.appliedOps[`${this.project_id}:${this.doc_id}`] = (this.appliedOps = ["mock-ops"]); - return this.ShareJsUpdateManager.applyUpdate(this.project_id, this.doc_id, this.update, this.lines, this.version, (err, docLines, version, appliedOps) => { - this.callback(err, docLines, version, appliedOps); - return done(); - }); - }); + describe('successfully', function () { + beforeEach(function (done) { + this.model.getSnapshot.callsArgWith(1, null, { + snapshot: this.updatedDocLines.join('\n'), + v: this.version + }) + this.model.db.appliedOps[ + `${this.project_id}:${this.doc_id}` + ] = this.appliedOps = ['mock-ops'] + return this.ShareJsUpdateManager.applyUpdate( + this.project_id, + this.doc_id, + this.update, + this.lines, + this.version, + (err, docLines, version, appliedOps) => { + this.callback(err, docLines, version, appliedOps) + return done() + } + ) + }) - it("should create a new ShareJs model", function() { - return this.ShareJsUpdateManager.getNewShareJsModel - .calledWith(this.project_id, this.doc_id, this.lines, this.version) - .should.equal(true); - }); + it('should create a new ShareJs model', function () { + return this.ShareJsUpdateManager.getNewShareJsModel + .calledWith(this.project_id, this.doc_id, this.lines, this.version) + .should.equal(true) + }) - it("should listen for ops on the model", function() { - return this.ShareJsUpdateManager._listenForOps - .calledWith(this.model) - .should.equal(true); - }); + it('should listen for ops on the model', function () { + return this.ShareJsUpdateManager._listenForOps + .calledWith(this.model) + .should.equal(true) + }) - it("should send the update to ShareJs", function() { - return this.model.applyOp - .calledWith(`${this.project_id}:${this.doc_id}`, this.update) - .should.equal(true); - }); + it('should send the update to ShareJs', function () { + return this.model.applyOp + .calledWith(`${this.project_id}:${this.doc_id}`, this.update) + .should.equal(true) + }) - it("should get the updated doc lines", function() { - return this.model.getSnapshot - .calledWith(`${this.project_id}:${this.doc_id}`) - .should.equal(true); - }); + it('should get the updated doc lines', function () { + return this.model.getSnapshot + .calledWith(`${this.project_id}:${this.doc_id}`) + .should.equal(true) + }) - return it("should return the updated doc lines, version and ops", function() { - return this.callback.calledWith(null, this.updatedDocLines, this.version, this.appliedOps).should.equal(true); - }); - }); + return it('should return the updated doc lines, version and ops', function () { + return this.callback + .calledWith(null, this.updatedDocLines, this.version, this.appliedOps) + .should.equal(true) + }) + }) - describe("when applyOp fails", function() { - beforeEach(function(done) { - this.error = new Error("Something went wrong"); - this.model.applyOp = sinon.stub().callsArgWith(2, this.error); - return this.ShareJsUpdateManager.applyUpdate(this.project_id, this.doc_id, this.update, this.lines, this.version, (err, docLines, version) => { - this.callback(err, docLines, version); - return done(); - }); - }); + describe('when applyOp fails', function () { + beforeEach(function (done) { + this.error = new Error('Something went wrong') + this.model.applyOp = sinon.stub().callsArgWith(2, this.error) + return this.ShareJsUpdateManager.applyUpdate( + this.project_id, + this.doc_id, + this.update, + this.lines, + this.version, + (err, docLines, version) => { + this.callback(err, docLines, version) + return done() + } + ) + }) - return it("should call the callback with the error", function() { - return this.callback.calledWith(this.error).should.equal(true); - }); - }); + return it('should call the callback with the error', function () { + return this.callback.calledWith(this.error).should.equal(true) + }) + }) - describe("when getSnapshot fails", function() { - beforeEach(function(done) { - this.error = new Error("Something went wrong"); - this.model.getSnapshot.callsArgWith(1, this.error); - return this.ShareJsUpdateManager.applyUpdate(this.project_id, this.doc_id, this.update, this.lines, this.version, (err, docLines, version) => { - this.callback(err, docLines, version); - return done(); - }); - }); + describe('when getSnapshot fails', function () { + beforeEach(function (done) { + this.error = new Error('Something went wrong') + this.model.getSnapshot.callsArgWith(1, this.error) + return this.ShareJsUpdateManager.applyUpdate( + this.project_id, + this.doc_id, + this.update, + this.lines, + this.version, + (err, docLines, version) => { + this.callback(err, docLines, version) + return done() + } + ) + }) - return it("should call the callback with the error", function() { - return this.callback.calledWith(this.error).should.equal(true); - }); - }); + return it('should call the callback with the error', function () { + return this.callback.calledWith(this.error).should.equal(true) + }) + }) - return describe("with an invalid hash", function() { - beforeEach(function(done) { - this.error = new Error("invalid hash"); - this.model.getSnapshot.callsArgWith(1, null, {snapshot: "unexpected content", v: this.version}); - this.model.db.appliedOps[`${this.project_id}:${this.doc_id}`] = (this.appliedOps = ["mock-ops"]); - return this.ShareJsUpdateManager.applyUpdate(this.project_id, this.doc_id, this.update, this.lines, this.version, (err, docLines, version, appliedOps) => { - this.callback(err, docLines, version, appliedOps); - return done(); - }); - }); + return describe('with an invalid hash', function () { + beforeEach(function (done) { + this.error = new Error('invalid hash') + this.model.getSnapshot.callsArgWith(1, null, { + snapshot: 'unexpected content', + v: this.version + }) + this.model.db.appliedOps[ + `${this.project_id}:${this.doc_id}` + ] = this.appliedOps = ['mock-ops'] + return this.ShareJsUpdateManager.applyUpdate( + this.project_id, + this.doc_id, + this.update, + this.lines, + this.version, + (err, docLines, version, appliedOps) => { + this.callback(err, docLines, version, appliedOps) + return done() + } + ) + }) - return it("should call the callback with the error", function() { - return this.callback.calledWith(this.error).should.equal(true); - }); - }); - }); + return it('should call the callback with the error', function () { + return this.callback.calledWith(this.error).should.equal(true) + }) + }) + }) - return describe("_listenForOps", function() { - beforeEach(function() { - this.model = { on: (event, callback) => { - return this.callback = callback; - } - }; - sinon.spy(this.model, "on"); - return this.ShareJsUpdateManager._listenForOps(this.model); - }); + return describe('_listenForOps', function () { + beforeEach(function () { + this.model = { + on: (event, callback) => { + return (this.callback = callback) + } + } + sinon.spy(this.model, 'on') + return this.ShareJsUpdateManager._listenForOps(this.model) + }) - it("should listen to the model for updates", function() { - return this.model.on.calledWith("applyOp") - .should.equal(true); - }); + it('should listen to the model for updates', function () { + return this.model.on.calledWith('applyOp').should.equal(true) + }) - return describe("the callback", function() { - beforeEach(function() { - this.opData = { - op: {t: "foo", p: 1}, - meta: { source: "bar" - } - }; - this.RealTimeRedisManager.sendData = sinon.stub(); - return this.callback(`${this.project_id}:${this.doc_id}`, this.opData); - }); - - return it("should publish the op to redis", function() { - return this.RealTimeRedisManager.sendData - .calledWith({project_id: this.project_id, doc_id: this.doc_id, op: this.opData}) - .should.equal(true); - }); - }); - }); -}); + return describe('the callback', function () { + beforeEach(function () { + this.opData = { + op: { t: 'foo', p: 1 }, + meta: { source: 'bar' } + } + this.RealTimeRedisManager.sendData = sinon.stub() + return this.callback(`${this.project_id}:${this.doc_id}`, this.opData) + }) + return it('should publish the op to redis', function () { + return this.RealTimeRedisManager.sendData + .calledWith({ + project_id: this.project_id, + doc_id: this.doc_id, + op: this.opData + }) + .should.equal(true) + }) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js b/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js index c5f89ab81c..4e39089490 100644 --- a/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js +++ b/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js @@ -11,477 +11,682 @@ * DS206: Consider reworking classes to avoid initClass * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const modulePath = "../../../../app/js/UpdateManager.js"; -const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const modulePath = '../../../../app/js/UpdateManager.js' +const SandboxedModule = require('sandboxed-module') -describe("UpdateManager", function() { - beforeEach(function() { - let Profiler, Timer; - this.project_id = "project-id-123"; - this.projectHistoryId = "history-id-123"; - this.doc_id = "document-id-123"; - this.callback = sinon.stub(); - return this.UpdateManager = SandboxedModule.require(modulePath, { requires: { - "./LockManager" : (this.LockManager = {}), - "./RedisManager" : (this.RedisManager = {}), - "./RealTimeRedisManager" : (this.RealTimeRedisManager = {}), - "./ShareJsUpdateManager" : (this.ShareJsUpdateManager = {}), - "./HistoryManager" : (this.HistoryManager = {}), - "logger-sharelatex": (this.logger = { log: sinon.stub() }), - "./Metrics": (this.Metrics = { - Timer: (Timer = (function() { - Timer = class Timer { - static initClass() { - this.prototype.done = sinon.stub(); - } - }; - Timer.initClass(); - return Timer; - })()) - }), - "settings-sharelatex": (this.Settings = {}), - "./DocumentManager": (this.DocumentManager = {}), - "./RangesManager": (this.RangesManager = {}), - "./SnapshotManager": (this.SnapshotManager = {}), - "./Profiler": (Profiler = (function() { - Profiler = class Profiler { - static initClass() { - this.prototype.log = sinon.stub().returns({ end: sinon.stub() }); - this.prototype.end = sinon.stub(); - } - }; - Profiler.initClass(); - return Profiler; - })()) - } - } - ); - }); - - describe("processOutstandingUpdates", function() { - beforeEach(function() { - this.UpdateManager.fetchAndApplyUpdates = sinon.stub().callsArg(2); - return this.UpdateManager.processOutstandingUpdates(this.project_id, this.doc_id, this.callback); - }); - - it("should apply the updates", function() { - return this.UpdateManager.fetchAndApplyUpdates.calledWith(this.project_id, this.doc_id).should.equal(true); - }); - - it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - - return it("should time the execution", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); - - describe("processOutstandingUpdatesWithLock", function() { - describe("when the lock is free", function() { - beforeEach(function() { - this.LockManager.tryLock = sinon.stub().callsArgWith(1, null, true, (this.lockValue = "mock-lock-value")); - this.LockManager.releaseLock = sinon.stub().callsArg(2); - this.UpdateManager.continueProcessingUpdatesWithLock = sinon.stub().callsArg(2); - return this.UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2); - }); - - describe("successfully", function() { - beforeEach(function() { - return this.UpdateManager.processOutstandingUpdatesWithLock(this.project_id, this.doc_id, this.callback); - }); - - it("should acquire the lock", function() { - return this.LockManager.tryLock.calledWith(this.doc_id).should.equal(true); - }); - - it("should free the lock", function() { - return this.LockManager.releaseLock.calledWith(this.doc_id, this.lockValue).should.equal(true); - }); - - it("should process the outstanding updates", function() { - return this.UpdateManager.processOutstandingUpdates.calledWith(this.project_id, this.doc_id).should.equal(true); - }); - - it("should do everything with the lock acquired", function() { - this.UpdateManager.processOutstandingUpdates.calledAfter(this.LockManager.tryLock).should.equal(true); - return this.UpdateManager.processOutstandingUpdates.calledBefore(this.LockManager.releaseLock).should.equal(true); - }); - - it("should continue processing new updates that may have come in", function() { - return this.UpdateManager.continueProcessingUpdatesWithLock.calledWith(this.project_id, this.doc_id).should.equal(true); - }); - - return it("should return the callback", function() { - return this.callback.called.should.equal(true); - }); - }); - - return describe("when processOutstandingUpdates returns an error", function() { - beforeEach(function() { - this.UpdateManager.processOutstandingUpdates = sinon.stub().callsArgWith(2, (this.error = new Error("Something went wrong"))); - return this.UpdateManager.processOutstandingUpdatesWithLock(this.project_id, this.doc_id, this.callback); - }); - - it("should free the lock", function() { - return this.LockManager.releaseLock.calledWith(this.doc_id, this.lockValue).should.equal(true); - }); - - return it("should return the error in the callback", function() { - return this.callback.calledWith(this.error).should.equal(true); - }); - }); - }); - - return describe("when the lock is taken", function() { - beforeEach(function() { - this.LockManager.tryLock = sinon.stub().callsArgWith(1, null, false); - this.UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2); - return this.UpdateManager.processOutstandingUpdatesWithLock(this.project_id, this.doc_id, this.callback); - }); - - it("should return the callback", function() { - return this.callback.called.should.equal(true); - }); - - return it("should not process the updates", function() { - return this.UpdateManager.processOutstandingUpdates.called.should.equal(false); - }); - }); - }); - - describe("continueProcessingUpdatesWithLock", function() { - describe("when there are outstanding updates", function() { - beforeEach(function() { - this.RealTimeRedisManager.getUpdatesLength = sinon.stub().callsArgWith(1, null, 3); - this.UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArg(2); - return this.UpdateManager.continueProcessingUpdatesWithLock(this.project_id, this.doc_id, this.callback); - }); - - it("should process the outstanding updates", function() { - return this.UpdateManager.processOutstandingUpdatesWithLock.calledWith(this.project_id, this.doc_id).should.equal(true); - }); - - return it("should return the callback", function() { - return this.callback.called.should.equal(true); - }); - }); - - return describe("when there are no outstanding updates", function() { - beforeEach(function() { - this.RealTimeRedisManager.getUpdatesLength = sinon.stub().callsArgWith(1, null, 0); - this.UpdateManager.processOutstandingUpdatesWithLock = sinon.stub().callsArg(2); - return this.UpdateManager.continueProcessingUpdatesWithLock(this.project_id, this.doc_id, this.callback); - }); - - it("should not try to process the outstanding updates", function() { - return this.UpdateManager.processOutstandingUpdatesWithLock.called.should.equal(false); - }); - - return it("should return the callback", function() { - return this.callback.called.should.equal(true); - }); - }); - }); - - describe("fetchAndApplyUpdates", function() { - describe("with updates", function() { - beforeEach(function() { - this.updates = [{p: 1, t: "foo"}]; - this.updatedDocLines = ["updated", "lines"]; - this.version = 34; - this.RealTimeRedisManager.getPendingUpdatesForDoc = sinon.stub().callsArgWith(1, null, this.updates); - this.UpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null, this.updatedDocLines, this.version); - return this.UpdateManager.fetchAndApplyUpdates(this.project_id, this.doc_id, this.callback); - }); - - it("should get the pending updates", function() { - return this.RealTimeRedisManager.getPendingUpdatesForDoc.calledWith(this.doc_id).should.equal(true); - }); - - it("should apply the updates", function() { - return Array.from(this.updates).map((update) => - this.UpdateManager.applyUpdate - .calledWith(this.project_id, this.doc_id, update) - .should.equal(true)); - }); - - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); - - return describe("when there are no updates", function() { - beforeEach(function() { - this.updates = []; - this.RealTimeRedisManager.getPendingUpdatesForDoc = sinon.stub().callsArgWith(1, null, this.updates); - this.UpdateManager.applyUpdate = sinon.stub(); - this.RedisManager.setDocument = sinon.stub(); - return this.UpdateManager.fetchAndApplyUpdates(this.project_id, this.doc_id, this.callback); - }); - - it("should not call applyUpdate", function() { - return this.UpdateManager.applyUpdate.called.should.equal(false); - }); - - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); - }); - - describe("applyUpdate", function() { - beforeEach(function() { - this.updateMeta = { user_id: 'last-author-fake-id' }; - this.update = {op: [{p: 42, i: "foo"}], meta: this.updateMeta}; - this.updatedDocLines = ["updated", "lines"]; - this.version = 34; - this.lines = ["original", "lines"]; - this.ranges = { entries: "mock", comments: "mock" }; - this.updated_ranges = { entries: "updated", comments: "updated" }; - this.appliedOps = [ {v: 42, op: "mock-op-42"}, { v: 45, op: "mock-op-45" }]; - this.doc_ops_length = sinon.stub(); - this.project_ops_length = sinon.stub(); - this.pathname = '/a/b/c.tex'; - this.DocumentManager.getDoc = sinon.stub().yields(null, this.lines, this.version, this.ranges, this.pathname, this.projectHistoryId); - this.RangesManager.applyUpdate = sinon.stub().yields(null, this.updated_ranges, false); - this.ShareJsUpdateManager.applyUpdate = sinon.stub().yields(null, this.updatedDocLines, this.version, this.appliedOps); - this.RedisManager.updateDocument = sinon.stub().yields(null, this.doc_ops_length, this.project_ops_length); - this.RealTimeRedisManager.sendData = sinon.stub(); - this.UpdateManager._addProjectHistoryMetadataToOps = sinon.stub(); - return this.HistoryManager.recordAndFlushHistoryOps = sinon.stub().callsArg(5); - }); - - describe("normally", function() { - beforeEach(function() { - return this.UpdateManager.applyUpdate(this.project_id, this.doc_id, this.update, this.callback); - }); - - it("should apply the updates via ShareJS", function() { - return this.ShareJsUpdateManager.applyUpdate - .calledWith(this.project_id, this.doc_id, this.update, this.lines, this.version) - .should.equal(true); - }); - - it("should update the ranges", function() { - return this.RangesManager.applyUpdate - .calledWith(this.project_id, this.doc_id, this.ranges, this.appliedOps, this.updatedDocLines) - .should.equal(true); - }); - - it("should save the document", function() { - return this.RedisManager.updateDocument - .calledWith(this.project_id, this.doc_id, this.updatedDocLines, this.version, this.appliedOps, this.updated_ranges, this.updateMeta) - .should.equal(true); - }); - - it("should add metadata to the ops" , function() { - return this.UpdateManager._addProjectHistoryMetadataToOps - .calledWith(this.appliedOps, this.pathname, this.projectHistoryId, this.lines) - .should.equal(true); - }); - - it("should push the applied ops into the history queue", function() { - return this.HistoryManager.recordAndFlushHistoryOps - .calledWith(this.project_id, this.doc_id, this.appliedOps, this.doc_ops_length, this.project_ops_length) - .should.equal(true); - }); - - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); - - describe("with UTF-16 surrogate pairs in the update", function() { - beforeEach(function() { - this.update = {op: [{p: 42, i: "\uD835\uDC00"}]}; - return this.UpdateManager.applyUpdate(this.project_id, this.doc_id, this.update, this.callback); - }); - - return it("should apply the update but with surrogate pairs removed", function() { - this.ShareJsUpdateManager.applyUpdate - .calledWith(this.project_id, this.doc_id, this.update) - .should.equal(true); - - // \uFFFD is 'replacement character' - return this.update.op[0].i.should.equal("\uFFFD\uFFFD"); - }); - }); - - describe("with an error", function() { - beforeEach(function() { - this.error = new Error("something went wrong"); - this.ShareJsUpdateManager.applyUpdate = sinon.stub().yields(this.error); - return this.UpdateManager.applyUpdate(this.project_id, this.doc_id, this.update, this.callback); - }); - - it("should call RealTimeRedisManager.sendData with the error", function() { - return this.RealTimeRedisManager.sendData - .calledWith({ - project_id: this.project_id, - doc_id: this.doc_id, - error: this.error.message - }) - .should.equal(true); - }); - - return it("should call the callback with the error", function() { - return this.callback.calledWith(this.error).should.equal(true); - }); - }); - - return describe("when ranges get collapsed", function() { - beforeEach(function() { - this.RangesManager.applyUpdate = sinon.stub().yields(null, this.updated_ranges, true); - this.SnapshotManager.recordSnapshot = sinon.stub().yields(); - return this.UpdateManager.applyUpdate(this.project_id, this.doc_id, this.update, this.callback); - }); - - return it("should call SnapshotManager.recordSnapshot", function() { - return this.SnapshotManager.recordSnapshot - .calledWith( - this.project_id, - this.doc_id, - this.version, - this.pathname, - this.lines, - this.ranges - ) - .should.equal(true); - }); - }); - }); - - - describe("_addProjectHistoryMetadataToOps", function() { return it("should add projectHistoryId, pathname and doc_length metadata to the ops", function() { - const lines = [ - 'some', - 'test', - 'data' - ]; - const appliedOps = [ - { v: 42, op: [{i: "foo", p: 4}, { i: "bar", p: 6 }] }, - { v: 45, op: [{d: "qux", p: 4}, { i: "bazbaz", p: 14 }] }, - { v: 49, op: [{i: "penguin", p: 18}] } - ]; - this.UpdateManager._addProjectHistoryMetadataToOps(appliedOps, this.pathname, this.projectHistoryId, lines); - return appliedOps.should.deep.equal([{ - projectHistoryId: this.projectHistoryId, - v: 42, - op: [{i: "foo", p: 4}, { i: "bar", p: 6 }], - meta: { - pathname: this.pathname, - doc_length: 14 +describe('UpdateManager', function () { + beforeEach(function () { + let Profiler, Timer + this.project_id = 'project-id-123' + this.projectHistoryId = 'history-id-123' + this.doc_id = 'document-id-123' + this.callback = sinon.stub() + return (this.UpdateManager = SandboxedModule.require(modulePath, { + requires: { + './LockManager': (this.LockManager = {}), + './RedisManager': (this.RedisManager = {}), + './RealTimeRedisManager': (this.RealTimeRedisManager = {}), + './ShareJsUpdateManager': (this.ShareJsUpdateManager = {}), + './HistoryManager': (this.HistoryManager = {}), + 'logger-sharelatex': (this.logger = { log: sinon.stub() }), + './Metrics': (this.Metrics = { + Timer: (Timer = (function () { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub() + } } - }, { - projectHistoryId: this.projectHistoryId, - v: 45, - op: [{d: "qux", p: 4}, { i: "bazbaz", p: 14 }], - meta: { - pathname: this.pathname, - doc_length: 20 - } // 14 + 'foo' + 'bar' - }, { - projectHistoryId: this.projectHistoryId, - v: 49, - op: [{i: "penguin", p: 18}], - meta: { - pathname: this.pathname, - doc_length: 23 - } // 14 - 'qux' + 'bazbaz' - }]); -}); }); + Timer.initClass() + return Timer + })()) + }), + 'settings-sharelatex': (this.Settings = {}), + './DocumentManager': (this.DocumentManager = {}), + './RangesManager': (this.RangesManager = {}), + './SnapshotManager': (this.SnapshotManager = {}), + './Profiler': (Profiler = (function () { + Profiler = class Profiler { + static initClass() { + this.prototype.log = sinon.stub().returns({ end: sinon.stub() }) + this.prototype.end = sinon.stub() + } + } + Profiler.initClass() + return Profiler + })()) + } + })) + }) - return describe("lockUpdatesAndDo", function() { - beforeEach(function() { - this.method = sinon.stub().callsArgWith(3, null, this.response_arg1); - this.callback = sinon.stub(); - this.arg1 = "argument 1"; - this.response_arg1 = "response argument 1"; - this.lockValue = "mock-lock-value"; - this.LockManager.getLock = sinon.stub().callsArgWith(1, null, this.lockValue); - return this.LockManager.releaseLock = sinon.stub().callsArg(2); - }); + describe('processOutstandingUpdates', function () { + beforeEach(function () { + this.UpdateManager.fetchAndApplyUpdates = sinon.stub().callsArg(2) + return this.UpdateManager.processOutstandingUpdates( + this.project_id, + this.doc_id, + this.callback + ) + }) - describe("successfully", function() { - beforeEach(function() { - this.UpdateManager.continueProcessingUpdatesWithLock = sinon.stub(); - this.UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2); - return this.UpdateManager.lockUpdatesAndDo(this.method, this.project_id, this.doc_id, this.arg1, this.callback); - }); + it('should apply the updates', function () { + return this.UpdateManager.fetchAndApplyUpdates + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) - it("should lock the doc", function() { - return this.LockManager.getLock - .calledWith(this.doc_id) - .should.equal(true); - }); + it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) - it("should process any outstanding updates", function() { - return this.UpdateManager.processOutstandingUpdates - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); + return it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) - it("should call the method", function() { - return this.method - .calledWith(this.project_id, this.doc_id, this.arg1) - .should.equal(true); - }); + describe('processOutstandingUpdatesWithLock', function () { + describe('when the lock is free', function () { + beforeEach(function () { + this.LockManager.tryLock = sinon + .stub() + .callsArgWith(1, null, true, (this.lockValue = 'mock-lock-value')) + this.LockManager.releaseLock = sinon.stub().callsArg(2) + this.UpdateManager.continueProcessingUpdatesWithLock = sinon + .stub() + .callsArg(2) + return (this.UpdateManager.processOutstandingUpdates = sinon + .stub() + .callsArg(2)) + }) - it("should return the method response to the callback", function() { - return this.callback - .calledWith(null, this.response_arg1) - .should.equal(true); - }); + describe('successfully', function () { + beforeEach(function () { + return this.UpdateManager.processOutstandingUpdatesWithLock( + this.project_id, + this.doc_id, + this.callback + ) + }) - it("should release the lock", function() { - return this.LockManager.releaseLock - .calledWith(this.doc_id, this.lockValue) - .should.equal(true); - }); + it('should acquire the lock', function () { + return this.LockManager.tryLock + .calledWith(this.doc_id) + .should.equal(true) + }) - return it("should continue processing updates", function() { - return this.UpdateManager.continueProcessingUpdatesWithLock - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); - }); + it('should free the lock', function () { + return this.LockManager.releaseLock + .calledWith(this.doc_id, this.lockValue) + .should.equal(true) + }) - describe("when processOutstandingUpdates returns an error", function() { - beforeEach(function() { - this.UpdateManager.processOutstandingUpdates = sinon.stub().callsArgWith(2, (this.error = new Error("Something went wrong"))); - return this.UpdateManager.lockUpdatesAndDo(this.method, this.project_id, this.doc_id, this.arg1, this.callback); - }); + it('should process the outstanding updates', function () { + return this.UpdateManager.processOutstandingUpdates + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) - it("should free the lock", function() { - return this.LockManager.releaseLock.calledWith(this.doc_id, this.lockValue).should.equal(true); - }); + it('should do everything with the lock acquired', function () { + this.UpdateManager.processOutstandingUpdates + .calledAfter(this.LockManager.tryLock) + .should.equal(true) + return this.UpdateManager.processOutstandingUpdates + .calledBefore(this.LockManager.releaseLock) + .should.equal(true) + }) - return it("should return the error in the callback", function() { - return this.callback.calledWith(this.error).should.equal(true); - }); - }); + it('should continue processing new updates that may have come in', function () { + return this.UpdateManager.continueProcessingUpdatesWithLock + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) - return describe("when the method returns an error", function() { - beforeEach(function() { - this.UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2); - this.method = sinon.stub().callsArgWith(3, (this.error = new Error("something went wrong")), this.response_arg1); - return this.UpdateManager.lockUpdatesAndDo(this.method, this.project_id, this.doc_id, this.arg1, this.callback); - }); + return it('should return the callback', function () { + return this.callback.called.should.equal(true) + }) + }) - it("should free the lock", function() { - return this.LockManager.releaseLock.calledWith(this.doc_id, this.lockValue).should.equal(true); - }); + return describe('when processOutstandingUpdates returns an error', function () { + beforeEach(function () { + this.UpdateManager.processOutstandingUpdates = sinon + .stub() + .callsArgWith(2, (this.error = new Error('Something went wrong'))) + return this.UpdateManager.processOutstandingUpdatesWithLock( + this.project_id, + this.doc_id, + this.callback + ) + }) - return it("should return the error in the callback", function() { - return this.callback.calledWith(this.error).should.equal(true); - }); - }); - }); -}); + it('should free the lock', function () { + return this.LockManager.releaseLock + .calledWith(this.doc_id, this.lockValue) + .should.equal(true) + }) + return it('should return the error in the callback', function () { + return this.callback.calledWith(this.error).should.equal(true) + }) + }) + }) + return describe('when the lock is taken', function () { + beforeEach(function () { + this.LockManager.tryLock = sinon.stub().callsArgWith(1, null, false) + this.UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2) + return this.UpdateManager.processOutstandingUpdatesWithLock( + this.project_id, + this.doc_id, + this.callback + ) + }) + it('should return the callback', function () { + return this.callback.called.should.equal(true) + }) + + return it('should not process the updates', function () { + return this.UpdateManager.processOutstandingUpdates.called.should.equal( + false + ) + }) + }) + }) + + describe('continueProcessingUpdatesWithLock', function () { + describe('when there are outstanding updates', function () { + beforeEach(function () { + this.RealTimeRedisManager.getUpdatesLength = sinon + .stub() + .callsArgWith(1, null, 3) + this.UpdateManager.processOutstandingUpdatesWithLock = sinon + .stub() + .callsArg(2) + return this.UpdateManager.continueProcessingUpdatesWithLock( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('should process the outstanding updates', function () { + return this.UpdateManager.processOutstandingUpdatesWithLock + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + return it('should return the callback', function () { + return this.callback.called.should.equal(true) + }) + }) + + return describe('when there are no outstanding updates', function () { + beforeEach(function () { + this.RealTimeRedisManager.getUpdatesLength = sinon + .stub() + .callsArgWith(1, null, 0) + this.UpdateManager.processOutstandingUpdatesWithLock = sinon + .stub() + .callsArg(2) + return this.UpdateManager.continueProcessingUpdatesWithLock( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('should not try to process the outstanding updates', function () { + return this.UpdateManager.processOutstandingUpdatesWithLock.called.should.equal( + false + ) + }) + + return it('should return the callback', function () { + return this.callback.called.should.equal(true) + }) + }) + }) + + describe('fetchAndApplyUpdates', function () { + describe('with updates', function () { + beforeEach(function () { + this.updates = [{ p: 1, t: 'foo' }] + this.updatedDocLines = ['updated', 'lines'] + this.version = 34 + this.RealTimeRedisManager.getPendingUpdatesForDoc = sinon + .stub() + .callsArgWith(1, null, this.updates) + this.UpdateManager.applyUpdate = sinon + .stub() + .callsArgWith(3, null, this.updatedDocLines, this.version) + return this.UpdateManager.fetchAndApplyUpdates( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('should get the pending updates', function () { + return this.RealTimeRedisManager.getPendingUpdatesForDoc + .calledWith(this.doc_id) + .should.equal(true) + }) + + it('should apply the updates', function () { + return Array.from(this.updates).map((update) => + this.UpdateManager.applyUpdate + .calledWith(this.project_id, this.doc_id, update) + .should.equal(true) + ) + }) + + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) + + return describe('when there are no updates', function () { + beforeEach(function () { + this.updates = [] + this.RealTimeRedisManager.getPendingUpdatesForDoc = sinon + .stub() + .callsArgWith(1, null, this.updates) + this.UpdateManager.applyUpdate = sinon.stub() + this.RedisManager.setDocument = sinon.stub() + return this.UpdateManager.fetchAndApplyUpdates( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('should not call applyUpdate', function () { + return this.UpdateManager.applyUpdate.called.should.equal(false) + }) + + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) + }) + + describe('applyUpdate', function () { + beforeEach(function () { + this.updateMeta = { user_id: 'last-author-fake-id' } + this.update = { op: [{ p: 42, i: 'foo' }], meta: this.updateMeta } + this.updatedDocLines = ['updated', 'lines'] + this.version = 34 + this.lines = ['original', 'lines'] + this.ranges = { entries: 'mock', comments: 'mock' } + this.updated_ranges = { entries: 'updated', comments: 'updated' } + this.appliedOps = [ + { v: 42, op: 'mock-op-42' }, + { v: 45, op: 'mock-op-45' } + ] + this.doc_ops_length = sinon.stub() + this.project_ops_length = sinon.stub() + this.pathname = '/a/b/c.tex' + this.DocumentManager.getDoc = sinon + .stub() + .yields( + null, + this.lines, + this.version, + this.ranges, + this.pathname, + this.projectHistoryId + ) + this.RangesManager.applyUpdate = sinon + .stub() + .yields(null, this.updated_ranges, false) + this.ShareJsUpdateManager.applyUpdate = sinon + .stub() + .yields(null, this.updatedDocLines, this.version, this.appliedOps) + this.RedisManager.updateDocument = sinon + .stub() + .yields(null, this.doc_ops_length, this.project_ops_length) + this.RealTimeRedisManager.sendData = sinon.stub() + this.UpdateManager._addProjectHistoryMetadataToOps = sinon.stub() + return (this.HistoryManager.recordAndFlushHistoryOps = sinon + .stub() + .callsArg(5)) + }) + + describe('normally', function () { + beforeEach(function () { + return this.UpdateManager.applyUpdate( + this.project_id, + this.doc_id, + this.update, + this.callback + ) + }) + + it('should apply the updates via ShareJS', function () { + return this.ShareJsUpdateManager.applyUpdate + .calledWith( + this.project_id, + this.doc_id, + this.update, + this.lines, + this.version + ) + .should.equal(true) + }) + + it('should update the ranges', function () { + return this.RangesManager.applyUpdate + .calledWith( + this.project_id, + this.doc_id, + this.ranges, + this.appliedOps, + this.updatedDocLines + ) + .should.equal(true) + }) + + it('should save the document', function () { + return this.RedisManager.updateDocument + .calledWith( + this.project_id, + this.doc_id, + this.updatedDocLines, + this.version, + this.appliedOps, + this.updated_ranges, + this.updateMeta + ) + .should.equal(true) + }) + + it('should add metadata to the ops', function () { + return this.UpdateManager._addProjectHistoryMetadataToOps + .calledWith( + this.appliedOps, + this.pathname, + this.projectHistoryId, + this.lines + ) + .should.equal(true) + }) + + it('should push the applied ops into the history queue', function () { + return this.HistoryManager.recordAndFlushHistoryOps + .calledWith( + this.project_id, + this.doc_id, + this.appliedOps, + this.doc_ops_length, + this.project_ops_length + ) + .should.equal(true) + }) + + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) + + describe('with UTF-16 surrogate pairs in the update', function () { + beforeEach(function () { + this.update = { op: [{ p: 42, i: '\uD835\uDC00' }] } + return this.UpdateManager.applyUpdate( + this.project_id, + this.doc_id, + this.update, + this.callback + ) + }) + + return it('should apply the update but with surrogate pairs removed', function () { + this.ShareJsUpdateManager.applyUpdate + .calledWith(this.project_id, this.doc_id, this.update) + .should.equal(true) + + // \uFFFD is 'replacement character' + return this.update.op[0].i.should.equal('\uFFFD\uFFFD') + }) + }) + + describe('with an error', function () { + beforeEach(function () { + this.error = new Error('something went wrong') + this.ShareJsUpdateManager.applyUpdate = sinon.stub().yields(this.error) + return this.UpdateManager.applyUpdate( + this.project_id, + this.doc_id, + this.update, + this.callback + ) + }) + + it('should call RealTimeRedisManager.sendData with the error', function () { + return this.RealTimeRedisManager.sendData + .calledWith({ + project_id: this.project_id, + doc_id: this.doc_id, + error: this.error.message + }) + .should.equal(true) + }) + + return it('should call the callback with the error', function () { + return this.callback.calledWith(this.error).should.equal(true) + }) + }) + + return describe('when ranges get collapsed', function () { + beforeEach(function () { + this.RangesManager.applyUpdate = sinon + .stub() + .yields(null, this.updated_ranges, true) + this.SnapshotManager.recordSnapshot = sinon.stub().yields() + return this.UpdateManager.applyUpdate( + this.project_id, + this.doc_id, + this.update, + this.callback + ) + }) + + return it('should call SnapshotManager.recordSnapshot', function () { + return this.SnapshotManager.recordSnapshot + .calledWith( + this.project_id, + this.doc_id, + this.version, + this.pathname, + this.lines, + this.ranges + ) + .should.equal(true) + }) + }) + }) + + describe('_addProjectHistoryMetadataToOps', function () { + return it('should add projectHistoryId, pathname and doc_length metadata to the ops', function () { + const lines = ['some', 'test', 'data'] + const appliedOps = [ + { + v: 42, + op: [ + { i: 'foo', p: 4 }, + { i: 'bar', p: 6 } + ] + }, + { + v: 45, + op: [ + { d: 'qux', p: 4 }, + { i: 'bazbaz', p: 14 } + ] + }, + { v: 49, op: [{ i: 'penguin', p: 18 }] } + ] + this.UpdateManager._addProjectHistoryMetadataToOps( + appliedOps, + this.pathname, + this.projectHistoryId, + lines + ) + return appliedOps.should.deep.equal([ + { + projectHistoryId: this.projectHistoryId, + v: 42, + op: [ + { i: 'foo', p: 4 }, + { i: 'bar', p: 6 } + ], + meta: { + pathname: this.pathname, + doc_length: 14 + } + }, + { + projectHistoryId: this.projectHistoryId, + v: 45, + op: [ + { d: 'qux', p: 4 }, + { i: 'bazbaz', p: 14 } + ], + meta: { + pathname: this.pathname, + doc_length: 20 + } // 14 + 'foo' + 'bar' + }, + { + projectHistoryId: this.projectHistoryId, + v: 49, + op: [{ i: 'penguin', p: 18 }], + meta: { + pathname: this.pathname, + doc_length: 23 + } // 14 - 'qux' + 'bazbaz' + } + ]) + }) + }) + + return describe('lockUpdatesAndDo', function () { + beforeEach(function () { + this.method = sinon.stub().callsArgWith(3, null, this.response_arg1) + this.callback = sinon.stub() + this.arg1 = 'argument 1' + this.response_arg1 = 'response argument 1' + this.lockValue = 'mock-lock-value' + this.LockManager.getLock = sinon + .stub() + .callsArgWith(1, null, this.lockValue) + return (this.LockManager.releaseLock = sinon.stub().callsArg(2)) + }) + + describe('successfully', function () { + beforeEach(function () { + this.UpdateManager.continueProcessingUpdatesWithLock = sinon.stub() + this.UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2) + return this.UpdateManager.lockUpdatesAndDo( + this.method, + this.project_id, + this.doc_id, + this.arg1, + this.callback + ) + }) + + it('should lock the doc', function () { + return this.LockManager.getLock + .calledWith(this.doc_id) + .should.equal(true) + }) + + it('should process any outstanding updates', function () { + return this.UpdateManager.processOutstandingUpdates + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should call the method', function () { + return this.method + .calledWith(this.project_id, this.doc_id, this.arg1) + .should.equal(true) + }) + + it('should return the method response to the callback', function () { + return this.callback + .calledWith(null, this.response_arg1) + .should.equal(true) + }) + + it('should release the lock', function () { + return this.LockManager.releaseLock + .calledWith(this.doc_id, this.lockValue) + .should.equal(true) + }) + + return it('should continue processing updates', function () { + return this.UpdateManager.continueProcessingUpdatesWithLock + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + }) + + describe('when processOutstandingUpdates returns an error', function () { + beforeEach(function () { + this.UpdateManager.processOutstandingUpdates = sinon + .stub() + .callsArgWith(2, (this.error = new Error('Something went wrong'))) + return this.UpdateManager.lockUpdatesAndDo( + this.method, + this.project_id, + this.doc_id, + this.arg1, + this.callback + ) + }) + + it('should free the lock', function () { + return this.LockManager.releaseLock + .calledWith(this.doc_id, this.lockValue) + .should.equal(true) + }) + + return it('should return the error in the callback', function () { + return this.callback.calledWith(this.error).should.equal(true) + }) + }) + + return describe('when the method returns an error', function () { + beforeEach(function () { + this.UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2) + this.method = sinon + .stub() + .callsArgWith( + 3, + (this.error = new Error('something went wrong')), + this.response_arg1 + ) + return this.UpdateManager.lockUpdatesAndDo( + this.method, + this.project_id, + this.doc_id, + this.arg1, + this.callback + ) + }) + + it('should free the lock', function () { + return this.LockManager.releaseLock + .calledWith(this.doc_id, this.lockValue) + .should.equal(true) + }) + + return it('should return the error in the callback', function () { + return this.callback.calledWith(this.error).should.equal(true) + }) + }) + }) +}) From f46fe5be7f6fe32260139414d48e873b807f3553 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 6 May 2020 12:12:02 +0200 Subject: [PATCH 619/769] decaffeinate: Rename ApplyingUpdatesToADocTests.coffee and 14 other files from .coffee to .js --- ...ingUpdatesToADocTests.coffee => ApplyingUpdatesToADocTests.js} | 0 ...tureTests.coffee => ApplyingUpdatesToProjectStructureTests.js} | 0 .../{DeletingADocumentTests.coffee => DeletingADocumentTests.js} | 0 .../{DeletingAProjectTests.coffee => DeletingAProjectTests.js} | 0 .../{FlushingAProjectTests.coffee => FlushingAProjectTests.js} | 0 .../coffee/{FlushingDocsTests.coffee => FlushingDocsTests.js} | 0 .../{GettingADocumentTests.coffee => GettingADocumentTests.js} | 0 ...{GettingProjectDocsTests.coffee => GettingProjectDocsTests.js} | 0 .../test/acceptance/coffee/{RangesTests.coffee => RangesTests.js} | 0 .../{SettingADocumentTests.coffee => SettingADocumentTests.js} | 0 .../coffee/helpers/{DocUpdaterApp.coffee => DocUpdaterApp.js} | 0 .../helpers/{DocUpdaterClient.coffee => DocUpdaterClient.js} | 0 .../{MockProjectHistoryApi.coffee => MockProjectHistoryApi.js} | 0 .../{MockTrackChangesApi.coffee => MockTrackChangesApi.js} | 0 .../coffee/helpers/{MockWebApi.coffee => MockWebApi.js} | 0 15 files changed, 0 insertions(+), 0 deletions(-) rename services/document-updater/test/acceptance/coffee/{ApplyingUpdatesToADocTests.coffee => ApplyingUpdatesToADocTests.js} (100%) rename services/document-updater/test/acceptance/coffee/{ApplyingUpdatesToProjectStructureTests.coffee => ApplyingUpdatesToProjectStructureTests.js} (100%) rename services/document-updater/test/acceptance/coffee/{DeletingADocumentTests.coffee => DeletingADocumentTests.js} (100%) rename services/document-updater/test/acceptance/coffee/{DeletingAProjectTests.coffee => DeletingAProjectTests.js} (100%) rename services/document-updater/test/acceptance/coffee/{FlushingAProjectTests.coffee => FlushingAProjectTests.js} (100%) rename services/document-updater/test/acceptance/coffee/{FlushingDocsTests.coffee => FlushingDocsTests.js} (100%) rename services/document-updater/test/acceptance/coffee/{GettingADocumentTests.coffee => GettingADocumentTests.js} (100%) rename services/document-updater/test/acceptance/coffee/{GettingProjectDocsTests.coffee => GettingProjectDocsTests.js} (100%) rename services/document-updater/test/acceptance/coffee/{RangesTests.coffee => RangesTests.js} (100%) rename services/document-updater/test/acceptance/coffee/{SettingADocumentTests.coffee => SettingADocumentTests.js} (100%) rename services/document-updater/test/acceptance/coffee/helpers/{DocUpdaterApp.coffee => DocUpdaterApp.js} (100%) rename services/document-updater/test/acceptance/coffee/helpers/{DocUpdaterClient.coffee => DocUpdaterClient.js} (100%) rename services/document-updater/test/acceptance/coffee/helpers/{MockProjectHistoryApi.coffee => MockProjectHistoryApi.js} (100%) rename services/document-updater/test/acceptance/coffee/helpers/{MockTrackChangesApi.coffee => MockTrackChangesApi.js} (100%) rename services/document-updater/test/acceptance/coffee/helpers/{MockWebApi.coffee => MockWebApi.js} (100%) diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee rename to services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.js diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.coffee rename to services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.js diff --git a/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/DeletingADocumentTests.coffee rename to services/document-updater/test/acceptance/coffee/DeletingADocumentTests.js diff --git a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/DeletingAProjectTests.coffee rename to services/document-updater/test/acceptance/coffee/DeletingAProjectTests.js diff --git a/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee b/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/FlushingAProjectTests.coffee rename to services/document-updater/test/acceptance/coffee/FlushingAProjectTests.js diff --git a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/FlushingDocsTests.coffee rename to services/document-updater/test/acceptance/coffee/FlushingDocsTests.js diff --git a/services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/GettingADocumentTests.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/GettingADocumentTests.coffee rename to services/document-updater/test/acceptance/coffee/GettingADocumentTests.js diff --git a/services/document-updater/test/acceptance/coffee/GettingProjectDocsTests.coffee b/services/document-updater/test/acceptance/coffee/GettingProjectDocsTests.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/GettingProjectDocsTests.coffee rename to services/document-updater/test/acceptance/coffee/GettingProjectDocsTests.js diff --git a/services/document-updater/test/acceptance/coffee/RangesTests.coffee b/services/document-updater/test/acceptance/coffee/RangesTests.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/RangesTests.coffee rename to services/document-updater/test/acceptance/coffee/RangesTests.js diff --git a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee rename to services/document-updater/test/acceptance/coffee/SettingADocumentTests.js diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterApp.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterApp.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/helpers/DocUpdaterApp.coffee rename to services/document-updater/test/acceptance/coffee/helpers/DocUpdaterApp.js diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.coffee rename to services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.js diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockProjectHistoryApi.coffee b/services/document-updater/test/acceptance/coffee/helpers/MockProjectHistoryApi.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/helpers/MockProjectHistoryApi.coffee rename to services/document-updater/test/acceptance/coffee/helpers/MockProjectHistoryApi.js diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.coffee b/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.coffee rename to services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.js diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/helpers/MockWebApi.coffee rename to services/document-updater/test/acceptance/coffee/helpers/MockWebApi.js From adffde3059fab913b1e2980c2a9e2fd243e8b840 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 6 May 2020 12:12:17 +0200 Subject: [PATCH 620/769] decaffeinate: Convert ApplyingUpdatesToADocTests.coffee and 14 other files to JS --- .../coffee/ApplyingUpdatesToADocTests.js | 771 ++++++++++-------- .../ApplyingUpdatesToProjectStructureTests.js | 549 +++++++------ .../coffee/DeletingADocumentTests.js | 204 +++-- .../coffee/DeletingAProjectTests.js | 313 ++++--- .../coffee/FlushingAProjectTests.js | 145 ++-- .../acceptance/coffee/FlushingDocsTests.js | 170 ++-- .../coffee/GettingADocumentTests.js | 264 +++--- .../coffee/GettingProjectDocsTests.js | 150 ++-- .../test/acceptance/coffee/RangesTests.js | 727 ++++++++++------- .../coffee/SettingADocumentTests.js | 484 ++++++----- .../coffee/helpers/DocUpdaterApp.js | 56 +- .../coffee/helpers/DocUpdaterClient.js | 240 ++++-- .../coffee/helpers/MockProjectHistoryApi.js | 45 +- .../coffee/helpers/MockTrackChangesApi.js | 51 +- .../acceptance/coffee/helpers/MockWebApi.js | 109 ++- 15 files changed, 2477 insertions(+), 1801 deletions(-) diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.js b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.js index 489f8d98eb..b5259dc7d5 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.js +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.js @@ -1,394 +1,499 @@ -sinon = require "sinon" -chai = require("chai") -chai.should() -expect = chai.expect -async = require "async" -Settings = require('settings-sharelatex') -rclient_history = require("redis-sharelatex").createClient(Settings.redis.history) # note: this is track changes, not project-history -rclient_project_history = require("redis-sharelatex").createClient(Settings.redis.project_history) -rclient_du = require("redis-sharelatex").createClient(Settings.redis.documentupdater) -Keys = Settings.redis.documentupdater.key_schema -HistoryKeys = Settings.redis.history.key_schema -ProjectHistoryKeys = Settings.redis.project_history.key_schema +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require("sinon"); +const chai = require("chai"); +chai.should(); +const { + expect +} = chai; +const async = require("async"); +const Settings = require('settings-sharelatex'); +const rclient_history = require("redis-sharelatex").createClient(Settings.redis.history); // note: this is track changes, not project-history +const rclient_project_history = require("redis-sharelatex").createClient(Settings.redis.project_history); +const rclient_du = require("redis-sharelatex").createClient(Settings.redis.documentupdater); +const Keys = Settings.redis.documentupdater.key_schema; +const HistoryKeys = Settings.redis.history.key_schema; +const ProjectHistoryKeys = Settings.redis.project_history.key_schema; -MockTrackChangesApi = require "./helpers/MockTrackChangesApi" -MockWebApi = require "./helpers/MockWebApi" -DocUpdaterClient = require "./helpers/DocUpdaterClient" -DocUpdaterApp = require "./helpers/DocUpdaterApp" +const MockTrackChangesApi = require("./helpers/MockTrackChangesApi"); +const MockWebApi = require("./helpers/MockWebApi"); +const DocUpdaterClient = require("./helpers/DocUpdaterClient"); +const DocUpdaterApp = require("./helpers/DocUpdaterApp"); -describe "Applying updates to a doc", -> - before (done) -> - @lines = ["one", "two", "three"] - @version = 42 - @update = - doc: @doc_id +describe("Applying updates to a doc", function() { + before(function(done) { + this.lines = ["one", "two", "three"]; + this.version = 42; + this.update = { + doc: this.doc_id, op: [{ - i: "one and a half\n" + i: "one and a half\n", p: 4 - }] - v: @version - @result = ["one", "one and a half", "two", "three"] - DocUpdaterApp.ensureRunning(done) + }], + v: this.version + }; + this.result = ["one", "one and a half", "two", "three"]; + return DocUpdaterApp.ensureRunning(done); + }); - describe "when the document is not loaded", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - sinon.spy MockWebApi, "getDocument" - @startTime = Date.now() - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> - throw error if error? - setTimeout done, 200 - return null + describe("when the document is not loaded", function() { + before(function(done) { + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + sinon.spy(MockWebApi, "getDocument"); + this.startTime = Date.now(); + MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); + DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.update, function(error) { + if (error != null) { throw error; } + return setTimeout(done, 200); + }); + return null; + }); - after -> - MockWebApi.getDocument.restore() + after(() => MockWebApi.getDocument.restore()); - it "should load the document from the web API", -> - MockWebApi.getDocument - .calledWith(@project_id, @doc_id) - .should.equal true + it("should load the document from the web API", function() { + return MockWebApi.getDocument + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); - it "should update the doc", (done) -> - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => - doc.lines.should.deep.equal @result - done() - return null + it("should update the doc", function(done) { + DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { + doc.lines.should.deep.equal(this.result); + return done(); + }); + return null; + }); - it "should push the applied updates to the track changes api", (done) -> - rclient_history.lrange HistoryKeys.uncompressedHistoryOps({@doc_id}), 0, -1, (error, updates) => - throw error if error? - JSON.parse(updates[0]).op.should.deep.equal @update.op - rclient_history.sismember HistoryKeys.docsWithHistoryOps({@project_id}), @doc_id, (error, result) => - throw error if error? - result.should.equal 1 - done() - return null + it("should push the applied updates to the track changes api", function(done) { + rclient_history.lrange(HistoryKeys.uncompressedHistoryOps({doc_id: this.doc_id}), 0, -1, (error, updates) => { + if (error != null) { throw error; } + JSON.parse(updates[0]).op.should.deep.equal(this.update.op); + return rclient_history.sismember(HistoryKeys.docsWithHistoryOps({project_id: this.project_id}), this.doc_id, (error, result) => { + if (error != null) { throw error; } + result.should.equal(1); + return done(); + }); + }); + return null; + }); - it "should push the applied updates to the project history changes api", (done) -> - rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => - throw error if error? - JSON.parse(updates[0]).op.should.deep.equal @update.op - done() - return null + it("should push the applied updates to the project history changes api", function(done) { + rclient_project_history.lrange(ProjectHistoryKeys.projectHistoryOps({project_id: this.project_id}), 0, -1, (error, updates) => { + if (error != null) { throw error; } + JSON.parse(updates[0]).op.should.deep.equal(this.update.op); + return done(); + }); + return null; + }); - it "should set the first op timestamp", (done) -> - rclient_project_history.get ProjectHistoryKeys.projectHistoryFirstOpTimestamp({@project_id}), (error, result) => - throw error if error? - result.should.be.within(@startTime, Date.now()) - @firstOpTimestamp = result - done() - return null + it("should set the first op timestamp", function(done) { + rclient_project_history.get(ProjectHistoryKeys.projectHistoryFirstOpTimestamp({project_id: this.project_id}), (error, result) => { + if (error != null) { throw error; } + result.should.be.within(this.startTime, Date.now()); + this.firstOpTimestamp = result; + return done(); + }); + return null; + }); - describe "when sending another update", -> - before (done) -> - @timeout = 10000 - @second_update = Object.create(@update) - @second_update.v = @version + 1 - DocUpdaterClient.sendUpdate @project_id, @doc_id, @second_update, (error) -> - throw error if error? - setTimeout done, 200 - return null + return describe("when sending another update", function() { + before(function(done) { + this.timeout = 10000; + this.second_update = Object.create(this.update); + this.second_update.v = this.version + 1; + DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.second_update, function(error) { + if (error != null) { throw error; } + return setTimeout(done, 200); + }); + return null; + }); - it "should not change the first op timestamp", (done) -> - rclient_project_history.get ProjectHistoryKeys.projectHistoryFirstOpTimestamp({@project_id}), (error, result) => - throw error if error? - result.should.equal @firstOpTimestamp - done() - return null + return it("should not change the first op timestamp", function(done) { + rclient_project_history.get(ProjectHistoryKeys.projectHistoryFirstOpTimestamp({project_id: this.project_id}), (error, result) => { + if (error != null) { throw error; } + result.should.equal(this.firstOpTimestamp); + return done(); + }); + return null; + }); + }); + }); - describe "when the document is loaded", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + describe("when the document is loaded", function() { + before(function(done) { + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => - throw error if error? - sinon.spy MockWebApi, "getDocument" - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> - throw error if error? - setTimeout done, 200 - return null + MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); + DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { + if (error != null) { throw error; } + sinon.spy(MockWebApi, "getDocument"); + return DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.update, function(error) { + if (error != null) { throw error; } + return setTimeout(done, 200); + }); + }); + return null; + }); - after -> - MockWebApi.getDocument.restore() + after(() => MockWebApi.getDocument.restore()); - it "should not need to call the web api", -> - MockWebApi.getDocument.called.should.equal false + it("should not need to call the web api", () => MockWebApi.getDocument.called.should.equal(false)); - it "should update the doc", (done) -> - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => - doc.lines.should.deep.equal @result - done() - return null + it("should update the doc", function(done) { + DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { + doc.lines.should.deep.equal(this.result); + return done(); + }); + return null; + }); - it "should push the applied updates to the track changes api", (done) -> - rclient_history.lrange HistoryKeys.uncompressedHistoryOps({@doc_id}), 0, -1, (error, updates) => - JSON.parse(updates[0]).op.should.deep.equal @update.op - rclient_history.sismember HistoryKeys.docsWithHistoryOps({@project_id}), @doc_id, (error, result) => - result.should.equal 1 - done() - return null + it("should push the applied updates to the track changes api", function(done) { + rclient_history.lrange(HistoryKeys.uncompressedHistoryOps({doc_id: this.doc_id}), 0, -1, (error, updates) => { + JSON.parse(updates[0]).op.should.deep.equal(this.update.op); + return rclient_history.sismember(HistoryKeys.docsWithHistoryOps({project_id: this.project_id}), this.doc_id, (error, result) => { + result.should.equal(1); + return done(); + }); + }); + return null; + }); - it "should push the applied updates to the project history changes api", (done) -> - rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => - JSON.parse(updates[0]).op.should.deep.equal @update.op - done() - return null + return it("should push the applied updates to the project history changes api", function(done) { + rclient_project_history.lrange(ProjectHistoryKeys.projectHistoryOps({project_id: this.project_id}), 0, -1, (error, updates) => { + JSON.parse(updates[0]).op.should.deep.equal(this.update.op); + return done(); + }); + return null; + }); + }); - describe "when the document is loaded and is using project-history only", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + describe("when the document is loaded and is using project-history only", function() { + before(function(done) { + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version, projectHistoryType: 'project-history'} - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => - throw error if error? - sinon.spy MockWebApi, "getDocument" - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) -> - throw error if error? - setTimeout done, 200 - return null + MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version, projectHistoryType: 'project-history'}); + DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { + if (error != null) { throw error; } + sinon.spy(MockWebApi, "getDocument"); + return DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.update, function(error) { + if (error != null) { throw error; } + return setTimeout(done, 200); + }); + }); + return null; + }); - after -> - MockWebApi.getDocument.restore() + after(() => MockWebApi.getDocument.restore()); - it "should update the doc", (done) -> - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => - doc.lines.should.deep.equal @result - done() - return null + it("should update the doc", function(done) { + DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { + doc.lines.should.deep.equal(this.result); + return done(); + }); + return null; + }); - it "should not push any applied updates to the track changes api", (done) -> - rclient_history.lrange HistoryKeys.uncompressedHistoryOps({@doc_id}), 0, -1, (error, updates) => - updates.length.should.equal 0 - done() - return null + it("should not push any applied updates to the track changes api", function(done) { + rclient_history.lrange(HistoryKeys.uncompressedHistoryOps({doc_id: this.doc_id}), 0, -1, (error, updates) => { + updates.length.should.equal(0); + return done(); + }); + return null; + }); - it "should push the applied updates to the project history changes api", (done) -> - rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => - JSON.parse(updates[0]).op.should.deep.equal @update.op - done() - return null + return it("should push the applied updates to the project history changes api", function(done) { + rclient_project_history.lrange(ProjectHistoryKeys.projectHistoryOps({project_id: this.project_id}), 0, -1, (error, updates) => { + JSON.parse(updates[0]).op.should.deep.equal(this.update.op); + return done(); + }); + return null; + }); + }); - describe "when the document has been deleted", -> - describe "when the ops come in a single linear order", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - lines = ["", "", ""] - MockWebApi.insertDoc @project_id, @doc_id, {lines: lines, version: 0} - @updates = [ - { doc_id: @doc_id, v: 0, op: [i: "h", p: 0 ] } - { doc_id: @doc_id, v: 1, op: [i: "e", p: 1 ] } - { doc_id: @doc_id, v: 2, op: [i: "l", p: 2 ] } - { doc_id: @doc_id, v: 3, op: [i: "l", p: 3 ] } - { doc_id: @doc_id, v: 4, op: [i: "o", p: 4 ] } - { doc_id: @doc_id, v: 5, op: [i: " ", p: 5 ] } - { doc_id: @doc_id, v: 6, op: [i: "w", p: 6 ] } - { doc_id: @doc_id, v: 7, op: [i: "o", p: 7 ] } - { doc_id: @doc_id, v: 8, op: [i: "r", p: 8 ] } - { doc_id: @doc_id, v: 9, op: [i: "l", p: 9 ] } - { doc_id: @doc_id, v: 10, op: [i: "d", p: 10] } - ] - @my_result = ["hello world", "", ""] - done() + describe("when the document has been deleted", function() { + describe("when the ops come in a single linear order", function() { + before(function(done) { + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + const lines = ["", "", ""]; + MockWebApi.insertDoc(this.project_id, this.doc_id, {lines, version: 0}); + this.updates = [ + { doc_id: this.doc_id, v: 0, op: [{i: "h", p: 0} ] }, + { doc_id: this.doc_id, v: 1, op: [{i: "e", p: 1} ] }, + { doc_id: this.doc_id, v: 2, op: [{i: "l", p: 2} ] }, + { doc_id: this.doc_id, v: 3, op: [{i: "l", p: 3} ] }, + { doc_id: this.doc_id, v: 4, op: [{i: "o", p: 4} ] }, + { doc_id: this.doc_id, v: 5, op: [{i: " ", p: 5} ] }, + { doc_id: this.doc_id, v: 6, op: [{i: "w", p: 6} ] }, + { doc_id: this.doc_id, v: 7, op: [{i: "o", p: 7} ] }, + { doc_id: this.doc_id, v: 8, op: [{i: "r", p: 8} ] }, + { doc_id: this.doc_id, v: 9, op: [{i: "l", p: 9} ] }, + { doc_id: this.doc_id, v: 10, op: [{i: "d", p: 10}] } + ]; + this.my_result = ["hello world", "", ""]; + return done(); + }); - it "should be able to continue applying updates when the project has been deleted", (done) -> - actions = [] - for update in @updates.slice(0,6) - do (update) => - actions.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc_id, update, callback - actions.push (callback) => DocUpdaterClient.deleteDoc @project_id, @doc_id, callback - for update in @updates.slice(6) - do (update) => - actions.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc_id, update, callback + it("should be able to continue applying updates when the project has been deleted", function(done) { + let update; + const actions = []; + for (update of Array.from(this.updates.slice(0,6))) { + (update => { + return actions.push(callback => DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, update, callback)); + })(update); + } + actions.push(callback => DocUpdaterClient.deleteDoc(this.project_id, this.doc_id, callback)); + for (update of Array.from(this.updates.slice(6))) { + (update => { + return actions.push(callback => DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, update, callback)); + })(update); + } - async.series actions, (error) => - throw error if error? - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => - doc.lines.should.deep.equal @my_result - done() - return null + async.series(actions, error => { + if (error != null) { throw error; } + return DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { + doc.lines.should.deep.equal(this.my_result); + return done(); + }); + }); + return null; + }); - it "should push the applied updates to the track changes api", (done) -> - rclient_history.lrange HistoryKeys.uncompressedHistoryOps({@doc_id}), 0, -1, (error, updates) => - updates = (JSON.parse(u) for u in updates) - for appliedUpdate, i in @updates - appliedUpdate.op.should.deep.equal updates[i].op + it("should push the applied updates to the track changes api", function(done) { + rclient_history.lrange(HistoryKeys.uncompressedHistoryOps({doc_id: this.doc_id}), 0, -1, (error, updates) => { + updates = (Array.from(updates).map((u) => JSON.parse(u))); + for (let i = 0; i < this.updates.length; i++) { + const appliedUpdate = this.updates[i]; + appliedUpdate.op.should.deep.equal(updates[i].op); + } - rclient_history.sismember HistoryKeys.docsWithHistoryOps({@project_id}), @doc_id, (error, result) => - result.should.equal 1 - done() - return null + return rclient_history.sismember(HistoryKeys.docsWithHistoryOps({project_id: this.project_id}), this.doc_id, (error, result) => { + result.should.equal(1); + return done(); + }); + }); + return null; + }); - it "should store the doc ops in the correct order", (done) -> - rclient_du.lrange Keys.docOps({doc_id: @doc_id}), 0, -1, (error, updates) => - updates = (JSON.parse(u) for u in updates) - for appliedUpdate, i in @updates - appliedUpdate.op.should.deep.equal updates[i].op - done() - return null + return it("should store the doc ops in the correct order", function(done) { + rclient_du.lrange(Keys.docOps({doc_id: this.doc_id}), 0, -1, (error, updates) => { + updates = (Array.from(updates).map((u) => JSON.parse(u))); + for (let i = 0; i < this.updates.length; i++) { + const appliedUpdate = this.updates[i]; + appliedUpdate.op.should.deep.equal(updates[i].op); + } + return done(); + }); + return null; + }); + }); - describe "when older ops come in after the delete", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - lines = ["", "", ""] - MockWebApi.insertDoc @project_id, @doc_id, {lines: lines, version: 0} - @updates = [ - { doc_id: @doc_id, v: 0, op: [i: "h", p: 0 ] } - { doc_id: @doc_id, v: 1, op: [i: "e", p: 1 ] } - { doc_id: @doc_id, v: 2, op: [i: "l", p: 2 ] } - { doc_id: @doc_id, v: 3, op: [i: "l", p: 3 ] } - { doc_id: @doc_id, v: 4, op: [i: "o", p: 4 ] } - { doc_id: @doc_id, v: 0, op: [i: "world", p: 1 ] } - ] - @my_result = ["hello", "world", ""] - done() + return describe("when older ops come in after the delete", function() { + before(function(done) { + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + const lines = ["", "", ""]; + MockWebApi.insertDoc(this.project_id, this.doc_id, {lines, version: 0}); + this.updates = [ + { doc_id: this.doc_id, v: 0, op: [{i: "h", p: 0} ] }, + { doc_id: this.doc_id, v: 1, op: [{i: "e", p: 1} ] }, + { doc_id: this.doc_id, v: 2, op: [{i: "l", p: 2} ] }, + { doc_id: this.doc_id, v: 3, op: [{i: "l", p: 3} ] }, + { doc_id: this.doc_id, v: 4, op: [{i: "o", p: 4} ] }, + { doc_id: this.doc_id, v: 0, op: [{i: "world", p: 1} ] } + ]; + this.my_result = ["hello", "world", ""]; + return done(); + }); - it "should be able to continue applying updates when the project has been deleted", (done) -> - actions = [] - for update in @updates.slice(0,5) - do (update) => - actions.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc_id, update, callback - actions.push (callback) => DocUpdaterClient.deleteDoc @project_id, @doc_id, callback - for update in @updates.slice(5) - do (update) => - actions.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc_id, update, callback + return it("should be able to continue applying updates when the project has been deleted", function(done) { + let update; + const actions = []; + for (update of Array.from(this.updates.slice(0,5))) { + (update => { + return actions.push(callback => DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, update, callback)); + })(update); + } + actions.push(callback => DocUpdaterClient.deleteDoc(this.project_id, this.doc_id, callback)); + for (update of Array.from(this.updates.slice(5))) { + (update => { + return actions.push(callback => DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, update, callback)); + })(update); + } - async.series actions, (error) => - throw error if error? - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => - doc.lines.should.deep.equal @my_result - done() - return null + async.series(actions, error => { + if (error != null) { throw error; } + return DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { + doc.lines.should.deep.equal(this.my_result); + return done(); + }); + }); + return null; + }); + }); + }); - describe "with a broken update", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - @broken_update = { doc_id: @doc_id, v: @version, op: [d: "not the correct content", p: 0 ] } - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + describe("with a broken update", function() { + before(function(done) { + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + this.broken_update = { doc_id: this.doc_id, v: this.version, op: [{d: "not the correct content", p: 0} ] }; + MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); - DocUpdaterClient.subscribeToAppliedOps @messageCallback = sinon.stub() + DocUpdaterClient.subscribeToAppliedOps(this.messageCallback = sinon.stub()); - DocUpdaterClient.sendUpdate @project_id, @doc_id, @broken_update, (error) -> - throw error if error? - setTimeout done, 200 - return null + DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.broken_update, function(error) { + if (error != null) { throw error; } + return setTimeout(done, 200); + }); + return null; + }); - it "should not update the doc", (done) -> - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => - doc.lines.should.deep.equal @lines - done() - return null + it("should not update the doc", function(done) { + DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { + doc.lines.should.deep.equal(this.lines); + return done(); + }); + return null; + }); - it "should send a message with an error", -> - @messageCallback.called.should.equal true - [channel, message] = @messageCallback.args[0] - channel.should.equal "applied-ops" - JSON.parse(message).should.deep.include { - project_id: @project_id, - doc_id: @doc_id, + return it("should send a message with an error", function() { + this.messageCallback.called.should.equal(true); + const [channel, message] = Array.from(this.messageCallback.args[0]); + channel.should.equal("applied-ops"); + return JSON.parse(message).should.deep.include({ + project_id: this.project_id, + doc_id: this.doc_id, error:'Delete component does not match' + }); + }); +}); + + describe("with enough updates to flush to the track changes api", function() { + before(function(done) { + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + const updates = []; + for (let v = 0; v <= 199; v++) { // Should flush after 100 ops + updates.push({ + doc_id: this.doc_id, + op: [{i: v.toString(), p: 0}], + v + }); } - describe "with enough updates to flush to the track changes api", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - updates = [] - for v in [0..199] # Should flush after 100 ops - updates.push - doc_id: @doc_id, - op: [i: v.toString(), p: 0] - v: v + sinon.spy(MockTrackChangesApi, "flushDoc"); - sinon.spy MockTrackChangesApi, "flushDoc" + MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: 0}); - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: 0} - - # Send updates in chunks to causes multiple flushes - actions = [] - for i in [0..19] - do (i) => - actions.push (cb) => - DocUpdaterClient.sendUpdates @project_id, @doc_id, updates.slice(i*10, (i+1)*10), cb - async.series actions, (error) => - throw error if error? - setTimeout done, 2000 - return null - - after -> - MockTrackChangesApi.flushDoc.restore() - - it "should flush the doc twice", -> - MockTrackChangesApi.flushDoc.calledTwice.should.equal true - - describe "when there is no version in Mongo", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines + // Send updates in chunks to causes multiple flushes + const actions = []; + for (let i = 0; i <= 19; i++) { + (i => { + return actions.push(cb => { + return DocUpdaterClient.sendUpdates(this.project_id, this.doc_id, updates.slice(i*10, (i+1)*10), cb); + }); + })(i); } + async.series(actions, error => { + if (error != null) { throw error; } + return setTimeout(done, 2000); + }); + return null; + }); - update = - doc: @doc_id - op: @update.op + after(() => MockTrackChangesApi.flushDoc.restore()); + + return it("should flush the doc twice", () => MockTrackChangesApi.flushDoc.calledTwice.should.equal(true)); + }); + + describe("when there is no version in Mongo", function() { + before(function(done) { + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines + }); + + const update = { + doc: this.doc_id, + op: this.update.op, v: 0 - DocUpdaterClient.sendUpdate @project_id, @doc_id, update, (error) -> - throw error if error? - setTimeout done, 200 - return null + }; + DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, update, function(error) { + if (error != null) { throw error; } + return setTimeout(done, 200); + }); + return null; + }); - it "should update the doc (using version = 0)", (done) -> - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => - doc.lines.should.deep.equal @result - done() - return null + return it("should update the doc (using version = 0)", function(done) { + DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { + doc.lines.should.deep.equal(this.result); + return done(); + }); + return null; + }); + }); - describe "when the sending duplicate ops", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + return describe("when the sending duplicate ops", function() { + before(function(done) { + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); - DocUpdaterClient.subscribeToAppliedOps @messageCallback = sinon.stub() + DocUpdaterClient.subscribeToAppliedOps(this.messageCallback = sinon.stub()); - # One user delete 'one', the next turns it into 'once'. The second becomes a NOP. - DocUpdaterClient.sendUpdate @project_id, @doc_id, { - doc: @doc_id + // One user delete 'one', the next turns it into 'once'. The second becomes a NOP. + DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, { + doc: this.doc_id, op: [{ - i: "one and a half\n" + i: "one and a half\n", p: 4 - }] - v: @version - meta: + }], + v: this.version, + meta: { source: "ikHceq3yfAdQYzBo4-xZ" - }, (error) => - throw error if error? - setTimeout () => - DocUpdaterClient.sendUpdate @project_id, @doc_id, { - doc: @doc_id + } + }, error => { + if (error != null) { throw error; } + return setTimeout(() => { + return DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, { + doc: this.doc_id, op: [{ - i: "one and a half\n" + i: "one and a half\n", p: 4 - }] - v: @version - dupIfSource: ["ikHceq3yfAdQYzBo4-xZ"] - meta: + }], + v: this.version, + dupIfSource: ["ikHceq3yfAdQYzBo4-xZ"], + meta: { source: "ikHceq3yfAdQYzBo4-xZ" - }, (error) => - throw error if error? - setTimeout done, 200 - , 200 - return null + } + }, error => { + if (error != null) { throw error; } + return setTimeout(done, 200); + }); + } + , 200); + }); + return null; + }); - it "should update the doc", (done) -> - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => - doc.lines.should.deep.equal @result - done() - return null + it("should update the doc", function(done) { + DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { + doc.lines.should.deep.equal(this.result); + return done(); + }); + return null; + }); - it "should return a message about duplicate ops", -> - @messageCallback.calledTwice.should.equal true - @messageCallback.args[0][0].should.equal "applied-ops" - expect(JSON.parse(@messageCallback.args[0][1]).op.dup).to.be.undefined - @messageCallback.args[1][0].should.equal "applied-ops" - expect(JSON.parse(@messageCallback.args[1][1]).op.dup).to.equal true + return it("should return a message about duplicate ops", function() { + this.messageCallback.calledTwice.should.equal(true); + this.messageCallback.args[0][0].should.equal("applied-ops"); + expect(JSON.parse(this.messageCallback.args[0][1]).op.dup).to.be.undefined; + this.messageCallback.args[1][0].should.equal("applied-ops"); + return expect(JSON.parse(this.messageCallback.args[1][1]).op.dup).to.equal(true); + }); + }); +}); diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.js b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.js index e18aa2e6a1..3875cf28d1 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.js +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.js @@ -1,300 +1,363 @@ -sinon = require "sinon" -chai = require("chai") -chai.should() -Settings = require('settings-sharelatex') -rclient_project_history = require("redis-sharelatex").createClient(Settings.redis.project_history) -ProjectHistoryKeys = Settings.redis.project_history.key_schema +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require("sinon"); +const chai = require("chai"); +chai.should(); +const Settings = require('settings-sharelatex'); +const rclient_project_history = require("redis-sharelatex").createClient(Settings.redis.project_history); +const ProjectHistoryKeys = Settings.redis.project_history.key_schema; -MockProjectHistoryApi = require "./helpers/MockProjectHistoryApi" -MockWebApi = require "./helpers/MockWebApi" -DocUpdaterClient = require "./helpers/DocUpdaterClient" -DocUpdaterApp = require "./helpers/DocUpdaterApp" +const MockProjectHistoryApi = require("./helpers/MockProjectHistoryApi"); +const MockWebApi = require("./helpers/MockWebApi"); +const DocUpdaterClient = require("./helpers/DocUpdaterClient"); +const DocUpdaterApp = require("./helpers/DocUpdaterApp"); -describe "Applying updates to a project's structure", -> - before -> - @user_id = 'user-id-123' - @version = 1234 +describe("Applying updates to a project's structure", function() { + before(function() { + this.user_id = 'user-id-123'; + return this.version = 1234; + }); - describe "renaming a file", -> - before (done) -> - @project_id = DocUpdaterClient.randomId() - @fileUpdate = - id: DocUpdaterClient.randomId() - pathname: '/file-path' + describe("renaming a file", function() { + before(function(done) { + this.project_id = DocUpdaterClient.randomId(); + this.fileUpdate = { + id: DocUpdaterClient.randomId(), + pathname: '/file-path', newPathname: '/new-file-path' - @fileUpdates = [ @fileUpdate ] - DocUpdaterApp.ensureRunning (error) => - throw error if error? - DocUpdaterClient.sendProjectUpdate @project_id, @user_id, [], @fileUpdates, @version, (error) -> - throw error if error? - setTimeout done, 200 + }; + this.fileUpdates = [ this.fileUpdate ]; + return DocUpdaterApp.ensureRunning(error => { + if (error != null) { throw error; } + return DocUpdaterClient.sendProjectUpdate(this.project_id, this.user_id, [], this.fileUpdates, this.version, function(error) { + if (error != null) { throw error; } + return setTimeout(done, 200); + }); + }); + }); - it "should push the applied file renames to the project history api", (done) -> - rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => - throw error if error? + return it("should push the applied file renames to the project history api", function(done) { + rclient_project_history.lrange(ProjectHistoryKeys.projectHistoryOps({project_id: this.project_id}), 0, -1, (error, updates) => { + if (error != null) { throw error; } - update = JSON.parse(updates[0]) - update.file.should.equal @fileUpdate.id - update.pathname.should.equal '/file-path' - update.new_pathname.should.equal '/new-file-path' - update.meta.user_id.should.equal @user_id - update.meta.ts.should.be.a('string') - update.version.should.equal "#{@version}.0" + const update = JSON.parse(updates[0]); + update.file.should.equal(this.fileUpdate.id); + update.pathname.should.equal('/file-path'); + update.new_pathname.should.equal('/new-file-path'); + update.meta.user_id.should.equal(this.user_id); + update.meta.ts.should.be.a('string'); + update.version.should.equal(`${this.version}.0`); - done() - return null + return done(); + }); + return null; + }); + }); - describe "renaming a document", -> - before -> - @docUpdate = - id: DocUpdaterClient.randomId() - pathname: '/doc-path' + describe("renaming a document", function() { + before(function() { + this.docUpdate = { + id: DocUpdaterClient.randomId(), + pathname: '/doc-path', newPathname: '/new-doc-path' - @docUpdates = [ @docUpdate ] + }; + return this.docUpdates = [ this.docUpdate ];}); - describe "when the document is not loaded", -> - before (done) -> - @project_id = DocUpdaterClient.randomId() - DocUpdaterClient.sendProjectUpdate @project_id, @user_id, @docUpdates, [], @version, (error) -> - throw error if error? - setTimeout done, 200 - return null + describe("when the document is not loaded", function() { + before(function(done) { + this.project_id = DocUpdaterClient.randomId(); + DocUpdaterClient.sendProjectUpdate(this.project_id, this.user_id, this.docUpdates, [], this.version, function(error) { + if (error != null) { throw error; } + return setTimeout(done, 200); + }); + return null; + }); - it "should push the applied doc renames to the project history api", (done) -> - rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => - throw error if error? + return it("should push the applied doc renames to the project history api", function(done) { + rclient_project_history.lrange(ProjectHistoryKeys.projectHistoryOps({project_id: this.project_id}), 0, -1, (error, updates) => { + if (error != null) { throw error; } - update = JSON.parse(updates[0]) - update.doc.should.equal @docUpdate.id - update.pathname.should.equal '/doc-path' - update.new_pathname.should.equal '/new-doc-path' - update.meta.user_id.should.equal @user_id - update.meta.ts.should.be.a('string') - update.version.should.equal "#{@version}.0" + const update = JSON.parse(updates[0]); + update.doc.should.equal(this.docUpdate.id); + update.pathname.should.equal('/doc-path'); + update.new_pathname.should.equal('/new-doc-path'); + update.meta.user_id.should.equal(this.user_id); + update.meta.ts.should.be.a('string'); + update.version.should.equal(`${this.version}.0`); - done() - return null + return done(); + }); + return null; + }); + }); - describe "when the document is loaded", -> - before (done) -> - @project_id = DocUpdaterClient.randomId() - MockWebApi.insertDoc @project_id, @docUpdate.id, {} - DocUpdaterClient.preloadDoc @project_id, @docUpdate.id, (error) => - throw error if error? - sinon.spy MockWebApi, "getDocument" - DocUpdaterClient.sendProjectUpdate @project_id, @user_id, @docUpdates, [], @version, (error) -> - throw error if error? - setTimeout done, 200 - return null + return describe("when the document is loaded", function() { + before(function(done) { + this.project_id = DocUpdaterClient.randomId(); + MockWebApi.insertDoc(this.project_id, this.docUpdate.id, {}); + DocUpdaterClient.preloadDoc(this.project_id, this.docUpdate.id, error => { + if (error != null) { throw error; } + sinon.spy(MockWebApi, "getDocument"); + return DocUpdaterClient.sendProjectUpdate(this.project_id, this.user_id, this.docUpdates, [], this.version, function(error) { + if (error != null) { throw error; } + return setTimeout(done, 200); + }); + }); + return null; + }); - after -> - MockWebApi.getDocument.restore() + after(() => MockWebApi.getDocument.restore()); - it "should update the doc", (done) -> - DocUpdaterClient.getDoc @project_id, @docUpdate.id, (error, res, doc) => - doc.pathname.should.equal @docUpdate.newPathname - done() - return null + it("should update the doc", function(done) { + DocUpdaterClient.getDoc(this.project_id, this.docUpdate.id, (error, res, doc) => { + doc.pathname.should.equal(this.docUpdate.newPathname); + return done(); + }); + return null; + }); - it "should push the applied doc renames to the project history api", (done) -> - rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => - throw error if error? + return it("should push the applied doc renames to the project history api", function(done) { + rclient_project_history.lrange(ProjectHistoryKeys.projectHistoryOps({project_id: this.project_id}), 0, -1, (error, updates) => { + if (error != null) { throw error; } - update = JSON.parse(updates[0]) - update.doc.should.equal @docUpdate.id - update.pathname.should.equal '/doc-path' - update.new_pathname.should.equal '/new-doc-path' - update.meta.user_id.should.equal @user_id - update.meta.ts.should.be.a('string') - update.version.should.equal "#{@version}.0" + const update = JSON.parse(updates[0]); + update.doc.should.equal(this.docUpdate.id); + update.pathname.should.equal('/doc-path'); + update.new_pathname.should.equal('/new-doc-path'); + update.meta.user_id.should.equal(this.user_id); + update.meta.ts.should.be.a('string'); + update.version.should.equal(`${this.version}.0`); - done() - return null + return done(); + }); + return null; + }); + }); + }); - describe "renaming multiple documents and files", -> - before -> - @docUpdate0 = - id: DocUpdaterClient.randomId() - pathname: '/doc-path0' + describe("renaming multiple documents and files", function() { + before(function() { + this.docUpdate0 = { + id: DocUpdaterClient.randomId(), + pathname: '/doc-path0', newPathname: '/new-doc-path0' - @docUpdate1 = - id: DocUpdaterClient.randomId() - pathname: '/doc-path1' + }; + this.docUpdate1 = { + id: DocUpdaterClient.randomId(), + pathname: '/doc-path1', newPathname: '/new-doc-path1' - @docUpdates = [ @docUpdate0, @docUpdate1 ] - @fileUpdate0 = - id: DocUpdaterClient.randomId() - pathname: '/file-path0' + }; + this.docUpdates = [ this.docUpdate0, this.docUpdate1 ]; + this.fileUpdate0 = { + id: DocUpdaterClient.randomId(), + pathname: '/file-path0', newPathname: '/new-file-path0' - @fileUpdate1 = - id: DocUpdaterClient.randomId() - pathname: '/file-path1' + }; + this.fileUpdate1 = { + id: DocUpdaterClient.randomId(), + pathname: '/file-path1', newPathname: '/new-file-path1' - @fileUpdates = [ @fileUpdate0, @fileUpdate1 ] + }; + return this.fileUpdates = [ this.fileUpdate0, this.fileUpdate1 ];}); - describe "when the documents are not loaded", -> - before (done) -> - @project_id = DocUpdaterClient.randomId() - DocUpdaterClient.sendProjectUpdate @project_id, @user_id, @docUpdates, @fileUpdates, @version, (error) -> - throw error if error? - setTimeout done, 200 - return null + return describe("when the documents are not loaded", function() { + before(function(done) { + this.project_id = DocUpdaterClient.randomId(); + DocUpdaterClient.sendProjectUpdate(this.project_id, this.user_id, this.docUpdates, this.fileUpdates, this.version, function(error) { + if (error != null) { throw error; } + return setTimeout(done, 200); + }); + return null; + }); - it "should push the applied doc renames to the project history api", (done) -> - rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => - throw error if error? + return it("should push the applied doc renames to the project history api", function(done) { + rclient_project_history.lrange(ProjectHistoryKeys.projectHistoryOps({project_id: this.project_id}), 0, -1, (error, updates) => { + if (error != null) { throw error; } - update = JSON.parse(updates[0]) - update.doc.should.equal @docUpdate0.id - update.pathname.should.equal '/doc-path0' - update.new_pathname.should.equal '/new-doc-path0' - update.meta.user_id.should.equal @user_id - update.meta.ts.should.be.a('string') - update.version.should.equal "#{@version}.0" + let update = JSON.parse(updates[0]); + update.doc.should.equal(this.docUpdate0.id); + update.pathname.should.equal('/doc-path0'); + update.new_pathname.should.equal('/new-doc-path0'); + update.meta.user_id.should.equal(this.user_id); + update.meta.ts.should.be.a('string'); + update.version.should.equal(`${this.version}.0`); - update = JSON.parse(updates[1]) - update.doc.should.equal @docUpdate1.id - update.pathname.should.equal '/doc-path1' - update.new_pathname.should.equal '/new-doc-path1' - update.meta.user_id.should.equal @user_id - update.meta.ts.should.be.a('string') - update.version.should.equal "#{@version}.1" + update = JSON.parse(updates[1]); + update.doc.should.equal(this.docUpdate1.id); + update.pathname.should.equal('/doc-path1'); + update.new_pathname.should.equal('/new-doc-path1'); + update.meta.user_id.should.equal(this.user_id); + update.meta.ts.should.be.a('string'); + update.version.should.equal(`${this.version}.1`); - update = JSON.parse(updates[2]) - update.file.should.equal @fileUpdate0.id - update.pathname.should.equal '/file-path0' - update.new_pathname.should.equal '/new-file-path0' - update.meta.user_id.should.equal @user_id - update.meta.ts.should.be.a('string') - update.version.should.equal "#{@version}.2" + update = JSON.parse(updates[2]); + update.file.should.equal(this.fileUpdate0.id); + update.pathname.should.equal('/file-path0'); + update.new_pathname.should.equal('/new-file-path0'); + update.meta.user_id.should.equal(this.user_id); + update.meta.ts.should.be.a('string'); + update.version.should.equal(`${this.version}.2`); - update = JSON.parse(updates[3]) - update.file.should.equal @fileUpdate1.id - update.pathname.should.equal '/file-path1' - update.new_pathname.should.equal '/new-file-path1' - update.meta.user_id.should.equal @user_id - update.meta.ts.should.be.a('string') - update.version.should.equal "#{@version}.3" + update = JSON.parse(updates[3]); + update.file.should.equal(this.fileUpdate1.id); + update.pathname.should.equal('/file-path1'); + update.new_pathname.should.equal('/new-file-path1'); + update.meta.user_id.should.equal(this.user_id); + update.meta.ts.should.be.a('string'); + update.version.should.equal(`${this.version}.3`); - done() - return null + return done(); + }); + return null; + }); + }); + }); - describe "adding a file", -> - before (done) -> - @project_id = DocUpdaterClient.randomId() - @fileUpdate = - id: DocUpdaterClient.randomId() - pathname: '/file-path' + describe("adding a file", function() { + before(function(done) { + this.project_id = DocUpdaterClient.randomId(); + this.fileUpdate = { + id: DocUpdaterClient.randomId(), + pathname: '/file-path', url: 'filestore.example.com' - @fileUpdates = [ @fileUpdate ] - DocUpdaterClient.sendProjectUpdate @project_id, @user_id, [], @fileUpdates, @version, (error) -> - throw error if error? - setTimeout done, 200 - return null + }; + this.fileUpdates = [ this.fileUpdate ]; + DocUpdaterClient.sendProjectUpdate(this.project_id, this.user_id, [], this.fileUpdates, this.version, function(error) { + if (error != null) { throw error; } + return setTimeout(done, 200); + }); + return null; + }); - it "should push the file addition to the project history api", (done) -> - rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => - throw error if error? + return it("should push the file addition to the project history api", function(done) { + rclient_project_history.lrange(ProjectHistoryKeys.projectHistoryOps({project_id: this.project_id}), 0, -1, (error, updates) => { + if (error != null) { throw error; } - update = JSON.parse(updates[0]) - update.file.should.equal @fileUpdate.id - update.pathname.should.equal '/file-path' - update.url.should.equal 'filestore.example.com' - update.meta.user_id.should.equal @user_id - update.meta.ts.should.be.a('string') - update.version.should.equal "#{@version}.0" + const update = JSON.parse(updates[0]); + update.file.should.equal(this.fileUpdate.id); + update.pathname.should.equal('/file-path'); + update.url.should.equal('filestore.example.com'); + update.meta.user_id.should.equal(this.user_id); + update.meta.ts.should.be.a('string'); + update.version.should.equal(`${this.version}.0`); - done() - return null + return done(); + }); + return null; + }); + }); - describe "adding a doc", -> - before (done) -> - @project_id = DocUpdaterClient.randomId() - @docUpdate = - id: DocUpdaterClient.randomId() - pathname: '/file-path' + describe("adding a doc", function() { + before(function(done) { + this.project_id = DocUpdaterClient.randomId(); + this.docUpdate = { + id: DocUpdaterClient.randomId(), + pathname: '/file-path', docLines: 'a\nb' - @docUpdates = [ @docUpdate ] - DocUpdaterClient.sendProjectUpdate @project_id, @user_id, @docUpdates, [], @version, (error) -> - throw error if error? - setTimeout done, 200 - return null + }; + this.docUpdates = [ this.docUpdate ]; + DocUpdaterClient.sendProjectUpdate(this.project_id, this.user_id, this.docUpdates, [], this.version, function(error) { + if (error != null) { throw error; } + return setTimeout(done, 200); + }); + return null; + }); - it "should push the doc addition to the project history api", (done) -> - rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) => - throw error if error? + return it("should push the doc addition to the project history api", function(done) { + rclient_project_history.lrange(ProjectHistoryKeys.projectHistoryOps({project_id: this.project_id}), 0, -1, (error, updates) => { + if (error != null) { throw error; } - update = JSON.parse(updates[0]) - update.doc.should.equal @docUpdate.id - update.pathname.should.equal '/file-path' - update.docLines.should.equal 'a\nb' - update.meta.user_id.should.equal @user_id - update.meta.ts.should.be.a('string') - update.version.should.equal "#{@version}.0" + const update = JSON.parse(updates[0]); + update.doc.should.equal(this.docUpdate.id); + update.pathname.should.equal('/file-path'); + update.docLines.should.equal('a\nb'); + update.meta.user_id.should.equal(this.user_id); + update.meta.ts.should.be.a('string'); + update.version.should.equal(`${this.version}.0`); - done() - return null + return done(); + }); + return null; + }); + }); - describe "with enough updates to flush to the history service", -> - before (done) -> - @project_id = DocUpdaterClient.randomId() - @user_id = DocUpdaterClient.randomId() - @version0 = 12345 - @version1 = @version0 + 1 - updates = [] - for v in [0..599] # Should flush after 500 ops - updates.push + describe("with enough updates to flush to the history service", function() { + before(function(done) { + this.project_id = DocUpdaterClient.randomId(); + this.user_id = DocUpdaterClient.randomId(); + this.version0 = 12345; + this.version1 = this.version0 + 1; + const updates = []; + for (let v = 0; v <= 599; v++) { // Should flush after 500 ops + updates.push({ id: DocUpdaterClient.randomId(), - pathname: '/file-' + v + pathname: '/file-' + v, docLines: 'a\nb' + }); + } - sinon.spy MockProjectHistoryApi, "flushProject" + sinon.spy(MockProjectHistoryApi, "flushProject"); - # Send updates in chunks to causes multiple flushes - projectId = @project_id - userId = @project_id - DocUpdaterClient.sendProjectUpdate projectId, userId, updates.slice(0, 250), [], @version0, (error) -> - throw error if error? - DocUpdaterClient.sendProjectUpdate projectId, userId, updates.slice(250), [], @version1, (error) -> - throw error if error? - setTimeout done, 2000 - return null + // Send updates in chunks to causes multiple flushes + const projectId = this.project_id; + const userId = this.project_id; + DocUpdaterClient.sendProjectUpdate(projectId, userId, updates.slice(0, 250), [], this.version0, function(error) { + if (error != null) { throw error; } + return DocUpdaterClient.sendProjectUpdate(projectId, userId, updates.slice(250), [], this.version1, function(error) { + if (error != null) { throw error; } + return setTimeout(done, 2000); + }); + }); + return null; + }); - after -> - MockProjectHistoryApi.flushProject.restore() + after(() => MockProjectHistoryApi.flushProject.restore()); - it "should flush project history", -> - MockProjectHistoryApi.flushProject.calledWith(@project_id).should.equal true + return it("should flush project history", function() { + return MockProjectHistoryApi.flushProject.calledWith(this.project_id).should.equal(true); + }); + }); - describe "with too few updates to flush to the history service", -> - before (done) -> - @project_id = DocUpdaterClient.randomId() - @user_id = DocUpdaterClient.randomId() - @version0 = 12345 - @version1 = @version0 + 1 + return describe("with too few updates to flush to the history service", function() { + before(function(done) { + this.project_id = DocUpdaterClient.randomId(); + this.user_id = DocUpdaterClient.randomId(); + this.version0 = 12345; + this.version1 = this.version0 + 1; - updates = [] - for v in [0..42] # Should flush after 500 ops - updates.push + const updates = []; + for (let v = 0; v <= 42; v++) { // Should flush after 500 ops + updates.push({ id: DocUpdaterClient.randomId(), - pathname: '/file-' + v + pathname: '/file-' + v, docLines: 'a\nb' + }); + } - sinon.spy MockProjectHistoryApi, "flushProject" + sinon.spy(MockProjectHistoryApi, "flushProject"); - # Send updates in chunks - projectId = @project_id - userId = @project_id - DocUpdaterClient.sendProjectUpdate projectId, userId, updates.slice(0, 10), [], @version0, (error) -> - throw error if error? - DocUpdaterClient.sendProjectUpdate projectId, userId, updates.slice(10), [], @version1, (error) -> - throw error if error? - setTimeout done, 2000 - return null + // Send updates in chunks + const projectId = this.project_id; + const userId = this.project_id; + DocUpdaterClient.sendProjectUpdate(projectId, userId, updates.slice(0, 10), [], this.version0, function(error) { + if (error != null) { throw error; } + return DocUpdaterClient.sendProjectUpdate(projectId, userId, updates.slice(10), [], this.version1, function(error) { + if (error != null) { throw error; } + return setTimeout(done, 2000); + }); + }); + return null; + }); - after -> - MockProjectHistoryApi.flushProject.restore() + after(() => MockProjectHistoryApi.flushProject.restore()); - it "should not flush project history", -> - MockProjectHistoryApi.flushProject.calledWith(@project_id).should.equal false + return it("should not flush project history", function() { + return MockProjectHistoryApi.flushProject.calledWith(this.project_id).should.equal(false); + }); + }); +}); diff --git a/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.js b/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.js index c2c4462d31..527ec2edd2 100644 --- a/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.js +++ b/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.js @@ -1,109 +1,141 @@ -sinon = require "sinon" -chai = require("chai") -chai.should() +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require("sinon"); +const chai = require("chai"); +chai.should(); -MockTrackChangesApi = require "./helpers/MockTrackChangesApi" -MockProjectHistoryApi = require "./helpers/MockProjectHistoryApi" -MockWebApi = require "./helpers/MockWebApi" -DocUpdaterClient = require "./helpers/DocUpdaterClient" -DocUpdaterApp = require "./helpers/DocUpdaterApp" +const MockTrackChangesApi = require("./helpers/MockTrackChangesApi"); +const MockProjectHistoryApi = require("./helpers/MockProjectHistoryApi"); +const MockWebApi = require("./helpers/MockWebApi"); +const DocUpdaterClient = require("./helpers/DocUpdaterClient"); +const DocUpdaterApp = require("./helpers/DocUpdaterApp"); -describe "Deleting a document", -> - before (done) -> - @lines = ["one", "two", "three"] - @version = 42 - @update = - doc: @doc_id +describe("Deleting a document", function() { + before(function(done) { + this.lines = ["one", "two", "three"]; + this.version = 42; + this.update = { + doc: this.doc_id, op: [{ - i: "one and a half\n" + i: "one and a half\n", p: 4 - }] - v: @version - @result = ["one", "one and a half", "two", "three"] + }], + v: this.version + }; + this.result = ["one", "one and a half", "two", "three"]; - sinon.spy MockTrackChangesApi, "flushDoc" - sinon.spy MockProjectHistoryApi, "flushProject" - DocUpdaterApp.ensureRunning(done) + sinon.spy(MockTrackChangesApi, "flushDoc"); + sinon.spy(MockProjectHistoryApi, "flushProject"); + return DocUpdaterApp.ensureRunning(done); + }); - after -> - MockTrackChangesApi.flushDoc.restore() - MockProjectHistoryApi.flushProject.restore() + after(function() { + MockTrackChangesApi.flushDoc.restore(); + return MockProjectHistoryApi.flushProject.restore(); + }); - describe "when the updated doc exists in the doc updater", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - sinon.spy MockWebApi, "setDocument" - sinon.spy MockWebApi, "getDocument" + describe("when the updated doc exists in the doc updater", function() { + before(function(done) { + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + sinon.spy(MockWebApi, "setDocument"); + sinon.spy(MockWebApi, "getDocument"); - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => - throw error if error? - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) => - throw error if error? - setTimeout () => - DocUpdaterClient.deleteDoc @project_id, @doc_id, (error, res, body) => - @statusCode = res.statusCode - setTimeout done, 200 - , 200 + MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); + return DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { + if (error != null) { throw error; } + return DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.update, error => { + if (error != null) { throw error; } + return setTimeout(() => { + return DocUpdaterClient.deleteDoc(this.project_id, this.doc_id, (error, res, body) => { + this.statusCode = res.statusCode; + return setTimeout(done, 200); + }); + } + , 200); + }); + }); + }); - after -> - MockWebApi.setDocument.restore() - MockWebApi.getDocument.restore() + after(function() { + MockWebApi.setDocument.restore(); + return MockWebApi.getDocument.restore(); + }); - it "should return a 204 status code", -> - @statusCode.should.equal 204 + it("should return a 204 status code", function() { + return this.statusCode.should.equal(204); + }); - it "should send the updated document and version to the web api", -> - MockWebApi.setDocument - .calledWith(@project_id, @doc_id, @result, @version + 1) - .should.equal true + it("should send the updated document and version to the web api", function() { + return MockWebApi.setDocument + .calledWith(this.project_id, this.doc_id, this.result, this.version + 1) + .should.equal(true); + }); - it "should need to reload the doc if read again", (done) -> - MockWebApi.getDocument.called.should.equal.false - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => + it("should need to reload the doc if read again", function(done) { + MockWebApi.getDocument.called.should.equal.false; + return DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { MockWebApi.getDocument - .calledWith(@project_id, @doc_id) - .should.equal true - done() + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + return done(); + }); + }); - it "should flush track changes", -> - MockTrackChangesApi.flushDoc.calledWith(@doc_id).should.equal true + it("should flush track changes", function() { + return MockTrackChangesApi.flushDoc.calledWith(this.doc_id).should.equal(true); + }); - it "should flush project history", -> - MockProjectHistoryApi.flushProject.calledWith(@project_id).should.equal true + return it("should flush project history", function() { + return MockProjectHistoryApi.flushProject.calledWith(this.project_id).should.equal(true); + }); + }); - describe "when the doc is not in the doc updater", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines - } - sinon.spy MockWebApi, "setDocument" - sinon.spy MockWebApi, "getDocument" - DocUpdaterClient.deleteDoc @project_id, @doc_id, (error, res, body) => - @statusCode = res.statusCode - setTimeout done, 200 + return describe("when the doc is not in the doc updater", function() { + before(function(done) { + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines + }); + sinon.spy(MockWebApi, "setDocument"); + sinon.spy(MockWebApi, "getDocument"); + return DocUpdaterClient.deleteDoc(this.project_id, this.doc_id, (error, res, body) => { + this.statusCode = res.statusCode; + return setTimeout(done, 200); + }); + }); - after -> - MockWebApi.setDocument.restore() - MockWebApi.getDocument.restore() + after(function() { + MockWebApi.setDocument.restore(); + return MockWebApi.getDocument.restore(); + }); - it "should return a 204 status code", -> - @statusCode.should.equal 204 + it("should return a 204 status code", function() { + return this.statusCode.should.equal(204); + }); - it "should not need to send the updated document to the web api", -> - MockWebApi.setDocument.called.should.equal false + it("should not need to send the updated document to the web api", () => MockWebApi.setDocument.called.should.equal(false)); - it "should need to reload the doc if read again", (done) -> - MockWebApi.getDocument.called.should.equal.false - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => + it("should need to reload the doc if read again", function(done) { + MockWebApi.getDocument.called.should.equal.false; + return DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { MockWebApi.getDocument - .calledWith(@project_id, @doc_id) - .should.equal true - done() + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + return done(); + }); + }); - it "should flush track changes", -> - MockTrackChangesApi.flushDoc.calledWith(@doc_id).should.equal true + it("should flush track changes", function() { + return MockTrackChangesApi.flushDoc.calledWith(this.doc_id).should.equal(true); + }); - it "should flush project history", -> - MockProjectHistoryApi.flushProject.calledWith(@project_id).should.equal true + return it("should flush project history", function() { + return MockProjectHistoryApi.flushProject.calledWith(this.project_id).should.equal(true); + }); + }); +}); diff --git a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.js b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.js index cddc008bc0..b60d04bd40 100644 --- a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.js +++ b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.js @@ -1,174 +1,217 @@ -sinon = require "sinon" -chai = require("chai") -chai.should() -async = require "async" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require("sinon"); +const chai = require("chai"); +chai.should(); +const async = require("async"); -MockTrackChangesApi = require "./helpers/MockTrackChangesApi" -MockProjectHistoryApi = require "./helpers/MockProjectHistoryApi" -MockWebApi = require "./helpers/MockWebApi" -DocUpdaterClient = require "./helpers/DocUpdaterClient" -DocUpdaterApp = require "./helpers/DocUpdaterApp" +const MockTrackChangesApi = require("./helpers/MockTrackChangesApi"); +const MockProjectHistoryApi = require("./helpers/MockProjectHistoryApi"); +const MockWebApi = require("./helpers/MockWebApi"); +const DocUpdaterClient = require("./helpers/DocUpdaterClient"); +const DocUpdaterApp = require("./helpers/DocUpdaterApp"); -describe "Deleting a project", -> - before (done) -> - @project_id = DocUpdaterClient.randomId() - @docs = [{ - id: doc_id0 = DocUpdaterClient.randomId() - lines: ["one", "two", "three"] - update: - doc: doc_id0 +describe("Deleting a project", function() { + before(function(done) { + let doc_id0, doc_id1; + this.project_id = DocUpdaterClient.randomId(); + this.docs = [{ + id: (doc_id0 = DocUpdaterClient.randomId()), + lines: ["one", "two", "three"], + update: { + doc: doc_id0, op: [{ - i: "one and a half\n" + i: "one and a half\n", p: 4 - }] + }], v: 0 + }, updatedLines: ["one", "one and a half", "two", "three"] }, { - id: doc_id1 = DocUpdaterClient.randomId() - lines: ["four", "five", "six"] - update: - doc: doc_id1 + id: (doc_id1 = DocUpdaterClient.randomId()), + lines: ["four", "five", "six"], + update: { + doc: doc_id1, op: [{ - i: "four and a half\n" + i: "four and a half\n", p: 5 - }] + }], v: 0 + }, updatedLines: ["four", "four and a half", "five", "six"] - }] - for doc in @docs - MockWebApi.insertDoc @project_id, doc.id, { - lines: doc.lines + }]; + for (let doc of Array.from(this.docs)) { + MockWebApi.insertDoc(this.project_id, doc.id, { + lines: doc.lines, version: doc.update.v - } + }); + } - DocUpdaterApp.ensureRunning(done) + return DocUpdaterApp.ensureRunning(done); + }); - describe "with documents which have been updated", -> - before (done) -> - sinon.spy MockWebApi, "setDocument" - sinon.spy MockTrackChangesApi, "flushDoc" - sinon.spy MockProjectHistoryApi, "flushProject" + describe("with documents which have been updated", function() { + before(function(done) { + sinon.spy(MockWebApi, "setDocument"); + sinon.spy(MockTrackChangesApi, "flushDoc"); + sinon.spy(MockProjectHistoryApi, "flushProject"); - async.series @docs.map((doc) => - (callback) => - DocUpdaterClient.preloadDoc @project_id, doc.id, (error) => - return callback(error) if error? - DocUpdaterClient.sendUpdate @project_id, doc.id, doc.update, (error) => - callback(error) - ), (error) => - throw error if error? - setTimeout () => - DocUpdaterClient.deleteProject @project_id, (error, res, body) => - @statusCode = res.statusCode - done() - , 200 + return async.series(this.docs.map(doc => { + return callback => { + return DocUpdaterClient.preloadDoc(this.project_id, doc.id, error => { + if (error != null) { return callback(error); } + return DocUpdaterClient.sendUpdate(this.project_id, doc.id, doc.update, error => { + return callback(error); + }); + }); + }; + }), error => { + if (error != null) { throw error; } + return setTimeout(() => { + return DocUpdaterClient.deleteProject(this.project_id, (error, res, body) => { + this.statusCode = res.statusCode; + return done(); + }); + } + , 200); + }); + }); - after -> - MockWebApi.setDocument.restore() - MockTrackChangesApi.flushDoc.restore() - MockProjectHistoryApi.flushProject.restore() + after(function() { + MockWebApi.setDocument.restore(); + MockTrackChangesApi.flushDoc.restore(); + return MockProjectHistoryApi.flushProject.restore(); + }); - it "should return a 204 status code", -> - @statusCode.should.equal 204 + it("should return a 204 status code", function() { + return this.statusCode.should.equal(204); + }); - it "should send each document to the web api", -> - for doc in @docs + it("should send each document to the web api", function() { + return Array.from(this.docs).map((doc) => MockWebApi.setDocument - .calledWith(@project_id, doc.id, doc.updatedLines) - .should.equal true + .calledWith(this.project_id, doc.id, doc.updatedLines) + .should.equal(true)); + }); - it "should need to reload the docs if read again", (done) -> - sinon.spy MockWebApi, "getDocument" - async.series @docs.map((doc) => - (callback) => - MockWebApi.getDocument.calledWith(@project_id, doc.id).should.equal false - DocUpdaterClient.getDoc @project_id, doc.id, (error, res, returnedDoc) => - MockWebApi.getDocument.calledWith(@project_id, doc.id).should.equal true - callback() - ), () -> - MockWebApi.getDocument.restore() - done() + it("should need to reload the docs if read again", function(done) { + sinon.spy(MockWebApi, "getDocument"); + return async.series(this.docs.map(doc => { + return callback => { + MockWebApi.getDocument.calledWith(this.project_id, doc.id).should.equal(false); + return DocUpdaterClient.getDoc(this.project_id, doc.id, (error, res, returnedDoc) => { + MockWebApi.getDocument.calledWith(this.project_id, doc.id).should.equal(true); + return callback(); + }); + }; + }), function() { + MockWebApi.getDocument.restore(); + return done(); + }); + }); - it "should flush each doc in track changes", -> - for doc in @docs - MockTrackChangesApi.flushDoc.calledWith(doc.id).should.equal true + it("should flush each doc in track changes", function() { + return Array.from(this.docs).map((doc) => + MockTrackChangesApi.flushDoc.calledWith(doc.id).should.equal(true)); + }); - it "should flush each doc in project history", -> - MockProjectHistoryApi.flushProject.calledWith(@project_id).should.equal true + return it("should flush each doc in project history", function() { + return MockProjectHistoryApi.flushProject.calledWith(this.project_id).should.equal(true); + }); + }); - describe "with the background=true parameter from realtime and no request to flush the queue", -> - before (done) -> - sinon.spy MockWebApi, "setDocument" - sinon.spy MockTrackChangesApi, "flushDoc" - sinon.spy MockProjectHistoryApi, "flushProject" + describe("with the background=true parameter from realtime and no request to flush the queue", function() { + before(function(done) { + sinon.spy(MockWebApi, "setDocument"); + sinon.spy(MockTrackChangesApi, "flushDoc"); + sinon.spy(MockProjectHistoryApi, "flushProject"); - async.series @docs.map((doc) => - (callback) => - DocUpdaterClient.preloadDoc @project_id, doc.id, callback - ), (error) => - throw error if error? - setTimeout () => - DocUpdaterClient.deleteProjectOnShutdown @project_id, (error, res, body) => - @statusCode = res.statusCode - done() - , 200 + return async.series(this.docs.map(doc => { + return callback => { + return DocUpdaterClient.preloadDoc(this.project_id, doc.id, callback); + }; + }), error => { + if (error != null) { throw error; } + return setTimeout(() => { + return DocUpdaterClient.deleteProjectOnShutdown(this.project_id, (error, res, body) => { + this.statusCode = res.statusCode; + return done(); + }); + } + , 200); + }); + }); - after -> - MockWebApi.setDocument.restore() - MockTrackChangesApi.flushDoc.restore() - MockProjectHistoryApi.flushProject.restore() + after(function() { + MockWebApi.setDocument.restore(); + MockTrackChangesApi.flushDoc.restore(); + return MockProjectHistoryApi.flushProject.restore(); + }); - it "should return a 204 status code", -> - @statusCode.should.equal 204 + it("should return a 204 status code", function() { + return this.statusCode.should.equal(204); + }); - it "should not send any documents to the web api", -> - MockWebApi.setDocument.called.should.equal false + it("should not send any documents to the web api", () => MockWebApi.setDocument.called.should.equal(false)); - it "should not flush any docs in track changes", -> - MockTrackChangesApi.flushDoc.called.should.equal false + it("should not flush any docs in track changes", () => MockTrackChangesApi.flushDoc.called.should.equal(false)); - it "should not flush to project history", -> - MockProjectHistoryApi.flushProject.called.should.equal false + return it("should not flush to project history", () => MockProjectHistoryApi.flushProject.called.should.equal(false)); + }); - describe "with the background=true parameter from realtime and a request to flush the queue", -> - before (done) -> - sinon.spy MockWebApi, "setDocument" - sinon.spy MockTrackChangesApi, "flushDoc" - sinon.spy MockProjectHistoryApi, "flushProject" + return describe("with the background=true parameter from realtime and a request to flush the queue", function() { + before(function(done) { + sinon.spy(MockWebApi, "setDocument"); + sinon.spy(MockTrackChangesApi, "flushDoc"); + sinon.spy(MockProjectHistoryApi, "flushProject"); - async.series @docs.map((doc) => - (callback) => - DocUpdaterClient.preloadDoc @project_id, doc.id, callback - ), (error) => - throw error if error? - setTimeout () => - DocUpdaterClient.deleteProjectOnShutdown @project_id, (error, res, body) => - @statusCode = res.statusCode - # after deleting the project and putting it in the queue, flush the queue - setTimeout () -> - DocUpdaterClient.flushOldProjects done - , 2000 - , 200 + return async.series(this.docs.map(doc => { + return callback => { + return DocUpdaterClient.preloadDoc(this.project_id, doc.id, callback); + }; + }), error => { + if (error != null) { throw error; } + return setTimeout(() => { + return DocUpdaterClient.deleteProjectOnShutdown(this.project_id, (error, res, body) => { + this.statusCode = res.statusCode; + // after deleting the project and putting it in the queue, flush the queue + return setTimeout(() => DocUpdaterClient.flushOldProjects(done) + , 2000); + }); + } + , 200); + }); + }); - after -> - MockWebApi.setDocument.restore() - MockTrackChangesApi.flushDoc.restore() - MockProjectHistoryApi.flushProject.restore() + after(function() { + MockWebApi.setDocument.restore(); + MockTrackChangesApi.flushDoc.restore(); + return MockProjectHistoryApi.flushProject.restore(); + }); - it "should return a 204 status code", -> - @statusCode.should.equal 204 + it("should return a 204 status code", function() { + return this.statusCode.should.equal(204); + }); - it "should send each document to the web api", -> - for doc in @docs + it("should send each document to the web api", function() { + return Array.from(this.docs).map((doc) => MockWebApi.setDocument - .calledWith(@project_id, doc.id, doc.updatedLines) - .should.equal true + .calledWith(this.project_id, doc.id, doc.updatedLines) + .should.equal(true)); + }); - it "should flush each doc in track changes", -> - for doc in @docs - MockTrackChangesApi.flushDoc.calledWith(doc.id).should.equal true + it("should flush each doc in track changes", function() { + return Array.from(this.docs).map((doc) => + MockTrackChangesApi.flushDoc.calledWith(doc.id).should.equal(true)); + }); - it "should flush to project history", -> - MockProjectHistoryApi.flushProject.called.should.equal true + return it("should flush to project history", () => MockProjectHistoryApi.flushProject.called.should.equal(true)); + }); +}); diff --git a/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.js b/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.js index c32b6b4001..e70798ee27 100644 --- a/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.js +++ b/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.js @@ -1,80 +1,105 @@ -sinon = require "sinon" -chai = require("chai") -chai.should() -async = require "async" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require("sinon"); +const chai = require("chai"); +chai.should(); +const async = require("async"); -MockWebApi = require "./helpers/MockWebApi" -DocUpdaterClient = require "./helpers/DocUpdaterClient" -DocUpdaterApp = require "./helpers/DocUpdaterApp" +const MockWebApi = require("./helpers/MockWebApi"); +const DocUpdaterClient = require("./helpers/DocUpdaterClient"); +const DocUpdaterApp = require("./helpers/DocUpdaterApp"); -describe "Flushing a project", -> - before (done) -> - @project_id = DocUpdaterClient.randomId() - @docs = [{ - id: doc_id0 = DocUpdaterClient.randomId() - lines: ["one", "two", "three"] - update: - doc: doc_id0 +describe("Flushing a project", function() { + before(function(done) { + let doc_id0, doc_id1; + this.project_id = DocUpdaterClient.randomId(); + this.docs = [{ + id: (doc_id0 = DocUpdaterClient.randomId()), + lines: ["one", "two", "three"], + update: { + doc: doc_id0, op: [{ - i: "one and a half\n" + i: "one and a half\n", p: 4 - }] + }], v: 0 + }, updatedLines: ["one", "one and a half", "two", "three"] }, { - id: doc_id1 = DocUpdaterClient.randomId() - lines: ["four", "five", "six"] - update: - doc: doc_id1 + id: (doc_id1 = DocUpdaterClient.randomId()), + lines: ["four", "five", "six"], + update: { + doc: doc_id1, op: [{ - i: "four and a half\n" + i: "four and a half\n", p: 5 - }] + }], v: 0 + }, updatedLines: ["four", "four and a half", "five", "six"] - }] - for doc in @docs - MockWebApi.insertDoc @project_id, doc.id, { - lines: doc.lines + }]; + for (let doc of Array.from(this.docs)) { + MockWebApi.insertDoc(this.project_id, doc.id, { + lines: doc.lines, version: doc.update.v - } - DocUpdaterApp.ensureRunning(done) + }); + } + return DocUpdaterApp.ensureRunning(done); + }); - describe "with documents which have been updated", -> - before (done) -> - sinon.spy MockWebApi, "setDocument" + return describe("with documents which have been updated", function() { + before(function(done) { + sinon.spy(MockWebApi, "setDocument"); - async.series @docs.map((doc) => - (callback) => - DocUpdaterClient.preloadDoc @project_id, doc.id, (error) => - return callback(error) if error? - DocUpdaterClient.sendUpdate @project_id, doc.id, doc.update, (error) => - callback(error) - ), (error) => - throw error if error? - setTimeout () => - DocUpdaterClient.flushProject @project_id, (error, res, body) => - @statusCode = res.statusCode - done() - , 200 + return async.series(this.docs.map(doc => { + return callback => { + return DocUpdaterClient.preloadDoc(this.project_id, doc.id, error => { + if (error != null) { return callback(error); } + return DocUpdaterClient.sendUpdate(this.project_id, doc.id, doc.update, error => { + return callback(error); + }); + }); + }; + }), error => { + if (error != null) { throw error; } + return setTimeout(() => { + return DocUpdaterClient.flushProject(this.project_id, (error, res, body) => { + this.statusCode = res.statusCode; + return done(); + }); + } + , 200); + }); + }); - after -> - MockWebApi.setDocument.restore() + after(() => MockWebApi.setDocument.restore()); - it "should return a 204 status code", -> - @statusCode.should.equal 204 + it("should return a 204 status code", function() { + return this.statusCode.should.equal(204); + }); - it "should send each document to the web api", -> - for doc in @docs + it("should send each document to the web api", function() { + return Array.from(this.docs).map((doc) => MockWebApi.setDocument - .calledWith(@project_id, doc.id, doc.updatedLines) - .should.equal true + .calledWith(this.project_id, doc.id, doc.updatedLines) + .should.equal(true)); + }); - it "should update the lines in the doc updater", (done) -> - async.series @docs.map((doc) => - (callback) => - DocUpdaterClient.getDoc @project_id, doc.id, (error, res, returnedDoc) => - returnedDoc.lines.should.deep.equal doc.updatedLines - callback() - ), done + return it("should update the lines in the doc updater", function(done) { + return async.series(this.docs.map(doc => { + return callback => { + return DocUpdaterClient.getDoc(this.project_id, doc.id, (error, res, returnedDoc) => { + returnedDoc.lines.should.deep.equal(doc.updatedLines); + return callback(); + }); + }; + }), done); + }); + }); +}); diff --git a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.js b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.js index 4f19f13c2f..761a388d3e 100644 --- a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.js +++ b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.js @@ -1,90 +1,112 @@ -sinon = require "sinon" -chai = require("chai") -chai.should() -expect = chai.expect -async = require "async" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require("sinon"); +const chai = require("chai"); +chai.should(); +const { + expect +} = chai; +const async = require("async"); -MockWebApi = require "./helpers/MockWebApi" -DocUpdaterClient = require "./helpers/DocUpdaterClient" -DocUpdaterApp = require "./helpers/DocUpdaterApp" +const MockWebApi = require("./helpers/MockWebApi"); +const DocUpdaterClient = require("./helpers/DocUpdaterClient"); +const DocUpdaterApp = require("./helpers/DocUpdaterApp"); -describe "Flushing a doc to Mongo", -> - before (done) -> - @lines = ["one", "two", "three"] - @version = 42 - @update = - doc: @doc_id - meta: { user_id: 'last-author-fake-id' } +describe("Flushing a doc to Mongo", function() { + before(function(done) { + this.lines = ["one", "two", "three"]; + this.version = 42; + this.update = { + doc: this.doc_id, + meta: { user_id: 'last-author-fake-id' }, op: [{ - i: "one and a half\n" + i: "one and a half\n", p: 4 - }] - v: @version - @result = ["one", "one and a half", "two", "three"] - DocUpdaterApp.ensureRunning(done) + }], + v: this.version + }; + this.result = ["one", "one and a half", "two", "three"]; + return DocUpdaterApp.ensureRunning(done); + }); - describe "when the updated doc exists in the doc updater", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - sinon.spy MockWebApi, "setDocument" + describe("when the updated doc exists in the doc updater", function() { + before(function(done) { + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + sinon.spy(MockWebApi, "setDocument"); - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} - DocUpdaterClient.sendUpdates @project_id, @doc_id, [@update], (error) => - throw error if error? - setTimeout () => - DocUpdaterClient.flushDoc @project_id, @doc_id, done - , 200 + MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); + return DocUpdaterClient.sendUpdates(this.project_id, this.doc_id, [this.update], error => { + if (error != null) { throw error; } + return setTimeout(() => { + return DocUpdaterClient.flushDoc(this.project_id, this.doc_id, done); + } + , 200); + }); + }); - after -> - MockWebApi.setDocument.restore() + after(() => MockWebApi.setDocument.restore()); - it "should flush the updated doc lines and version to the web api", -> - MockWebApi.setDocument - .calledWith(@project_id, @doc_id, @result, @version + 1) - .should.equal true + it("should flush the updated doc lines and version to the web api", function() { + return MockWebApi.setDocument + .calledWith(this.project_id, this.doc_id, this.result, this.version + 1) + .should.equal(true); + }); - it "should flush the last update author and time to the web api", -> - lastUpdatedAt = MockWebApi.setDocument.lastCall.args[5] - parseInt(lastUpdatedAt).should.be.closeTo((new Date()).getTime(), 30000) + return it("should flush the last update author and time to the web api", function() { + const lastUpdatedAt = MockWebApi.setDocument.lastCall.args[5]; + parseInt(lastUpdatedAt).should.be.closeTo((new Date()).getTime(), 30000); - lastUpdatedBy = MockWebApi.setDocument.lastCall.args[6] - lastUpdatedBy.should.equal 'last-author-fake-id' + const lastUpdatedBy = MockWebApi.setDocument.lastCall.args[6]; + return lastUpdatedBy.should.equal('last-author-fake-id'); + }); + }); - describe "when the doc does not exist in the doc updater", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines - } - sinon.spy MockWebApi, "setDocument" - DocUpdaterClient.flushDoc @project_id, @doc_id, done + describe("when the doc does not exist in the doc updater", function() { + before(function(done) { + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines + }); + sinon.spy(MockWebApi, "setDocument"); + return DocUpdaterClient.flushDoc(this.project_id, this.doc_id, done); + }); - after -> - MockWebApi.setDocument.restore() + after(() => MockWebApi.setDocument.restore()); - it "should not flush the doc to the web api", -> - MockWebApi.setDocument.called.should.equal false + return it("should not flush the doc to the web api", () => MockWebApi.setDocument.called.should.equal(false)); + }); - describe "when the web api http request takes a long time on first request", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines - version: @version - } - t = 30000 - sinon.stub MockWebApi, "setDocument", (project_id, doc_id, lines, version, ranges, lastUpdatedAt, lastUpdatedBy, callback = (error) ->) -> - setTimeout callback, t - t = 0 - DocUpdaterClient.preloadDoc @project_id, @doc_id, done + return describe("when the web api http request takes a long time on first request", function() { + before(function(done) { + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version + }); + let t = 30000; + sinon.stub(MockWebApi, "setDocument", function(project_id, doc_id, lines, version, ranges, lastUpdatedAt, lastUpdatedBy, callback) { + if (callback == null) { callback = function(error) {}; } + setTimeout(callback, t); + return t = 0; + }); + return DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, done); + }); - after -> - MockWebApi.setDocument.restore() + after(() => MockWebApi.setDocument.restore()); - it "should still work", (done) -> - start = Date.now() - DocUpdaterClient.flushDoc @project_id, @doc_id, (error, res, doc) => - res.statusCode.should.equal 204 - delta = Date.now() - start - expect(delta).to.be.below 20000 - done() + return it("should still work", function(done) { + const start = Date.now(); + return DocUpdaterClient.flushDoc(this.project_id, this.doc_id, (error, res, doc) => { + res.statusCode.should.equal(204); + const delta = Date.now() - start; + expect(delta).to.be.below(20000); + return done(); + }); + }); + }); +}); diff --git a/services/document-updater/test/acceptance/coffee/GettingADocumentTests.js b/services/document-updater/test/acceptance/coffee/GettingADocumentTests.js index f3aa6ef875..273ee1c3f0 100644 --- a/services/document-updater/test/acceptance/coffee/GettingADocumentTests.js +++ b/services/document-updater/test/acceptance/coffee/GettingADocumentTests.js @@ -1,138 +1,188 @@ -sinon = require "sinon" -chai = require("chai") -chai.should() -expect = chai.expect +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require("sinon"); +const chai = require("chai"); +chai.should(); +const { + expect +} = chai; -MockWebApi = require "./helpers/MockWebApi" -DocUpdaterClient = require "./helpers/DocUpdaterClient" -DocUpdaterApp = require "./helpers/DocUpdaterApp" +const MockWebApi = require("./helpers/MockWebApi"); +const DocUpdaterClient = require("./helpers/DocUpdaterClient"); +const DocUpdaterApp = require("./helpers/DocUpdaterApp"); -describe "Getting a document", -> - before (done) -> - @lines = ["one", "two", "three"] - @version = 42 - DocUpdaterApp.ensureRunning(done) +describe("Getting a document", function() { + before(function(done) { + this.lines = ["one", "two", "three"]; + this.version = 42; + return DocUpdaterApp.ensureRunning(done); + }); - describe "when the document is not loaded", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - sinon.spy MockWebApi, "getDocument" + describe("when the document is not loaded", function() { + before(function(done) { + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + sinon.spy(MockWebApi, "getDocument"); - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, @returnedDoc) => done() + return DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, returnedDoc) => { this.returnedDoc = returnedDoc; return done(); }); + }); - after -> - MockWebApi.getDocument.restore() + after(() => MockWebApi.getDocument.restore()); - it "should load the document from the web API", -> - MockWebApi.getDocument - .calledWith(@project_id, @doc_id) - .should.equal true + it("should load the document from the web API", function() { + return MockWebApi.getDocument + .calledWith(this.project_id, this.doc_id) + .should.equal(true); + }); - it "should return the document lines", -> - @returnedDoc.lines.should.deep.equal @lines + it("should return the document lines", function() { + return this.returnedDoc.lines.should.deep.equal(this.lines); + }); - it "should return the document at its current version", -> - @returnedDoc.version.should.equal @version + return it("should return the document at its current version", function() { + return this.returnedDoc.version.should.equal(this.version); + }); + }); - describe "when the document is already loaded", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + describe("when the document is already loaded", function() { + before(function(done) { + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => - throw error if error? - sinon.spy MockWebApi, "getDocument" - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, @returnedDoc) => done() + MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); + return DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { + if (error != null) { throw error; } + sinon.spy(MockWebApi, "getDocument"); + return DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, returnedDoc) => { this.returnedDoc = returnedDoc; return done(); }); + }); + }); - after -> - MockWebApi.getDocument.restore() + after(() => MockWebApi.getDocument.restore()); - it "should not load the document from the web API", -> - MockWebApi.getDocument.called.should.equal false + it("should not load the document from the web API", () => MockWebApi.getDocument.called.should.equal(false)); - it "should return the document lines", -> - @returnedDoc.lines.should.deep.equal @lines + return it("should return the document lines", function() { + return this.returnedDoc.lines.should.deep.equal(this.lines); + }); + }); - describe "when the request asks for some recent ops", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, { - lines: @lines = ["one", "two", "three"] - } + describe("when the request asks for some recent ops", function() { + before(function(done) { + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: (this.lines = ["one", "two", "three"]) + }); - @updates = for v in [0..199] - doc_id: @doc_id, - op: [i: v.toString(), p: 0] - v: v + this.updates = __range__(0, 199, true).map((v) => ({ + doc_id: this.doc_id, + op: [{i: v.toString(), p: 0}], + v + })); - DocUpdaterClient.sendUpdates @project_id, @doc_id, @updates, (error) => - throw error if error? - sinon.spy MockWebApi, "getDocument" - done() + return DocUpdaterClient.sendUpdates(this.project_id, this.doc_id, this.updates, error => { + if (error != null) { throw error; } + sinon.spy(MockWebApi, "getDocument"); + return done(); + }); + }); - after -> - MockWebApi.getDocument.restore() + after(() => MockWebApi.getDocument.restore()); - describe "when the ops are loaded", -> - before (done) -> - DocUpdaterClient.getDocAndRecentOps @project_id, @doc_id, 190, (error, res, @returnedDoc) => done() + describe("when the ops are loaded", function() { + before(function(done) { + return DocUpdaterClient.getDocAndRecentOps(this.project_id, this.doc_id, 190, (error, res, returnedDoc) => { this.returnedDoc = returnedDoc; return done(); }); + }); - it "should return the recent ops", -> - @returnedDoc.ops.length.should.equal 10 - for update, i in @updates.slice(190, -1) - @returnedDoc.ops[i].op.should.deep.equal update.op + return it("should return the recent ops", function() { + this.returnedDoc.ops.length.should.equal(10); + return Array.from(this.updates.slice(190, -1)).map((update, i) => + this.returnedDoc.ops[i].op.should.deep.equal(update.op)); + }); + }); - describe "when the ops are not all loaded", -> - before (done) -> - # We only track 100 ops - DocUpdaterClient.getDocAndRecentOps @project_id, @doc_id, 10, (error, @res, @returnedDoc) => done() + return describe("when the ops are not all loaded", function() { + before(function(done) { + // We only track 100 ops + return DocUpdaterClient.getDocAndRecentOps(this.project_id, this.doc_id, 10, (error, res, returnedDoc) => { this.res = res; this.returnedDoc = returnedDoc; return done(); }); + }); - it "should return UnprocessableEntity", -> - @res.statusCode.should.equal 422 + return it("should return UnprocessableEntity", function() { + return this.res.statusCode.should.equal(422); + }); + }); + }); - describe "when the document does not exist", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => - @statusCode = res.statusCode - done() + describe("when the document does not exist", function() { + before(function(done) { + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + return DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { + this.statusCode = res.statusCode; + return done(); + }); + }); - it "should return 404", -> - @statusCode.should.equal 404 + return it("should return 404", function() { + return this.statusCode.should.equal(404); + }); + }); - describe "when the web api returns an error", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - sinon.stub MockWebApi, "getDocument", (project_id, doc_id, callback = (error, doc) ->) -> - callback new Error("oops") - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => - @statusCode = res.statusCode - done() + describe("when the web api returns an error", function() { + before(function(done) { + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + sinon.stub(MockWebApi, "getDocument", function(project_id, doc_id, callback) { + if (callback == null) { callback = function(error, doc) {}; } + return callback(new Error("oops")); + }); + return DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { + this.statusCode = res.statusCode; + return done(); + }); + }); - after -> - MockWebApi.getDocument.restore() + after(() => MockWebApi.getDocument.restore()); - it "should return 500", -> - @statusCode.should.equal 500 + return it("should return 500", function() { + return this.statusCode.should.equal(500); + }); + }); - describe "when the web api http request takes a long time", -> - before (done) -> - @timeout = 10000 - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - sinon.stub MockWebApi, "getDocument", (project_id, doc_id, callback = (error, doc) ->) -> - setTimeout callback, 30000 - done() + return describe("when the web api http request takes a long time", function() { + before(function(done) { + this.timeout = 10000; + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + sinon.stub(MockWebApi, "getDocument", function(project_id, doc_id, callback) { + if (callback == null) { callback = function(error, doc) {}; } + return setTimeout(callback, 30000); + }); + return done(); + }); - after -> - MockWebApi.getDocument.restore() + after(() => MockWebApi.getDocument.restore()); - it "should return quickly(ish)", (done) -> - start = Date.now() - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => - res.statusCode.should.equal 500 - delta = Date.now() - start - expect(delta).to.be.below 20000 - done() + return it("should return quickly(ish)", function(done) { + const start = Date.now(); + return DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { + res.statusCode.should.equal(500); + const delta = Date.now() - start; + expect(delta).to.be.below(20000); + return done(); + }); + }); + }); +}); + +function __range__(left, right, inclusive) { + let range = []; + let ascending = left < right; + let end = !inclusive ? right : ascending ? right + 1 : right - 1; + for (let i = left; ascending ? i < end : i > end; ascending ? i++ : i--) { + range.push(i); + } + return range; +} \ No newline at end of file diff --git a/services/document-updater/test/acceptance/coffee/GettingProjectDocsTests.js b/services/document-updater/test/acceptance/coffee/GettingProjectDocsTests.js index 3483d170fa..e974070468 100644 --- a/services/document-updater/test/acceptance/coffee/GettingProjectDocsTests.js +++ b/services/document-updater/test/acceptance/coffee/GettingProjectDocsTests.js @@ -1,69 +1,109 @@ -sinon = require "sinon" -chai = require("chai") -chai.should() -expect = chai.expect +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require("sinon"); +const chai = require("chai"); +chai.should(); +const { + expect +} = chai; -MockWebApi = require "./helpers/MockWebApi" -DocUpdaterClient = require "./helpers/DocUpdaterClient" -DocUpdaterApp = require "./helpers/DocUpdaterApp" +const MockWebApi = require("./helpers/MockWebApi"); +const DocUpdaterClient = require("./helpers/DocUpdaterClient"); +const DocUpdaterApp = require("./helpers/DocUpdaterApp"); -describe "Getting documents for project", -> - before (done) -> - @lines = ["one", "two", "three"] - @version = 42 - DocUpdaterApp.ensureRunning(done) +describe("Getting documents for project", function() { + before(function(done) { + this.lines = ["one", "two", "three"]; + this.version = 42; + return DocUpdaterApp.ensureRunning(done); + }); - describe "when project state hash does not match", -> - before (done) -> - @projectStateHash = DocUpdaterClient.randomId() - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + describe("when project state hash does not match", function() { + before(function(done) { + this.projectStateHash = DocUpdaterClient.randomId(); + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => - throw error if error? - DocUpdaterClient.getProjectDocs @project_id, @projectStateHash, (error, @res, @returnedDocs) => - done() + MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); + return DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { + if (error != null) { throw error; } + return DocUpdaterClient.getProjectDocs(this.project_id, this.projectStateHash, (error, res, returnedDocs) => { + this.res = res; + this.returnedDocs = returnedDocs; + return done(); + }); + }); + }); - it "should return a 409 Conflict response", -> - @res.statusCode.should.equal 409 + return it("should return a 409 Conflict response", function() { + return this.res.statusCode.should.equal(409); + }); + }); - describe "when project state hash matches", -> - before (done) -> - @projectStateHash = DocUpdaterClient.randomId() - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + describe("when project state hash matches", function() { + before(function(done) { + this.projectStateHash = DocUpdaterClient.randomId(); + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => - throw error if error? - DocUpdaterClient.getProjectDocs @project_id, @projectStateHash, (error, @res0, @returnedDocs0) => - # set the hash - DocUpdaterClient.getProjectDocs @project_id, @projectStateHash, (error, @res, @returnedDocs) => - # the hash should now match - done() + MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); + return DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { + if (error != null) { throw error; } + return DocUpdaterClient.getProjectDocs(this.project_id, this.projectStateHash, (error, res0, returnedDocs0) => { + // set the hash + this.res0 = res0; + this.returnedDocs0 = returnedDocs0; + return DocUpdaterClient.getProjectDocs(this.project_id, this.projectStateHash, (error, res, returnedDocs) => { + // the hash should now match + this.res = res; + this.returnedDocs = returnedDocs; + return done(); + }); + }); + }); + }); - it "should return a 200 response", -> - @res.statusCode.should.equal 200 + it("should return a 200 response", function() { + return this.res.statusCode.should.equal(200); + }); - it "should return the documents", -> - @returnedDocs.should.deep.equal [ {_id: @doc_id, lines: @lines, v: @version} ] + return it("should return the documents", function() { + return this.returnedDocs.should.deep.equal([ {_id: this.doc_id, lines: this.lines, v: this.version} ]); + }); +}); - describe "when the doc has been removed", -> - before (done) -> - @projectStateHash = DocUpdaterClient.randomId() - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] + return describe("when the doc has been removed", function() { + before(function(done) { + this.projectStateHash = DocUpdaterClient.randomId(); + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => - throw error if error? - DocUpdaterClient.getProjectDocs @project_id, @projectStateHash, (error, @res0, @returnedDocs0) => - # set the hash - DocUpdaterClient.deleteDoc @project_id, @doc_id, (error, res, body) => - # delete the doc - DocUpdaterClient.getProjectDocs @project_id, @projectStateHash, (error, @res, @returnedDocs) => - # the hash would match, but the doc has been deleted - done() + MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); + return DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { + if (error != null) { throw error; } + return DocUpdaterClient.getProjectDocs(this.project_id, this.projectStateHash, (error, res0, returnedDocs0) => { + // set the hash + this.res0 = res0; + this.returnedDocs0 = returnedDocs0; + return DocUpdaterClient.deleteDoc(this.project_id, this.doc_id, (error, res, body) => { + // delete the doc + return DocUpdaterClient.getProjectDocs(this.project_id, this.projectStateHash, (error, res1, returnedDocs) => { + // the hash would match, but the doc has been deleted + this.res = res1; + this.returnedDocs = returnedDocs; + return done(); + }); + }); + }); + }); + }); - it "should return a 409 Conflict response", -> - @res.statusCode.should.equal 409 + return it("should return a 409 Conflict response", function() { + return this.res.statusCode.should.equal(409); + }); + }); +}); diff --git a/services/document-updater/test/acceptance/coffee/RangesTests.js b/services/document-updater/test/acceptance/coffee/RangesTests.js index 52946f4823..e7ba085b04 100644 --- a/services/document-updater/test/acceptance/coffee/RangesTests.js +++ b/services/document-updater/test/acceptance/coffee/RangesTests.js @@ -1,372 +1,467 @@ -sinon = require "sinon" -chai = require("chai") -chai.should() -expect = chai.expect -async = require "async" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require("sinon"); +const chai = require("chai"); +chai.should(); +const { + expect +} = chai; +const async = require("async"); -{db, ObjectId} = require "../../../app/js/mongojs" -MockWebApi = require "./helpers/MockWebApi" -DocUpdaterClient = require "./helpers/DocUpdaterClient" -DocUpdaterApp = require "./helpers/DocUpdaterApp" +const {db, ObjectId} = require("../../../app/js/mongojs"); +const MockWebApi = require("./helpers/MockWebApi"); +const DocUpdaterClient = require("./helpers/DocUpdaterClient"); +const DocUpdaterApp = require("./helpers/DocUpdaterApp"); -describe "Ranges", -> - before (done) -> - DocUpdaterApp.ensureRunning done +describe("Ranges", function() { + before(done => DocUpdaterApp.ensureRunning(done)); - describe "tracking changes from ops", -> - before (done) -> - @project_id = DocUpdaterClient.randomId() - @user_id = DocUpdaterClient.randomId() - @id_seed = "587357bd35e64f6157" - @doc = { - id: DocUpdaterClient.randomId() + describe("tracking changes from ops", function() { + before(function(done) { + this.project_id = DocUpdaterClient.randomId(); + this.user_id = DocUpdaterClient.randomId(); + this.id_seed = "587357bd35e64f6157"; + this.doc = { + id: DocUpdaterClient.randomId(), lines: ["aaa"] - } - @updates = [{ - doc: @doc.id - op: [{ i: "123", p: 1 }] - v: 0 - meta: { user_id: @user_id } + }; + this.updates = [{ + doc: this.doc.id, + op: [{ i: "123", p: 1 }], + v: 0, + meta: { user_id: this.user_id } }, { - doc: @doc.id - op: [{ i: "456", p: 5 }] - v: 1 - meta: { user_id: @user_id, tc: @id_seed } + doc: this.doc.id, + op: [{ i: "456", p: 5 }], + v: 1, + meta: { user_id: this.user_id, tc: this.id_seed } }, { - doc: @doc.id - op: [{ d: "12", p: 1 }] - v: 2 - meta: { user_id: @user_id } - }] - MockWebApi.insertDoc @project_id, @doc.id, { - lines: @doc.lines + doc: this.doc.id, + op: [{ d: "12", p: 1 }], + v: 2, + meta: { user_id: this.user_id } + }]; + MockWebApi.insertDoc(this.project_id, this.doc.id, { + lines: this.doc.lines, version: 0 + }); + const jobs = []; + for (let update of Array.from(this.updates)) { + (update => { + return jobs.push(callback => DocUpdaterClient.sendUpdate(this.project_id, this.doc.id, update, callback)); + })(update); } - jobs = [] - for update in @updates - do (update) => - jobs.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc.id, update, callback - DocUpdaterApp.ensureRunning (error) => - throw error if error? - DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) => - throw error if error? - async.series jobs, (error) -> - throw error if error? - done() + return DocUpdaterApp.ensureRunning(error => { + if (error != null) { throw error; } + return DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { + if (error != null) { throw error; } + return async.series(jobs, function(error) { + if (error != null) { throw error; } + return done(); + }); + }); + }); + }); - it "should update the ranges", (done) -> - DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => - throw error if error? - ranges = data.ranges - change = ranges.changes[0] - change.op.should.deep.equal { i: "456", p: 3 } - change.id.should.equal @id_seed + "000001" - change.metadata.user_id.should.equal @user_id - done() + it("should update the ranges", function(done) { + return DocUpdaterClient.getDoc(this.project_id, this.doc.id, (error, res, data) => { + if (error != null) { throw error; } + const { + ranges + } = data; + const change = ranges.changes[0]; + change.op.should.deep.equal({ i: "456", p: 3 }); + change.id.should.equal(this.id_seed + "000001"); + change.metadata.user_id.should.equal(this.user_id); + return done(); + }); + }); - describe "Adding comments", -> - describe "standalone", -> - before (done) -> - @project_id = DocUpdaterClient.randomId() - @user_id = DocUpdaterClient.randomId() - @doc = { - id: DocUpdaterClient.randomId() + return describe("Adding comments", function() { + describe("standalone", function() { + before(function(done) { + this.project_id = DocUpdaterClient.randomId(); + this.user_id = DocUpdaterClient.randomId(); + this.doc = { + id: DocUpdaterClient.randomId(), lines: ["foo bar baz"] - } - @updates = [{ - doc: @doc.id - op: [{ c: "bar", p: 4, t: @tid = DocUpdaterClient.randomId() }] + }; + this.updates = [{ + doc: this.doc.id, + op: [{ c: "bar", p: 4, t: (this.tid = DocUpdaterClient.randomId()) }], v: 0 - }] - MockWebApi.insertDoc @project_id, @doc.id, { - lines: @doc.lines + }]; + MockWebApi.insertDoc(this.project_id, this.doc.id, { + lines: this.doc.lines, version: 0 + }); + const jobs = []; + for (let update of Array.from(this.updates)) { + (update => { + return jobs.push(callback => DocUpdaterClient.sendUpdate(this.project_id, this.doc.id, update, callback)); + })(update); } - jobs = [] - for update in @updates - do (update) => - jobs.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc.id, update, callback - DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) => - throw error if error? - async.series jobs, (error) -> - throw error if error? - setTimeout done, 200 + return DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { + if (error != null) { throw error; } + return async.series(jobs, function(error) { + if (error != null) { throw error; } + return setTimeout(done, 200); + }); + }); + }); - it "should update the ranges", (done) -> - DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => - throw error if error? - ranges = data.ranges - comment = ranges.comments[0] - comment.op.should.deep.equal { c: "bar", p: 4, t: @tid } - comment.id.should.equal @tid - done() + return it("should update the ranges", function(done) { + return DocUpdaterClient.getDoc(this.project_id, this.doc.id, (error, res, data) => { + if (error != null) { throw error; } + const { + ranges + } = data; + const comment = ranges.comments[0]; + comment.op.should.deep.equal({ c: "bar", p: 4, t: this.tid }); + comment.id.should.equal(this.tid); + return done(); + }); + }); + }); - describe "with conflicting ops needing OT", -> - before (done) -> - @project_id = DocUpdaterClient.randomId() - @user_id = DocUpdaterClient.randomId() - @doc = { - id: DocUpdaterClient.randomId() + return describe("with conflicting ops needing OT", function() { + before(function(done) { + this.project_id = DocUpdaterClient.randomId(); + this.user_id = DocUpdaterClient.randomId(); + this.doc = { + id: DocUpdaterClient.randomId(), lines: ["foo bar baz"] - } - @updates = [{ - doc: @doc.id - op: [{ i: "ABC", p: 3 }] - v: 0 - meta: { user_id: @user_id } + }; + this.updates = [{ + doc: this.doc.id, + op: [{ i: "ABC", p: 3 }], + v: 0, + meta: { user_id: this.user_id } }, { - doc: @doc.id - op: [{ c: "bar", p: 4, t: @tid = DocUpdaterClient.randomId() }] + doc: this.doc.id, + op: [{ c: "bar", p: 4, t: (this.tid = DocUpdaterClient.randomId()) }], v: 0 - }] - MockWebApi.insertDoc @project_id, @doc.id, { - lines: @doc.lines + }]; + MockWebApi.insertDoc(this.project_id, this.doc.id, { + lines: this.doc.lines, version: 0 + }); + const jobs = []; + for (let update of Array.from(this.updates)) { + (update => { + return jobs.push(callback => DocUpdaterClient.sendUpdate(this.project_id, this.doc.id, update, callback)); + })(update); } - jobs = [] - for update in @updates - do (update) => - jobs.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc.id, update, callback - DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) => - throw error if error? - async.series jobs, (error) -> - throw error if error? - setTimeout done, 200 + return DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { + if (error != null) { throw error; } + return async.series(jobs, function(error) { + if (error != null) { throw error; } + return setTimeout(done, 200); + }); + }); + }); - it "should update the comments with the OT shifted comment", (done) -> - DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => - throw error if error? - ranges = data.ranges - comment = ranges.comments[0] - comment.op.should.deep.equal { c: "bar", p: 7, t: @tid } - done() + return it("should update the comments with the OT shifted comment", function(done) { + return DocUpdaterClient.getDoc(this.project_id, this.doc.id, (error, res, data) => { + if (error != null) { throw error; } + const { + ranges + } = data; + const comment = ranges.comments[0]; + comment.op.should.deep.equal({ c: "bar", p: 7, t: this.tid }); + return done(); + }); + }); + }); + }); + }); - describe "Loading ranges from persistence layer", -> - before (done) -> - @project_id = DocUpdaterClient.randomId() - @user_id = DocUpdaterClient.randomId() - @id_seed = "587357bd35e64f6157" - @doc = { - id: DocUpdaterClient.randomId() + describe("Loading ranges from persistence layer", function() { + before(function(done) { + this.project_id = DocUpdaterClient.randomId(); + this.user_id = DocUpdaterClient.randomId(); + this.id_seed = "587357bd35e64f6157"; + this.doc = { + id: DocUpdaterClient.randomId(), lines: ["a123aa"] - } - @update = { - doc: @doc.id - op: [{ i: "456", p: 5 }] - v: 0 - meta: { user_id: @user_id, tc: @id_seed } - } - MockWebApi.insertDoc @project_id, @doc.id, { - lines: @doc.lines - version: 0 + }; + this.update = { + doc: this.doc.id, + op: [{ i: "456", p: 5 }], + v: 0, + meta: { user_id: this.user_id, tc: this.id_seed } + }; + MockWebApi.insertDoc(this.project_id, this.doc.id, { + lines: this.doc.lines, + version: 0, ranges: { changes: [{ - op: { i: "123", p: 1 } - metadata: - user_id: @user_id + op: { i: "123", p: 1 }, + metadata: { + user_id: this.user_id, ts: new Date() + } }] } - } - DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) => - throw error if error? - DocUpdaterClient.sendUpdate @project_id, @doc.id, @update, (error) -> - throw error if error? - setTimeout done, 200 + }); + return DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { + if (error != null) { throw error; } + return DocUpdaterClient.sendUpdate(this.project_id, this.doc.id, this.update, function(error) { + if (error != null) { throw error; } + return setTimeout(done, 200); + }); + }); + }); - it "should have preloaded the existing ranges", (done) -> - DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => - throw error if error? - {changes} = data.ranges - changes[0].op.should.deep.equal { i: "123", p: 1 } - changes[1].op.should.deep.equal { i: "456", p: 5 } - done() + it("should have preloaded the existing ranges", function(done) { + return DocUpdaterClient.getDoc(this.project_id, this.doc.id, (error, res, data) => { + if (error != null) { throw error; } + const {changes} = data.ranges; + changes[0].op.should.deep.equal({ i: "123", p: 1 }); + changes[1].op.should.deep.equal({ i: "456", p: 5 }); + return done(); + }); + }); - it "should flush the ranges to the persistence layer again", (done) -> - DocUpdaterClient.flushDoc @project_id, @doc.id, (error) => - throw error if error? - MockWebApi.getDocument @project_id, @doc.id, (error, doc) => - {changes} = doc.ranges - changes[0].op.should.deep.equal { i: "123", p: 1 } - changes[1].op.should.deep.equal { i: "456", p: 5 } - done() + return it("should flush the ranges to the persistence layer again", function(done) { + return DocUpdaterClient.flushDoc(this.project_id, this.doc.id, error => { + if (error != null) { throw error; } + return MockWebApi.getDocument(this.project_id, this.doc.id, (error, doc) => { + const {changes} = doc.ranges; + changes[0].op.should.deep.equal({ i: "123", p: 1 }); + changes[1].op.should.deep.equal({ i: "456", p: 5 }); + return done(); + }); + }); + }); + }); - describe "accepting a change", -> - before (done) -> - @project_id = DocUpdaterClient.randomId() - @user_id = DocUpdaterClient.randomId() - @id_seed = "587357bd35e64f6157" - @doc = { - id: DocUpdaterClient.randomId() + describe("accepting a change", function() { + before(function(done) { + this.project_id = DocUpdaterClient.randomId(); + this.user_id = DocUpdaterClient.randomId(); + this.id_seed = "587357bd35e64f6157"; + this.doc = { + id: DocUpdaterClient.randomId(), lines: ["aaa"] - } - @update = { - doc: @doc.id - op: [{ i: "456", p: 1 }] - v: 0 - meta: { user_id: @user_id, tc: @id_seed } - } - MockWebApi.insertDoc @project_id, @doc.id, { - lines: @doc.lines + }; + this.update = { + doc: this.doc.id, + op: [{ i: "456", p: 1 }], + v: 0, + meta: { user_id: this.user_id, tc: this.id_seed } + }; + MockWebApi.insertDoc(this.project_id, this.doc.id, { + lines: this.doc.lines, version: 0 - } - DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) => - throw error if error? - DocUpdaterClient.sendUpdate @project_id, @doc.id, @update, (error) => - throw error if error? - setTimeout () => - DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => - throw error if error? - ranges = data.ranges - change = ranges.changes[0] - change.op.should.deep.equal { i: "456", p: 1 } - change.id.should.equal @id_seed + "000001" - change.metadata.user_id.should.equal @user_id - done() - , 200 + }); + return DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { + if (error != null) { throw error; } + return DocUpdaterClient.sendUpdate(this.project_id, this.doc.id, this.update, error => { + if (error != null) { throw error; } + return setTimeout(() => { + return DocUpdaterClient.getDoc(this.project_id, this.doc.id, (error, res, data) => { + if (error != null) { throw error; } + const { + ranges + } = data; + const change = ranges.changes[0]; + change.op.should.deep.equal({ i: "456", p: 1 }); + change.id.should.equal(this.id_seed + "000001"); + change.metadata.user_id.should.equal(this.user_id); + return done(); + }); + } + , 200); + }); + }); + }); - it "should remove the change after accepting", (done) -> - DocUpdaterClient.acceptChange @project_id, @doc.id, @id_seed + "000001", (error) => - throw error if error? - DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => - throw error if error? - expect(data.ranges.changes).to.be.undefined - done() + return it("should remove the change after accepting", function(done) { + return DocUpdaterClient.acceptChange(this.project_id, this.doc.id, this.id_seed + "000001", error => { + if (error != null) { throw error; } + return DocUpdaterClient.getDoc(this.project_id, this.doc.id, (error, res, data) => { + if (error != null) { throw error; } + expect(data.ranges.changes).to.be.undefined; + return done(); + }); + }); + }); + }); - describe "deleting a comment range", -> - before (done) -> - @project_id = DocUpdaterClient.randomId() - @user_id = DocUpdaterClient.randomId() - @doc = { - id: DocUpdaterClient.randomId() + describe("deleting a comment range", function() { + before(function(done) { + this.project_id = DocUpdaterClient.randomId(); + this.user_id = DocUpdaterClient.randomId(); + this.doc = { + id: DocUpdaterClient.randomId(), lines: ["foo bar"] - } - @update = { - doc: @doc.id - op: [{ c: "bar", p: 4, t: @tid = DocUpdaterClient.randomId() }] + }; + this.update = { + doc: this.doc.id, + op: [{ c: "bar", p: 4, t: (this.tid = DocUpdaterClient.randomId()) }], v: 0 - } - MockWebApi.insertDoc @project_id, @doc.id, { - lines: @doc.lines + }; + MockWebApi.insertDoc(this.project_id, this.doc.id, { + lines: this.doc.lines, version: 0 - } - DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) => - throw error if error? - DocUpdaterClient.sendUpdate @project_id, @doc.id, @update, (error) => - throw error if error? - setTimeout () => - DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => - throw error if error? - ranges = data.ranges - change = ranges.comments[0] - change.op.should.deep.equal { c: "bar", p: 4, t: @tid } - change.id.should.equal @tid - done() - , 200 + }); + return DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { + if (error != null) { throw error; } + return DocUpdaterClient.sendUpdate(this.project_id, this.doc.id, this.update, error => { + if (error != null) { throw error; } + return setTimeout(() => { + return DocUpdaterClient.getDoc(this.project_id, this.doc.id, (error, res, data) => { + if (error != null) { throw error; } + const { + ranges + } = data; + const change = ranges.comments[0]; + change.op.should.deep.equal({ c: "bar", p: 4, t: this.tid }); + change.id.should.equal(this.tid); + return done(); + }); + } + , 200); + }); + }); + }); - it "should remove the comment range", (done) -> - DocUpdaterClient.removeComment @project_id, @doc.id, @tid, (error, res) => - throw error if error? - expect(res.statusCode).to.equal 204 - DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => - throw error if error? - expect(data.ranges.comments).to.be.undefined - done() + return it("should remove the comment range", function(done) { + return DocUpdaterClient.removeComment(this.project_id, this.doc.id, this.tid, (error, res) => { + if (error != null) { throw error; } + expect(res.statusCode).to.equal(204); + return DocUpdaterClient.getDoc(this.project_id, this.doc.id, (error, res, data) => { + if (error != null) { throw error; } + expect(data.ranges.comments).to.be.undefined; + return done(); + }); + }); + }); + }); - describe "tripping range size limit", -> - before (done) -> - @project_id = DocUpdaterClient.randomId() - @user_id = DocUpdaterClient.randomId() - @id_seed = DocUpdaterClient.randomId() - @doc = { - id: DocUpdaterClient.randomId() + describe("tripping range size limit", function() { + before(function(done) { + this.project_id = DocUpdaterClient.randomId(); + this.user_id = DocUpdaterClient.randomId(); + this.id_seed = DocUpdaterClient.randomId(); + this.doc = { + id: DocUpdaterClient.randomId(), lines: ["aaa"] - } - @i = new Array(3 * 1024 * 1024).join("a") - @updates = [{ - doc: @doc.id - op: [{ i: @i, p: 1 }] - v: 0 - meta: { user_id: @user_id, tc: @id_seed } - }] - MockWebApi.insertDoc @project_id, @doc.id, { - lines: @doc.lines + }; + this.i = new Array(3 * 1024 * 1024).join("a"); + this.updates = [{ + doc: this.doc.id, + op: [{ i: this.i, p: 1 }], + v: 0, + meta: { user_id: this.user_id, tc: this.id_seed } + }]; + MockWebApi.insertDoc(this.project_id, this.doc.id, { + lines: this.doc.lines, version: 0 + }); + const jobs = []; + for (let update of Array.from(this.updates)) { + (update => { + return jobs.push(callback => DocUpdaterClient.sendUpdate(this.project_id, this.doc.id, update, callback)); + })(update); } - jobs = [] - for update in @updates - do (update) => - jobs.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc.id, update, callback - DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) => - throw error if error? - async.series jobs, (error) -> - throw error if error? - setTimeout done, 200 + return DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { + if (error != null) { throw error; } + return async.series(jobs, function(error) { + if (error != null) { throw error; } + return setTimeout(done, 200); + }); + }); + }); - it "should not update the ranges", (done) -> - DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) => - throw error if error? - ranges = data.ranges - expect(ranges.changes).to.be.undefined - done() + return it("should not update the ranges", function(done) { + return DocUpdaterClient.getDoc(this.project_id, this.doc.id, (error, res, data) => { + if (error != null) { throw error; } + const { + ranges + } = data; + expect(ranges.changes).to.be.undefined; + return done(); + }); + }); + }); - describe "deleting text surrounding a comment", -> - before (done) -> - @project_id = DocUpdaterClient.randomId() - @user_id = DocUpdaterClient.randomId() - @doc_id = DocUpdaterClient.randomId() - MockWebApi.insertDoc @project_id, @doc_id, { - lines: ["foo bar baz"] - version: 0 + return describe("deleting text surrounding a comment", function() { + before(function(done) { + this.project_id = DocUpdaterClient.randomId(); + this.user_id = DocUpdaterClient.randomId(); + this.doc_id = DocUpdaterClient.randomId(); + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: ["foo bar baz"], + version: 0, ranges: { comments: [{ - op: { c: "a", p: 5, tid: @tid = DocUpdaterClient.randomId() } - metadata: - user_id: @user_id + op: { c: "a", p: 5, tid: (this.tid = DocUpdaterClient.randomId()) }, + metadata: { + user_id: this.user_id, ts: new Date() + } }] } - } - @updates = [{ - doc: @doc_id - op: [{ d: "foo ", p: 0 }] - v: 0 - meta: { user_id: @user_id } + }); + this.updates = [{ + doc: this.doc_id, + op: [{ d: "foo ", p: 0 }], + v: 0, + meta: { user_id: this.user_id } }, { - doc: @doc_id - op: [{ d: "bar ", p: 0 }] - v: 1 - meta: { user_id: @user_id } - }] - jobs = [] - for update in @updates - do (update) => - jobs.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc_id, update, callback - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => - throw error if error? - async.series jobs, (error) -> - throw error if error? - setTimeout () => - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, data) => - throw error if error? - done() - , 200 + doc: this.doc_id, + op: [{ d: "bar ", p: 0 }], + v: 1, + meta: { user_id: this.user_id } + }]; + const jobs = []; + for (let update of Array.from(this.updates)) { + (update => { + return jobs.push(callback => DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, update, callback)); + })(update); + } + return DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { + if (error != null) { throw error; } + return async.series(jobs, function(error) { + if (error != null) { throw error; } + return setTimeout(() => { + return DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, data) => { + if (error != null) { throw error; } + return done(); + }); + } + , 200); + }); + }); + }); - it "should write a snapshot from before the destructive change", (done) -> - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, data) => - return done(error) if error? - db.docSnapshots.find { - project_id: ObjectId(@project_id), - doc_id: ObjectId(@doc_id) - }, (error, docSnapshots) => - return done(error) if error? - expect(docSnapshots.length).to.equal 1 - expect(docSnapshots[0].version).to.equal 1 - expect(docSnapshots[0].lines).to.deep.equal ["bar baz"] - expect(docSnapshots[0].ranges.comments[0].op).to.deep.equal { + return it("should write a snapshot from before the destructive change", function(done) { + return DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, data) => { + if (error != null) { return done(error); } + return db.docSnapshots.find({ + project_id: ObjectId(this.project_id), + doc_id: ObjectId(this.doc_id) + }, (error, docSnapshots) => { + if (error != null) { return done(error); } + expect(docSnapshots.length).to.equal(1); + expect(docSnapshots[0].version).to.equal(1); + expect(docSnapshots[0].lines).to.deep.equal(["bar baz"]); + expect(docSnapshots[0].ranges.comments[0].op).to.deep.equal({ c: "a", p: 1, - tid: @tid - } - done() + tid: this.tid + }); + return done(); + }); + }); + }); + }); +}); diff --git a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.js b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.js index 5ea43a39cc..1255a0a938 100644 --- a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.js +++ b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.js @@ -1,248 +1,320 @@ -sinon = require "sinon" -chai = require("chai") -chai.should() -expect = require("chai").expect -Settings = require('settings-sharelatex') -rclient_du = require("redis-sharelatex").createClient(Settings.redis.documentupdater) -Keys = Settings.redis.documentupdater.key_schema +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require("sinon"); +const chai = require("chai"); +chai.should(); +const { + expect +} = require("chai"); +const Settings = require('settings-sharelatex'); +const rclient_du = require("redis-sharelatex").createClient(Settings.redis.documentupdater); +const Keys = Settings.redis.documentupdater.key_schema; -MockTrackChangesApi = require "./helpers/MockTrackChangesApi" -MockProjectHistoryApi = require "./helpers/MockProjectHistoryApi" -MockWebApi = require "./helpers/MockWebApi" -DocUpdaterClient = require "./helpers/DocUpdaterClient" -DocUpdaterApp = require "./helpers/DocUpdaterApp" +const MockTrackChangesApi = require("./helpers/MockTrackChangesApi"); +const MockProjectHistoryApi = require("./helpers/MockProjectHistoryApi"); +const MockWebApi = require("./helpers/MockWebApi"); +const DocUpdaterClient = require("./helpers/DocUpdaterClient"); +const DocUpdaterApp = require("./helpers/DocUpdaterApp"); -describe "Setting a document", -> - before (done) -> - @lines = ["one", "two", "three"] - @version = 42 - @update = - doc: @doc_id +describe("Setting a document", function() { + before(function(done) { + this.lines = ["one", "two", "three"]; + this.version = 42; + this.update = { + doc: this.doc_id, op: [{ - i: "one and a half\n" + i: "one and a half\n", p: 4 - }] - v: @version - @result = ["one", "one and a half", "two", "three"] - @newLines = ["these", "are", "the", "new", "lines"] - @source = "dropbox" - @user_id = "user-id-123" + }], + v: this.version + }; + this.result = ["one", "one and a half", "two", "three"]; + this.newLines = ["these", "are", "the", "new", "lines"]; + this.source = "dropbox"; + this.user_id = "user-id-123"; - sinon.spy MockTrackChangesApi, "flushDoc" - sinon.spy MockProjectHistoryApi, "flushProject" - sinon.spy MockWebApi, "setDocument" - DocUpdaterApp.ensureRunning(done) + sinon.spy(MockTrackChangesApi, "flushDoc"); + sinon.spy(MockProjectHistoryApi, "flushProject"); + sinon.spy(MockWebApi, "setDocument"); + return DocUpdaterApp.ensureRunning(done); + }); - after -> - MockTrackChangesApi.flushDoc.restore() - MockProjectHistoryApi.flushProject.restore() - MockWebApi.setDocument.restore() + after(function() { + MockTrackChangesApi.flushDoc.restore(); + MockProjectHistoryApi.flushProject.restore(); + return MockWebApi.setDocument.restore(); + }); - describe "when the updated doc exists in the doc updater", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, lines: @lines, version: @version - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => - throw error if error? - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) => - throw error if error? - setTimeout () => - DocUpdaterClient.setDocLines @project_id, @doc_id, @newLines, @source, @user_id, false, (error, res, body) => - @statusCode = res.statusCode - done() - , 200 - return null + describe("when the updated doc exists in the doc updater", function() { + before(function(done) { + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); + DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { + if (error != null) { throw error; } + return DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.update, error => { + if (error != null) { throw error; } + return setTimeout(() => { + return DocUpdaterClient.setDocLines(this.project_id, this.doc_id, this.newLines, this.source, this.user_id, false, (error, res, body) => { + this.statusCode = res.statusCode; + return done(); + }); + } + , 200); + }); + }); + return null; + }); - after -> - MockTrackChangesApi.flushDoc.reset() - MockProjectHistoryApi.flushProject.reset() - MockWebApi.setDocument.reset() + after(function() { + MockTrackChangesApi.flushDoc.reset(); + MockProjectHistoryApi.flushProject.reset(); + return MockWebApi.setDocument.reset(); + }); - it "should return a 204 status code", -> - @statusCode.should.equal 204 + it("should return a 204 status code", function() { + return this.statusCode.should.equal(204); + }); - it "should send the updated doc lines and version to the web api", -> - MockWebApi.setDocument - .calledWith(@project_id, @doc_id, @newLines) - .should.equal true + it("should send the updated doc lines and version to the web api", function() { + return MockWebApi.setDocument + .calledWith(this.project_id, this.doc_id, this.newLines) + .should.equal(true); + }); - it "should update the lines in the doc updater", (done) -> - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => - doc.lines.should.deep.equal @newLines - done() - return null + it("should update the lines in the doc updater", function(done) { + DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { + doc.lines.should.deep.equal(this.newLines); + return done(); + }); + return null; + }); - it "should bump the version in the doc updater", (done) -> - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, doc) => - doc.version.should.equal @version + 2 - done() - return null + it("should bump the version in the doc updater", function(done) { + DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { + doc.version.should.equal(this.version + 2); + return done(); + }); + return null; + }); - it "should leave the document in redis", (done) -> - rclient_du.get Keys.docLines({doc_id: @doc_id}), (error, lines) => - throw error if error? - expect(JSON.parse(lines)).to.deep.equal @newLines - done() - return null + return it("should leave the document in redis", function(done) { + rclient_du.get(Keys.docLines({doc_id: this.doc_id}), (error, lines) => { + if (error != null) { throw error; } + expect(JSON.parse(lines)).to.deep.equal(this.newLines); + return done(); + }); + return null; + }); + }); - describe "when the updated doc does not exist in the doc updater", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} - DocUpdaterClient.setDocLines @project_id, @doc_id, @newLines, @source, @user_id, false, (error, res, body) => - @statusCode = res.statusCode - setTimeout done, 200 - return null + describe("when the updated doc does not exist in the doc updater", function() { + before(function(done) { + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); + DocUpdaterClient.setDocLines(this.project_id, this.doc_id, this.newLines, this.source, this.user_id, false, (error, res, body) => { + this.statusCode = res.statusCode; + return setTimeout(done, 200); + }); + return null; + }); - after -> - MockTrackChangesApi.flushDoc.reset() - MockProjectHistoryApi.flushProject.reset() - MockWebApi.setDocument.reset() + after(function() { + MockTrackChangesApi.flushDoc.reset(); + MockProjectHistoryApi.flushProject.reset(); + return MockWebApi.setDocument.reset(); + }); - it "should return a 204 status code", -> - @statusCode.should.equal 204 + it("should return a 204 status code", function() { + return this.statusCode.should.equal(204); + }); - it "should send the updated doc lines to the web api", -> - MockWebApi.setDocument - .calledWith(@project_id, @doc_id, @newLines) - .should.equal true + it("should send the updated doc lines to the web api", function() { + return MockWebApi.setDocument + .calledWith(this.project_id, this.doc_id, this.newLines) + .should.equal(true); + }); - it "should flush track changes", -> - MockTrackChangesApi.flushDoc.calledWith(@doc_id).should.equal true + it("should flush track changes", function() { + return MockTrackChangesApi.flushDoc.calledWith(this.doc_id).should.equal(true); + }); - it "should flush project history", -> - MockProjectHistoryApi.flushProject.calledWith(@project_id).should.equal true + it("should flush project history", function() { + return MockProjectHistoryApi.flushProject.calledWith(this.project_id).should.equal(true); + }); - it "should remove the document from redis", (done) -> - rclient_du.get Keys.docLines({doc_id: @doc_id}), (error, lines) => - throw error if error? - expect(lines).to.not.exist - done() - return null + return it("should remove the document from redis", function(done) { + rclient_du.get(Keys.docLines({doc_id: this.doc_id}), (error, lines) => { + if (error != null) { throw error; } + expect(lines).to.not.exist; + return done(); + }); + return null; + }); + }); - describe "when the updated doc is too large for the body parser", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} - @newLines = [] - while JSON.stringify(@newLines).length < Settings.max_doc_length + 64 * 1024 - @newLines.push("(a long line of text)".repeat(10000)) - DocUpdaterClient.setDocLines @project_id, @doc_id, @newLines, @source, @user_id, false, (error, res, body) => - @statusCode = res.statusCode - setTimeout done, 200 - return null + describe("when the updated doc is too large for the body parser", function() { + before(function(done) { + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); + this.newLines = []; + while (JSON.stringify(this.newLines).length < (Settings.max_doc_length + (64 * 1024))) { + this.newLines.push("(a long line of text)".repeat(10000)); + } + DocUpdaterClient.setDocLines(this.project_id, this.doc_id, this.newLines, this.source, this.user_id, false, (error, res, body) => { + this.statusCode = res.statusCode; + return setTimeout(done, 200); + }); + return null; + }); - after -> - MockTrackChangesApi.flushDoc.reset() - MockProjectHistoryApi.flushProject.reset() - MockWebApi.setDocument.reset() + after(function() { + MockTrackChangesApi.flushDoc.reset(); + MockProjectHistoryApi.flushProject.reset(); + return MockWebApi.setDocument.reset(); + }); - it "should return a 413 status code", -> - @statusCode.should.equal 413 + it("should return a 413 status code", function() { + return this.statusCode.should.equal(413); + }); - it "should not send the updated doc lines to the web api", -> - MockWebApi.setDocument.called.should.equal false + it("should not send the updated doc lines to the web api", () => MockWebApi.setDocument.called.should.equal(false)); - it "should not flush track changes", -> - MockTrackChangesApi.flushDoc.called.should.equal false + it("should not flush track changes", () => MockTrackChangesApi.flushDoc.called.should.equal(false)); - it "should not flush project history", -> - MockProjectHistoryApi.flushProject.called.should.equal false + return it("should not flush project history", () => MockProjectHistoryApi.flushProject.called.should.equal(false)); + }); - describe "when the updated doc is large but under the bodyParser and HTTPController size limit", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} + describe("when the updated doc is large but under the bodyParser and HTTPController size limit", function() { + before(function(done) { + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); - @newLines = [] - while JSON.stringify(@newLines).length < 2 * 1024 * 1024 # limit in HTTPController - @newLines.push("(a long line of text)".repeat(10000)) - @newLines.pop() # remove the line which took it over the limit - DocUpdaterClient.setDocLines @project_id, @doc_id, @newLines, @source, @user_id, false, (error, res, body) => - @statusCode = res.statusCode - setTimeout done, 200 - return null + this.newLines = []; + while (JSON.stringify(this.newLines).length < (2 * 1024 * 1024)) { // limit in HTTPController + this.newLines.push("(a long line of text)".repeat(10000)); + } + this.newLines.pop(); // remove the line which took it over the limit + DocUpdaterClient.setDocLines(this.project_id, this.doc_id, this.newLines, this.source, this.user_id, false, (error, res, body) => { + this.statusCode = res.statusCode; + return setTimeout(done, 200); + }); + return null; + }); - after -> - MockTrackChangesApi.flushDoc.reset() - MockProjectHistoryApi.flushProject.reset() - MockWebApi.setDocument.reset() + after(function() { + MockTrackChangesApi.flushDoc.reset(); + MockProjectHistoryApi.flushProject.reset(); + return MockWebApi.setDocument.reset(); + }); - it "should return a 204 status code", -> - @statusCode.should.equal 204 + it("should return a 204 status code", function() { + return this.statusCode.should.equal(204); + }); - it "should send the updated doc lines to the web api", -> - MockWebApi.setDocument - .calledWith(@project_id, @doc_id, @newLines) - .should.equal true + return it("should send the updated doc lines to the web api", function() { + return MockWebApi.setDocument + .calledWith(this.project_id, this.doc_id, this.newLines) + .should.equal(true); + }); + }); - describe "with track changes", -> - before -> - @lines = ["one", "one and a half", "two", "three"] - @id_seed = "587357bd35e64f6157" - @update = - doc: @doc_id + return describe("with track changes", function() { + before(function() { + this.lines = ["one", "one and a half", "two", "three"]; + this.id_seed = "587357bd35e64f6157"; + return this.update = { + doc: this.doc_id, op: [{ - d: "one and a half\n" + d: "one and a half\n", p: 4 - }] - meta: - tc: @id_seed - user_id: @user_id - v: @version + }], + meta: { + tc: this.id_seed, + user_id: this.user_id + }, + v: this.version + }; + }); - describe "with the undo flag", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => - throw error if error? - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) => - throw error if error? - # Go back to old lines, with undo flag - DocUpdaterClient.setDocLines @project_id, @doc_id, @lines, @source, @user_id, true, (error, res, body) => - @statusCode = res.statusCode - setTimeout done, 200 - return null + describe("with the undo flag", function() { + before(function(done) { + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); + DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { + if (error != null) { throw error; } + return DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.update, error => { + if (error != null) { throw error; } + // Go back to old lines, with undo flag + return DocUpdaterClient.setDocLines(this.project_id, this.doc_id, this.lines, this.source, this.user_id, true, (error, res, body) => { + this.statusCode = res.statusCode; + return setTimeout(done, 200); + }); + }); + }); + return null; + }); - after -> - MockTrackChangesApi.flushDoc.reset() - MockProjectHistoryApi.flushProject.reset() - MockWebApi.setDocument.reset() + after(function() { + MockTrackChangesApi.flushDoc.reset(); + MockProjectHistoryApi.flushProject.reset(); + return MockWebApi.setDocument.reset(); + }); - it "should undo the tracked changes", (done) -> - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, data) => - throw error if error? - ranges = data.ranges - expect(ranges.changes).to.be.undefined - done() - return null + return it("should undo the tracked changes", function(done) { + DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, data) => { + if (error != null) { throw error; } + const { + ranges + } = data; + expect(ranges.changes).to.be.undefined; + return done(); + }); + return null; + }); + }); - describe "without the undo flag", -> - before (done) -> - [@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()] - MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version} - DocUpdaterClient.preloadDoc @project_id, @doc_id, (error) => - throw error if error? - DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) => - throw error if error? - # Go back to old lines, without undo flag - DocUpdaterClient.setDocLines @project_id, @doc_id, @lines, @source, @user_id, false, (error, res, body) => - @statusCode = res.statusCode - setTimeout done, 200 - return null + return describe("without the undo flag", function() { + before(function(done) { + [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); + DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { + if (error != null) { throw error; } + return DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.update, error => { + if (error != null) { throw error; } + // Go back to old lines, without undo flag + return DocUpdaterClient.setDocLines(this.project_id, this.doc_id, this.lines, this.source, this.user_id, false, (error, res, body) => { + this.statusCode = res.statusCode; + return setTimeout(done, 200); + }); + }); + }); + return null; + }); - after -> - MockTrackChangesApi.flushDoc.reset() - MockProjectHistoryApi.flushProject.reset() - MockWebApi.setDocument.reset() + after(function() { + MockTrackChangesApi.flushDoc.reset(); + MockProjectHistoryApi.flushProject.reset(); + return MockWebApi.setDocument.reset(); + }); - it "should not undo the tracked changes", (done) -> - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, data) => - throw error if error? - ranges = data.ranges - expect(ranges.changes.length).to.equal 1 - done() - return null + return it("should not undo the tracked changes", function(done) { + DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, data) => { + if (error != null) { throw error; } + const { + ranges + } = data; + expect(ranges.changes.length).to.equal(1); + return done(); + }); + return null; + }); + }); + }); +}); diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterApp.js b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterApp.js index 0f77199e73..985458819a 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterApp.js +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterApp.js @@ -1,20 +1,38 @@ -app = require('../../../../app') -require("logger-sharelatex").logger.level("fatal") +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const app = require('../../../../app'); +require("logger-sharelatex").logger.level("fatal"); -module.exports = - running: false - initing: false - callbacks: [] - ensureRunning: (callback = (error) ->) -> - if @running - return callback() - else if @initing - @callbacks.push callback - else - @initing = true - @callbacks.push callback - app.listen 3003, "localhost", (error) => - throw error if error? - @running = true - for callback in @callbacks - callback() +module.exports = { + running: false, + initing: false, + callbacks: [], + ensureRunning(callback) { + if (callback == null) { callback = function(error) {}; } + if (this.running) { + return callback(); + } else if (this.initing) { + return this.callbacks.push(callback); + } else { + this.initing = true; + this.callbacks.push(callback); + return app.listen(3003, "localhost", error => { + if (error != null) { throw error; } + this.running = true; + return (() => { + const result = []; + for (callback of Array.from(this.callbacks)) { + result.push(callback()); + } + return result; + })(); + }); + } + } +}; diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.js b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.js index b78f2aa7dd..09fad8c8f7 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.js +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.js @@ -1,111 +1,171 @@ -Settings = require('settings-sharelatex') -rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) -keys = Settings.redis.documentupdater.key_schema -request = require("request").defaults(jar: false) -async = require "async" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let DocUpdaterClient; +const Settings = require('settings-sharelatex'); +const rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater); +const keys = Settings.redis.documentupdater.key_schema; +const request = require("request").defaults({jar: false}); +const async = require("async"); -rclient_sub = require("redis-sharelatex").createClient(Settings.redis.pubsub) -rclient_sub.subscribe "applied-ops" -rclient_sub.setMaxListeners(0) +const rclient_sub = require("redis-sharelatex").createClient(Settings.redis.pubsub); +rclient_sub.subscribe("applied-ops"); +rclient_sub.setMaxListeners(0); -module.exports = DocUpdaterClient = - randomId: () -> - chars = for i in [1..24] - Math.random().toString(16)[2] - return chars.join("") +module.exports = (DocUpdaterClient = { + randomId() { + const chars = __range__(1, 24, true).map((i) => + Math.random().toString(16)[2]); + return chars.join(""); + }, - subscribeToAppliedOps: (callback = (message) ->) -> - rclient_sub.on "message", callback + subscribeToAppliedOps(callback) { + if (callback == null) { callback = function(message) {}; } + return rclient_sub.on("message", callback); + }, - sendUpdate: (project_id, doc_id, update, callback = (error) ->) -> - rclient.rpush keys.pendingUpdates({doc_id}), JSON.stringify(update), (error)-> - return callback(error) if error? - doc_key = "#{project_id}:#{doc_id}" - rclient.sadd "DocsWithPendingUpdates", doc_key, (error) -> - return callback(error) if error? - rclient.rpush "pending-updates-list", doc_key, callback + sendUpdate(project_id, doc_id, update, callback) { + if (callback == null) { callback = function(error) {}; } + return rclient.rpush(keys.pendingUpdates({doc_id}), JSON.stringify(update), function(error){ + if (error != null) { return callback(error); } + const doc_key = `${project_id}:${doc_id}`; + return rclient.sadd("DocsWithPendingUpdates", doc_key, function(error) { + if (error != null) { return callback(error); } + return rclient.rpush("pending-updates-list", doc_key, callback); + }); + }); + }, - sendUpdates: (project_id, doc_id, updates, callback = (error) ->) -> - DocUpdaterClient.preloadDoc project_id, doc_id, (error) -> - return callback(error) if error? - jobs = [] - for update in updates - do (update) -> - jobs.push (callback) -> - DocUpdaterClient.sendUpdate project_id, doc_id, update, callback - async.series jobs, (err) -> - DocUpdaterClient.waitForPendingUpdates project_id, doc_id, callback + sendUpdates(project_id, doc_id, updates, callback) { + if (callback == null) { callback = function(error) {}; } + return DocUpdaterClient.preloadDoc(project_id, doc_id, function(error) { + if (error != null) { return callback(error); } + const jobs = []; + for (let update of Array.from(updates)) { + ((update => jobs.push(callback => DocUpdaterClient.sendUpdate(project_id, doc_id, update, callback))))(update); + } + return async.series(jobs, err => DocUpdaterClient.waitForPendingUpdates(project_id, doc_id, callback)); + }); + }, - waitForPendingUpdates: (project_id, doc_id, callback) -> - async.retry {times: 30, interval: 100}, (cb) -> - rclient.llen keys.pendingUpdates({doc_id}), (err, length) -> - if length > 0 - cb(new Error("updates still pending")) - else - cb() - , callback + waitForPendingUpdates(project_id, doc_id, callback) { + return async.retry({times: 30, interval: 100}, cb => rclient.llen(keys.pendingUpdates({doc_id}), function(err, length) { + if (length > 0) { + return cb(new Error("updates still pending")); + } else { + return cb(); + } + }) + , callback); + }, - getDoc: (project_id, doc_id, callback = (error, res, body) ->) -> - request.get "http://localhost:3003/project/#{project_id}/doc/#{doc_id}", (error, res, body) -> - if body? and res.statusCode >= 200 and res.statusCode < 300 - body = JSON.parse(body) - callback error, res, body + getDoc(project_id, doc_id, callback) { + if (callback == null) { callback = function(error, res, body) {}; } + return request.get(`http://localhost:3003/project/${project_id}/doc/${doc_id}`, function(error, res, body) { + if ((body != null) && (res.statusCode >= 200) && (res.statusCode < 300)) { + body = JSON.parse(body); + } + return callback(error, res, body); + }); + }, - getDocAndRecentOps: (project_id, doc_id, fromVersion, callback = (error, res, body) ->) -> - request.get "http://localhost:3003/project/#{project_id}/doc/#{doc_id}?fromVersion=#{fromVersion}", (error, res, body) -> - if body? and res.statusCode >= 200 and res.statusCode < 300 - body = JSON.parse(body) - callback error, res, body + getDocAndRecentOps(project_id, doc_id, fromVersion, callback) { + if (callback == null) { callback = function(error, res, body) {}; } + return request.get(`http://localhost:3003/project/${project_id}/doc/${doc_id}?fromVersion=${fromVersion}`, function(error, res, body) { + if ((body != null) && (res.statusCode >= 200) && (res.statusCode < 300)) { + body = JSON.parse(body); + } + return callback(error, res, body); + }); + }, - preloadDoc: (project_id, doc_id, callback = (error) ->) -> - DocUpdaterClient.getDoc project_id, doc_id, callback + preloadDoc(project_id, doc_id, callback) { + if (callback == null) { callback = function(error) {}; } + return DocUpdaterClient.getDoc(project_id, doc_id, callback); + }, - flushDoc: (project_id, doc_id, callback = (error) ->) -> - request.post "http://localhost:3003/project/#{project_id}/doc/#{doc_id}/flush", (error, res, body) -> - callback error, res, body + flushDoc(project_id, doc_id, callback) { + if (callback == null) { callback = function(error) {}; } + return request.post(`http://localhost:3003/project/${project_id}/doc/${doc_id}/flush`, (error, res, body) => callback(error, res, body)); + }, - setDocLines: (project_id, doc_id, lines, source, user_id, undoing, callback = (error) ->) -> - request.post { - url: "http://localhost:3003/project/#{project_id}/doc/#{doc_id}" - json: - lines: lines - source: source - user_id: user_id - undoing: undoing - }, (error, res, body) -> - callback error, res, body + setDocLines(project_id, doc_id, lines, source, user_id, undoing, callback) { + if (callback == null) { callback = function(error) {}; } + return request.post({ + url: `http://localhost:3003/project/${project_id}/doc/${doc_id}`, + json: { + lines, + source, + user_id, + undoing + } + }, (error, res, body) => callback(error, res, body)); + }, - deleteDoc: (project_id, doc_id, callback = (error) ->) -> - request.del "http://localhost:3003/project/#{project_id}/doc/#{doc_id}", (error, res, body) -> - callback error, res, body + deleteDoc(project_id, doc_id, callback) { + if (callback == null) { callback = function(error) {}; } + return request.del(`http://localhost:3003/project/${project_id}/doc/${doc_id}`, (error, res, body) => callback(error, res, body)); + }, - flushProject: (project_id, callback = () ->) -> - request.post "http://localhost:3003/project/#{project_id}/flush", callback + flushProject(project_id, callback) { + if (callback == null) { callback = function() {}; } + return request.post(`http://localhost:3003/project/${project_id}/flush`, callback); + }, - deleteProject: (project_id, callback = () ->) -> - request.del "http://localhost:3003/project/#{project_id}", callback + deleteProject(project_id, callback) { + if (callback == null) { callback = function() {}; } + return request.del(`http://localhost:3003/project/${project_id}`, callback); + }, - deleteProjectOnShutdown: (project_id, callback = () ->) -> - request.del "http://localhost:3003/project/#{project_id}?background=true&shutdown=true", callback + deleteProjectOnShutdown(project_id, callback) { + if (callback == null) { callback = function() {}; } + return request.del(`http://localhost:3003/project/${project_id}?background=true&shutdown=true`, callback); + }, - flushOldProjects: (callback = () ->) -> - request.get "http://localhost:3003/flush_queued_projects?min_delete_age=1", callback + flushOldProjects(callback) { + if (callback == null) { callback = function() {}; } + return request.get("http://localhost:3003/flush_queued_projects?min_delete_age=1", callback); + }, - acceptChange: (project_id, doc_id, change_id, callback = () ->) -> - request.post "http://localhost:3003/project/#{project_id}/doc/#{doc_id}/change/#{change_id}/accept", callback + acceptChange(project_id, doc_id, change_id, callback) { + if (callback == null) { callback = function() {}; } + return request.post(`http://localhost:3003/project/${project_id}/doc/${doc_id}/change/${change_id}/accept`, callback); + }, - removeComment: (project_id, doc_id, comment, callback = () ->) -> - request.del "http://localhost:3003/project/#{project_id}/doc/#{doc_id}/comment/#{comment}", callback + removeComment(project_id, doc_id, comment, callback) { + if (callback == null) { callback = function() {}; } + return request.del(`http://localhost:3003/project/${project_id}/doc/${doc_id}/comment/${comment}`, callback); + }, - getProjectDocs: (project_id, projectStateHash, callback = () ->) -> - request.get "http://localhost:3003/project/#{project_id}/doc?state=#{projectStateHash}", (error, res, body) -> - if body? and res.statusCode >= 200 and res.statusCode < 300 - body = JSON.parse(body) - callback error, res, body + getProjectDocs(project_id, projectStateHash, callback) { + if (callback == null) { callback = function() {}; } + return request.get(`http://localhost:3003/project/${project_id}/doc?state=${projectStateHash}`, function(error, res, body) { + if ((body != null) && (res.statusCode >= 200) && (res.statusCode < 300)) { + body = JSON.parse(body); + } + return callback(error, res, body); + }); + }, - sendProjectUpdate: (project_id, userId, docUpdates, fileUpdates, version, callback = (error) ->) -> - request.post { - url: "http://localhost:3003/project/#{project_id}" + sendProjectUpdate(project_id, userId, docUpdates, fileUpdates, version, callback) { + if (callback == null) { callback = function(error) {}; } + return request.post({ + url: `http://localhost:3003/project/${project_id}`, json: { userId, docUpdates, fileUpdates, version } - }, (error, res, body) -> - callback error, res, body + }, (error, res, body) => callback(error, res, body)); + } +}); + +function __range__(left, right, inclusive) { + let range = []; + let ascending = left < right; + let end = !inclusive ? right : ascending ? right + 1 : right - 1; + for (let i = left; ascending ? i < end : i > end; ascending ? i++ : i--) { + range.push(i); + } + return range; +} \ No newline at end of file diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockProjectHistoryApi.js b/services/document-updater/test/acceptance/coffee/helpers/MockProjectHistoryApi.js index eb635225da..ba084fd108 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/MockProjectHistoryApi.js +++ b/services/document-updater/test/acceptance/coffee/helpers/MockProjectHistoryApi.js @@ -1,19 +1,34 @@ -express = require("express") -app = express() +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let MockProjectHistoryApi; +const express = require("express"); +const app = express(); -module.exports = MockProjectHistoryApi = - flushProject: (doc_id, callback = (error) ->) -> - callback() +module.exports = (MockProjectHistoryApi = { + flushProject(doc_id, callback) { + if (callback == null) { callback = function(error) {}; } + return callback(); + }, - run: () -> - app.post "/project/:project_id/flush", (req, res, next) => - @flushProject req.params.project_id, (error) -> - if error? - res.sendStatus 500 - else - res.sendStatus 204 + run() { + app.post("/project/:project_id/flush", (req, res, next) => { + return this.flushProject(req.params.project_id, function(error) { + if (error != null) { + return res.sendStatus(500); + } else { + return res.sendStatus(204); + } + }); + }); - app.listen 3054, (error) -> - throw error if error? + return app.listen(3054, function(error) { + if (error != null) { throw error; } + }); + } +}); -MockProjectHistoryApi.run() +MockProjectHistoryApi.run(); diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.js b/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.js index 924937fe39..bd217d5545 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.js +++ b/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.js @@ -1,23 +1,38 @@ -express = require("express") -app = express() +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let MockTrackChangesApi; +const express = require("express"); +const app = express(); -module.exports = MockTrackChangesApi = - flushDoc: (doc_id, callback = (error) ->) -> - callback() +module.exports = (MockTrackChangesApi = { + flushDoc(doc_id, callback) { + if (callback == null) { callback = function(error) {}; } + return callback(); + }, - run: () -> - app.post "/project/:project_id/doc/:doc_id/flush", (req, res, next) => - @flushDoc req.params.doc_id, (error) -> - if error? - res.sendStatus 500 - else - res.sendStatus 204 + run() { + app.post("/project/:project_id/doc/:doc_id/flush", (req, res, next) => { + return this.flushDoc(req.params.doc_id, function(error) { + if (error != null) { + return res.sendStatus(500); + } else { + return res.sendStatus(204); + } + }); + }); - app.listen 3015, (error) -> - throw error if error? - .on "error", (error) -> - console.error "error starting MockTrackChangesApi:", error.message - process.exit(1) + return app.listen(3015, function(error) { + if (error != null) { throw error; } + }).on("error", function(error) { + console.error("error starting MockTrackChangesApi:", error.message); + return process.exit(1); + }); + } +}); -MockTrackChangesApi.run() +MockTrackChangesApi.run(); diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.js b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.js index 19b518f7c6..5069c20cb6 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.js +++ b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.js @@ -1,54 +1,75 @@ -express = require("express") -bodyParser = require("body-parser") -app = express() -MAX_REQUEST_SIZE = 2*(2*1024*1024 + 64*1024) +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let MockWebApi; +const express = require("express"); +const bodyParser = require("body-parser"); +const app = express(); +const MAX_REQUEST_SIZE = 2*((2*1024*1024) + (64*1024)); -module.exports = MockWebApi = - docs: {} +module.exports = (MockWebApi = { + docs: {}, - clearDocs: () -> @docs = {} + clearDocs() { return this.docs = {}; }, - insertDoc: (project_id, doc_id, doc) -> - doc.version ?= 0 - doc.lines ?= [] - doc.pathname = '/a/b/c.tex' - @docs["#{project_id}:#{doc_id}"] = doc + insertDoc(project_id, doc_id, doc) { + if (doc.version == null) { doc.version = 0; } + if (doc.lines == null) { doc.lines = []; } + doc.pathname = '/a/b/c.tex'; + return this.docs[`${project_id}:${doc_id}`] = doc; + }, - setDocument: (project_id, doc_id, lines, version, ranges, lastUpdatedAt, lastUpdatedBy, callback = (error) ->) -> - doc = @docs["#{project_id}:#{doc_id}"] ||= {} - doc.lines = lines - doc.version = version - doc.ranges = ranges - doc.pathname = '/a/b/c.tex' - doc.lastUpdatedAt = lastUpdatedAt - doc.lastUpdatedBy = lastUpdatedBy - callback null + setDocument(project_id, doc_id, lines, version, ranges, lastUpdatedAt, lastUpdatedBy, callback) { + if (callback == null) { callback = function(error) {}; } + const doc = this.docs[`${project_id}:${doc_id}`] || (this.docs[`${project_id}:${doc_id}`] = {}); + doc.lines = lines; + doc.version = version; + doc.ranges = ranges; + doc.pathname = '/a/b/c.tex'; + doc.lastUpdatedAt = lastUpdatedAt; + doc.lastUpdatedBy = lastUpdatedBy; + return callback(null); + }, - getDocument: (project_id, doc_id, callback = (error, doc) ->) -> - callback null, @docs["#{project_id}:#{doc_id}"] + getDocument(project_id, doc_id, callback) { + if (callback == null) { callback = function(error, doc) {}; } + return callback(null, this.docs[`${project_id}:${doc_id}`]); + }, - run: () -> - app.get "/project/:project_id/doc/:doc_id", (req, res, next) => - @getDocument req.params.project_id, req.params.doc_id, (error, doc) -> - if error? - res.sendStatus 500 - else if doc? - res.send JSON.stringify doc - else - res.sendStatus 404 + run() { + app.get("/project/:project_id/doc/:doc_id", (req, res, next) => { + return this.getDocument(req.params.project_id, req.params.doc_id, function(error, doc) { + if (error != null) { + return res.sendStatus(500); + } else if (doc != null) { + return res.send(JSON.stringify(doc)); + } else { + return res.sendStatus(404); + } + }); + }); - app.post "/project/:project_id/doc/:doc_id", bodyParser.json({limit: MAX_REQUEST_SIZE}), (req, res, next) => - MockWebApi.setDocument req.params.project_id, req.params.doc_id, req.body.lines, req.body.version, req.body.ranges, req.body.lastUpdatedAt, req.body.lastUpdatedBy, (error) -> - if error? - res.sendStatus 500 - else - res.sendStatus 204 + app.post("/project/:project_id/doc/:doc_id", bodyParser.json({limit: MAX_REQUEST_SIZE}), (req, res, next) => { + return MockWebApi.setDocument(req.params.project_id, req.params.doc_id, req.body.lines, req.body.version, req.body.ranges, req.body.lastUpdatedAt, req.body.lastUpdatedBy, function(error) { + if (error != null) { + return res.sendStatus(500); + } else { + return res.sendStatus(204); + } + }); + }); - app.listen 3000, (error) -> - throw error if error? - .on "error", (error) -> - console.error "error starting MockWebApi:", error.message - process.exit(1) + return app.listen(3000, function(error) { + if (error != null) { throw error; } + }).on("error", function(error) { + console.error("error starting MockWebApi:", error.message); + return process.exit(1); + }); + } +}); -MockWebApi.run() +MockWebApi.run(); From 24ac4d4935838654ee32ae143a5ba4462f715f0f Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 6 May 2020 12:12:36 +0200 Subject: [PATCH 621/769] decaffeinate: Run post-processing cleanups on ApplyingUpdatesToADocTests.coffee and 14 other files --- .../coffee/ApplyingUpdatesToADocTests.js | 30 +++++++++++-------- .../ApplyingUpdatesToProjectStructureTests.js | 29 +++++++++++------- .../coffee/DeletingADocumentTests.js | 7 ++++- .../coffee/DeletingAProjectTests.js | 18 +++++++---- .../coffee/FlushingAProjectTests.js | 10 +++++-- .../acceptance/coffee/FlushingDocsTests.js | 18 +++++++---- .../coffee/GettingADocumentTests.js | 28 ++++++++++------- .../coffee/GettingProjectDocsTests.js | 6 ++++ .../test/acceptance/coffee/RangesTests.js | 28 ++++++++++------- .../coffee/SettingADocumentTests.js | 13 ++++++-- .../coffee/helpers/DocUpdaterApp.js | 5 ++++ .../coffee/helpers/DocUpdaterClient.js | 28 ++++++++++------- .../coffee/helpers/MockProjectHistoryApi.js | 10 +++++-- .../coffee/helpers/MockTrackChangesApi.js | 12 ++++++-- .../acceptance/coffee/helpers/MockWebApi.js | 15 +++++++--- 15 files changed, 175 insertions(+), 82 deletions(-) diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.js b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.js index b5259dc7d5..928e656d03 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.js +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + camelcase, + handle-callback-err, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -47,14 +53,14 @@ describe("Applying updates to a doc", function() { sinon.spy(MockWebApi, "getDocument"); this.startTime = Date.now(); MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); - DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.update, function(error) { + DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.update, (error) => { if (error != null) { throw error; } return setTimeout(done, 200); }); return null; }); - after(() => MockWebApi.getDocument.restore()); + after(function() { return MockWebApi.getDocument.restore(); }); it("should load the document from the web API", function() { return MockWebApi.getDocument @@ -107,7 +113,7 @@ describe("Applying updates to a doc", function() { this.timeout = 10000; this.second_update = Object.create(this.update); this.second_update.v = this.version + 1; - DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.second_update, function(error) { + DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.second_update, (error) => { if (error != null) { throw error; } return setTimeout(done, 200); }); @@ -133,7 +139,7 @@ describe("Applying updates to a doc", function() { DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { if (error != null) { throw error; } sinon.spy(MockWebApi, "getDocument"); - return DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.update, function(error) { + return DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.update, (error) => { if (error != null) { throw error; } return setTimeout(done, 200); }); @@ -141,9 +147,9 @@ describe("Applying updates to a doc", function() { return null; }); - after(() => MockWebApi.getDocument.restore()); + after(function() { return MockWebApi.getDocument.restore(); }); - it("should not need to call the web api", () => MockWebApi.getDocument.called.should.equal(false)); + it("should not need to call the web api", function() { return MockWebApi.getDocument.called.should.equal(false); }); it("should update the doc", function(done) { DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { @@ -181,7 +187,7 @@ describe("Applying updates to a doc", function() { DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { if (error != null) { throw error; } sinon.spy(MockWebApi, "getDocument"); - return DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.update, function(error) { + return DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.update, (error) => { if (error != null) { throw error; } return setTimeout(done, 200); }); @@ -189,7 +195,7 @@ describe("Applying updates to a doc", function() { return null; }); - after(() => MockWebApi.getDocument.restore()); + after(function() { return MockWebApi.getDocument.restore(); }); it("should update the doc", function(done) { DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { @@ -345,7 +351,7 @@ describe("Applying updates to a doc", function() { DocUpdaterClient.subscribeToAppliedOps(this.messageCallback = sinon.stub()); - DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.broken_update, function(error) { + DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.broken_update, (error) => { if (error != null) { throw error; } return setTimeout(done, 200); }); @@ -404,9 +410,9 @@ describe("Applying updates to a doc", function() { return null; }); - after(() => MockTrackChangesApi.flushDoc.restore()); + after(function() { return MockTrackChangesApi.flushDoc.restore(); }); - return it("should flush the doc twice", () => MockTrackChangesApi.flushDoc.calledTwice.should.equal(true)); + return it("should flush the doc twice", function() { return MockTrackChangesApi.flushDoc.calledTwice.should.equal(true); }); }); describe("when there is no version in Mongo", function() { @@ -421,7 +427,7 @@ describe("Applying updates to a doc", function() { op: this.update.op, v: 0 }; - DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, update, function(error) { + DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, update, (error) => { if (error != null) { throw error; } return setTimeout(done, 200); }); diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.js b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.js index 3875cf28d1..d20c3f8aef 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.js +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns @@ -33,7 +40,7 @@ describe("Applying updates to a project's structure", function() { this.fileUpdates = [ this.fileUpdate ]; return DocUpdaterApp.ensureRunning(error => { if (error != null) { throw error; } - return DocUpdaterClient.sendProjectUpdate(this.project_id, this.user_id, [], this.fileUpdates, this.version, function(error) { + return DocUpdaterClient.sendProjectUpdate(this.project_id, this.user_id, [], this.fileUpdates, this.version, (error) => { if (error != null) { throw error; } return setTimeout(done, 200); }); @@ -70,7 +77,7 @@ describe("Applying updates to a project's structure", function() { describe("when the document is not loaded", function() { before(function(done) { this.project_id = DocUpdaterClient.randomId(); - DocUpdaterClient.sendProjectUpdate(this.project_id, this.user_id, this.docUpdates, [], this.version, function(error) { + DocUpdaterClient.sendProjectUpdate(this.project_id, this.user_id, this.docUpdates, [], this.version, (error) => { if (error != null) { throw error; } return setTimeout(done, 200); }); @@ -102,7 +109,7 @@ describe("Applying updates to a project's structure", function() { DocUpdaterClient.preloadDoc(this.project_id, this.docUpdate.id, error => { if (error != null) { throw error; } sinon.spy(MockWebApi, "getDocument"); - return DocUpdaterClient.sendProjectUpdate(this.project_id, this.user_id, this.docUpdates, [], this.version, function(error) { + return DocUpdaterClient.sendProjectUpdate(this.project_id, this.user_id, this.docUpdates, [], this.version, (error) => { if (error != null) { throw error; } return setTimeout(done, 200); }); @@ -110,7 +117,7 @@ describe("Applying updates to a project's structure", function() { return null; }); - after(() => MockWebApi.getDocument.restore()); + after(function() { return MockWebApi.getDocument.restore(); }); it("should update the doc", function(done) { DocUpdaterClient.getDoc(this.project_id, this.docUpdate.id, (error, res, doc) => { @@ -167,7 +174,7 @@ describe("Applying updates to a project's structure", function() { return describe("when the documents are not loaded", function() { before(function(done) { this.project_id = DocUpdaterClient.randomId(); - DocUpdaterClient.sendProjectUpdate(this.project_id, this.user_id, this.docUpdates, this.fileUpdates, this.version, function(error) { + DocUpdaterClient.sendProjectUpdate(this.project_id, this.user_id, this.docUpdates, this.fileUpdates, this.version, (error) => { if (error != null) { throw error; } return setTimeout(done, 200); }); @@ -227,7 +234,7 @@ describe("Applying updates to a project's structure", function() { url: 'filestore.example.com' }; this.fileUpdates = [ this.fileUpdate ]; - DocUpdaterClient.sendProjectUpdate(this.project_id, this.user_id, [], this.fileUpdates, this.version, function(error) { + DocUpdaterClient.sendProjectUpdate(this.project_id, this.user_id, [], this.fileUpdates, this.version, (error) => { if (error != null) { throw error; } return setTimeout(done, 200); }); @@ -261,7 +268,7 @@ describe("Applying updates to a project's structure", function() { docLines: 'a\nb' }; this.docUpdates = [ this.docUpdate ]; - DocUpdaterClient.sendProjectUpdate(this.project_id, this.user_id, this.docUpdates, [], this.version, function(error) { + DocUpdaterClient.sendProjectUpdate(this.project_id, this.user_id, this.docUpdates, [], this.version, (error) => { if (error != null) { throw error; } return setTimeout(done, 200); }); @@ -308,7 +315,7 @@ describe("Applying updates to a project's structure", function() { const userId = this.project_id; DocUpdaterClient.sendProjectUpdate(projectId, userId, updates.slice(0, 250), [], this.version0, function(error) { if (error != null) { throw error; } - return DocUpdaterClient.sendProjectUpdate(projectId, userId, updates.slice(250), [], this.version1, function(error) { + return DocUpdaterClient.sendProjectUpdate(projectId, userId, updates.slice(250), [], this.version1, (error) => { if (error != null) { throw error; } return setTimeout(done, 2000); }); @@ -316,7 +323,7 @@ describe("Applying updates to a project's structure", function() { return null; }); - after(() => MockProjectHistoryApi.flushProject.restore()); + after(function() { return MockProjectHistoryApi.flushProject.restore(); }); return it("should flush project history", function() { return MockProjectHistoryApi.flushProject.calledWith(this.project_id).should.equal(true); @@ -346,7 +353,7 @@ describe("Applying updates to a project's structure", function() { const userId = this.project_id; DocUpdaterClient.sendProjectUpdate(projectId, userId, updates.slice(0, 10), [], this.version0, function(error) { if (error != null) { throw error; } - return DocUpdaterClient.sendProjectUpdate(projectId, userId, updates.slice(10), [], this.version1, function(error) { + return DocUpdaterClient.sendProjectUpdate(projectId, userId, updates.slice(10), [], this.version1, (error) => { if (error != null) { throw error; } return setTimeout(done, 2000); }); @@ -354,7 +361,7 @@ describe("Applying updates to a project's structure", function() { return null; }); - after(() => MockProjectHistoryApi.flushProject.restore()); + after(function() { return MockProjectHistoryApi.flushProject.restore(); }); return it("should not flush project history", function() { return MockProjectHistoryApi.flushProject.calledWith(this.project_id).should.equal(false); diff --git a/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.js b/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.js index 527ec2edd2..73f61646cf 100644 --- a/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.js +++ b/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.js @@ -1,3 +1,8 @@ +/* eslint-disable + handle-callback-err, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -118,7 +123,7 @@ describe("Deleting a document", function() { return this.statusCode.should.equal(204); }); - it("should not need to send the updated document to the web api", () => MockWebApi.setDocument.called.should.equal(false)); + it("should not need to send the updated document to the web api", function() { return MockWebApi.setDocument.called.should.equal(false); }); it("should need to reload the doc if read again", function(done) { MockWebApi.getDocument.called.should.equal.false; diff --git a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.js b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.js index b60d04bd40..74e1afd0b1 100644 --- a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.js +++ b/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + camelcase, + handle-callback-err, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -45,7 +51,7 @@ describe("Deleting a project", function() { }, updatedLines: ["four", "four and a half", "five", "six"] }]; - for (let doc of Array.from(this.docs)) { + for (const doc of Array.from(this.docs)) { MockWebApi.insertDoc(this.project_id, doc.id, { lines: doc.lines, version: doc.update.v @@ -110,7 +116,7 @@ describe("Deleting a project", function() { return callback(); }); }; - }), function() { + }), () => { MockWebApi.getDocument.restore(); return done(); }); @@ -158,11 +164,11 @@ describe("Deleting a project", function() { return this.statusCode.should.equal(204); }); - it("should not send any documents to the web api", () => MockWebApi.setDocument.called.should.equal(false)); + it("should not send any documents to the web api", function() { return MockWebApi.setDocument.called.should.equal(false); }); - it("should not flush any docs in track changes", () => MockTrackChangesApi.flushDoc.called.should.equal(false)); + it("should not flush any docs in track changes", function() { return MockTrackChangesApi.flushDoc.called.should.equal(false); }); - return it("should not flush to project history", () => MockProjectHistoryApi.flushProject.called.should.equal(false)); + return it("should not flush to project history", function() { return MockProjectHistoryApi.flushProject.called.should.equal(false); }); }); return describe("with the background=true parameter from realtime and a request to flush the queue", function() { @@ -211,7 +217,7 @@ describe("Deleting a project", function() { MockTrackChangesApi.flushDoc.calledWith(doc.id).should.equal(true)); }); - return it("should flush to project history", () => MockProjectHistoryApi.flushProject.called.should.equal(true)); + return it("should flush to project history", function() { return MockProjectHistoryApi.flushProject.called.should.equal(true); }); }); }); diff --git a/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.js b/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.js index e70798ee27..ea45d4af26 100644 --- a/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.js +++ b/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + camelcase, + handle-callback-err, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -43,7 +49,7 @@ describe("Flushing a project", function() { }, updatedLines: ["four", "four and a half", "five", "six"] }]; - for (let doc of Array.from(this.docs)) { + for (const doc of Array.from(this.docs)) { MockWebApi.insertDoc(this.project_id, doc.id, { lines: doc.lines, version: doc.update.v @@ -77,7 +83,7 @@ describe("Flushing a project", function() { }); }); - after(() => MockWebApi.setDocument.restore()); + after(function() { return MockWebApi.setDocument.restore(); }); it("should return a 204 status code", function() { return this.statusCode.should.equal(204); diff --git a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.js b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.js index 761a388d3e..5d8c066f20 100644 --- a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.js +++ b/services/document-updater/test/acceptance/coffee/FlushingDocsTests.js @@ -1,3 +1,11 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -49,7 +57,7 @@ describe("Flushing a doc to Mongo", function() { }); }); - after(() => MockWebApi.setDocument.restore()); + after(function() { return MockWebApi.setDocument.restore(); }); it("should flush the updated doc lines and version to the web api", function() { return MockWebApi.setDocument @@ -76,9 +84,9 @@ describe("Flushing a doc to Mongo", function() { return DocUpdaterClient.flushDoc(this.project_id, this.doc_id, done); }); - after(() => MockWebApi.setDocument.restore()); + after(function() { return MockWebApi.setDocument.restore(); }); - return it("should not flush the doc to the web api", () => MockWebApi.setDocument.called.should.equal(false)); + return it("should not flush the doc to the web api", function() { return MockWebApi.setDocument.called.should.equal(false); }); }); return describe("when the web api http request takes a long time on first request", function() { @@ -89,7 +97,7 @@ describe("Flushing a doc to Mongo", function() { version: this.version }); let t = 30000; - sinon.stub(MockWebApi, "setDocument", function(project_id, doc_id, lines, version, ranges, lastUpdatedAt, lastUpdatedBy, callback) { + sinon.stub(MockWebApi, "setDocument", (project_id, doc_id, lines, version, ranges, lastUpdatedAt, lastUpdatedBy, callback) => { if (callback == null) { callback = function(error) {}; } setTimeout(callback, t); return t = 0; @@ -97,7 +105,7 @@ describe("Flushing a doc to Mongo", function() { return DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, done); }); - after(() => MockWebApi.setDocument.restore()); + after(function() { return MockWebApi.setDocument.restore(); }); return it("should still work", function(done) { const start = Date.now(); diff --git a/services/document-updater/test/acceptance/coffee/GettingADocumentTests.js b/services/document-updater/test/acceptance/coffee/GettingADocumentTests.js index 273ee1c3f0..ad51413387 100644 --- a/services/document-updater/test/acceptance/coffee/GettingADocumentTests.js +++ b/services/document-updater/test/acceptance/coffee/GettingADocumentTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + camelcase, + handle-callback-err, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -33,7 +39,7 @@ describe("Getting a document", function() { return DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, returnedDoc) => { this.returnedDoc = returnedDoc; return done(); }); }); - after(() => MockWebApi.getDocument.restore()); + after(function() { return MockWebApi.getDocument.restore(); }); it("should load the document from the web API", function() { return MockWebApi.getDocument @@ -62,9 +68,9 @@ describe("Getting a document", function() { }); }); - after(() => MockWebApi.getDocument.restore()); + after(function() { return MockWebApi.getDocument.restore(); }); - it("should not load the document from the web API", () => MockWebApi.getDocument.called.should.equal(false)); + it("should not load the document from the web API", function() { return MockWebApi.getDocument.called.should.equal(false); }); return it("should return the document lines", function() { return this.returnedDoc.lines.should.deep.equal(this.lines); @@ -91,7 +97,7 @@ describe("Getting a document", function() { }); }); - after(() => MockWebApi.getDocument.restore()); + after(function() { return MockWebApi.getDocument.restore(); }); describe("when the ops are loaded", function() { before(function(done) { @@ -134,7 +140,7 @@ describe("Getting a document", function() { describe("when the web api returns an error", function() { before(function(done) { [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - sinon.stub(MockWebApi, "getDocument", function(project_id, doc_id, callback) { + sinon.stub(MockWebApi, "getDocument", (project_id, doc_id, callback) => { if (callback == null) { callback = function(error, doc) {}; } return callback(new Error("oops")); }); @@ -144,7 +150,7 @@ describe("Getting a document", function() { }); }); - after(() => MockWebApi.getDocument.restore()); + after(function() { return MockWebApi.getDocument.restore(); }); return it("should return 500", function() { return this.statusCode.should.equal(500); @@ -155,14 +161,14 @@ describe("Getting a document", function() { before(function(done) { this.timeout = 10000; [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - sinon.stub(MockWebApi, "getDocument", function(project_id, doc_id, callback) { + sinon.stub(MockWebApi, "getDocument", (project_id, doc_id, callback) => { if (callback == null) { callback = function(error, doc) {}; } return setTimeout(callback, 30000); }); return done(); }); - after(() => MockWebApi.getDocument.restore()); + after(function() { return MockWebApi.getDocument.restore(); }); return it("should return quickly(ish)", function(done) { const start = Date.now(); @@ -178,9 +184,9 @@ describe("Getting a document", function() { function __range__(left, right, inclusive) { - let range = []; - let ascending = left < right; - let end = !inclusive ? right : ascending ? right + 1 : right - 1; + const range = []; + const ascending = left < right; + const end = !inclusive ? right : ascending ? right + 1 : right - 1; for (let i = left; ascending ? i < end : i > end; ascending ? i++ : i--) { range.push(i); } diff --git a/services/document-updater/test/acceptance/coffee/GettingProjectDocsTests.js b/services/document-updater/test/acceptance/coffee/GettingProjectDocsTests.js index e974070468..cd5d359dd5 100644 --- a/services/document-updater/test/acceptance/coffee/GettingProjectDocsTests.js +++ b/services/document-updater/test/acceptance/coffee/GettingProjectDocsTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + handle-callback-err, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/services/document-updater/test/acceptance/coffee/RangesTests.js b/services/document-updater/test/acceptance/coffee/RangesTests.js index e7ba085b04..e5daa62018 100644 --- a/services/document-updater/test/acceptance/coffee/RangesTests.js +++ b/services/document-updater/test/acceptance/coffee/RangesTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + handle-callback-err, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -19,7 +25,7 @@ const DocUpdaterClient = require("./helpers/DocUpdaterClient"); const DocUpdaterApp = require("./helpers/DocUpdaterApp"); describe("Ranges", function() { - before(done => DocUpdaterApp.ensureRunning(done)); + before(function(done) { return DocUpdaterApp.ensureRunning(done); }); describe("tracking changes from ops", function() { before(function(done) { @@ -51,7 +57,7 @@ describe("Ranges", function() { version: 0 }); const jobs = []; - for (let update of Array.from(this.updates)) { + for (const update of Array.from(this.updates)) { (update => { return jobs.push(callback => DocUpdaterClient.sendUpdate(this.project_id, this.doc.id, update, callback)); })(update); @@ -61,7 +67,7 @@ describe("Ranges", function() { if (error != null) { throw error; } return DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { if (error != null) { throw error; } - return async.series(jobs, function(error) { + return async.series(jobs, (error) => { if (error != null) { throw error; } return done(); }); @@ -102,14 +108,14 @@ describe("Ranges", function() { version: 0 }); const jobs = []; - for (let update of Array.from(this.updates)) { + for (const update of Array.from(this.updates)) { (update => { return jobs.push(callback => DocUpdaterClient.sendUpdate(this.project_id, this.doc.id, update, callback)); })(update); } return DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { if (error != null) { throw error; } - return async.series(jobs, function(error) { + return async.series(jobs, (error) => { if (error != null) { throw error; } return setTimeout(done, 200); }); @@ -153,14 +159,14 @@ describe("Ranges", function() { version: 0 }); const jobs = []; - for (let update of Array.from(this.updates)) { + for (const update of Array.from(this.updates)) { (update => { return jobs.push(callback => DocUpdaterClient.sendUpdate(this.project_id, this.doc.id, update, callback)); })(update); } return DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { if (error != null) { throw error; } - return async.series(jobs, function(error) { + return async.series(jobs, (error) => { if (error != null) { throw error; } return setTimeout(done, 200); }); @@ -212,7 +218,7 @@ describe("Ranges", function() { }); return DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { if (error != null) { throw error; } - return DocUpdaterClient.sendUpdate(this.project_id, this.doc.id, this.update, function(error) { + return DocUpdaterClient.sendUpdate(this.project_id, this.doc.id, this.update, (error) => { if (error != null) { throw error; } return setTimeout(done, 200); }); @@ -367,14 +373,14 @@ describe("Ranges", function() { version: 0 }); const jobs = []; - for (let update of Array.from(this.updates)) { + for (const update of Array.from(this.updates)) { (update => { return jobs.push(callback => DocUpdaterClient.sendUpdate(this.project_id, this.doc.id, update, callback)); })(update); } return DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { if (error != null) { throw error; } - return async.series(jobs, function(error) { + return async.series(jobs, (error) => { if (error != null) { throw error; } return setTimeout(done, 200); }); @@ -423,7 +429,7 @@ describe("Ranges", function() { meta: { user_id: this.user_id } }]; const jobs = []; - for (let update of Array.from(this.updates)) { + for (const update of Array.from(this.updates)) { (update => { return jobs.push(callback => DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, update, callback)); })(update); diff --git a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.js b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.js index 1255a0a938..61fba26f17 100644 --- a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.js +++ b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -182,11 +189,11 @@ describe("Setting a document", function() { return this.statusCode.should.equal(413); }); - it("should not send the updated doc lines to the web api", () => MockWebApi.setDocument.called.should.equal(false)); + it("should not send the updated doc lines to the web api", function() { return MockWebApi.setDocument.called.should.equal(false); }); - it("should not flush track changes", () => MockTrackChangesApi.flushDoc.called.should.equal(false)); + it("should not flush track changes", function() { return MockTrackChangesApi.flushDoc.called.should.equal(false); }); - return it("should not flush project history", () => MockProjectHistoryApi.flushProject.called.should.equal(false)); + return it("should not flush project history", function() { return MockProjectHistoryApi.flushProject.called.should.equal(false); }); }); describe("when the updated doc is large but under the bodyParser and HTTPController size limit", function() { diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterApp.js b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterApp.js index 985458819a..a08fd82fac 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterApp.js +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterApp.js @@ -1,3 +1,8 @@ +/* eslint-disable + handle-callback-err, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.js b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.js index 09fad8c8f7..9f55291cd0 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.js +++ b/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.js @@ -1,3 +1,9 @@ +/* eslint-disable + camelcase, + handle-callback-err, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -30,10 +36,10 @@ module.exports = (DocUpdaterClient = { sendUpdate(project_id, doc_id, update, callback) { if (callback == null) { callback = function(error) {}; } - return rclient.rpush(keys.pendingUpdates({doc_id}), JSON.stringify(update), function(error){ + return rclient.rpush(keys.pendingUpdates({doc_id}), JSON.stringify(update), (error) => { if (error != null) { return callback(error); } const doc_key = `${project_id}:${doc_id}`; - return rclient.sadd("DocsWithPendingUpdates", doc_key, function(error) { + return rclient.sadd("DocsWithPendingUpdates", doc_key, (error) => { if (error != null) { return callback(error); } return rclient.rpush("pending-updates-list", doc_key, callback); }); @@ -42,10 +48,10 @@ module.exports = (DocUpdaterClient = { sendUpdates(project_id, doc_id, updates, callback) { if (callback == null) { callback = function(error) {}; } - return DocUpdaterClient.preloadDoc(project_id, doc_id, function(error) { + return DocUpdaterClient.preloadDoc(project_id, doc_id, (error) => { if (error != null) { return callback(error); } const jobs = []; - for (let update of Array.from(updates)) { + for (const update of Array.from(updates)) { ((update => jobs.push(callback => DocUpdaterClient.sendUpdate(project_id, doc_id, update, callback))))(update); } return async.series(jobs, err => DocUpdaterClient.waitForPendingUpdates(project_id, doc_id, callback)); @@ -53,7 +59,7 @@ module.exports = (DocUpdaterClient = { }, waitForPendingUpdates(project_id, doc_id, callback) { - return async.retry({times: 30, interval: 100}, cb => rclient.llen(keys.pendingUpdates({doc_id}), function(err, length) { + return async.retry({times: 30, interval: 100}, cb => rclient.llen(keys.pendingUpdates({doc_id}), (err, length) => { if (length > 0) { return cb(new Error("updates still pending")); } else { @@ -65,7 +71,7 @@ module.exports = (DocUpdaterClient = { getDoc(project_id, doc_id, callback) { if (callback == null) { callback = function(error, res, body) {}; } - return request.get(`http://localhost:3003/project/${project_id}/doc/${doc_id}`, function(error, res, body) { + return request.get(`http://localhost:3003/project/${project_id}/doc/${doc_id}`, (error, res, body) => { if ((body != null) && (res.statusCode >= 200) && (res.statusCode < 300)) { body = JSON.parse(body); } @@ -75,7 +81,7 @@ module.exports = (DocUpdaterClient = { getDocAndRecentOps(project_id, doc_id, fromVersion, callback) { if (callback == null) { callback = function(error, res, body) {}; } - return request.get(`http://localhost:3003/project/${project_id}/doc/${doc_id}?fromVersion=${fromVersion}`, function(error, res, body) { + return request.get(`http://localhost:3003/project/${project_id}/doc/${doc_id}?fromVersion=${fromVersion}`, (error, res, body) => { if ((body != null) && (res.statusCode >= 200) && (res.statusCode < 300)) { body = JSON.parse(body); } @@ -143,7 +149,7 @@ module.exports = (DocUpdaterClient = { getProjectDocs(project_id, projectStateHash, callback) { if (callback == null) { callback = function() {}; } - return request.get(`http://localhost:3003/project/${project_id}/doc?state=${projectStateHash}`, function(error, res, body) { + return request.get(`http://localhost:3003/project/${project_id}/doc?state=${projectStateHash}`, (error, res, body) => { if ((body != null) && (res.statusCode >= 200) && (res.statusCode < 300)) { body = JSON.parse(body); } @@ -161,9 +167,9 @@ module.exports = (DocUpdaterClient = { }); function __range__(left, right, inclusive) { - let range = []; - let ascending = left < right; - let end = !inclusive ? right : ascending ? right + 1 : right - 1; + const range = []; + const ascending = left < right; + const end = !inclusive ? right : ascending ? right + 1 : right - 1; for (let i = left; ascending ? i < end : i > end; ascending ? i++ : i--) { range.push(i); } diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockProjectHistoryApi.js b/services/document-updater/test/acceptance/coffee/helpers/MockProjectHistoryApi.js index ba084fd108..84e2d7075c 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/MockProjectHistoryApi.js +++ b/services/document-updater/test/acceptance/coffee/helpers/MockProjectHistoryApi.js @@ -1,3 +1,9 @@ +/* eslint-disable + camelcase, + handle-callback-err, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns @@ -16,7 +22,7 @@ module.exports = (MockProjectHistoryApi = { run() { app.post("/project/:project_id/flush", (req, res, next) => { - return this.flushProject(req.params.project_id, function(error) { + return this.flushProject(req.params.project_id, (error) => { if (error != null) { return res.sendStatus(500); } else { @@ -25,7 +31,7 @@ module.exports = (MockProjectHistoryApi = { }); }); - return app.listen(3054, function(error) { + return app.listen(3054, (error) => { if (error != null) { throw error; } }); } diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.js b/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.js index bd217d5545..ef14b85beb 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.js +++ b/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.js @@ -1,3 +1,9 @@ +/* eslint-disable + camelcase, + handle-callback-err, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns @@ -16,7 +22,7 @@ module.exports = (MockTrackChangesApi = { run() { app.post("/project/:project_id/doc/:doc_id/flush", (req, res, next) => { - return this.flushDoc(req.params.doc_id, function(error) { + return this.flushDoc(req.params.doc_id, (error) => { if (error != null) { return res.sendStatus(500); } else { @@ -25,9 +31,9 @@ module.exports = (MockTrackChangesApi = { }); }); - return app.listen(3015, function(error) { + return app.listen(3015, (error) => { if (error != null) { throw error; } - }).on("error", function(error) { + }).on("error", (error) => { console.error("error starting MockTrackChangesApi:", error.message); return process.exit(1); }); diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.js b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.js index 5069c20cb6..653dc3bf2a 100644 --- a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.js +++ b/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns @@ -41,7 +48,7 @@ module.exports = (MockWebApi = { run() { app.get("/project/:project_id/doc/:doc_id", (req, res, next) => { - return this.getDocument(req.params.project_id, req.params.doc_id, function(error, doc) { + return this.getDocument(req.params.project_id, req.params.doc_id, (error, doc) => { if (error != null) { return res.sendStatus(500); } else if (doc != null) { @@ -53,7 +60,7 @@ module.exports = (MockWebApi = { }); app.post("/project/:project_id/doc/:doc_id", bodyParser.json({limit: MAX_REQUEST_SIZE}), (req, res, next) => { - return MockWebApi.setDocument(req.params.project_id, req.params.doc_id, req.body.lines, req.body.version, req.body.ranges, req.body.lastUpdatedAt, req.body.lastUpdatedBy, function(error) { + return MockWebApi.setDocument(req.params.project_id, req.params.doc_id, req.body.lines, req.body.version, req.body.ranges, req.body.lastUpdatedAt, req.body.lastUpdatedBy, (error) => { if (error != null) { return res.sendStatus(500); } else { @@ -62,9 +69,9 @@ module.exports = (MockWebApi = { }); }); - return app.listen(3000, function(error) { + return app.listen(3000, (error) => { if (error != null) { throw error; } - }).on("error", function(error) { + }).on("error", (error) => { console.error("error starting MockWebApi:", error.message); return process.exit(1); }); From 9680e62bb12418b0e84733e9ec92895f0bfefd99 Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Wed, 6 May 2020 12:12:41 +0200 Subject: [PATCH 622/769] decaffeinate: rename test/acceptance/coffee to test/acceptance/js --- .../test/acceptance/{coffee => js}/ApplyingUpdatesToADocTests.js | 0 .../{coffee => js}/ApplyingUpdatesToProjectStructureTests.js | 0 .../test/acceptance/{coffee => js}/DeletingADocumentTests.js | 0 .../test/acceptance/{coffee => js}/DeletingAProjectTests.js | 0 .../test/acceptance/{coffee => js}/FlushingAProjectTests.js | 0 .../test/acceptance/{coffee => js}/FlushingDocsTests.js | 0 .../test/acceptance/{coffee => js}/GettingADocumentTests.js | 0 .../test/acceptance/{coffee => js}/GettingProjectDocsTests.js | 0 .../test/acceptance/{coffee => js}/RangesTests.js | 0 .../test/acceptance/{coffee => js}/SettingADocumentTests.js | 0 .../test/acceptance/{coffee => js}/helpers/DocUpdaterApp.js | 0 .../test/acceptance/{coffee => js}/helpers/DocUpdaterClient.js | 0 .../acceptance/{coffee => js}/helpers/MockProjectHistoryApi.js | 0 .../test/acceptance/{coffee => js}/helpers/MockTrackChangesApi.js | 0 .../test/acceptance/{coffee => js}/helpers/MockWebApi.js | 0 15 files changed, 0 insertions(+), 0 deletions(-) rename services/document-updater/test/acceptance/{coffee => js}/ApplyingUpdatesToADocTests.js (100%) rename services/document-updater/test/acceptance/{coffee => js}/ApplyingUpdatesToProjectStructureTests.js (100%) rename services/document-updater/test/acceptance/{coffee => js}/DeletingADocumentTests.js (100%) rename services/document-updater/test/acceptance/{coffee => js}/DeletingAProjectTests.js (100%) rename services/document-updater/test/acceptance/{coffee => js}/FlushingAProjectTests.js (100%) rename services/document-updater/test/acceptance/{coffee => js}/FlushingDocsTests.js (100%) rename services/document-updater/test/acceptance/{coffee => js}/GettingADocumentTests.js (100%) rename services/document-updater/test/acceptance/{coffee => js}/GettingProjectDocsTests.js (100%) rename services/document-updater/test/acceptance/{coffee => js}/RangesTests.js (100%) rename services/document-updater/test/acceptance/{coffee => js}/SettingADocumentTests.js (100%) rename services/document-updater/test/acceptance/{coffee => js}/helpers/DocUpdaterApp.js (100%) rename services/document-updater/test/acceptance/{coffee => js}/helpers/DocUpdaterClient.js (100%) rename services/document-updater/test/acceptance/{coffee => js}/helpers/MockProjectHistoryApi.js (100%) rename services/document-updater/test/acceptance/{coffee => js}/helpers/MockTrackChangesApi.js (100%) rename services/document-updater/test/acceptance/{coffee => js}/helpers/MockWebApi.js (100%) diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.js rename to services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/ApplyingUpdatesToProjectStructureTests.js rename to services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js diff --git a/services/document-updater/test/acceptance/coffee/DeletingADocumentTests.js b/services/document-updater/test/acceptance/js/DeletingADocumentTests.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/DeletingADocumentTests.js rename to services/document-updater/test/acceptance/js/DeletingADocumentTests.js diff --git a/services/document-updater/test/acceptance/coffee/DeletingAProjectTests.js b/services/document-updater/test/acceptance/js/DeletingAProjectTests.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/DeletingAProjectTests.js rename to services/document-updater/test/acceptance/js/DeletingAProjectTests.js diff --git a/services/document-updater/test/acceptance/coffee/FlushingAProjectTests.js b/services/document-updater/test/acceptance/js/FlushingAProjectTests.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/FlushingAProjectTests.js rename to services/document-updater/test/acceptance/js/FlushingAProjectTests.js diff --git a/services/document-updater/test/acceptance/coffee/FlushingDocsTests.js b/services/document-updater/test/acceptance/js/FlushingDocsTests.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/FlushingDocsTests.js rename to services/document-updater/test/acceptance/js/FlushingDocsTests.js diff --git a/services/document-updater/test/acceptance/coffee/GettingADocumentTests.js b/services/document-updater/test/acceptance/js/GettingADocumentTests.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/GettingADocumentTests.js rename to services/document-updater/test/acceptance/js/GettingADocumentTests.js diff --git a/services/document-updater/test/acceptance/coffee/GettingProjectDocsTests.js b/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/GettingProjectDocsTests.js rename to services/document-updater/test/acceptance/js/GettingProjectDocsTests.js diff --git a/services/document-updater/test/acceptance/coffee/RangesTests.js b/services/document-updater/test/acceptance/js/RangesTests.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/RangesTests.js rename to services/document-updater/test/acceptance/js/RangesTests.js diff --git a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.js b/services/document-updater/test/acceptance/js/SettingADocumentTests.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/SettingADocumentTests.js rename to services/document-updater/test/acceptance/js/SettingADocumentTests.js diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterApp.js b/services/document-updater/test/acceptance/js/helpers/DocUpdaterApp.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/helpers/DocUpdaterApp.js rename to services/document-updater/test/acceptance/js/helpers/DocUpdaterApp.js diff --git a/services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.js b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/helpers/DocUpdaterClient.js rename to services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockProjectHistoryApi.js b/services/document-updater/test/acceptance/js/helpers/MockProjectHistoryApi.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/helpers/MockProjectHistoryApi.js rename to services/document-updater/test/acceptance/js/helpers/MockProjectHistoryApi.js diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.js b/services/document-updater/test/acceptance/js/helpers/MockTrackChangesApi.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/helpers/MockTrackChangesApi.js rename to services/document-updater/test/acceptance/js/helpers/MockTrackChangesApi.js diff --git a/services/document-updater/test/acceptance/coffee/helpers/MockWebApi.js b/services/document-updater/test/acceptance/js/helpers/MockWebApi.js similarity index 100% rename from services/document-updater/test/acceptance/coffee/helpers/MockWebApi.js rename to services/document-updater/test/acceptance/js/helpers/MockWebApi.js From d15738cb984b30f049e4ca434348a0644308d1f2 Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Wed, 6 May 2020 12:12:47 +0200 Subject: [PATCH 623/769] prettier: convert test/acceptance decaffeinated files to Prettier format --- .../js/ApplyingUpdatesToADocTests.js | 1170 ++++++++++------- .../ApplyingUpdatesToProjectStructureTests.js | 785 ++++++----- .../acceptance/js/DeletingADocumentTests.js | 279 ++-- .../acceptance/js/DeletingAProjectTests.js | 442 ++++--- .../acceptance/js/FlushingAProjectTests.js | 214 +-- .../test/acceptance/js/FlushingDocsTests.js | 229 ++-- .../acceptance/js/GettingADocumentTests.js | 396 +++--- .../acceptance/js/GettingProjectDocsTests.js | 238 ++-- .../test/acceptance/js/RangesTests.js | 1126 +++++++++------- .../acceptance/js/SettingADocumentTests.js | 700 ++++++---- .../acceptance/js/helpers/DocUpdaterApp.js | 60 +- .../acceptance/js/helpers/DocUpdaterClient.js | 383 ++++-- .../js/helpers/MockProjectHistoryApi.js | 52 +- .../js/helpers/MockTrackChangesApi.js | 61 +- .../test/acceptance/js/helpers/MockWebApi.js | 161 ++- 15 files changed, 3810 insertions(+), 2486 deletions(-) diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js index 928e656d03..aab254ff96 100644 --- a/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js +++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js @@ -11,495 +11,785 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require("sinon"); -const chai = require("chai"); -chai.should(); -const { - expect -} = chai; -const async = require("async"); -const Settings = require('settings-sharelatex'); -const rclient_history = require("redis-sharelatex").createClient(Settings.redis.history); // note: this is track changes, not project-history -const rclient_project_history = require("redis-sharelatex").createClient(Settings.redis.project_history); -const rclient_du = require("redis-sharelatex").createClient(Settings.redis.documentupdater); -const Keys = Settings.redis.documentupdater.key_schema; -const HistoryKeys = Settings.redis.history.key_schema; -const ProjectHistoryKeys = Settings.redis.project_history.key_schema; +const sinon = require('sinon') +const chai = require('chai') +chai.should() +const { expect } = chai +const async = require('async') +const Settings = require('settings-sharelatex') +const rclient_history = require('redis-sharelatex').createClient( + Settings.redis.history +) // note: this is track changes, not project-history +const rclient_project_history = require('redis-sharelatex').createClient( + Settings.redis.project_history +) +const rclient_du = require('redis-sharelatex').createClient( + Settings.redis.documentupdater +) +const Keys = Settings.redis.documentupdater.key_schema +const HistoryKeys = Settings.redis.history.key_schema +const ProjectHistoryKeys = Settings.redis.project_history.key_schema -const MockTrackChangesApi = require("./helpers/MockTrackChangesApi"); -const MockWebApi = require("./helpers/MockWebApi"); -const DocUpdaterClient = require("./helpers/DocUpdaterClient"); -const DocUpdaterApp = require("./helpers/DocUpdaterApp"); +const MockTrackChangesApi = require('./helpers/MockTrackChangesApi') +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') -describe("Applying updates to a doc", function() { - before(function(done) { - this.lines = ["one", "two", "three"]; - this.version = 42; - this.update = { - doc: this.doc_id, - op: [{ - i: "one and a half\n", - p: 4 - }], - v: this.version - }; - this.result = ["one", "one and a half", "two", "three"]; - return DocUpdaterApp.ensureRunning(done); - }); +describe('Applying updates to a doc', function () { + before(function (done) { + this.lines = ['one', 'two', 'three'] + this.version = 42 + this.update = { + doc: this.doc_id, + op: [ + { + i: 'one and a half\n', + p: 4 + } + ], + v: this.version + } + this.result = ['one', 'one and a half', 'two', 'three'] + return DocUpdaterApp.ensureRunning(done) + }) - describe("when the document is not loaded", function() { - before(function(done) { - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - sinon.spy(MockWebApi, "getDocument"); - this.startTime = Date.now(); - MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); - DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.update, (error) => { - if (error != null) { throw error; } - return setTimeout(done, 200); - }); - return null; - }); + describe('when the document is not loaded', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) + sinon.spy(MockWebApi, 'getDocument') + this.startTime = Date.now() + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version + }) + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + this.update, + (error) => { + if (error != null) { + throw error + } + return setTimeout(done, 200) + } + ) + return null + }) - after(function() { return MockWebApi.getDocument.restore(); }); + after(function () { + return MockWebApi.getDocument.restore() + }) - it("should load the document from the web API", function() { - return MockWebApi.getDocument - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); + it('should load the document from the web API', function () { + return MockWebApi.getDocument + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) - it("should update the doc", function(done) { - DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { - doc.lines.should.deep.equal(this.result); - return done(); - }); - return null; - }); + it('should update the doc', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + doc.lines.should.deep.equal(this.result) + return done() + } + ) + return null + }) - it("should push the applied updates to the track changes api", function(done) { - rclient_history.lrange(HistoryKeys.uncompressedHistoryOps({doc_id: this.doc_id}), 0, -1, (error, updates) => { - if (error != null) { throw error; } - JSON.parse(updates[0]).op.should.deep.equal(this.update.op); - return rclient_history.sismember(HistoryKeys.docsWithHistoryOps({project_id: this.project_id}), this.doc_id, (error, result) => { - if (error != null) { throw error; } - result.should.equal(1); - return done(); - }); - }); - return null; - }); + it('should push the applied updates to the track changes api', function (done) { + rclient_history.lrange( + HistoryKeys.uncompressedHistoryOps({ doc_id: this.doc_id }), + 0, + -1, + (error, updates) => { + if (error != null) { + throw error + } + JSON.parse(updates[0]).op.should.deep.equal(this.update.op) + return rclient_history.sismember( + HistoryKeys.docsWithHistoryOps({ project_id: this.project_id }), + this.doc_id, + (error, result) => { + if (error != null) { + throw error + } + result.should.equal(1) + return done() + } + ) + } + ) + return null + }) - it("should push the applied updates to the project history changes api", function(done) { - rclient_project_history.lrange(ProjectHistoryKeys.projectHistoryOps({project_id: this.project_id}), 0, -1, (error, updates) => { - if (error != null) { throw error; } - JSON.parse(updates[0]).op.should.deep.equal(this.update.op); - return done(); - }); - return null; - }); + it('should push the applied updates to the project history changes api', function (done) { + rclient_project_history.lrange( + ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), + 0, + -1, + (error, updates) => { + if (error != null) { + throw error + } + JSON.parse(updates[0]).op.should.deep.equal(this.update.op) + return done() + } + ) + return null + }) - it("should set the first op timestamp", function(done) { - rclient_project_history.get(ProjectHistoryKeys.projectHistoryFirstOpTimestamp({project_id: this.project_id}), (error, result) => { - if (error != null) { throw error; } - result.should.be.within(this.startTime, Date.now()); - this.firstOpTimestamp = result; - return done(); - }); - return null; - }); + it('should set the first op timestamp', function (done) { + rclient_project_history.get( + ProjectHistoryKeys.projectHistoryFirstOpTimestamp({ + project_id: this.project_id + }), + (error, result) => { + if (error != null) { + throw error + } + result.should.be.within(this.startTime, Date.now()) + this.firstOpTimestamp = result + return done() + } + ) + return null + }) - return describe("when sending another update", function() { - before(function(done) { - this.timeout = 10000; - this.second_update = Object.create(this.update); - this.second_update.v = this.version + 1; - DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.second_update, (error) => { - if (error != null) { throw error; } - return setTimeout(done, 200); - }); - return null; - }); + return describe('when sending another update', function () { + before(function (done) { + this.timeout = 10000 + this.second_update = Object.create(this.update) + this.second_update.v = this.version + 1 + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + this.second_update, + (error) => { + if (error != null) { + throw error + } + return setTimeout(done, 200) + } + ) + return null + }) - return it("should not change the first op timestamp", function(done) { - rclient_project_history.get(ProjectHistoryKeys.projectHistoryFirstOpTimestamp({project_id: this.project_id}), (error, result) => { - if (error != null) { throw error; } - result.should.equal(this.firstOpTimestamp); - return done(); - }); - return null; - }); - }); - }); + return it('should not change the first op timestamp', function (done) { + rclient_project_history.get( + ProjectHistoryKeys.projectHistoryFirstOpTimestamp({ + project_id: this.project_id + }), + (error, result) => { + if (error != null) { + throw error + } + result.should.equal(this.firstOpTimestamp) + return done() + } + ) + return null + }) + }) + }) - describe("when the document is loaded", function() { - before(function(done) { - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + describe('when the document is loaded', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) - MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); - DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { - if (error != null) { throw error; } - sinon.spy(MockWebApi, "getDocument"); - return DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.update, (error) => { - if (error != null) { throw error; } - return setTimeout(done, 200); - }); - }); - return null; - }); + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version + }) + DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => { + if (error != null) { + throw error + } + sinon.spy(MockWebApi, 'getDocument') + return DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + this.update, + (error) => { + if (error != null) { + throw error + } + return setTimeout(done, 200) + } + ) + }) + return null + }) - after(function() { return MockWebApi.getDocument.restore(); }); + after(function () { + return MockWebApi.getDocument.restore() + }) - it("should not need to call the web api", function() { return MockWebApi.getDocument.called.should.equal(false); }); + it('should not need to call the web api', function () { + return MockWebApi.getDocument.called.should.equal(false) + }) - it("should update the doc", function(done) { - DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { - doc.lines.should.deep.equal(this.result); - return done(); - }); - return null; - }); + it('should update the doc', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + doc.lines.should.deep.equal(this.result) + return done() + } + ) + return null + }) - it("should push the applied updates to the track changes api", function(done) { - rclient_history.lrange(HistoryKeys.uncompressedHistoryOps({doc_id: this.doc_id}), 0, -1, (error, updates) => { - JSON.parse(updates[0]).op.should.deep.equal(this.update.op); - return rclient_history.sismember(HistoryKeys.docsWithHistoryOps({project_id: this.project_id}), this.doc_id, (error, result) => { - result.should.equal(1); - return done(); - }); - }); - return null; - }); + it('should push the applied updates to the track changes api', function (done) { + rclient_history.lrange( + HistoryKeys.uncompressedHistoryOps({ doc_id: this.doc_id }), + 0, + -1, + (error, updates) => { + JSON.parse(updates[0]).op.should.deep.equal(this.update.op) + return rclient_history.sismember( + HistoryKeys.docsWithHistoryOps({ project_id: this.project_id }), + this.doc_id, + (error, result) => { + result.should.equal(1) + return done() + } + ) + } + ) + return null + }) - return it("should push the applied updates to the project history changes api", function(done) { - rclient_project_history.lrange(ProjectHistoryKeys.projectHistoryOps({project_id: this.project_id}), 0, -1, (error, updates) => { - JSON.parse(updates[0]).op.should.deep.equal(this.update.op); - return done(); - }); - return null; - }); - }); + return it('should push the applied updates to the project history changes api', function (done) { + rclient_project_history.lrange( + ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), + 0, + -1, + (error, updates) => { + JSON.parse(updates[0]).op.should.deep.equal(this.update.op) + return done() + } + ) + return null + }) + }) - describe("when the document is loaded and is using project-history only", function() { - before(function(done) { - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + describe('when the document is loaded and is using project-history only', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) - MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version, projectHistoryType: 'project-history'}); - DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { - if (error != null) { throw error; } - sinon.spy(MockWebApi, "getDocument"); - return DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.update, (error) => { - if (error != null) { throw error; } - return setTimeout(done, 200); - }); - }); - return null; - }); + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version, + projectHistoryType: 'project-history' + }) + DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => { + if (error != null) { + throw error + } + sinon.spy(MockWebApi, 'getDocument') + return DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + this.update, + (error) => { + if (error != null) { + throw error + } + return setTimeout(done, 200) + } + ) + }) + return null + }) - after(function() { return MockWebApi.getDocument.restore(); }); + after(function () { + return MockWebApi.getDocument.restore() + }) - it("should update the doc", function(done) { - DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { - doc.lines.should.deep.equal(this.result); - return done(); - }); - return null; - }); + it('should update the doc', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + doc.lines.should.deep.equal(this.result) + return done() + } + ) + return null + }) - it("should not push any applied updates to the track changes api", function(done) { - rclient_history.lrange(HistoryKeys.uncompressedHistoryOps({doc_id: this.doc_id}), 0, -1, (error, updates) => { - updates.length.should.equal(0); - return done(); - }); - return null; - }); + it('should not push any applied updates to the track changes api', function (done) { + rclient_history.lrange( + HistoryKeys.uncompressedHistoryOps({ doc_id: this.doc_id }), + 0, + -1, + (error, updates) => { + updates.length.should.equal(0) + return done() + } + ) + return null + }) - return it("should push the applied updates to the project history changes api", function(done) { - rclient_project_history.lrange(ProjectHistoryKeys.projectHistoryOps({project_id: this.project_id}), 0, -1, (error, updates) => { - JSON.parse(updates[0]).op.should.deep.equal(this.update.op); - return done(); - }); - return null; - }); - }); + return it('should push the applied updates to the project history changes api', function (done) { + rclient_project_history.lrange( + ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), + 0, + -1, + (error, updates) => { + JSON.parse(updates[0]).op.should.deep.equal(this.update.op) + return done() + } + ) + return null + }) + }) - describe("when the document has been deleted", function() { - describe("when the ops come in a single linear order", function() { - before(function(done) { - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - const lines = ["", "", ""]; - MockWebApi.insertDoc(this.project_id, this.doc_id, {lines, version: 0}); - this.updates = [ - { doc_id: this.doc_id, v: 0, op: [{i: "h", p: 0} ] }, - { doc_id: this.doc_id, v: 1, op: [{i: "e", p: 1} ] }, - { doc_id: this.doc_id, v: 2, op: [{i: "l", p: 2} ] }, - { doc_id: this.doc_id, v: 3, op: [{i: "l", p: 3} ] }, - { doc_id: this.doc_id, v: 4, op: [{i: "o", p: 4} ] }, - { doc_id: this.doc_id, v: 5, op: [{i: " ", p: 5} ] }, - { doc_id: this.doc_id, v: 6, op: [{i: "w", p: 6} ] }, - { doc_id: this.doc_id, v: 7, op: [{i: "o", p: 7} ] }, - { doc_id: this.doc_id, v: 8, op: [{i: "r", p: 8} ] }, - { doc_id: this.doc_id, v: 9, op: [{i: "l", p: 9} ] }, - { doc_id: this.doc_id, v: 10, op: [{i: "d", p: 10}] } - ]; - this.my_result = ["hello world", "", ""]; - return done(); - }); + describe('when the document has been deleted', function () { + describe('when the ops come in a single linear order', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) + const lines = ['', '', ''] + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines, + version: 0 + }) + this.updates = [ + { doc_id: this.doc_id, v: 0, op: [{ i: 'h', p: 0 }] }, + { doc_id: this.doc_id, v: 1, op: [{ i: 'e', p: 1 }] }, + { doc_id: this.doc_id, v: 2, op: [{ i: 'l', p: 2 }] }, + { doc_id: this.doc_id, v: 3, op: [{ i: 'l', p: 3 }] }, + { doc_id: this.doc_id, v: 4, op: [{ i: 'o', p: 4 }] }, + { doc_id: this.doc_id, v: 5, op: [{ i: ' ', p: 5 }] }, + { doc_id: this.doc_id, v: 6, op: [{ i: 'w', p: 6 }] }, + { doc_id: this.doc_id, v: 7, op: [{ i: 'o', p: 7 }] }, + { doc_id: this.doc_id, v: 8, op: [{ i: 'r', p: 8 }] }, + { doc_id: this.doc_id, v: 9, op: [{ i: 'l', p: 9 }] }, + { doc_id: this.doc_id, v: 10, op: [{ i: 'd', p: 10 }] } + ] + this.my_result = ['hello world', '', ''] + return done() + }) - it("should be able to continue applying updates when the project has been deleted", function(done) { - let update; - const actions = []; - for (update of Array.from(this.updates.slice(0,6))) { - (update => { - return actions.push(callback => DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, update, callback)); - })(update); - } - actions.push(callback => DocUpdaterClient.deleteDoc(this.project_id, this.doc_id, callback)); - for (update of Array.from(this.updates.slice(6))) { - (update => { - return actions.push(callback => DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, update, callback)); - })(update); - } + it('should be able to continue applying updates when the project has been deleted', function (done) { + let update + const actions = [] + for (update of Array.from(this.updates.slice(0, 6))) { + ;((update) => { + return actions.push((callback) => + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + update, + callback + ) + ) + })(update) + } + actions.push((callback) => + DocUpdaterClient.deleteDoc(this.project_id, this.doc_id, callback) + ) + for (update of Array.from(this.updates.slice(6))) { + ;((update) => { + return actions.push((callback) => + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + update, + callback + ) + ) + })(update) + } - async.series(actions, error => { - if (error != null) { throw error; } - return DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { - doc.lines.should.deep.equal(this.my_result); - return done(); - }); - }); - return null; - }); + async.series(actions, (error) => { + if (error != null) { + throw error + } + return DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + doc.lines.should.deep.equal(this.my_result) + return done() + } + ) + }) + return null + }) - it("should push the applied updates to the track changes api", function(done) { - rclient_history.lrange(HistoryKeys.uncompressedHistoryOps({doc_id: this.doc_id}), 0, -1, (error, updates) => { - updates = (Array.from(updates).map((u) => JSON.parse(u))); - for (let i = 0; i < this.updates.length; i++) { - const appliedUpdate = this.updates[i]; - appliedUpdate.op.should.deep.equal(updates[i].op); - } + it('should push the applied updates to the track changes api', function (done) { + rclient_history.lrange( + HistoryKeys.uncompressedHistoryOps({ doc_id: this.doc_id }), + 0, + -1, + (error, updates) => { + updates = Array.from(updates).map((u) => JSON.parse(u)) + for (let i = 0; i < this.updates.length; i++) { + const appliedUpdate = this.updates[i] + appliedUpdate.op.should.deep.equal(updates[i].op) + } - return rclient_history.sismember(HistoryKeys.docsWithHistoryOps({project_id: this.project_id}), this.doc_id, (error, result) => { - result.should.equal(1); - return done(); - }); - }); - return null; - }); + return rclient_history.sismember( + HistoryKeys.docsWithHistoryOps({ project_id: this.project_id }), + this.doc_id, + (error, result) => { + result.should.equal(1) + return done() + } + ) + } + ) + return null + }) - return it("should store the doc ops in the correct order", function(done) { - rclient_du.lrange(Keys.docOps({doc_id: this.doc_id}), 0, -1, (error, updates) => { - updates = (Array.from(updates).map((u) => JSON.parse(u))); - for (let i = 0; i < this.updates.length; i++) { - const appliedUpdate = this.updates[i]; - appliedUpdate.op.should.deep.equal(updates[i].op); - } - return done(); - }); - return null; - }); - }); + return it('should store the doc ops in the correct order', function (done) { + rclient_du.lrange( + Keys.docOps({ doc_id: this.doc_id }), + 0, + -1, + (error, updates) => { + updates = Array.from(updates).map((u) => JSON.parse(u)) + for (let i = 0; i < this.updates.length; i++) { + const appliedUpdate = this.updates[i] + appliedUpdate.op.should.deep.equal(updates[i].op) + } + return done() + } + ) + return null + }) + }) - return describe("when older ops come in after the delete", function() { - before(function(done) { - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - const lines = ["", "", ""]; - MockWebApi.insertDoc(this.project_id, this.doc_id, {lines, version: 0}); - this.updates = [ - { doc_id: this.doc_id, v: 0, op: [{i: "h", p: 0} ] }, - { doc_id: this.doc_id, v: 1, op: [{i: "e", p: 1} ] }, - { doc_id: this.doc_id, v: 2, op: [{i: "l", p: 2} ] }, - { doc_id: this.doc_id, v: 3, op: [{i: "l", p: 3} ] }, - { doc_id: this.doc_id, v: 4, op: [{i: "o", p: 4} ] }, - { doc_id: this.doc_id, v: 0, op: [{i: "world", p: 1} ] } - ]; - this.my_result = ["hello", "world", ""]; - return done(); - }); + return describe('when older ops come in after the delete', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) + const lines = ['', '', ''] + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines, + version: 0 + }) + this.updates = [ + { doc_id: this.doc_id, v: 0, op: [{ i: 'h', p: 0 }] }, + { doc_id: this.doc_id, v: 1, op: [{ i: 'e', p: 1 }] }, + { doc_id: this.doc_id, v: 2, op: [{ i: 'l', p: 2 }] }, + { doc_id: this.doc_id, v: 3, op: [{ i: 'l', p: 3 }] }, + { doc_id: this.doc_id, v: 4, op: [{ i: 'o', p: 4 }] }, + { doc_id: this.doc_id, v: 0, op: [{ i: 'world', p: 1 }] } + ] + this.my_result = ['hello', 'world', ''] + return done() + }) - return it("should be able to continue applying updates when the project has been deleted", function(done) { - let update; - const actions = []; - for (update of Array.from(this.updates.slice(0,5))) { - (update => { - return actions.push(callback => DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, update, callback)); - })(update); - } - actions.push(callback => DocUpdaterClient.deleteDoc(this.project_id, this.doc_id, callback)); - for (update of Array.from(this.updates.slice(5))) { - (update => { - return actions.push(callback => DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, update, callback)); - })(update); - } + return it('should be able to continue applying updates when the project has been deleted', function (done) { + let update + const actions = [] + for (update of Array.from(this.updates.slice(0, 5))) { + ;((update) => { + return actions.push((callback) => + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + update, + callback + ) + ) + })(update) + } + actions.push((callback) => + DocUpdaterClient.deleteDoc(this.project_id, this.doc_id, callback) + ) + for (update of Array.from(this.updates.slice(5))) { + ;((update) => { + return actions.push((callback) => + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + update, + callback + ) + ) + })(update) + } - async.series(actions, error => { - if (error != null) { throw error; } - return DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { - doc.lines.should.deep.equal(this.my_result); - return done(); - }); - }); - return null; - }); - }); - }); + async.series(actions, (error) => { + if (error != null) { + throw error + } + return DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + doc.lines.should.deep.equal(this.my_result) + return done() + } + ) + }) + return null + }) + }) + }) - describe("with a broken update", function() { - before(function(done) { - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - this.broken_update = { doc_id: this.doc_id, v: this.version, op: [{d: "not the correct content", p: 0} ] }; - MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); + describe('with a broken update', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) + this.broken_update = { + doc_id: this.doc_id, + v: this.version, + op: [{ d: 'not the correct content', p: 0 }] + } + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version + }) - DocUpdaterClient.subscribeToAppliedOps(this.messageCallback = sinon.stub()); + DocUpdaterClient.subscribeToAppliedOps( + (this.messageCallback = sinon.stub()) + ) - DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.broken_update, (error) => { - if (error != null) { throw error; } - return setTimeout(done, 200); - }); - return null; - }); + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + this.broken_update, + (error) => { + if (error != null) { + throw error + } + return setTimeout(done, 200) + } + ) + return null + }) - it("should not update the doc", function(done) { - DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { - doc.lines.should.deep.equal(this.lines); - return done(); - }); - return null; - }); + it('should not update the doc', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + doc.lines.should.deep.equal(this.lines) + return done() + } + ) + return null + }) - return it("should send a message with an error", function() { - this.messageCallback.called.should.equal(true); - const [channel, message] = Array.from(this.messageCallback.args[0]); - channel.should.equal("applied-ops"); - return JSON.parse(message).should.deep.include({ - project_id: this.project_id, - doc_id: this.doc_id, - error:'Delete component does not match' - }); - }); -}); + return it('should send a message with an error', function () { + this.messageCallback.called.should.equal(true) + const [channel, message] = Array.from(this.messageCallback.args[0]) + channel.should.equal('applied-ops') + return JSON.parse(message).should.deep.include({ + project_id: this.project_id, + doc_id: this.doc_id, + error: 'Delete component does not match' + }) + }) + }) - describe("with enough updates to flush to the track changes api", function() { - before(function(done) { - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - const updates = []; - for (let v = 0; v <= 199; v++) { // Should flush after 100 ops - updates.push({ - doc_id: this.doc_id, - op: [{i: v.toString(), p: 0}], - v - }); - } + describe('with enough updates to flush to the track changes api', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) + const updates = [] + for (let v = 0; v <= 199; v++) { + // Should flush after 100 ops + updates.push({ + doc_id: this.doc_id, + op: [{ i: v.toString(), p: 0 }], + v + }) + } - sinon.spy(MockTrackChangesApi, "flushDoc"); + sinon.spy(MockTrackChangesApi, 'flushDoc') - MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: 0}); + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: 0 + }) - // Send updates in chunks to causes multiple flushes - const actions = []; - for (let i = 0; i <= 19; i++) { - (i => { - return actions.push(cb => { - return DocUpdaterClient.sendUpdates(this.project_id, this.doc_id, updates.slice(i*10, (i+1)*10), cb); - }); - })(i); - } - async.series(actions, error => { - if (error != null) { throw error; } - return setTimeout(done, 2000); - }); - return null; - }); + // Send updates in chunks to causes multiple flushes + const actions = [] + for (let i = 0; i <= 19; i++) { + ;((i) => { + return actions.push((cb) => { + return DocUpdaterClient.sendUpdates( + this.project_id, + this.doc_id, + updates.slice(i * 10, (i + 1) * 10), + cb + ) + }) + })(i) + } + async.series(actions, (error) => { + if (error != null) { + throw error + } + return setTimeout(done, 2000) + }) + return null + }) - after(function() { return MockTrackChangesApi.flushDoc.restore(); }); + after(function () { + return MockTrackChangesApi.flushDoc.restore() + }) - return it("should flush the doc twice", function() { return MockTrackChangesApi.flushDoc.calledTwice.should.equal(true); }); - }); + return it('should flush the doc twice', function () { + return MockTrackChangesApi.flushDoc.calledTwice.should.equal(true) + }) + }) - describe("when there is no version in Mongo", function() { - before(function(done) { - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - MockWebApi.insertDoc(this.project_id, this.doc_id, { - lines: this.lines - }); + describe('when there is no version in Mongo', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines + }) - const update = { - doc: this.doc_id, - op: this.update.op, - v: 0 - }; - DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, update, (error) => { - if (error != null) { throw error; } - return setTimeout(done, 200); - }); - return null; - }); + const update = { + doc: this.doc_id, + op: this.update.op, + v: 0 + } + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + update, + (error) => { + if (error != null) { + throw error + } + return setTimeout(done, 200) + } + ) + return null + }) - return it("should update the doc (using version = 0)", function(done) { - DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { - doc.lines.should.deep.equal(this.result); - return done(); - }); - return null; - }); - }); + return it('should update the doc (using version = 0)', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + doc.lines.should.deep.equal(this.result) + return done() + } + ) + return null + }) + }) - return describe("when the sending duplicate ops", function() { - before(function(done) { - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); + return describe('when the sending duplicate ops', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version + }) - DocUpdaterClient.subscribeToAppliedOps(this.messageCallback = sinon.stub()); + DocUpdaterClient.subscribeToAppliedOps( + (this.messageCallback = sinon.stub()) + ) - // One user delete 'one', the next turns it into 'once'. The second becomes a NOP. - DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, { - doc: this.doc_id, - op: [{ - i: "one and a half\n", - p: 4 - }], - v: this.version, - meta: { - source: "ikHceq3yfAdQYzBo4-xZ" - } - }, error => { - if (error != null) { throw error; } - return setTimeout(() => { - return DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, { - doc: this.doc_id, - op: [{ - i: "one and a half\n", - p: 4 - }], - v: this.version, - dupIfSource: ["ikHceq3yfAdQYzBo4-xZ"], - meta: { - source: "ikHceq3yfAdQYzBo4-xZ" - } - }, error => { - if (error != null) { throw error; } - return setTimeout(done, 200); - }); - } - , 200); - }); - return null; - }); + // One user delete 'one', the next turns it into 'once'. The second becomes a NOP. + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + { + doc: this.doc_id, + op: [ + { + i: 'one and a half\n', + p: 4 + } + ], + v: this.version, + meta: { + source: 'ikHceq3yfAdQYzBo4-xZ' + } + }, + (error) => { + if (error != null) { + throw error + } + return setTimeout(() => { + return DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + { + doc: this.doc_id, + op: [ + { + i: 'one and a half\n', + p: 4 + } + ], + v: this.version, + dupIfSource: ['ikHceq3yfAdQYzBo4-xZ'], + meta: { + source: 'ikHceq3yfAdQYzBo4-xZ' + } + }, + (error) => { + if (error != null) { + throw error + } + return setTimeout(done, 200) + } + ) + }, 200) + } + ) + return null + }) - it("should update the doc", function(done) { - DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { - doc.lines.should.deep.equal(this.result); - return done(); - }); - return null; - }); - - return it("should return a message about duplicate ops", function() { - this.messageCallback.calledTwice.should.equal(true); - this.messageCallback.args[0][0].should.equal("applied-ops"); - expect(JSON.parse(this.messageCallback.args[0][1]).op.dup).to.be.undefined; - this.messageCallback.args[1][0].should.equal("applied-ops"); - return expect(JSON.parse(this.messageCallback.args[1][1]).op.dup).to.equal(true); - }); - }); -}); + it('should update the doc', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + doc.lines.should.deep.equal(this.result) + return done() + } + ) + return null + }) + return it('should return a message about duplicate ops', function () { + this.messageCallback.calledTwice.should.equal(true) + this.messageCallback.args[0][0].should.equal('applied-ops') + expect(JSON.parse(this.messageCallback.args[0][1]).op.dup).to.be.undefined + this.messageCallback.args[1][0].should.equal('applied-ops') + return expect( + JSON.parse(this.messageCallback.args[1][1]).op.dup + ).to.equal(true) + }) + }) +}) diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js index d20c3f8aef..793a9fa5a8 100644 --- a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js +++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js @@ -11,360 +11,519 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require("sinon"); -const chai = require("chai"); -chai.should(); -const Settings = require('settings-sharelatex'); -const rclient_project_history = require("redis-sharelatex").createClient(Settings.redis.project_history); -const ProjectHistoryKeys = Settings.redis.project_history.key_schema; +const sinon = require('sinon') +const chai = require('chai') +chai.should() +const Settings = require('settings-sharelatex') +const rclient_project_history = require('redis-sharelatex').createClient( + Settings.redis.project_history +) +const ProjectHistoryKeys = Settings.redis.project_history.key_schema -const MockProjectHistoryApi = require("./helpers/MockProjectHistoryApi"); -const MockWebApi = require("./helpers/MockWebApi"); -const DocUpdaterClient = require("./helpers/DocUpdaterClient"); -const DocUpdaterApp = require("./helpers/DocUpdaterApp"); +const MockProjectHistoryApi = require('./helpers/MockProjectHistoryApi') +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') -describe("Applying updates to a project's structure", function() { - before(function() { - this.user_id = 'user-id-123'; - return this.version = 1234; - }); +describe("Applying updates to a project's structure", function () { + before(function () { + this.user_id = 'user-id-123' + return (this.version = 1234) + }) - describe("renaming a file", function() { - before(function(done) { - this.project_id = DocUpdaterClient.randomId(); - this.fileUpdate = { - id: DocUpdaterClient.randomId(), - pathname: '/file-path', - newPathname: '/new-file-path' - }; - this.fileUpdates = [ this.fileUpdate ]; - return DocUpdaterApp.ensureRunning(error => { - if (error != null) { throw error; } - return DocUpdaterClient.sendProjectUpdate(this.project_id, this.user_id, [], this.fileUpdates, this.version, (error) => { - if (error != null) { throw error; } - return setTimeout(done, 200); - }); - }); - }); + describe('renaming a file', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.fileUpdate = { + id: DocUpdaterClient.randomId(), + pathname: '/file-path', + newPathname: '/new-file-path' + } + this.fileUpdates = [this.fileUpdate] + return DocUpdaterApp.ensureRunning((error) => { + if (error != null) { + throw error + } + return DocUpdaterClient.sendProjectUpdate( + this.project_id, + this.user_id, + [], + this.fileUpdates, + this.version, + (error) => { + if (error != null) { + throw error + } + return setTimeout(done, 200) + } + ) + }) + }) - return it("should push the applied file renames to the project history api", function(done) { - rclient_project_history.lrange(ProjectHistoryKeys.projectHistoryOps({project_id: this.project_id}), 0, -1, (error, updates) => { - if (error != null) { throw error; } + return it('should push the applied file renames to the project history api', function (done) { + rclient_project_history.lrange( + ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), + 0, + -1, + (error, updates) => { + if (error != null) { + throw error + } - const update = JSON.parse(updates[0]); - update.file.should.equal(this.fileUpdate.id); - update.pathname.should.equal('/file-path'); - update.new_pathname.should.equal('/new-file-path'); - update.meta.user_id.should.equal(this.user_id); - update.meta.ts.should.be.a('string'); - update.version.should.equal(`${this.version}.0`); + const update = JSON.parse(updates[0]) + update.file.should.equal(this.fileUpdate.id) + update.pathname.should.equal('/file-path') + update.new_pathname.should.equal('/new-file-path') + update.meta.user_id.should.equal(this.user_id) + update.meta.ts.should.be.a('string') + update.version.should.equal(`${this.version}.0`) - return done(); - }); - return null; - }); - }); + return done() + } + ) + return null + }) + }) - describe("renaming a document", function() { - before(function() { - this.docUpdate = { - id: DocUpdaterClient.randomId(), - pathname: '/doc-path', - newPathname: '/new-doc-path' - }; - return this.docUpdates = [ this.docUpdate ];}); + describe('renaming a document', function () { + before(function () { + this.docUpdate = { + id: DocUpdaterClient.randomId(), + pathname: '/doc-path', + newPathname: '/new-doc-path' + } + return (this.docUpdates = [this.docUpdate]) + }) - describe("when the document is not loaded", function() { - before(function(done) { - this.project_id = DocUpdaterClient.randomId(); - DocUpdaterClient.sendProjectUpdate(this.project_id, this.user_id, this.docUpdates, [], this.version, (error) => { - if (error != null) { throw error; } - return setTimeout(done, 200); - }); - return null; - }); + describe('when the document is not loaded', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + DocUpdaterClient.sendProjectUpdate( + this.project_id, + this.user_id, + this.docUpdates, + [], + this.version, + (error) => { + if (error != null) { + throw error + } + return setTimeout(done, 200) + } + ) + return null + }) - return it("should push the applied doc renames to the project history api", function(done) { - rclient_project_history.lrange(ProjectHistoryKeys.projectHistoryOps({project_id: this.project_id}), 0, -1, (error, updates) => { - if (error != null) { throw error; } + return it('should push the applied doc renames to the project history api', function (done) { + rclient_project_history.lrange( + ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), + 0, + -1, + (error, updates) => { + if (error != null) { + throw error + } - const update = JSON.parse(updates[0]); - update.doc.should.equal(this.docUpdate.id); - update.pathname.should.equal('/doc-path'); - update.new_pathname.should.equal('/new-doc-path'); - update.meta.user_id.should.equal(this.user_id); - update.meta.ts.should.be.a('string'); - update.version.should.equal(`${this.version}.0`); + const update = JSON.parse(updates[0]) + update.doc.should.equal(this.docUpdate.id) + update.pathname.should.equal('/doc-path') + update.new_pathname.should.equal('/new-doc-path') + update.meta.user_id.should.equal(this.user_id) + update.meta.ts.should.be.a('string') + update.version.should.equal(`${this.version}.0`) - return done(); - }); - return null; - }); - }); + return done() + } + ) + return null + }) + }) - return describe("when the document is loaded", function() { - before(function(done) { - this.project_id = DocUpdaterClient.randomId(); - MockWebApi.insertDoc(this.project_id, this.docUpdate.id, {}); - DocUpdaterClient.preloadDoc(this.project_id, this.docUpdate.id, error => { - if (error != null) { throw error; } - sinon.spy(MockWebApi, "getDocument"); - return DocUpdaterClient.sendProjectUpdate(this.project_id, this.user_id, this.docUpdates, [], this.version, (error) => { - if (error != null) { throw error; } - return setTimeout(done, 200); - }); - }); - return null; - }); + return describe('when the document is loaded', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + MockWebApi.insertDoc(this.project_id, this.docUpdate.id, {}) + DocUpdaterClient.preloadDoc( + this.project_id, + this.docUpdate.id, + (error) => { + if (error != null) { + throw error + } + sinon.spy(MockWebApi, 'getDocument') + return DocUpdaterClient.sendProjectUpdate( + this.project_id, + this.user_id, + this.docUpdates, + [], + this.version, + (error) => { + if (error != null) { + throw error + } + return setTimeout(done, 200) + } + ) + } + ) + return null + }) - after(function() { return MockWebApi.getDocument.restore(); }); + after(function () { + return MockWebApi.getDocument.restore() + }) - it("should update the doc", function(done) { - DocUpdaterClient.getDoc(this.project_id, this.docUpdate.id, (error, res, doc) => { - doc.pathname.should.equal(this.docUpdate.newPathname); - return done(); - }); - return null; - }); + it('should update the doc', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.docUpdate.id, + (error, res, doc) => { + doc.pathname.should.equal(this.docUpdate.newPathname) + return done() + } + ) + return null + }) - return it("should push the applied doc renames to the project history api", function(done) { - rclient_project_history.lrange(ProjectHistoryKeys.projectHistoryOps({project_id: this.project_id}), 0, -1, (error, updates) => { - if (error != null) { throw error; } + return it('should push the applied doc renames to the project history api', function (done) { + rclient_project_history.lrange( + ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), + 0, + -1, + (error, updates) => { + if (error != null) { + throw error + } - const update = JSON.parse(updates[0]); - update.doc.should.equal(this.docUpdate.id); - update.pathname.should.equal('/doc-path'); - update.new_pathname.should.equal('/new-doc-path'); - update.meta.user_id.should.equal(this.user_id); - update.meta.ts.should.be.a('string'); - update.version.should.equal(`${this.version}.0`); + const update = JSON.parse(updates[0]) + update.doc.should.equal(this.docUpdate.id) + update.pathname.should.equal('/doc-path') + update.new_pathname.should.equal('/new-doc-path') + update.meta.user_id.should.equal(this.user_id) + update.meta.ts.should.be.a('string') + update.version.should.equal(`${this.version}.0`) - return done(); - }); - return null; - }); - }); - }); + return done() + } + ) + return null + }) + }) + }) - describe("renaming multiple documents and files", function() { - before(function() { - this.docUpdate0 = { - id: DocUpdaterClient.randomId(), - pathname: '/doc-path0', - newPathname: '/new-doc-path0' - }; - this.docUpdate1 = { - id: DocUpdaterClient.randomId(), - pathname: '/doc-path1', - newPathname: '/new-doc-path1' - }; - this.docUpdates = [ this.docUpdate0, this.docUpdate1 ]; - this.fileUpdate0 = { - id: DocUpdaterClient.randomId(), - pathname: '/file-path0', - newPathname: '/new-file-path0' - }; - this.fileUpdate1 = { - id: DocUpdaterClient.randomId(), - pathname: '/file-path1', - newPathname: '/new-file-path1' - }; - return this.fileUpdates = [ this.fileUpdate0, this.fileUpdate1 ];}); + describe('renaming multiple documents and files', function () { + before(function () { + this.docUpdate0 = { + id: DocUpdaterClient.randomId(), + pathname: '/doc-path0', + newPathname: '/new-doc-path0' + } + this.docUpdate1 = { + id: DocUpdaterClient.randomId(), + pathname: '/doc-path1', + newPathname: '/new-doc-path1' + } + this.docUpdates = [this.docUpdate0, this.docUpdate1] + this.fileUpdate0 = { + id: DocUpdaterClient.randomId(), + pathname: '/file-path0', + newPathname: '/new-file-path0' + } + this.fileUpdate1 = { + id: DocUpdaterClient.randomId(), + pathname: '/file-path1', + newPathname: '/new-file-path1' + } + return (this.fileUpdates = [this.fileUpdate0, this.fileUpdate1]) + }) - return describe("when the documents are not loaded", function() { - before(function(done) { - this.project_id = DocUpdaterClient.randomId(); - DocUpdaterClient.sendProjectUpdate(this.project_id, this.user_id, this.docUpdates, this.fileUpdates, this.version, (error) => { - if (error != null) { throw error; } - return setTimeout(done, 200); - }); - return null; - }); + return describe('when the documents are not loaded', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + DocUpdaterClient.sendProjectUpdate( + this.project_id, + this.user_id, + this.docUpdates, + this.fileUpdates, + this.version, + (error) => { + if (error != null) { + throw error + } + return setTimeout(done, 200) + } + ) + return null + }) - return it("should push the applied doc renames to the project history api", function(done) { - rclient_project_history.lrange(ProjectHistoryKeys.projectHistoryOps({project_id: this.project_id}), 0, -1, (error, updates) => { - if (error != null) { throw error; } + return it('should push the applied doc renames to the project history api', function (done) { + rclient_project_history.lrange( + ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), + 0, + -1, + (error, updates) => { + if (error != null) { + throw error + } - let update = JSON.parse(updates[0]); - update.doc.should.equal(this.docUpdate0.id); - update.pathname.should.equal('/doc-path0'); - update.new_pathname.should.equal('/new-doc-path0'); - update.meta.user_id.should.equal(this.user_id); - update.meta.ts.should.be.a('string'); - update.version.should.equal(`${this.version}.0`); + let update = JSON.parse(updates[0]) + update.doc.should.equal(this.docUpdate0.id) + update.pathname.should.equal('/doc-path0') + update.new_pathname.should.equal('/new-doc-path0') + update.meta.user_id.should.equal(this.user_id) + update.meta.ts.should.be.a('string') + update.version.should.equal(`${this.version}.0`) - update = JSON.parse(updates[1]); - update.doc.should.equal(this.docUpdate1.id); - update.pathname.should.equal('/doc-path1'); - update.new_pathname.should.equal('/new-doc-path1'); - update.meta.user_id.should.equal(this.user_id); - update.meta.ts.should.be.a('string'); - update.version.should.equal(`${this.version}.1`); + update = JSON.parse(updates[1]) + update.doc.should.equal(this.docUpdate1.id) + update.pathname.should.equal('/doc-path1') + update.new_pathname.should.equal('/new-doc-path1') + update.meta.user_id.should.equal(this.user_id) + update.meta.ts.should.be.a('string') + update.version.should.equal(`${this.version}.1`) - update = JSON.parse(updates[2]); - update.file.should.equal(this.fileUpdate0.id); - update.pathname.should.equal('/file-path0'); - update.new_pathname.should.equal('/new-file-path0'); - update.meta.user_id.should.equal(this.user_id); - update.meta.ts.should.be.a('string'); - update.version.should.equal(`${this.version}.2`); + update = JSON.parse(updates[2]) + update.file.should.equal(this.fileUpdate0.id) + update.pathname.should.equal('/file-path0') + update.new_pathname.should.equal('/new-file-path0') + update.meta.user_id.should.equal(this.user_id) + update.meta.ts.should.be.a('string') + update.version.should.equal(`${this.version}.2`) - update = JSON.parse(updates[3]); - update.file.should.equal(this.fileUpdate1.id); - update.pathname.should.equal('/file-path1'); - update.new_pathname.should.equal('/new-file-path1'); - update.meta.user_id.should.equal(this.user_id); - update.meta.ts.should.be.a('string'); - update.version.should.equal(`${this.version}.3`); + update = JSON.parse(updates[3]) + update.file.should.equal(this.fileUpdate1.id) + update.pathname.should.equal('/file-path1') + update.new_pathname.should.equal('/new-file-path1') + update.meta.user_id.should.equal(this.user_id) + update.meta.ts.should.be.a('string') + update.version.should.equal(`${this.version}.3`) - return done(); - }); - return null; - }); - }); - }); + return done() + } + ) + return null + }) + }) + }) + describe('adding a file', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.fileUpdate = { + id: DocUpdaterClient.randomId(), + pathname: '/file-path', + url: 'filestore.example.com' + } + this.fileUpdates = [this.fileUpdate] + DocUpdaterClient.sendProjectUpdate( + this.project_id, + this.user_id, + [], + this.fileUpdates, + this.version, + (error) => { + if (error != null) { + throw error + } + return setTimeout(done, 200) + } + ) + return null + }) - describe("adding a file", function() { - before(function(done) { - this.project_id = DocUpdaterClient.randomId(); - this.fileUpdate = { - id: DocUpdaterClient.randomId(), - pathname: '/file-path', - url: 'filestore.example.com' - }; - this.fileUpdates = [ this.fileUpdate ]; - DocUpdaterClient.sendProjectUpdate(this.project_id, this.user_id, [], this.fileUpdates, this.version, (error) => { - if (error != null) { throw error; } - return setTimeout(done, 200); - }); - return null; - }); + return it('should push the file addition to the project history api', function (done) { + rclient_project_history.lrange( + ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), + 0, + -1, + (error, updates) => { + if (error != null) { + throw error + } - return it("should push the file addition to the project history api", function(done) { - rclient_project_history.lrange(ProjectHistoryKeys.projectHistoryOps({project_id: this.project_id}), 0, -1, (error, updates) => { - if (error != null) { throw error; } + const update = JSON.parse(updates[0]) + update.file.should.equal(this.fileUpdate.id) + update.pathname.should.equal('/file-path') + update.url.should.equal('filestore.example.com') + update.meta.user_id.should.equal(this.user_id) + update.meta.ts.should.be.a('string') + update.version.should.equal(`${this.version}.0`) - const update = JSON.parse(updates[0]); - update.file.should.equal(this.fileUpdate.id); - update.pathname.should.equal('/file-path'); - update.url.should.equal('filestore.example.com'); - update.meta.user_id.should.equal(this.user_id); - update.meta.ts.should.be.a('string'); - update.version.should.equal(`${this.version}.0`); + return done() + } + ) + return null + }) + }) - return done(); - }); - return null; - }); - }); + describe('adding a doc', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.docUpdate = { + id: DocUpdaterClient.randomId(), + pathname: '/file-path', + docLines: 'a\nb' + } + this.docUpdates = [this.docUpdate] + DocUpdaterClient.sendProjectUpdate( + this.project_id, + this.user_id, + this.docUpdates, + [], + this.version, + (error) => { + if (error != null) { + throw error + } + return setTimeout(done, 200) + } + ) + return null + }) - describe("adding a doc", function() { - before(function(done) { - this.project_id = DocUpdaterClient.randomId(); - this.docUpdate = { - id: DocUpdaterClient.randomId(), - pathname: '/file-path', - docLines: 'a\nb' - }; - this.docUpdates = [ this.docUpdate ]; - DocUpdaterClient.sendProjectUpdate(this.project_id, this.user_id, this.docUpdates, [], this.version, (error) => { - if (error != null) { throw error; } - return setTimeout(done, 200); - }); - return null; - }); + return it('should push the doc addition to the project history api', function (done) { + rclient_project_history.lrange( + ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), + 0, + -1, + (error, updates) => { + if (error != null) { + throw error + } - return it("should push the doc addition to the project history api", function(done) { - rclient_project_history.lrange(ProjectHistoryKeys.projectHistoryOps({project_id: this.project_id}), 0, -1, (error, updates) => { - if (error != null) { throw error; } + const update = JSON.parse(updates[0]) + update.doc.should.equal(this.docUpdate.id) + update.pathname.should.equal('/file-path') + update.docLines.should.equal('a\nb') + update.meta.user_id.should.equal(this.user_id) + update.meta.ts.should.be.a('string') + update.version.should.equal(`${this.version}.0`) - const update = JSON.parse(updates[0]); - update.doc.should.equal(this.docUpdate.id); - update.pathname.should.equal('/file-path'); - update.docLines.should.equal('a\nb'); - update.meta.user_id.should.equal(this.user_id); - update.meta.ts.should.be.a('string'); - update.version.should.equal(`${this.version}.0`); + return done() + } + ) + return null + }) + }) - return done(); - }); - return null; - }); - }); + describe('with enough updates to flush to the history service', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.user_id = DocUpdaterClient.randomId() + this.version0 = 12345 + this.version1 = this.version0 + 1 + const updates = [] + for (let v = 0; v <= 599; v++) { + // Should flush after 500 ops + updates.push({ + id: DocUpdaterClient.randomId(), + pathname: '/file-' + v, + docLines: 'a\nb' + }) + } - describe("with enough updates to flush to the history service", function() { - before(function(done) { - this.project_id = DocUpdaterClient.randomId(); - this.user_id = DocUpdaterClient.randomId(); - this.version0 = 12345; - this.version1 = this.version0 + 1; - const updates = []; - for (let v = 0; v <= 599; v++) { // Should flush after 500 ops - updates.push({ - id: DocUpdaterClient.randomId(), - pathname: '/file-' + v, - docLines: 'a\nb' - }); - } + sinon.spy(MockProjectHistoryApi, 'flushProject') - sinon.spy(MockProjectHistoryApi, "flushProject"); + // Send updates in chunks to causes multiple flushes + const projectId = this.project_id + const userId = this.project_id + DocUpdaterClient.sendProjectUpdate( + projectId, + userId, + updates.slice(0, 250), + [], + this.version0, + function (error) { + if (error != null) { + throw error + } + return DocUpdaterClient.sendProjectUpdate( + projectId, + userId, + updates.slice(250), + [], + this.version1, + (error) => { + if (error != null) { + throw error + } + return setTimeout(done, 2000) + } + ) + } + ) + return null + }) - // Send updates in chunks to causes multiple flushes - const projectId = this.project_id; - const userId = this.project_id; - DocUpdaterClient.sendProjectUpdate(projectId, userId, updates.slice(0, 250), [], this.version0, function(error) { - if (error != null) { throw error; } - return DocUpdaterClient.sendProjectUpdate(projectId, userId, updates.slice(250), [], this.version1, (error) => { - if (error != null) { throw error; } - return setTimeout(done, 2000); - }); - }); - return null; - }); + after(function () { + return MockProjectHistoryApi.flushProject.restore() + }) - after(function() { return MockProjectHistoryApi.flushProject.restore(); }); + return it('should flush project history', function () { + return MockProjectHistoryApi.flushProject + .calledWith(this.project_id) + .should.equal(true) + }) + }) - return it("should flush project history", function() { - return MockProjectHistoryApi.flushProject.calledWith(this.project_id).should.equal(true); - }); - }); + return describe('with too few updates to flush to the history service', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.user_id = DocUpdaterClient.randomId() + this.version0 = 12345 + this.version1 = this.version0 + 1 - return describe("with too few updates to flush to the history service", function() { - before(function(done) { - this.project_id = DocUpdaterClient.randomId(); - this.user_id = DocUpdaterClient.randomId(); - this.version0 = 12345; - this.version1 = this.version0 + 1; + const updates = [] + for (let v = 0; v <= 42; v++) { + // Should flush after 500 ops + updates.push({ + id: DocUpdaterClient.randomId(), + pathname: '/file-' + v, + docLines: 'a\nb' + }) + } - const updates = []; - for (let v = 0; v <= 42; v++) { // Should flush after 500 ops - updates.push({ - id: DocUpdaterClient.randomId(), - pathname: '/file-' + v, - docLines: 'a\nb' - }); - } + sinon.spy(MockProjectHistoryApi, 'flushProject') - sinon.spy(MockProjectHistoryApi, "flushProject"); + // Send updates in chunks + const projectId = this.project_id + const userId = this.project_id + DocUpdaterClient.sendProjectUpdate( + projectId, + userId, + updates.slice(0, 10), + [], + this.version0, + function (error) { + if (error != null) { + throw error + } + return DocUpdaterClient.sendProjectUpdate( + projectId, + userId, + updates.slice(10), + [], + this.version1, + (error) => { + if (error != null) { + throw error + } + return setTimeout(done, 2000) + } + ) + } + ) + return null + }) - // Send updates in chunks - const projectId = this.project_id; - const userId = this.project_id; - DocUpdaterClient.sendProjectUpdate(projectId, userId, updates.slice(0, 10), [], this.version0, function(error) { - if (error != null) { throw error; } - return DocUpdaterClient.sendProjectUpdate(projectId, userId, updates.slice(10), [], this.version1, (error) => { - if (error != null) { throw error; } - return setTimeout(done, 2000); - }); - }); - return null; - }); + after(function () { + return MockProjectHistoryApi.flushProject.restore() + }) - after(function() { return MockProjectHistoryApi.flushProject.restore(); }); - - return it("should not flush project history", function() { - return MockProjectHistoryApi.flushProject.calledWith(this.project_id).should.equal(false); - }); - }); -}); + return it('should not flush project history', function () { + return MockProjectHistoryApi.flushProject + .calledWith(this.project_id) + .should.equal(false) + }) + }) +}) diff --git a/services/document-updater/test/acceptance/js/DeletingADocumentTests.js b/services/document-updater/test/acceptance/js/DeletingADocumentTests.js index 73f61646cf..fbb8055aae 100644 --- a/services/document-updater/test/acceptance/js/DeletingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/DeletingADocumentTests.js @@ -10,137 +10,186 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require("sinon"); -const chai = require("chai"); -chai.should(); +const sinon = require('sinon') +const chai = require('chai') +chai.should() -const MockTrackChangesApi = require("./helpers/MockTrackChangesApi"); -const MockProjectHistoryApi = require("./helpers/MockProjectHistoryApi"); -const MockWebApi = require("./helpers/MockWebApi"); -const DocUpdaterClient = require("./helpers/DocUpdaterClient"); -const DocUpdaterApp = require("./helpers/DocUpdaterApp"); +const MockTrackChangesApi = require('./helpers/MockTrackChangesApi') +const MockProjectHistoryApi = require('./helpers/MockProjectHistoryApi') +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') -describe("Deleting a document", function() { - before(function(done) { - this.lines = ["one", "two", "three"]; - this.version = 42; - this.update = { - doc: this.doc_id, - op: [{ - i: "one and a half\n", - p: 4 - }], - v: this.version - }; - this.result = ["one", "one and a half", "two", "three"]; +describe('Deleting a document', function () { + before(function (done) { + this.lines = ['one', 'two', 'three'] + this.version = 42 + this.update = { + doc: this.doc_id, + op: [ + { + i: 'one and a half\n', + p: 4 + } + ], + v: this.version + } + this.result = ['one', 'one and a half', 'two', 'three'] - sinon.spy(MockTrackChangesApi, "flushDoc"); - sinon.spy(MockProjectHistoryApi, "flushProject"); - return DocUpdaterApp.ensureRunning(done); - }); + sinon.spy(MockTrackChangesApi, 'flushDoc') + sinon.spy(MockProjectHistoryApi, 'flushProject') + return DocUpdaterApp.ensureRunning(done) + }) - after(function() { - MockTrackChangesApi.flushDoc.restore(); - return MockProjectHistoryApi.flushProject.restore(); - }); + after(function () { + MockTrackChangesApi.flushDoc.restore() + return MockProjectHistoryApi.flushProject.restore() + }) - describe("when the updated doc exists in the doc updater", function() { - before(function(done) { - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - sinon.spy(MockWebApi, "setDocument"); - sinon.spy(MockWebApi, "getDocument"); + describe('when the updated doc exists in the doc updater', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) + sinon.spy(MockWebApi, 'setDocument') + sinon.spy(MockWebApi, 'getDocument') - MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); - return DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { - if (error != null) { throw error; } - return DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.update, error => { - if (error != null) { throw error; } - return setTimeout(() => { - return DocUpdaterClient.deleteDoc(this.project_id, this.doc_id, (error, res, body) => { - this.statusCode = res.statusCode; - return setTimeout(done, 200); - }); - } - , 200); - }); - }); - }); + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version + }) + return DocUpdaterClient.preloadDoc( + this.project_id, + this.doc_id, + (error) => { + if (error != null) { + throw error + } + return DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + this.update, + (error) => { + if (error != null) { + throw error + } + return setTimeout(() => { + return DocUpdaterClient.deleteDoc( + this.project_id, + this.doc_id, + (error, res, body) => { + this.statusCode = res.statusCode + return setTimeout(done, 200) + } + ) + }, 200) + } + ) + } + ) + }) - after(function() { - MockWebApi.setDocument.restore(); - return MockWebApi.getDocument.restore(); - }); + after(function () { + MockWebApi.setDocument.restore() + return MockWebApi.getDocument.restore() + }) - it("should return a 204 status code", function() { - return this.statusCode.should.equal(204); - }); + it('should return a 204 status code', function () { + return this.statusCode.should.equal(204) + }) - it("should send the updated document and version to the web api", function() { - return MockWebApi.setDocument - .calledWith(this.project_id, this.doc_id, this.result, this.version + 1) - .should.equal(true); - }); + it('should send the updated document and version to the web api', function () { + return MockWebApi.setDocument + .calledWith(this.project_id, this.doc_id, this.result, this.version + 1) + .should.equal(true) + }) - it("should need to reload the doc if read again", function(done) { - MockWebApi.getDocument.called.should.equal.false; - return DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { - MockWebApi.getDocument - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - return done(); - }); - }); + it('should need to reload the doc if read again', function (done) { + MockWebApi.getDocument.called.should.equal.false + return DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + MockWebApi.getDocument + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + return done() + } + ) + }) - it("should flush track changes", function() { - return MockTrackChangesApi.flushDoc.calledWith(this.doc_id).should.equal(true); - }); + it('should flush track changes', function () { + return MockTrackChangesApi.flushDoc + .calledWith(this.doc_id) + .should.equal(true) + }) - return it("should flush project history", function() { - return MockProjectHistoryApi.flushProject.calledWith(this.project_id).should.equal(true); - }); - }); + return it('should flush project history', function () { + return MockProjectHistoryApi.flushProject + .calledWith(this.project_id) + .should.equal(true) + }) + }) - return describe("when the doc is not in the doc updater", function() { - before(function(done) { - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - MockWebApi.insertDoc(this.project_id, this.doc_id, { - lines: this.lines - }); - sinon.spy(MockWebApi, "setDocument"); - sinon.spy(MockWebApi, "getDocument"); - return DocUpdaterClient.deleteDoc(this.project_id, this.doc_id, (error, res, body) => { - this.statusCode = res.statusCode; - return setTimeout(done, 200); - }); - }); + return describe('when the doc is not in the doc updater', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines + }) + sinon.spy(MockWebApi, 'setDocument') + sinon.spy(MockWebApi, 'getDocument') + return DocUpdaterClient.deleteDoc( + this.project_id, + this.doc_id, + (error, res, body) => { + this.statusCode = res.statusCode + return setTimeout(done, 200) + } + ) + }) - after(function() { - MockWebApi.setDocument.restore(); - return MockWebApi.getDocument.restore(); - }); + after(function () { + MockWebApi.setDocument.restore() + return MockWebApi.getDocument.restore() + }) - it("should return a 204 status code", function() { - return this.statusCode.should.equal(204); - }); + it('should return a 204 status code', function () { + return this.statusCode.should.equal(204) + }) - it("should not need to send the updated document to the web api", function() { return MockWebApi.setDocument.called.should.equal(false); }); + it('should not need to send the updated document to the web api', function () { + return MockWebApi.setDocument.called.should.equal(false) + }) - it("should need to reload the doc if read again", function(done) { - MockWebApi.getDocument.called.should.equal.false; - return DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { - MockWebApi.getDocument - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - return done(); - }); - }); + it('should need to reload the doc if read again', function (done) { + MockWebApi.getDocument.called.should.equal.false + return DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + MockWebApi.getDocument + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + return done() + } + ) + }) - it("should flush track changes", function() { - return MockTrackChangesApi.flushDoc.calledWith(this.doc_id).should.equal(true); - }); + it('should flush track changes', function () { + return MockTrackChangesApi.flushDoc + .calledWith(this.doc_id) + .should.equal(true) + }) - return it("should flush project history", function() { - return MockProjectHistoryApi.flushProject.calledWith(this.project_id).should.equal(true); - }); - }); -}); + return it('should flush project history', function () { + return MockProjectHistoryApi.flushProject + .calledWith(this.project_id) + .should.equal(true) + }) + }) +}) diff --git a/services/document-updater/test/acceptance/js/DeletingAProjectTests.js b/services/document-updater/test/acceptance/js/DeletingAProjectTests.js index 74e1afd0b1..f050ea22e1 100644 --- a/services/document-updater/test/acceptance/js/DeletingAProjectTests.js +++ b/services/document-updater/test/acceptance/js/DeletingAProjectTests.js @@ -11,213 +11,285 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require("sinon"); -const chai = require("chai"); -chai.should(); -const async = require("async"); +const sinon = require('sinon') +const chai = require('chai') +chai.should() +const async = require('async') -const MockTrackChangesApi = require("./helpers/MockTrackChangesApi"); -const MockProjectHistoryApi = require("./helpers/MockProjectHistoryApi"); -const MockWebApi = require("./helpers/MockWebApi"); -const DocUpdaterClient = require("./helpers/DocUpdaterClient"); -const DocUpdaterApp = require("./helpers/DocUpdaterApp"); +const MockTrackChangesApi = require('./helpers/MockTrackChangesApi') +const MockProjectHistoryApi = require('./helpers/MockProjectHistoryApi') +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') -describe("Deleting a project", function() { - before(function(done) { - let doc_id0, doc_id1; - this.project_id = DocUpdaterClient.randomId(); - this.docs = [{ - id: (doc_id0 = DocUpdaterClient.randomId()), - lines: ["one", "two", "three"], - update: { - doc: doc_id0, - op: [{ - i: "one and a half\n", - p: 4 - }], - v: 0 - }, - updatedLines: ["one", "one and a half", "two", "three"] - }, { - id: (doc_id1 = DocUpdaterClient.randomId()), - lines: ["four", "five", "six"], - update: { - doc: doc_id1, - op: [{ - i: "four and a half\n", - p: 5 - }], - v: 0 - }, - updatedLines: ["four", "four and a half", "five", "six"] - }]; - for (const doc of Array.from(this.docs)) { - MockWebApi.insertDoc(this.project_id, doc.id, { - lines: doc.lines, - version: doc.update.v - }); - } +describe('Deleting a project', function () { + before(function (done) { + let doc_id0, doc_id1 + this.project_id = DocUpdaterClient.randomId() + this.docs = [ + { + id: (doc_id0 = DocUpdaterClient.randomId()), + lines: ['one', 'two', 'three'], + update: { + doc: doc_id0, + op: [ + { + i: 'one and a half\n', + p: 4 + } + ], + v: 0 + }, + updatedLines: ['one', 'one and a half', 'two', 'three'] + }, + { + id: (doc_id1 = DocUpdaterClient.randomId()), + lines: ['four', 'five', 'six'], + update: { + doc: doc_id1, + op: [ + { + i: 'four and a half\n', + p: 5 + } + ], + v: 0 + }, + updatedLines: ['four', 'four and a half', 'five', 'six'] + } + ] + for (const doc of Array.from(this.docs)) { + MockWebApi.insertDoc(this.project_id, doc.id, { + lines: doc.lines, + version: doc.update.v + }) + } - return DocUpdaterApp.ensureRunning(done); - }); + return DocUpdaterApp.ensureRunning(done) + }) + describe('with documents which have been updated', function () { + before(function (done) { + sinon.spy(MockWebApi, 'setDocument') + sinon.spy(MockTrackChangesApi, 'flushDoc') + sinon.spy(MockProjectHistoryApi, 'flushProject') - describe("with documents which have been updated", function() { - before(function(done) { - sinon.spy(MockWebApi, "setDocument"); - sinon.spy(MockTrackChangesApi, "flushDoc"); - sinon.spy(MockProjectHistoryApi, "flushProject"); + return async.series( + this.docs.map((doc) => { + return (callback) => { + return DocUpdaterClient.preloadDoc( + this.project_id, + doc.id, + (error) => { + if (error != null) { + return callback(error) + } + return DocUpdaterClient.sendUpdate( + this.project_id, + doc.id, + doc.update, + (error) => { + return callback(error) + } + ) + } + ) + } + }), + (error) => { + if (error != null) { + throw error + } + return setTimeout(() => { + return DocUpdaterClient.deleteProject( + this.project_id, + (error, res, body) => { + this.statusCode = res.statusCode + return done() + } + ) + }, 200) + } + ) + }) - return async.series(this.docs.map(doc => { - return callback => { - return DocUpdaterClient.preloadDoc(this.project_id, doc.id, error => { - if (error != null) { return callback(error); } - return DocUpdaterClient.sendUpdate(this.project_id, doc.id, doc.update, error => { - return callback(error); - }); - }); - }; - }), error => { - if (error != null) { throw error; } - return setTimeout(() => { - return DocUpdaterClient.deleteProject(this.project_id, (error, res, body) => { - this.statusCode = res.statusCode; - return done(); - }); - } - , 200); - }); - }); + after(function () { + MockWebApi.setDocument.restore() + MockTrackChangesApi.flushDoc.restore() + return MockProjectHistoryApi.flushProject.restore() + }) - after(function() { - MockWebApi.setDocument.restore(); - MockTrackChangesApi.flushDoc.restore(); - return MockProjectHistoryApi.flushProject.restore(); - }); + it('should return a 204 status code', function () { + return this.statusCode.should.equal(204) + }) - it("should return a 204 status code", function() { - return this.statusCode.should.equal(204); - }); + it('should send each document to the web api', function () { + return Array.from(this.docs).map((doc) => + MockWebApi.setDocument + .calledWith(this.project_id, doc.id, doc.updatedLines) + .should.equal(true) + ) + }) - it("should send each document to the web api", function() { - return Array.from(this.docs).map((doc) => - MockWebApi.setDocument - .calledWith(this.project_id, doc.id, doc.updatedLines) - .should.equal(true)); - }); + it('should need to reload the docs if read again', function (done) { + sinon.spy(MockWebApi, 'getDocument') + return async.series( + this.docs.map((doc) => { + return (callback) => { + MockWebApi.getDocument + .calledWith(this.project_id, doc.id) + .should.equal(false) + return DocUpdaterClient.getDoc( + this.project_id, + doc.id, + (error, res, returnedDoc) => { + MockWebApi.getDocument + .calledWith(this.project_id, doc.id) + .should.equal(true) + return callback() + } + ) + } + }), + () => { + MockWebApi.getDocument.restore() + return done() + } + ) + }) - it("should need to reload the docs if read again", function(done) { - sinon.spy(MockWebApi, "getDocument"); - return async.series(this.docs.map(doc => { - return callback => { - MockWebApi.getDocument.calledWith(this.project_id, doc.id).should.equal(false); - return DocUpdaterClient.getDoc(this.project_id, doc.id, (error, res, returnedDoc) => { - MockWebApi.getDocument.calledWith(this.project_id, doc.id).should.equal(true); - return callback(); - }); - }; - }), () => { - MockWebApi.getDocument.restore(); - return done(); - }); - }); + it('should flush each doc in track changes', function () { + return Array.from(this.docs).map((doc) => + MockTrackChangesApi.flushDoc.calledWith(doc.id).should.equal(true) + ) + }) - it("should flush each doc in track changes", function() { - return Array.from(this.docs).map((doc) => - MockTrackChangesApi.flushDoc.calledWith(doc.id).should.equal(true)); - }); + return it('should flush each doc in project history', function () { + return MockProjectHistoryApi.flushProject + .calledWith(this.project_id) + .should.equal(true) + }) + }) - return it("should flush each doc in project history", function() { - return MockProjectHistoryApi.flushProject.calledWith(this.project_id).should.equal(true); - }); - }); + describe('with the background=true parameter from realtime and no request to flush the queue', function () { + before(function (done) { + sinon.spy(MockWebApi, 'setDocument') + sinon.spy(MockTrackChangesApi, 'flushDoc') + sinon.spy(MockProjectHistoryApi, 'flushProject') - describe("with the background=true parameter from realtime and no request to flush the queue", function() { - before(function(done) { - sinon.spy(MockWebApi, "setDocument"); - sinon.spy(MockTrackChangesApi, "flushDoc"); - sinon.spy(MockProjectHistoryApi, "flushProject"); + return async.series( + this.docs.map((doc) => { + return (callback) => { + return DocUpdaterClient.preloadDoc( + this.project_id, + doc.id, + callback + ) + } + }), + (error) => { + if (error != null) { + throw error + } + return setTimeout(() => { + return DocUpdaterClient.deleteProjectOnShutdown( + this.project_id, + (error, res, body) => { + this.statusCode = res.statusCode + return done() + } + ) + }, 200) + } + ) + }) - return async.series(this.docs.map(doc => { - return callback => { - return DocUpdaterClient.preloadDoc(this.project_id, doc.id, callback); - }; - }), error => { - if (error != null) { throw error; } - return setTimeout(() => { - return DocUpdaterClient.deleteProjectOnShutdown(this.project_id, (error, res, body) => { - this.statusCode = res.statusCode; - return done(); - }); - } - , 200); - }); - }); + after(function () { + MockWebApi.setDocument.restore() + MockTrackChangesApi.flushDoc.restore() + return MockProjectHistoryApi.flushProject.restore() + }) - after(function() { - MockWebApi.setDocument.restore(); - MockTrackChangesApi.flushDoc.restore(); - return MockProjectHistoryApi.flushProject.restore(); - }); + it('should return a 204 status code', function () { + return this.statusCode.should.equal(204) + }) - it("should return a 204 status code", function() { - return this.statusCode.should.equal(204); - }); + it('should not send any documents to the web api', function () { + return MockWebApi.setDocument.called.should.equal(false) + }) - it("should not send any documents to the web api", function() { return MockWebApi.setDocument.called.should.equal(false); }); + it('should not flush any docs in track changes', function () { + return MockTrackChangesApi.flushDoc.called.should.equal(false) + }) - it("should not flush any docs in track changes", function() { return MockTrackChangesApi.flushDoc.called.should.equal(false); }); + return it('should not flush to project history', function () { + return MockProjectHistoryApi.flushProject.called.should.equal(false) + }) + }) - return it("should not flush to project history", function() { return MockProjectHistoryApi.flushProject.called.should.equal(false); }); - }); + return describe('with the background=true parameter from realtime and a request to flush the queue', function () { + before(function (done) { + sinon.spy(MockWebApi, 'setDocument') + sinon.spy(MockTrackChangesApi, 'flushDoc') + sinon.spy(MockProjectHistoryApi, 'flushProject') - return describe("with the background=true parameter from realtime and a request to flush the queue", function() { - before(function(done) { - sinon.spy(MockWebApi, "setDocument"); - sinon.spy(MockTrackChangesApi, "flushDoc"); - sinon.spy(MockProjectHistoryApi, "flushProject"); + return async.series( + this.docs.map((doc) => { + return (callback) => { + return DocUpdaterClient.preloadDoc( + this.project_id, + doc.id, + callback + ) + } + }), + (error) => { + if (error != null) { + throw error + } + return setTimeout(() => { + return DocUpdaterClient.deleteProjectOnShutdown( + this.project_id, + (error, res, body) => { + this.statusCode = res.statusCode + // after deleting the project and putting it in the queue, flush the queue + return setTimeout( + () => DocUpdaterClient.flushOldProjects(done), + 2000 + ) + } + ) + }, 200) + } + ) + }) - return async.series(this.docs.map(doc => { - return callback => { - return DocUpdaterClient.preloadDoc(this.project_id, doc.id, callback); - }; - }), error => { - if (error != null) { throw error; } - return setTimeout(() => { - return DocUpdaterClient.deleteProjectOnShutdown(this.project_id, (error, res, body) => { - this.statusCode = res.statusCode; - // after deleting the project and putting it in the queue, flush the queue - return setTimeout(() => DocUpdaterClient.flushOldProjects(done) - , 2000); - }); - } - , 200); - }); - }); + after(function () { + MockWebApi.setDocument.restore() + MockTrackChangesApi.flushDoc.restore() + return MockProjectHistoryApi.flushProject.restore() + }) - after(function() { - MockWebApi.setDocument.restore(); - MockTrackChangesApi.flushDoc.restore(); - return MockProjectHistoryApi.flushProject.restore(); - }); + it('should return a 204 status code', function () { + return this.statusCode.should.equal(204) + }) - it("should return a 204 status code", function() { - return this.statusCode.should.equal(204); - }); + it('should send each document to the web api', function () { + return Array.from(this.docs).map((doc) => + MockWebApi.setDocument + .calledWith(this.project_id, doc.id, doc.updatedLines) + .should.equal(true) + ) + }) - it("should send each document to the web api", function() { - return Array.from(this.docs).map((doc) => - MockWebApi.setDocument - .calledWith(this.project_id, doc.id, doc.updatedLines) - .should.equal(true)); - }); - - it("should flush each doc in track changes", function() { - return Array.from(this.docs).map((doc) => - MockTrackChangesApi.flushDoc.calledWith(doc.id).should.equal(true)); - }); - - return it("should flush to project history", function() { return MockProjectHistoryApi.flushProject.called.should.equal(true); }); - }); -}); + it('should flush each doc in track changes', function () { + return Array.from(this.docs).map((doc) => + MockTrackChangesApi.flushDoc.calledWith(doc.id).should.equal(true) + ) + }) + return it('should flush to project history', function () { + return MockProjectHistoryApi.flushProject.called.should.equal(true) + }) + }) +}) diff --git a/services/document-updater/test/acceptance/js/FlushingAProjectTests.js b/services/document-updater/test/acceptance/js/FlushingAProjectTests.js index ea45d4af26..c860fce849 100644 --- a/services/document-updater/test/acceptance/js/FlushingAProjectTests.js +++ b/services/document-updater/test/acceptance/js/FlushingAProjectTests.js @@ -11,101 +11,135 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require("sinon"); -const chai = require("chai"); -chai.should(); -const async = require("async"); +const sinon = require('sinon') +const chai = require('chai') +chai.should() +const async = require('async') -const MockWebApi = require("./helpers/MockWebApi"); -const DocUpdaterClient = require("./helpers/DocUpdaterClient"); -const DocUpdaterApp = require("./helpers/DocUpdaterApp"); +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') -describe("Flushing a project", function() { - before(function(done) { - let doc_id0, doc_id1; - this.project_id = DocUpdaterClient.randomId(); - this.docs = [{ - id: (doc_id0 = DocUpdaterClient.randomId()), - lines: ["one", "two", "three"], - update: { - doc: doc_id0, - op: [{ - i: "one and a half\n", - p: 4 - }], - v: 0 - }, - updatedLines: ["one", "one and a half", "two", "three"] - }, { - id: (doc_id1 = DocUpdaterClient.randomId()), - lines: ["four", "five", "six"], - update: { - doc: doc_id1, - op: [{ - i: "four and a half\n", - p: 5 - }], - v: 0 - }, - updatedLines: ["four", "four and a half", "five", "six"] - }]; - for (const doc of Array.from(this.docs)) { - MockWebApi.insertDoc(this.project_id, doc.id, { - lines: doc.lines, - version: doc.update.v - }); - } - return DocUpdaterApp.ensureRunning(done); - }); +describe('Flushing a project', function () { + before(function (done) { + let doc_id0, doc_id1 + this.project_id = DocUpdaterClient.randomId() + this.docs = [ + { + id: (doc_id0 = DocUpdaterClient.randomId()), + lines: ['one', 'two', 'three'], + update: { + doc: doc_id0, + op: [ + { + i: 'one and a half\n', + p: 4 + } + ], + v: 0 + }, + updatedLines: ['one', 'one and a half', 'two', 'three'] + }, + { + id: (doc_id1 = DocUpdaterClient.randomId()), + lines: ['four', 'five', 'six'], + update: { + doc: doc_id1, + op: [ + { + i: 'four and a half\n', + p: 5 + } + ], + v: 0 + }, + updatedLines: ['four', 'four and a half', 'five', 'six'] + } + ] + for (const doc of Array.from(this.docs)) { + MockWebApi.insertDoc(this.project_id, doc.id, { + lines: doc.lines, + version: doc.update.v + }) + } + return DocUpdaterApp.ensureRunning(done) + }) - return describe("with documents which have been updated", function() { - before(function(done) { - sinon.spy(MockWebApi, "setDocument"); + return describe('with documents which have been updated', function () { + before(function (done) { + sinon.spy(MockWebApi, 'setDocument') - return async.series(this.docs.map(doc => { - return callback => { - return DocUpdaterClient.preloadDoc(this.project_id, doc.id, error => { - if (error != null) { return callback(error); } - return DocUpdaterClient.sendUpdate(this.project_id, doc.id, doc.update, error => { - return callback(error); - }); - }); - }; - }), error => { - if (error != null) { throw error; } - return setTimeout(() => { - return DocUpdaterClient.flushProject(this.project_id, (error, res, body) => { - this.statusCode = res.statusCode; - return done(); - }); - } - , 200); - }); - }); + return async.series( + this.docs.map((doc) => { + return (callback) => { + return DocUpdaterClient.preloadDoc( + this.project_id, + doc.id, + (error) => { + if (error != null) { + return callback(error) + } + return DocUpdaterClient.sendUpdate( + this.project_id, + doc.id, + doc.update, + (error) => { + return callback(error) + } + ) + } + ) + } + }), + (error) => { + if (error != null) { + throw error + } + return setTimeout(() => { + return DocUpdaterClient.flushProject( + this.project_id, + (error, res, body) => { + this.statusCode = res.statusCode + return done() + } + ) + }, 200) + } + ) + }) - after(function() { return MockWebApi.setDocument.restore(); }); + after(function () { + return MockWebApi.setDocument.restore() + }) - it("should return a 204 status code", function() { - return this.statusCode.should.equal(204); - }); + it('should return a 204 status code', function () { + return this.statusCode.should.equal(204) + }) - it("should send each document to the web api", function() { - return Array.from(this.docs).map((doc) => - MockWebApi.setDocument - .calledWith(this.project_id, doc.id, doc.updatedLines) - .should.equal(true)); - }); - - return it("should update the lines in the doc updater", function(done) { - return async.series(this.docs.map(doc => { - return callback => { - return DocUpdaterClient.getDoc(this.project_id, doc.id, (error, res, returnedDoc) => { - returnedDoc.lines.should.deep.equal(doc.updatedLines); - return callback(); - }); - }; - }), done); - }); - }); -}); + it('should send each document to the web api', function () { + return Array.from(this.docs).map((doc) => + MockWebApi.setDocument + .calledWith(this.project_id, doc.id, doc.updatedLines) + .should.equal(true) + ) + }) + return it('should update the lines in the doc updater', function (done) { + return async.series( + this.docs.map((doc) => { + return (callback) => { + return DocUpdaterClient.getDoc( + this.project_id, + doc.id, + (error, res, returnedDoc) => { + returnedDoc.lines.should.deep.equal(doc.updatedLines) + return callback() + } + ) + } + }), + done + ) + }) + }) +}) diff --git a/services/document-updater/test/acceptance/js/FlushingDocsTests.js b/services/document-updater/test/acceptance/js/FlushingDocsTests.js index 5d8c066f20..5556870f8e 100644 --- a/services/document-updater/test/acceptance/js/FlushingDocsTests.js +++ b/services/document-updater/test/acceptance/js/FlushingDocsTests.js @@ -13,108 +13,153 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require("sinon"); -const chai = require("chai"); -chai.should(); -const { - expect -} = chai; -const async = require("async"); +const sinon = require('sinon') +const chai = require('chai') +chai.should() +const { expect } = chai +const async = require('async') -const MockWebApi = require("./helpers/MockWebApi"); -const DocUpdaterClient = require("./helpers/DocUpdaterClient"); -const DocUpdaterApp = require("./helpers/DocUpdaterApp"); +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') -describe("Flushing a doc to Mongo", function() { - before(function(done) { - this.lines = ["one", "two", "three"]; - this.version = 42; - this.update = { - doc: this.doc_id, - meta: { user_id: 'last-author-fake-id' }, - op: [{ - i: "one and a half\n", - p: 4 - }], - v: this.version - }; - this.result = ["one", "one and a half", "two", "three"]; - return DocUpdaterApp.ensureRunning(done); - }); +describe('Flushing a doc to Mongo', function () { + before(function (done) { + this.lines = ['one', 'two', 'three'] + this.version = 42 + this.update = { + doc: this.doc_id, + meta: { user_id: 'last-author-fake-id' }, + op: [ + { + i: 'one and a half\n', + p: 4 + } + ], + v: this.version + } + this.result = ['one', 'one and a half', 'two', 'three'] + return DocUpdaterApp.ensureRunning(done) + }) - describe("when the updated doc exists in the doc updater", function() { - before(function(done) { - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - sinon.spy(MockWebApi, "setDocument"); + describe('when the updated doc exists in the doc updater', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) + sinon.spy(MockWebApi, 'setDocument') - MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); - return DocUpdaterClient.sendUpdates(this.project_id, this.doc_id, [this.update], error => { - if (error != null) { throw error; } - return setTimeout(() => { - return DocUpdaterClient.flushDoc(this.project_id, this.doc_id, done); - } - , 200); - }); - }); + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version + }) + return DocUpdaterClient.sendUpdates( + this.project_id, + this.doc_id, + [this.update], + (error) => { + if (error != null) { + throw error + } + return setTimeout(() => { + return DocUpdaterClient.flushDoc(this.project_id, this.doc_id, done) + }, 200) + } + ) + }) - after(function() { return MockWebApi.setDocument.restore(); }); + after(function () { + return MockWebApi.setDocument.restore() + }) - it("should flush the updated doc lines and version to the web api", function() { - return MockWebApi.setDocument - .calledWith(this.project_id, this.doc_id, this.result, this.version + 1) - .should.equal(true); - }); + it('should flush the updated doc lines and version to the web api', function () { + return MockWebApi.setDocument + .calledWith(this.project_id, this.doc_id, this.result, this.version + 1) + .should.equal(true) + }) - return it("should flush the last update author and time to the web api", function() { - const lastUpdatedAt = MockWebApi.setDocument.lastCall.args[5]; - parseInt(lastUpdatedAt).should.be.closeTo((new Date()).getTime(), 30000); + return it('should flush the last update author and time to the web api', function () { + const lastUpdatedAt = MockWebApi.setDocument.lastCall.args[5] + parseInt(lastUpdatedAt).should.be.closeTo(new Date().getTime(), 30000) - const lastUpdatedBy = MockWebApi.setDocument.lastCall.args[6]; - return lastUpdatedBy.should.equal('last-author-fake-id'); - }); - }); + const lastUpdatedBy = MockWebApi.setDocument.lastCall.args[6] + return lastUpdatedBy.should.equal('last-author-fake-id') + }) + }) - describe("when the doc does not exist in the doc updater", function() { - before(function(done) { - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - MockWebApi.insertDoc(this.project_id, this.doc_id, { - lines: this.lines - }); - sinon.spy(MockWebApi, "setDocument"); - return DocUpdaterClient.flushDoc(this.project_id, this.doc_id, done); - }); + describe('when the doc does not exist in the doc updater', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines + }) + sinon.spy(MockWebApi, 'setDocument') + return DocUpdaterClient.flushDoc(this.project_id, this.doc_id, done) + }) - after(function() { return MockWebApi.setDocument.restore(); }); + after(function () { + return MockWebApi.setDocument.restore() + }) - return it("should not flush the doc to the web api", function() { return MockWebApi.setDocument.called.should.equal(false); }); - }); + return it('should not flush the doc to the web api', function () { + return MockWebApi.setDocument.called.should.equal(false) + }) + }) - return describe("when the web api http request takes a long time on first request", function() { - before(function(done) { - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - MockWebApi.insertDoc(this.project_id, this.doc_id, { - lines: this.lines, - version: this.version - }); - let t = 30000; - sinon.stub(MockWebApi, "setDocument", (project_id, doc_id, lines, version, ranges, lastUpdatedAt, lastUpdatedBy, callback) => { - if (callback == null) { callback = function(error) {}; } - setTimeout(callback, t); - return t = 0; - }); - return DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, done); - }); + return describe('when the web api http request takes a long time on first request', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version + }) + let t = 30000 + sinon.stub( + MockWebApi, + 'setDocument', + ( + project_id, + doc_id, + lines, + version, + ranges, + lastUpdatedAt, + lastUpdatedBy, + callback + ) => { + if (callback == null) { + callback = function (error) {} + } + setTimeout(callback, t) + return (t = 0) + } + ) + return DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, done) + }) - after(function() { return MockWebApi.setDocument.restore(); }); - - return it("should still work", function(done) { - const start = Date.now(); - return DocUpdaterClient.flushDoc(this.project_id, this.doc_id, (error, res, doc) => { - res.statusCode.should.equal(204); - const delta = Date.now() - start; - expect(delta).to.be.below(20000); - return done(); - }); - }); - }); -}); + after(function () { + return MockWebApi.setDocument.restore() + }) + + return it('should still work', function (done) { + const start = Date.now() + return DocUpdaterClient.flushDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + res.statusCode.should.equal(204) + const delta = Date.now() - start + expect(delta).to.be.below(20000) + return done() + } + ) + }) + }) +}) diff --git a/services/document-updater/test/acceptance/js/GettingADocumentTests.js b/services/document-updater/test/acceptance/js/GettingADocumentTests.js index ad51413387..fc97c8d825 100644 --- a/services/document-updater/test/acceptance/js/GettingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/GettingADocumentTests.js @@ -11,184 +11,278 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require("sinon"); -const chai = require("chai"); -chai.should(); -const { - expect -} = chai; +const sinon = require('sinon') +const chai = require('chai') +chai.should() +const { expect } = chai -const MockWebApi = require("./helpers/MockWebApi"); -const DocUpdaterClient = require("./helpers/DocUpdaterClient"); -const DocUpdaterApp = require("./helpers/DocUpdaterApp"); +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') -describe("Getting a document", function() { - before(function(done) { - this.lines = ["one", "two", "three"]; - this.version = 42; - return DocUpdaterApp.ensureRunning(done); - }); +describe('Getting a document', function () { + before(function (done) { + this.lines = ['one', 'two', 'three'] + this.version = 42 + return DocUpdaterApp.ensureRunning(done) + }) - describe("when the document is not loaded", function() { - before(function(done) { - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - sinon.spy(MockWebApi, "getDocument"); + describe('when the document is not loaded', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) + sinon.spy(MockWebApi, 'getDocument') - MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version + }) - return DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, returnedDoc) => { this.returnedDoc = returnedDoc; return done(); }); - }); + return DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, returnedDoc) => { + this.returnedDoc = returnedDoc + return done() + } + ) + }) - after(function() { return MockWebApi.getDocument.restore(); }); + after(function () { + return MockWebApi.getDocument.restore() + }) - it("should load the document from the web API", function() { - return MockWebApi.getDocument - .calledWith(this.project_id, this.doc_id) - .should.equal(true); - }); - - it("should return the document lines", function() { - return this.returnedDoc.lines.should.deep.equal(this.lines); - }); + it('should load the document from the web API', function () { + return MockWebApi.getDocument + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) - return it("should return the document at its current version", function() { - return this.returnedDoc.version.should.equal(this.version); - }); - }); + it('should return the document lines', function () { + return this.returnedDoc.lines.should.deep.equal(this.lines) + }) - describe("when the document is already loaded", function() { - before(function(done) { - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - - MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); - return DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { - if (error != null) { throw error; } - sinon.spy(MockWebApi, "getDocument"); - return DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, returnedDoc) => { this.returnedDoc = returnedDoc; return done(); }); - }); - }); + return it('should return the document at its current version', function () { + return this.returnedDoc.version.should.equal(this.version) + }) + }) - after(function() { return MockWebApi.getDocument.restore(); }); + describe('when the document is already loaded', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) - it("should not load the document from the web API", function() { return MockWebApi.getDocument.called.should.equal(false); }); + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version + }) + return DocUpdaterClient.preloadDoc( + this.project_id, + this.doc_id, + (error) => { + if (error != null) { + throw error + } + sinon.spy(MockWebApi, 'getDocument') + return DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, returnedDoc) => { + this.returnedDoc = returnedDoc + return done() + } + ) + } + ) + }) - return it("should return the document lines", function() { - return this.returnedDoc.lines.should.deep.equal(this.lines); - }); - }); + after(function () { + return MockWebApi.getDocument.restore() + }) - describe("when the request asks for some recent ops", function() { - before(function(done) { - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - MockWebApi.insertDoc(this.project_id, this.doc_id, { - lines: (this.lines = ["one", "two", "three"]) - }); + it('should not load the document from the web API', function () { + return MockWebApi.getDocument.called.should.equal(false) + }) - this.updates = __range__(0, 199, true).map((v) => ({ - doc_id: this.doc_id, - op: [{i: v.toString(), p: 0}], - v - })); + return it('should return the document lines', function () { + return this.returnedDoc.lines.should.deep.equal(this.lines) + }) + }) - return DocUpdaterClient.sendUpdates(this.project_id, this.doc_id, this.updates, error => { - if (error != null) { throw error; } - sinon.spy(MockWebApi, "getDocument"); - return done(); - }); - }); + describe('when the request asks for some recent ops', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: (this.lines = ['one', 'two', 'three']) + }) - after(function() { return MockWebApi.getDocument.restore(); }); - - describe("when the ops are loaded", function() { - before(function(done) { - return DocUpdaterClient.getDocAndRecentOps(this.project_id, this.doc_id, 190, (error, res, returnedDoc) => { this.returnedDoc = returnedDoc; return done(); }); - }); + this.updates = __range__(0, 199, true).map((v) => ({ + doc_id: this.doc_id, + op: [{ i: v.toString(), p: 0 }], + v + })) - return it("should return the recent ops", function() { - this.returnedDoc.ops.length.should.equal(10); - return Array.from(this.updates.slice(190, -1)).map((update, i) => - this.returnedDoc.ops[i].op.should.deep.equal(update.op)); - }); - }); - - return describe("when the ops are not all loaded", function() { - before(function(done) { - // We only track 100 ops - return DocUpdaterClient.getDocAndRecentOps(this.project_id, this.doc_id, 10, (error, res, returnedDoc) => { this.res = res; this.returnedDoc = returnedDoc; return done(); }); - }); + return DocUpdaterClient.sendUpdates( + this.project_id, + this.doc_id, + this.updates, + (error) => { + if (error != null) { + throw error + } + sinon.spy(MockWebApi, 'getDocument') + return done() + } + ) + }) - return it("should return UnprocessableEntity", function() { - return this.res.statusCode.should.equal(422); - }); - }); - }); + after(function () { + return MockWebApi.getDocument.restore() + }) - describe("when the document does not exist", function() { - before(function(done) { - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - return DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { - this.statusCode = res.statusCode; - return done(); - }); - }); + describe('when the ops are loaded', function () { + before(function (done) { + return DocUpdaterClient.getDocAndRecentOps( + this.project_id, + this.doc_id, + 190, + (error, res, returnedDoc) => { + this.returnedDoc = returnedDoc + return done() + } + ) + }) - return it("should return 404", function() { - return this.statusCode.should.equal(404); - }); - }); + return it('should return the recent ops', function () { + this.returnedDoc.ops.length.should.equal(10) + return Array.from(this.updates.slice(190, -1)).map((update, i) => + this.returnedDoc.ops[i].op.should.deep.equal(update.op) + ) + }) + }) - describe("when the web api returns an error", function() { - before(function(done) { - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - sinon.stub(MockWebApi, "getDocument", (project_id, doc_id, callback) => { - if (callback == null) { callback = function(error, doc) {}; } - return callback(new Error("oops")); - }); - return DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { - this.statusCode = res.statusCode; - return done(); - }); - }); + return describe('when the ops are not all loaded', function () { + before(function (done) { + // We only track 100 ops + return DocUpdaterClient.getDocAndRecentOps( + this.project_id, + this.doc_id, + 10, + (error, res, returnedDoc) => { + this.res = res + this.returnedDoc = returnedDoc + return done() + } + ) + }) - after(function() { return MockWebApi.getDocument.restore(); }); + return it('should return UnprocessableEntity', function () { + return this.res.statusCode.should.equal(422) + }) + }) + }) - return it("should return 500", function() { - return this.statusCode.should.equal(500); - }); - }); + describe('when the document does not exist', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) + return DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + this.statusCode = res.statusCode + return done() + } + ) + }) - return describe("when the web api http request takes a long time", function() { - before(function(done) { - this.timeout = 10000; - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - sinon.stub(MockWebApi, "getDocument", (project_id, doc_id, callback) => { - if (callback == null) { callback = function(error, doc) {}; } - return setTimeout(callback, 30000); - }); - return done(); - }); + return it('should return 404', function () { + return this.statusCode.should.equal(404) + }) + }) - after(function() { return MockWebApi.getDocument.restore(); }); - - return it("should return quickly(ish)", function(done) { - const start = Date.now(); - return DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { - res.statusCode.should.equal(500); - const delta = Date.now() - start; - expect(delta).to.be.below(20000); - return done(); - }); - }); - }); -}); + describe('when the web api returns an error', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) + sinon.stub(MockWebApi, 'getDocument', (project_id, doc_id, callback) => { + if (callback == null) { + callback = function (error, doc) {} + } + return callback(new Error('oops')) + }) + return DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + this.statusCode = res.statusCode + return done() + } + ) + }) + after(function () { + return MockWebApi.getDocument.restore() + }) + + return it('should return 500', function () { + return this.statusCode.should.equal(500) + }) + }) + + return describe('when the web api http request takes a long time', function () { + before(function (done) { + this.timeout = 10000 + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) + sinon.stub(MockWebApi, 'getDocument', (project_id, doc_id, callback) => { + if (callback == null) { + callback = function (error, doc) {} + } + return setTimeout(callback, 30000) + }) + return done() + }) + + after(function () { + return MockWebApi.getDocument.restore() + }) + + return it('should return quickly(ish)', function (done) { + const start = Date.now() + return DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + res.statusCode.should.equal(500) + const delta = Date.now() - start + expect(delta).to.be.below(20000) + return done() + } + ) + }) + }) +}) function __range__(left, right, inclusive) { - const range = []; - const ascending = left < right; - const end = !inclusive ? right : ascending ? right + 1 : right - 1; + const range = [] + const ascending = left < right + const end = !inclusive ? right : ascending ? right + 1 : right - 1 for (let i = left; ascending ? i < end : i > end; ascending ? i++ : i--) { - range.push(i); + range.push(i) } - return range; -} \ No newline at end of file + return range +} diff --git a/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js b/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js index cd5d359dd5..72a6824562 100644 --- a/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js +++ b/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js @@ -11,105 +11,163 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require("sinon"); -const chai = require("chai"); -chai.should(); -const { - expect -} = chai; +const sinon = require('sinon') +const chai = require('chai') +chai.should() +const { expect } = chai -const MockWebApi = require("./helpers/MockWebApi"); -const DocUpdaterClient = require("./helpers/DocUpdaterClient"); -const DocUpdaterApp = require("./helpers/DocUpdaterApp"); +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') -describe("Getting documents for project", function() { - before(function(done) { - this.lines = ["one", "two", "three"]; - this.version = 42; - return DocUpdaterApp.ensureRunning(done); - }); +describe('Getting documents for project', function () { + before(function (done) { + this.lines = ['one', 'two', 'three'] + this.version = 42 + return DocUpdaterApp.ensureRunning(done) + }) - describe("when project state hash does not match", function() { - before(function(done) { - this.projectStateHash = DocUpdaterClient.randomId(); - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + describe('when project state hash does not match', function () { + before(function (done) { + this.projectStateHash = DocUpdaterClient.randomId() + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) - MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); - return DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { - if (error != null) { throw error; } - return DocUpdaterClient.getProjectDocs(this.project_id, this.projectStateHash, (error, res, returnedDocs) => { - this.res = res; - this.returnedDocs = returnedDocs; - return done(); - }); - }); - }); + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version + }) + return DocUpdaterClient.preloadDoc( + this.project_id, + this.doc_id, + (error) => { + if (error != null) { + throw error + } + return DocUpdaterClient.getProjectDocs( + this.project_id, + this.projectStateHash, + (error, res, returnedDocs) => { + this.res = res + this.returnedDocs = returnedDocs + return done() + } + ) + } + ) + }) - return it("should return a 409 Conflict response", function() { - return this.res.statusCode.should.equal(409); - }); - }); + return it('should return a 409 Conflict response', function () { + return this.res.statusCode.should.equal(409) + }) + }) + describe('when project state hash matches', function () { + before(function (done) { + this.projectStateHash = DocUpdaterClient.randomId() + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) - describe("when project state hash matches", function() { - before(function(done) { - this.projectStateHash = DocUpdaterClient.randomId(); - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version + }) + return DocUpdaterClient.preloadDoc( + this.project_id, + this.doc_id, + (error) => { + if (error != null) { + throw error + } + return DocUpdaterClient.getProjectDocs( + this.project_id, + this.projectStateHash, + (error, res0, returnedDocs0) => { + // set the hash + this.res0 = res0 + this.returnedDocs0 = returnedDocs0 + return DocUpdaterClient.getProjectDocs( + this.project_id, + this.projectStateHash, + (error, res, returnedDocs) => { + // the hash should now match + this.res = res + this.returnedDocs = returnedDocs + return done() + } + ) + } + ) + } + ) + }) - MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); - return DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { - if (error != null) { throw error; } - return DocUpdaterClient.getProjectDocs(this.project_id, this.projectStateHash, (error, res0, returnedDocs0) => { - // set the hash - this.res0 = res0; - this.returnedDocs0 = returnedDocs0; - return DocUpdaterClient.getProjectDocs(this.project_id, this.projectStateHash, (error, res, returnedDocs) => { - // the hash should now match - this.res = res; - this.returnedDocs = returnedDocs; - return done(); - }); - }); - }); - }); + it('should return a 200 response', function () { + return this.res.statusCode.should.equal(200) + }) - it("should return a 200 response", function() { - return this.res.statusCode.should.equal(200); - }); + return it('should return the documents', function () { + return this.returnedDocs.should.deep.equal([ + { _id: this.doc_id, lines: this.lines, v: this.version } + ]) + }) + }) - return it("should return the documents", function() { - return this.returnedDocs.should.deep.equal([ {_id: this.doc_id, lines: this.lines, v: this.version} ]); - }); -}); + return describe('when the doc has been removed', function () { + before(function (done) { + this.projectStateHash = DocUpdaterClient.randomId() + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version + }) + return DocUpdaterClient.preloadDoc( + this.project_id, + this.doc_id, + (error) => { + if (error != null) { + throw error + } + return DocUpdaterClient.getProjectDocs( + this.project_id, + this.projectStateHash, + (error, res0, returnedDocs0) => { + // set the hash + this.res0 = res0 + this.returnedDocs0 = returnedDocs0 + return DocUpdaterClient.deleteDoc( + this.project_id, + this.doc_id, + (error, res, body) => { + // delete the doc + return DocUpdaterClient.getProjectDocs( + this.project_id, + this.projectStateHash, + (error, res1, returnedDocs) => { + // the hash would match, but the doc has been deleted + this.res = res1 + this.returnedDocs = returnedDocs + return done() + } + ) + } + ) + } + ) + } + ) + }) - return describe("when the doc has been removed", function() { - before(function(done) { - this.projectStateHash = DocUpdaterClient.randomId(); - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - - MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); - return DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { - if (error != null) { throw error; } - return DocUpdaterClient.getProjectDocs(this.project_id, this.projectStateHash, (error, res0, returnedDocs0) => { - // set the hash - this.res0 = res0; - this.returnedDocs0 = returnedDocs0; - return DocUpdaterClient.deleteDoc(this.project_id, this.doc_id, (error, res, body) => { - // delete the doc - return DocUpdaterClient.getProjectDocs(this.project_id, this.projectStateHash, (error, res1, returnedDocs) => { - // the hash would match, but the doc has been deleted - this.res = res1; - this.returnedDocs = returnedDocs; - return done(); - }); - }); - }); - }); - }); - - return it("should return a 409 Conflict response", function() { - return this.res.statusCode.should.equal(409); - }); - }); -}); + return it('should return a 409 Conflict response', function () { + return this.res.statusCode.should.equal(409) + }) + }) +}) diff --git a/services/document-updater/test/acceptance/js/RangesTests.js b/services/document-updater/test/acceptance/js/RangesTests.js index e5daa62018..df0afe01e5 100644 --- a/services/document-updater/test/acceptance/js/RangesTests.js +++ b/services/document-updater/test/acceptance/js/RangesTests.js @@ -11,463 +11,685 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require("sinon"); -const chai = require("chai"); -chai.should(); -const { - expect -} = chai; -const async = require("async"); +const sinon = require('sinon') +const chai = require('chai') +chai.should() +const { expect } = chai +const async = require('async') -const {db, ObjectId} = require("../../../app/js/mongojs"); -const MockWebApi = require("./helpers/MockWebApi"); -const DocUpdaterClient = require("./helpers/DocUpdaterClient"); -const DocUpdaterApp = require("./helpers/DocUpdaterApp"); +const { db, ObjectId } = require('../../../app/js/mongojs') +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') -describe("Ranges", function() { - before(function(done) { return DocUpdaterApp.ensureRunning(done); }); +describe('Ranges', function () { + before(function (done) { + return DocUpdaterApp.ensureRunning(done) + }) - describe("tracking changes from ops", function() { - before(function(done) { - this.project_id = DocUpdaterClient.randomId(); - this.user_id = DocUpdaterClient.randomId(); - this.id_seed = "587357bd35e64f6157"; - this.doc = { - id: DocUpdaterClient.randomId(), - lines: ["aaa"] - }; - this.updates = [{ - doc: this.doc.id, - op: [{ i: "123", p: 1 }], - v: 0, - meta: { user_id: this.user_id } - }, { - doc: this.doc.id, - op: [{ i: "456", p: 5 }], - v: 1, - meta: { user_id: this.user_id, tc: this.id_seed } - }, { - doc: this.doc.id, - op: [{ d: "12", p: 1 }], - v: 2, - meta: { user_id: this.user_id } - }]; - MockWebApi.insertDoc(this.project_id, this.doc.id, { - lines: this.doc.lines, - version: 0 - }); - const jobs = []; - for (const update of Array.from(this.updates)) { - (update => { - return jobs.push(callback => DocUpdaterClient.sendUpdate(this.project_id, this.doc.id, update, callback)); - })(update); - } - - return DocUpdaterApp.ensureRunning(error => { - if (error != null) { throw error; } - return DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { - if (error != null) { throw error; } - return async.series(jobs, (error) => { - if (error != null) { throw error; } - return done(); - }); - }); - }); - }); - - it("should update the ranges", function(done) { - return DocUpdaterClient.getDoc(this.project_id, this.doc.id, (error, res, data) => { - if (error != null) { throw error; } - const { - ranges - } = data; - const change = ranges.changes[0]; - change.op.should.deep.equal({ i: "456", p: 3 }); - change.id.should.equal(this.id_seed + "000001"); - change.metadata.user_id.should.equal(this.user_id); - return done(); - }); - }); - - return describe("Adding comments", function() { - describe("standalone", function() { - before(function(done) { - this.project_id = DocUpdaterClient.randomId(); - this.user_id = DocUpdaterClient.randomId(); - this.doc = { - id: DocUpdaterClient.randomId(), - lines: ["foo bar baz"] - }; - this.updates = [{ - doc: this.doc.id, - op: [{ c: "bar", p: 4, t: (this.tid = DocUpdaterClient.randomId()) }], - v: 0 - }]; - MockWebApi.insertDoc(this.project_id, this.doc.id, { - lines: this.doc.lines, - version: 0 - }); - const jobs = []; - for (const update of Array.from(this.updates)) { - (update => { - return jobs.push(callback => DocUpdaterClient.sendUpdate(this.project_id, this.doc.id, update, callback)); - })(update); - } - return DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { - if (error != null) { throw error; } - return async.series(jobs, (error) => { - if (error != null) { throw error; } - return setTimeout(done, 200); - }); - }); - }); - - return it("should update the ranges", function(done) { - return DocUpdaterClient.getDoc(this.project_id, this.doc.id, (error, res, data) => { - if (error != null) { throw error; } - const { - ranges - } = data; - const comment = ranges.comments[0]; - comment.op.should.deep.equal({ c: "bar", p: 4, t: this.tid }); - comment.id.should.equal(this.tid); - return done(); - }); - }); - }); + describe('tracking changes from ops', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.user_id = DocUpdaterClient.randomId() + this.id_seed = '587357bd35e64f6157' + this.doc = { + id: DocUpdaterClient.randomId(), + lines: ['aaa'] + } + this.updates = [ + { + doc: this.doc.id, + op: [{ i: '123', p: 1 }], + v: 0, + meta: { user_id: this.user_id } + }, + { + doc: this.doc.id, + op: [{ i: '456', p: 5 }], + v: 1, + meta: { user_id: this.user_id, tc: this.id_seed } + }, + { + doc: this.doc.id, + op: [{ d: '12', p: 1 }], + v: 2, + meta: { user_id: this.user_id } + } + ] + MockWebApi.insertDoc(this.project_id, this.doc.id, { + lines: this.doc.lines, + version: 0 + }) + const jobs = [] + for (const update of Array.from(this.updates)) { + ;((update) => { + return jobs.push((callback) => + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc.id, + update, + callback + ) + ) + })(update) + } - return describe("with conflicting ops needing OT", function() { - before(function(done) { - this.project_id = DocUpdaterClient.randomId(); - this.user_id = DocUpdaterClient.randomId(); - this.doc = { - id: DocUpdaterClient.randomId(), - lines: ["foo bar baz"] - }; - this.updates = [{ - doc: this.doc.id, - op: [{ i: "ABC", p: 3 }], - v: 0, - meta: { user_id: this.user_id } - }, { - doc: this.doc.id, - op: [{ c: "bar", p: 4, t: (this.tid = DocUpdaterClient.randomId()) }], - v: 0 - }]; - MockWebApi.insertDoc(this.project_id, this.doc.id, { - lines: this.doc.lines, - version: 0 - }); - const jobs = []; - for (const update of Array.from(this.updates)) { - (update => { - return jobs.push(callback => DocUpdaterClient.sendUpdate(this.project_id, this.doc.id, update, callback)); - })(update); - } - return DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { - if (error != null) { throw error; } - return async.series(jobs, (error) => { - if (error != null) { throw error; } - return setTimeout(done, 200); - }); - }); - }); - - return it("should update the comments with the OT shifted comment", function(done) { - return DocUpdaterClient.getDoc(this.project_id, this.doc.id, (error, res, data) => { - if (error != null) { throw error; } - const { - ranges - } = data; - const comment = ranges.comments[0]; - comment.op.should.deep.equal({ c: "bar", p: 7, t: this.tid }); - return done(); - }); - }); - }); - }); - }); + return DocUpdaterApp.ensureRunning((error) => { + if (error != null) { + throw error + } + return DocUpdaterClient.preloadDoc( + this.project_id, + this.doc.id, + (error) => { + if (error != null) { + throw error + } + return async.series(jobs, (error) => { + if (error != null) { + throw error + } + return done() + }) + } + ) + }) + }) - describe("Loading ranges from persistence layer", function() { - before(function(done) { - this.project_id = DocUpdaterClient.randomId(); - this.user_id = DocUpdaterClient.randomId(); - this.id_seed = "587357bd35e64f6157"; - this.doc = { - id: DocUpdaterClient.randomId(), - lines: ["a123aa"] - }; - this.update = { - doc: this.doc.id, - op: [{ i: "456", p: 5 }], - v: 0, - meta: { user_id: this.user_id, tc: this.id_seed } - }; - MockWebApi.insertDoc(this.project_id, this.doc.id, { - lines: this.doc.lines, - version: 0, - ranges: { - changes: [{ - op: { i: "123", p: 1 }, - metadata: { - user_id: this.user_id, - ts: new Date() - } - }] - } - }); - return DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { - if (error != null) { throw error; } - return DocUpdaterClient.sendUpdate(this.project_id, this.doc.id, this.update, (error) => { - if (error != null) { throw error; } - return setTimeout(done, 200); - }); - }); - }); - - it("should have preloaded the existing ranges", function(done) { - return DocUpdaterClient.getDoc(this.project_id, this.doc.id, (error, res, data) => { - if (error != null) { throw error; } - const {changes} = data.ranges; - changes[0].op.should.deep.equal({ i: "123", p: 1 }); - changes[1].op.should.deep.equal({ i: "456", p: 5 }); - return done(); - }); - }); - - return it("should flush the ranges to the persistence layer again", function(done) { - return DocUpdaterClient.flushDoc(this.project_id, this.doc.id, error => { - if (error != null) { throw error; } - return MockWebApi.getDocument(this.project_id, this.doc.id, (error, doc) => { - const {changes} = doc.ranges; - changes[0].op.should.deep.equal({ i: "123", p: 1 }); - changes[1].op.should.deep.equal({ i: "456", p: 5 }); - return done(); - }); - }); - }); - }); + it('should update the ranges', function (done) { + return DocUpdaterClient.getDoc( + this.project_id, + this.doc.id, + (error, res, data) => { + if (error != null) { + throw error + } + const { ranges } = data + const change = ranges.changes[0] + change.op.should.deep.equal({ i: '456', p: 3 }) + change.id.should.equal(this.id_seed + '000001') + change.metadata.user_id.should.equal(this.user_id) + return done() + } + ) + }) - describe("accepting a change", function() { - before(function(done) { - this.project_id = DocUpdaterClient.randomId(); - this.user_id = DocUpdaterClient.randomId(); - this.id_seed = "587357bd35e64f6157"; - this.doc = { - id: DocUpdaterClient.randomId(), - lines: ["aaa"] - }; - this.update = { - doc: this.doc.id, - op: [{ i: "456", p: 1 }], - v: 0, - meta: { user_id: this.user_id, tc: this.id_seed } - }; - MockWebApi.insertDoc(this.project_id, this.doc.id, { - lines: this.doc.lines, - version: 0 - }); - return DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { - if (error != null) { throw error; } - return DocUpdaterClient.sendUpdate(this.project_id, this.doc.id, this.update, error => { - if (error != null) { throw error; } - return setTimeout(() => { - return DocUpdaterClient.getDoc(this.project_id, this.doc.id, (error, res, data) => { - if (error != null) { throw error; } - const { - ranges - } = data; - const change = ranges.changes[0]; - change.op.should.deep.equal({ i: "456", p: 1 }); - change.id.should.equal(this.id_seed + "000001"); - change.metadata.user_id.should.equal(this.user_id); - return done(); - }); - } - , 200); - }); - }); - }); - - return it("should remove the change after accepting", function(done) { - return DocUpdaterClient.acceptChange(this.project_id, this.doc.id, this.id_seed + "000001", error => { - if (error != null) { throw error; } - return DocUpdaterClient.getDoc(this.project_id, this.doc.id, (error, res, data) => { - if (error != null) { throw error; } - expect(data.ranges.changes).to.be.undefined; - return done(); - }); - }); - }); - }); + return describe('Adding comments', function () { + describe('standalone', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.user_id = DocUpdaterClient.randomId() + this.doc = { + id: DocUpdaterClient.randomId(), + lines: ['foo bar baz'] + } + this.updates = [ + { + doc: this.doc.id, + op: [ + { c: 'bar', p: 4, t: (this.tid = DocUpdaterClient.randomId()) } + ], + v: 0 + } + ] + MockWebApi.insertDoc(this.project_id, this.doc.id, { + lines: this.doc.lines, + version: 0 + }) + const jobs = [] + for (const update of Array.from(this.updates)) { + ;((update) => { + return jobs.push((callback) => + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc.id, + update, + callback + ) + ) + })(update) + } + return DocUpdaterClient.preloadDoc( + this.project_id, + this.doc.id, + (error) => { + if (error != null) { + throw error + } + return async.series(jobs, (error) => { + if (error != null) { + throw error + } + return setTimeout(done, 200) + }) + } + ) + }) - describe("deleting a comment range", function() { - before(function(done) { - this.project_id = DocUpdaterClient.randomId(); - this.user_id = DocUpdaterClient.randomId(); - this.doc = { - id: DocUpdaterClient.randomId(), - lines: ["foo bar"] - }; - this.update = { - doc: this.doc.id, - op: [{ c: "bar", p: 4, t: (this.tid = DocUpdaterClient.randomId()) }], - v: 0 - }; - MockWebApi.insertDoc(this.project_id, this.doc.id, { - lines: this.doc.lines, - version: 0 - }); - return DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { - if (error != null) { throw error; } - return DocUpdaterClient.sendUpdate(this.project_id, this.doc.id, this.update, error => { - if (error != null) { throw error; } - return setTimeout(() => { - return DocUpdaterClient.getDoc(this.project_id, this.doc.id, (error, res, data) => { - if (error != null) { throw error; } - const { - ranges - } = data; - const change = ranges.comments[0]; - change.op.should.deep.equal({ c: "bar", p: 4, t: this.tid }); - change.id.should.equal(this.tid); - return done(); - }); - } - , 200); - }); - }); - }); - - return it("should remove the comment range", function(done) { - return DocUpdaterClient.removeComment(this.project_id, this.doc.id, this.tid, (error, res) => { - if (error != null) { throw error; } - expect(res.statusCode).to.equal(204); - return DocUpdaterClient.getDoc(this.project_id, this.doc.id, (error, res, data) => { - if (error != null) { throw error; } - expect(data.ranges.comments).to.be.undefined; - return done(); - }); - }); - }); - }); - - describe("tripping range size limit", function() { - before(function(done) { - this.project_id = DocUpdaterClient.randomId(); - this.user_id = DocUpdaterClient.randomId(); - this.id_seed = DocUpdaterClient.randomId(); - this.doc = { - id: DocUpdaterClient.randomId(), - lines: ["aaa"] - }; - this.i = new Array(3 * 1024 * 1024).join("a"); - this.updates = [{ - doc: this.doc.id, - op: [{ i: this.i, p: 1 }], - v: 0, - meta: { user_id: this.user_id, tc: this.id_seed } - }]; - MockWebApi.insertDoc(this.project_id, this.doc.id, { - lines: this.doc.lines, - version: 0 - }); - const jobs = []; - for (const update of Array.from(this.updates)) { - (update => { - return jobs.push(callback => DocUpdaterClient.sendUpdate(this.project_id, this.doc.id, update, callback)); - })(update); - } - return DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { - if (error != null) { throw error; } - return async.series(jobs, (error) => { - if (error != null) { throw error; } - return setTimeout(done, 200); - }); - }); - }); - - return it("should not update the ranges", function(done) { - return DocUpdaterClient.getDoc(this.project_id, this.doc.id, (error, res, data) => { - if (error != null) { throw error; } - const { - ranges - } = data; - expect(ranges.changes).to.be.undefined; - return done(); - }); - }); - }); + return it('should update the ranges', function (done) { + return DocUpdaterClient.getDoc( + this.project_id, + this.doc.id, + (error, res, data) => { + if (error != null) { + throw error + } + const { ranges } = data + const comment = ranges.comments[0] + comment.op.should.deep.equal({ c: 'bar', p: 4, t: this.tid }) + comment.id.should.equal(this.tid) + return done() + } + ) + }) + }) - return describe("deleting text surrounding a comment", function() { - before(function(done) { - this.project_id = DocUpdaterClient.randomId(); - this.user_id = DocUpdaterClient.randomId(); - this.doc_id = DocUpdaterClient.randomId(); - MockWebApi.insertDoc(this.project_id, this.doc_id, { - lines: ["foo bar baz"], - version: 0, - ranges: { - comments: [{ - op: { c: "a", p: 5, tid: (this.tid = DocUpdaterClient.randomId()) }, - metadata: { - user_id: this.user_id, - ts: new Date() - } - }] - } - }); - this.updates = [{ - doc: this.doc_id, - op: [{ d: "foo ", p: 0 }], - v: 0, - meta: { user_id: this.user_id } - }, { - doc: this.doc_id, - op: [{ d: "bar ", p: 0 }], - v: 1, - meta: { user_id: this.user_id } - }]; - const jobs = []; - for (const update of Array.from(this.updates)) { - (update => { - return jobs.push(callback => DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, update, callback)); - })(update); - } - return DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { - if (error != null) { throw error; } - return async.series(jobs, function(error) { - if (error != null) { throw error; } - return setTimeout(() => { - return DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, data) => { - if (error != null) { throw error; } - return done(); - }); - } - , 200); - }); - }); - }); - - return it("should write a snapshot from before the destructive change", function(done) { - return DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, data) => { - if (error != null) { return done(error); } - return db.docSnapshots.find({ - project_id: ObjectId(this.project_id), - doc_id: ObjectId(this.doc_id) - }, (error, docSnapshots) => { - if (error != null) { return done(error); } - expect(docSnapshots.length).to.equal(1); - expect(docSnapshots[0].version).to.equal(1); - expect(docSnapshots[0].lines).to.deep.equal(["bar baz"]); - expect(docSnapshots[0].ranges.comments[0].op).to.deep.equal({ - c: "a", - p: 1, - tid: this.tid - }); - return done(); - }); - }); - }); - }); -}); + return describe('with conflicting ops needing OT', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.user_id = DocUpdaterClient.randomId() + this.doc = { + id: DocUpdaterClient.randomId(), + lines: ['foo bar baz'] + } + this.updates = [ + { + doc: this.doc.id, + op: [{ i: 'ABC', p: 3 }], + v: 0, + meta: { user_id: this.user_id } + }, + { + doc: this.doc.id, + op: [ + { c: 'bar', p: 4, t: (this.tid = DocUpdaterClient.randomId()) } + ], + v: 0 + } + ] + MockWebApi.insertDoc(this.project_id, this.doc.id, { + lines: this.doc.lines, + version: 0 + }) + const jobs = [] + for (const update of Array.from(this.updates)) { + ;((update) => { + return jobs.push((callback) => + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc.id, + update, + callback + ) + ) + })(update) + } + return DocUpdaterClient.preloadDoc( + this.project_id, + this.doc.id, + (error) => { + if (error != null) { + throw error + } + return async.series(jobs, (error) => { + if (error != null) { + throw error + } + return setTimeout(done, 200) + }) + } + ) + }) + + return it('should update the comments with the OT shifted comment', function (done) { + return DocUpdaterClient.getDoc( + this.project_id, + this.doc.id, + (error, res, data) => { + if (error != null) { + throw error + } + const { ranges } = data + const comment = ranges.comments[0] + comment.op.should.deep.equal({ c: 'bar', p: 7, t: this.tid }) + return done() + } + ) + }) + }) + }) + }) + + describe('Loading ranges from persistence layer', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.user_id = DocUpdaterClient.randomId() + this.id_seed = '587357bd35e64f6157' + this.doc = { + id: DocUpdaterClient.randomId(), + lines: ['a123aa'] + } + this.update = { + doc: this.doc.id, + op: [{ i: '456', p: 5 }], + v: 0, + meta: { user_id: this.user_id, tc: this.id_seed } + } + MockWebApi.insertDoc(this.project_id, this.doc.id, { + lines: this.doc.lines, + version: 0, + ranges: { + changes: [ + { + op: { i: '123', p: 1 }, + metadata: { + user_id: this.user_id, + ts: new Date() + } + } + ] + } + }) + return DocUpdaterClient.preloadDoc( + this.project_id, + this.doc.id, + (error) => { + if (error != null) { + throw error + } + return DocUpdaterClient.sendUpdate( + this.project_id, + this.doc.id, + this.update, + (error) => { + if (error != null) { + throw error + } + return setTimeout(done, 200) + } + ) + } + ) + }) + + it('should have preloaded the existing ranges', function (done) { + return DocUpdaterClient.getDoc( + this.project_id, + this.doc.id, + (error, res, data) => { + if (error != null) { + throw error + } + const { changes } = data.ranges + changes[0].op.should.deep.equal({ i: '123', p: 1 }) + changes[1].op.should.deep.equal({ i: '456', p: 5 }) + return done() + } + ) + }) + + return it('should flush the ranges to the persistence layer again', function (done) { + return DocUpdaterClient.flushDoc( + this.project_id, + this.doc.id, + (error) => { + if (error != null) { + throw error + } + return MockWebApi.getDocument( + this.project_id, + this.doc.id, + (error, doc) => { + const { changes } = doc.ranges + changes[0].op.should.deep.equal({ i: '123', p: 1 }) + changes[1].op.should.deep.equal({ i: '456', p: 5 }) + return done() + } + ) + } + ) + }) + }) + + describe('accepting a change', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.user_id = DocUpdaterClient.randomId() + this.id_seed = '587357bd35e64f6157' + this.doc = { + id: DocUpdaterClient.randomId(), + lines: ['aaa'] + } + this.update = { + doc: this.doc.id, + op: [{ i: '456', p: 1 }], + v: 0, + meta: { user_id: this.user_id, tc: this.id_seed } + } + MockWebApi.insertDoc(this.project_id, this.doc.id, { + lines: this.doc.lines, + version: 0 + }) + return DocUpdaterClient.preloadDoc( + this.project_id, + this.doc.id, + (error) => { + if (error != null) { + throw error + } + return DocUpdaterClient.sendUpdate( + this.project_id, + this.doc.id, + this.update, + (error) => { + if (error != null) { + throw error + } + return setTimeout(() => { + return DocUpdaterClient.getDoc( + this.project_id, + this.doc.id, + (error, res, data) => { + if (error != null) { + throw error + } + const { ranges } = data + const change = ranges.changes[0] + change.op.should.deep.equal({ i: '456', p: 1 }) + change.id.should.equal(this.id_seed + '000001') + change.metadata.user_id.should.equal(this.user_id) + return done() + } + ) + }, 200) + } + ) + } + ) + }) + + return it('should remove the change after accepting', function (done) { + return DocUpdaterClient.acceptChange( + this.project_id, + this.doc.id, + this.id_seed + '000001', + (error) => { + if (error != null) { + throw error + } + return DocUpdaterClient.getDoc( + this.project_id, + this.doc.id, + (error, res, data) => { + if (error != null) { + throw error + } + expect(data.ranges.changes).to.be.undefined + return done() + } + ) + } + ) + }) + }) + + describe('deleting a comment range', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.user_id = DocUpdaterClient.randomId() + this.doc = { + id: DocUpdaterClient.randomId(), + lines: ['foo bar'] + } + this.update = { + doc: this.doc.id, + op: [{ c: 'bar', p: 4, t: (this.tid = DocUpdaterClient.randomId()) }], + v: 0 + } + MockWebApi.insertDoc(this.project_id, this.doc.id, { + lines: this.doc.lines, + version: 0 + }) + return DocUpdaterClient.preloadDoc( + this.project_id, + this.doc.id, + (error) => { + if (error != null) { + throw error + } + return DocUpdaterClient.sendUpdate( + this.project_id, + this.doc.id, + this.update, + (error) => { + if (error != null) { + throw error + } + return setTimeout(() => { + return DocUpdaterClient.getDoc( + this.project_id, + this.doc.id, + (error, res, data) => { + if (error != null) { + throw error + } + const { ranges } = data + const change = ranges.comments[0] + change.op.should.deep.equal({ c: 'bar', p: 4, t: this.tid }) + change.id.should.equal(this.tid) + return done() + } + ) + }, 200) + } + ) + } + ) + }) + + return it('should remove the comment range', function (done) { + return DocUpdaterClient.removeComment( + this.project_id, + this.doc.id, + this.tid, + (error, res) => { + if (error != null) { + throw error + } + expect(res.statusCode).to.equal(204) + return DocUpdaterClient.getDoc( + this.project_id, + this.doc.id, + (error, res, data) => { + if (error != null) { + throw error + } + expect(data.ranges.comments).to.be.undefined + return done() + } + ) + } + ) + }) + }) + + describe('tripping range size limit', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.user_id = DocUpdaterClient.randomId() + this.id_seed = DocUpdaterClient.randomId() + this.doc = { + id: DocUpdaterClient.randomId(), + lines: ['aaa'] + } + this.i = new Array(3 * 1024 * 1024).join('a') + this.updates = [ + { + doc: this.doc.id, + op: [{ i: this.i, p: 1 }], + v: 0, + meta: { user_id: this.user_id, tc: this.id_seed } + } + ] + MockWebApi.insertDoc(this.project_id, this.doc.id, { + lines: this.doc.lines, + version: 0 + }) + const jobs = [] + for (const update of Array.from(this.updates)) { + ;((update) => { + return jobs.push((callback) => + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc.id, + update, + callback + ) + ) + })(update) + } + return DocUpdaterClient.preloadDoc( + this.project_id, + this.doc.id, + (error) => { + if (error != null) { + throw error + } + return async.series(jobs, (error) => { + if (error != null) { + throw error + } + return setTimeout(done, 200) + }) + } + ) + }) + + return it('should not update the ranges', function (done) { + return DocUpdaterClient.getDoc( + this.project_id, + this.doc.id, + (error, res, data) => { + if (error != null) { + throw error + } + const { ranges } = data + expect(ranges.changes).to.be.undefined + return done() + } + ) + }) + }) + + return describe('deleting text surrounding a comment', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.user_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: ['foo bar baz'], + version: 0, + ranges: { + comments: [ + { + op: { + c: 'a', + p: 5, + tid: (this.tid = DocUpdaterClient.randomId()) + }, + metadata: { + user_id: this.user_id, + ts: new Date() + } + } + ] + } + }) + this.updates = [ + { + doc: this.doc_id, + op: [{ d: 'foo ', p: 0 }], + v: 0, + meta: { user_id: this.user_id } + }, + { + doc: this.doc_id, + op: [{ d: 'bar ', p: 0 }], + v: 1, + meta: { user_id: this.user_id } + } + ] + const jobs = [] + for (const update of Array.from(this.updates)) { + ;((update) => { + return jobs.push((callback) => + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + update, + callback + ) + ) + })(update) + } + return DocUpdaterClient.preloadDoc( + this.project_id, + this.doc_id, + (error) => { + if (error != null) { + throw error + } + return async.series(jobs, function (error) { + if (error != null) { + throw error + } + return setTimeout(() => { + return DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, data) => { + if (error != null) { + throw error + } + return done() + } + ) + }, 200) + }) + } + ) + }) + + return it('should write a snapshot from before the destructive change', function (done) { + return DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, data) => { + if (error != null) { + return done(error) + } + return db.docSnapshots.find( + { + project_id: ObjectId(this.project_id), + doc_id: ObjectId(this.doc_id) + }, + (error, docSnapshots) => { + if (error != null) { + return done(error) + } + expect(docSnapshots.length).to.equal(1) + expect(docSnapshots[0].version).to.equal(1) + expect(docSnapshots[0].lines).to.deep.equal(['bar baz']) + expect(docSnapshots[0].ranges.comments[0].op).to.deep.equal({ + c: 'a', + p: 1, + tid: this.tid + }) + return done() + } + ) + } + ) + }) + }) +}) diff --git a/services/document-updater/test/acceptance/js/SettingADocumentTests.js b/services/document-updater/test/acceptance/js/SettingADocumentTests.js index 61fba26f17..6c13282ba5 100644 --- a/services/document-updater/test/acceptance/js/SettingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/SettingADocumentTests.js @@ -12,316 +12,462 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require("sinon"); -const chai = require("chai"); -chai.should(); -const { - expect -} = require("chai"); -const Settings = require('settings-sharelatex'); -const rclient_du = require("redis-sharelatex").createClient(Settings.redis.documentupdater); -const Keys = Settings.redis.documentupdater.key_schema; +const sinon = require('sinon') +const chai = require('chai') +chai.should() +const { expect } = require('chai') +const Settings = require('settings-sharelatex') +const rclient_du = require('redis-sharelatex').createClient( + Settings.redis.documentupdater +) +const Keys = Settings.redis.documentupdater.key_schema -const MockTrackChangesApi = require("./helpers/MockTrackChangesApi"); -const MockProjectHistoryApi = require("./helpers/MockProjectHistoryApi"); -const MockWebApi = require("./helpers/MockWebApi"); -const DocUpdaterClient = require("./helpers/DocUpdaterClient"); -const DocUpdaterApp = require("./helpers/DocUpdaterApp"); +const MockTrackChangesApi = require('./helpers/MockTrackChangesApi') +const MockProjectHistoryApi = require('./helpers/MockProjectHistoryApi') +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') -describe("Setting a document", function() { - before(function(done) { - this.lines = ["one", "two", "three"]; - this.version = 42; - this.update = { - doc: this.doc_id, - op: [{ - i: "one and a half\n", - p: 4 - }], - v: this.version - }; - this.result = ["one", "one and a half", "two", "three"]; - this.newLines = ["these", "are", "the", "new", "lines"]; - this.source = "dropbox"; - this.user_id = "user-id-123"; +describe('Setting a document', function () { + before(function (done) { + this.lines = ['one', 'two', 'three'] + this.version = 42 + this.update = { + doc: this.doc_id, + op: [ + { + i: 'one and a half\n', + p: 4 + } + ], + v: this.version + } + this.result = ['one', 'one and a half', 'two', 'three'] + this.newLines = ['these', 'are', 'the', 'new', 'lines'] + this.source = 'dropbox' + this.user_id = 'user-id-123' - sinon.spy(MockTrackChangesApi, "flushDoc"); - sinon.spy(MockProjectHistoryApi, "flushProject"); - sinon.spy(MockWebApi, "setDocument"); - return DocUpdaterApp.ensureRunning(done); - }); + sinon.spy(MockTrackChangesApi, 'flushDoc') + sinon.spy(MockProjectHistoryApi, 'flushProject') + sinon.spy(MockWebApi, 'setDocument') + return DocUpdaterApp.ensureRunning(done) + }) - after(function() { - MockTrackChangesApi.flushDoc.restore(); - MockProjectHistoryApi.flushProject.restore(); - return MockWebApi.setDocument.restore(); - }); + after(function () { + MockTrackChangesApi.flushDoc.restore() + MockProjectHistoryApi.flushProject.restore() + return MockWebApi.setDocument.restore() + }) - describe("when the updated doc exists in the doc updater", function() { - before(function(done) { - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); - DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { - if (error != null) { throw error; } - return DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.update, error => { - if (error != null) { throw error; } - return setTimeout(() => { - return DocUpdaterClient.setDocLines(this.project_id, this.doc_id, this.newLines, this.source, this.user_id, false, (error, res, body) => { - this.statusCode = res.statusCode; - return done(); - }); - } - , 200); - }); - }); - return null; - }); + describe('when the updated doc exists in the doc updater', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version + }) + DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => { + if (error != null) { + throw error + } + return DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + this.update, + (error) => { + if (error != null) { + throw error + } + return setTimeout(() => { + return DocUpdaterClient.setDocLines( + this.project_id, + this.doc_id, + this.newLines, + this.source, + this.user_id, + false, + (error, res, body) => { + this.statusCode = res.statusCode + return done() + } + ) + }, 200) + } + ) + }) + return null + }) - after(function() { - MockTrackChangesApi.flushDoc.reset(); - MockProjectHistoryApi.flushProject.reset(); - return MockWebApi.setDocument.reset(); - }); + after(function () { + MockTrackChangesApi.flushDoc.reset() + MockProjectHistoryApi.flushProject.reset() + return MockWebApi.setDocument.reset() + }) - it("should return a 204 status code", function() { - return this.statusCode.should.equal(204); - }); + it('should return a 204 status code', function () { + return this.statusCode.should.equal(204) + }) - it("should send the updated doc lines and version to the web api", function() { - return MockWebApi.setDocument - .calledWith(this.project_id, this.doc_id, this.newLines) - .should.equal(true); - }); + it('should send the updated doc lines and version to the web api', function () { + return MockWebApi.setDocument + .calledWith(this.project_id, this.doc_id, this.newLines) + .should.equal(true) + }) - it("should update the lines in the doc updater", function(done) { - DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { - doc.lines.should.deep.equal(this.newLines); - return done(); - }); - return null; - }); + it('should update the lines in the doc updater', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + doc.lines.should.deep.equal(this.newLines) + return done() + } + ) + return null + }) - it("should bump the version in the doc updater", function(done) { - DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, doc) => { - doc.version.should.equal(this.version + 2); - return done(); - }); - return null; - }); + it('should bump the version in the doc updater', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + doc.version.should.equal(this.version + 2) + return done() + } + ) + return null + }) - return it("should leave the document in redis", function(done) { - rclient_du.get(Keys.docLines({doc_id: this.doc_id}), (error, lines) => { - if (error != null) { throw error; } - expect(JSON.parse(lines)).to.deep.equal(this.newLines); - return done(); - }); - return null; - }); - }); + return it('should leave the document in redis', function (done) { + rclient_du.get(Keys.docLines({ doc_id: this.doc_id }), (error, lines) => { + if (error != null) { + throw error + } + expect(JSON.parse(lines)).to.deep.equal(this.newLines) + return done() + }) + return null + }) + }) - describe("when the updated doc does not exist in the doc updater", function() { - before(function(done) { - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); - DocUpdaterClient.setDocLines(this.project_id, this.doc_id, this.newLines, this.source, this.user_id, false, (error, res, body) => { - this.statusCode = res.statusCode; - return setTimeout(done, 200); - }); - return null; - }); + describe('when the updated doc does not exist in the doc updater', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version + }) + DocUpdaterClient.setDocLines( + this.project_id, + this.doc_id, + this.newLines, + this.source, + this.user_id, + false, + (error, res, body) => { + this.statusCode = res.statusCode + return setTimeout(done, 200) + } + ) + return null + }) - after(function() { - MockTrackChangesApi.flushDoc.reset(); - MockProjectHistoryApi.flushProject.reset(); - return MockWebApi.setDocument.reset(); - }); + after(function () { + MockTrackChangesApi.flushDoc.reset() + MockProjectHistoryApi.flushProject.reset() + return MockWebApi.setDocument.reset() + }) - it("should return a 204 status code", function() { - return this.statusCode.should.equal(204); - }); + it('should return a 204 status code', function () { + return this.statusCode.should.equal(204) + }) - it("should send the updated doc lines to the web api", function() { - return MockWebApi.setDocument - .calledWith(this.project_id, this.doc_id, this.newLines) - .should.equal(true); - }); + it('should send the updated doc lines to the web api', function () { + return MockWebApi.setDocument + .calledWith(this.project_id, this.doc_id, this.newLines) + .should.equal(true) + }) - it("should flush track changes", function() { - return MockTrackChangesApi.flushDoc.calledWith(this.doc_id).should.equal(true); - }); + it('should flush track changes', function () { + return MockTrackChangesApi.flushDoc + .calledWith(this.doc_id) + .should.equal(true) + }) - it("should flush project history", function() { - return MockProjectHistoryApi.flushProject.calledWith(this.project_id).should.equal(true); - }); + it('should flush project history', function () { + return MockProjectHistoryApi.flushProject + .calledWith(this.project_id) + .should.equal(true) + }) - return it("should remove the document from redis", function(done) { - rclient_du.get(Keys.docLines({doc_id: this.doc_id}), (error, lines) => { - if (error != null) { throw error; } - expect(lines).to.not.exist; - return done(); - }); - return null; - }); - }); + return it('should remove the document from redis', function (done) { + rclient_du.get(Keys.docLines({ doc_id: this.doc_id }), (error, lines) => { + if (error != null) { + throw error + } + expect(lines).to.not.exist + return done() + }) + return null + }) + }) - describe("when the updated doc is too large for the body parser", function() { - before(function(done) { - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); - this.newLines = []; - while (JSON.stringify(this.newLines).length < (Settings.max_doc_length + (64 * 1024))) { - this.newLines.push("(a long line of text)".repeat(10000)); - } - DocUpdaterClient.setDocLines(this.project_id, this.doc_id, this.newLines, this.source, this.user_id, false, (error, res, body) => { - this.statusCode = res.statusCode; - return setTimeout(done, 200); - }); - return null; - }); + describe('when the updated doc is too large for the body parser', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version + }) + this.newLines = [] + while ( + JSON.stringify(this.newLines).length < + Settings.max_doc_length + 64 * 1024 + ) { + this.newLines.push('(a long line of text)'.repeat(10000)) + } + DocUpdaterClient.setDocLines( + this.project_id, + this.doc_id, + this.newLines, + this.source, + this.user_id, + false, + (error, res, body) => { + this.statusCode = res.statusCode + return setTimeout(done, 200) + } + ) + return null + }) - after(function() { - MockTrackChangesApi.flushDoc.reset(); - MockProjectHistoryApi.flushProject.reset(); - return MockWebApi.setDocument.reset(); - }); + after(function () { + MockTrackChangesApi.flushDoc.reset() + MockProjectHistoryApi.flushProject.reset() + return MockWebApi.setDocument.reset() + }) - it("should return a 413 status code", function() { - return this.statusCode.should.equal(413); - }); + it('should return a 413 status code', function () { + return this.statusCode.should.equal(413) + }) - it("should not send the updated doc lines to the web api", function() { return MockWebApi.setDocument.called.should.equal(false); }); + it('should not send the updated doc lines to the web api', function () { + return MockWebApi.setDocument.called.should.equal(false) + }) - it("should not flush track changes", function() { return MockTrackChangesApi.flushDoc.called.should.equal(false); }); + it('should not flush track changes', function () { + return MockTrackChangesApi.flushDoc.called.should.equal(false) + }) - return it("should not flush project history", function() { return MockProjectHistoryApi.flushProject.called.should.equal(false); }); - }); + return it('should not flush project history', function () { + return MockProjectHistoryApi.flushProject.called.should.equal(false) + }) + }) - describe("when the updated doc is large but under the bodyParser and HTTPController size limit", function() { - before(function(done) { - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); + describe('when the updated doc is large but under the bodyParser and HTTPController size limit', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version + }) - this.newLines = []; - while (JSON.stringify(this.newLines).length < (2 * 1024 * 1024)) { // limit in HTTPController - this.newLines.push("(a long line of text)".repeat(10000)); - } - this.newLines.pop(); // remove the line which took it over the limit - DocUpdaterClient.setDocLines(this.project_id, this.doc_id, this.newLines, this.source, this.user_id, false, (error, res, body) => { - this.statusCode = res.statusCode; - return setTimeout(done, 200); - }); - return null; - }); + this.newLines = [] + while (JSON.stringify(this.newLines).length < 2 * 1024 * 1024) { + // limit in HTTPController + this.newLines.push('(a long line of text)'.repeat(10000)) + } + this.newLines.pop() // remove the line which took it over the limit + DocUpdaterClient.setDocLines( + this.project_id, + this.doc_id, + this.newLines, + this.source, + this.user_id, + false, + (error, res, body) => { + this.statusCode = res.statusCode + return setTimeout(done, 200) + } + ) + return null + }) - after(function() { - MockTrackChangesApi.flushDoc.reset(); - MockProjectHistoryApi.flushProject.reset(); - return MockWebApi.setDocument.reset(); - }); + after(function () { + MockTrackChangesApi.flushDoc.reset() + MockProjectHistoryApi.flushProject.reset() + return MockWebApi.setDocument.reset() + }) - it("should return a 204 status code", function() { - return this.statusCode.should.equal(204); - }); + it('should return a 204 status code', function () { + return this.statusCode.should.equal(204) + }) - return it("should send the updated doc lines to the web api", function() { - return MockWebApi.setDocument - .calledWith(this.project_id, this.doc_id, this.newLines) - .should.equal(true); - }); - }); + return it('should send the updated doc lines to the web api', function () { + return MockWebApi.setDocument + .calledWith(this.project_id, this.doc_id, this.newLines) + .should.equal(true) + }) + }) - return describe("with track changes", function() { - before(function() { - this.lines = ["one", "one and a half", "two", "three"]; - this.id_seed = "587357bd35e64f6157"; - return this.update = { - doc: this.doc_id, - op: [{ - d: "one and a half\n", - p: 4 - }], - meta: { - tc: this.id_seed, - user_id: this.user_id - }, - v: this.version - }; - }); + return describe('with track changes', function () { + before(function () { + this.lines = ['one', 'one and a half', 'two', 'three'] + this.id_seed = '587357bd35e64f6157' + return (this.update = { + doc: this.doc_id, + op: [ + { + d: 'one and a half\n', + p: 4 + } + ], + meta: { + tc: this.id_seed, + user_id: this.user_id + }, + v: this.version + }) + }) - describe("with the undo flag", function() { - before(function(done) { - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); - DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { - if (error != null) { throw error; } - return DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.update, error => { - if (error != null) { throw error; } - // Go back to old lines, with undo flag - return DocUpdaterClient.setDocLines(this.project_id, this.doc_id, this.lines, this.source, this.user_id, true, (error, res, body) => { - this.statusCode = res.statusCode; - return setTimeout(done, 200); - }); - }); - }); - return null; - }); + describe('with the undo flag', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version + }) + DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => { + if (error != null) { + throw error + } + return DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + this.update, + (error) => { + if (error != null) { + throw error + } + // Go back to old lines, with undo flag + return DocUpdaterClient.setDocLines( + this.project_id, + this.doc_id, + this.lines, + this.source, + this.user_id, + true, + (error, res, body) => { + this.statusCode = res.statusCode + return setTimeout(done, 200) + } + ) + } + ) + }) + return null + }) - after(function() { - MockTrackChangesApi.flushDoc.reset(); - MockProjectHistoryApi.flushProject.reset(); - return MockWebApi.setDocument.reset(); - }); + after(function () { + MockTrackChangesApi.flushDoc.reset() + MockProjectHistoryApi.flushProject.reset() + return MockWebApi.setDocument.reset() + }) - return it("should undo the tracked changes", function(done) { - DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, data) => { - if (error != null) { throw error; } - const { - ranges - } = data; - expect(ranges.changes).to.be.undefined; - return done(); - }); - return null; - }); - }); + return it('should undo the tracked changes', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, data) => { + if (error != null) { + throw error + } + const { ranges } = data + expect(ranges.changes).to.be.undefined + return done() + } + ) + return null + }) + }) - return describe("without the undo flag", function() { - before(function(done) { - [this.project_id, this.doc_id] = Array.from([DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]); - MockWebApi.insertDoc(this.project_id, this.doc_id, {lines: this.lines, version: this.version}); - DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { - if (error != null) { throw error; } - return DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, this.update, error => { - if (error != null) { throw error; } - // Go back to old lines, without undo flag - return DocUpdaterClient.setDocLines(this.project_id, this.doc_id, this.lines, this.source, this.user_id, false, (error, res, body) => { - this.statusCode = res.statusCode; - return setTimeout(done, 200); - }); - }); - }); - return null; - }); - - after(function() { - MockTrackChangesApi.flushDoc.reset(); - MockProjectHistoryApi.flushProject.reset(); - return MockWebApi.setDocument.reset(); - }); - - return it("should not undo the tracked changes", function(done) { - DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, data) => { - if (error != null) { throw error; } - const { - ranges - } = data; - expect(ranges.changes.length).to.equal(1); - return done(); - }); - return null; - }); - }); - }); -}); + return describe('without the undo flag', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version + }) + DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => { + if (error != null) { + throw error + } + return DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + this.update, + (error) => { + if (error != null) { + throw error + } + // Go back to old lines, without undo flag + return DocUpdaterClient.setDocLines( + this.project_id, + this.doc_id, + this.lines, + this.source, + this.user_id, + false, + (error, res, body) => { + this.statusCode = res.statusCode + return setTimeout(done, 200) + } + ) + } + ) + }) + return null + }) + after(function () { + MockTrackChangesApi.flushDoc.reset() + MockProjectHistoryApi.flushProject.reset() + return MockWebApi.setDocument.reset() + }) + return it('should not undo the tracked changes', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, data) => { + if (error != null) { + throw error + } + const { ranges } = data + expect(ranges.changes.length).to.equal(1) + return done() + } + ) + return null + }) + }) + }) +}) diff --git a/services/document-updater/test/acceptance/js/helpers/DocUpdaterApp.js b/services/document-updater/test/acceptance/js/helpers/DocUpdaterApp.js index a08fd82fac..b922032cd8 100644 --- a/services/document-updater/test/acceptance/js/helpers/DocUpdaterApp.js +++ b/services/document-updater/test/acceptance/js/helpers/DocUpdaterApp.js @@ -11,33 +11,37 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const app = require('../../../../app'); -require("logger-sharelatex").logger.level("fatal"); +const app = require('../../../../app') +require('logger-sharelatex').logger.level('fatal') module.exports = { - running: false, - initing: false, - callbacks: [], - ensureRunning(callback) { - if (callback == null) { callback = function(error) {}; } - if (this.running) { - return callback(); - } else if (this.initing) { - return this.callbacks.push(callback); - } else { - this.initing = true; - this.callbacks.push(callback); - return app.listen(3003, "localhost", error => { - if (error != null) { throw error; } - this.running = true; - return (() => { - const result = []; - for (callback of Array.from(this.callbacks)) { - result.push(callback()); - } - return result; - })(); - }); - } - } -}; + running: false, + initing: false, + callbacks: [], + ensureRunning(callback) { + if (callback == null) { + callback = function (error) {} + } + if (this.running) { + return callback() + } else if (this.initing) { + return this.callbacks.push(callback) + } else { + this.initing = true + this.callbacks.push(callback) + return app.listen(3003, 'localhost', (error) => { + if (error != null) { + throw error + } + this.running = true + return (() => { + const result = [] + for (callback of Array.from(this.callbacks)) { + result.push(callback()) + } + return result + })() + }) + } + } +} diff --git a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js index 9f55291cd0..53793135eb 100644 --- a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js +++ b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js @@ -11,167 +11,268 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let DocUpdaterClient; -const Settings = require('settings-sharelatex'); -const rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater); -const keys = Settings.redis.documentupdater.key_schema; -const request = require("request").defaults({jar: false}); -const async = require("async"); +let DocUpdaterClient +const Settings = require('settings-sharelatex') +const rclient = require('redis-sharelatex').createClient( + Settings.redis.documentupdater +) +const keys = Settings.redis.documentupdater.key_schema +const request = require('request').defaults({ jar: false }) +const async = require('async') -const rclient_sub = require("redis-sharelatex").createClient(Settings.redis.pubsub); -rclient_sub.subscribe("applied-ops"); -rclient_sub.setMaxListeners(0); - -module.exports = (DocUpdaterClient = { - randomId() { - const chars = __range__(1, 24, true).map((i) => - Math.random().toString(16)[2]); - return chars.join(""); - }, - - subscribeToAppliedOps(callback) { - if (callback == null) { callback = function(message) {}; } - return rclient_sub.on("message", callback); - }, +const rclient_sub = require('redis-sharelatex').createClient( + Settings.redis.pubsub +) +rclient_sub.subscribe('applied-ops') +rclient_sub.setMaxListeners(0) - sendUpdate(project_id, doc_id, update, callback) { - if (callback == null) { callback = function(error) {}; } - return rclient.rpush(keys.pendingUpdates({doc_id}), JSON.stringify(update), (error) => { - if (error != null) { return callback(error); } - const doc_key = `${project_id}:${doc_id}`; - return rclient.sadd("DocsWithPendingUpdates", doc_key, (error) => { - if (error != null) { return callback(error); } - return rclient.rpush("pending-updates-list", doc_key, callback); - }); - }); - }, +module.exports = DocUpdaterClient = { + randomId() { + const chars = __range__(1, 24, true).map( + (i) => Math.random().toString(16)[2] + ) + return chars.join('') + }, - sendUpdates(project_id, doc_id, updates, callback) { - if (callback == null) { callback = function(error) {}; } - return DocUpdaterClient.preloadDoc(project_id, doc_id, (error) => { - if (error != null) { return callback(error); } - const jobs = []; - for (const update of Array.from(updates)) { - ((update => jobs.push(callback => DocUpdaterClient.sendUpdate(project_id, doc_id, update, callback))))(update); - } - return async.series(jobs, err => DocUpdaterClient.waitForPendingUpdates(project_id, doc_id, callback)); - }); - }, + subscribeToAppliedOps(callback) { + if (callback == null) { + callback = function (message) {} + } + return rclient_sub.on('message', callback) + }, - waitForPendingUpdates(project_id, doc_id, callback) { - return async.retry({times: 30, interval: 100}, cb => rclient.llen(keys.pendingUpdates({doc_id}), (err, length) => { - if (length > 0) { - return cb(new Error("updates still pending")); - } else { - return cb(); - } + sendUpdate(project_id, doc_id, update, callback) { + if (callback == null) { + callback = function (error) {} + } + return rclient.rpush( + keys.pendingUpdates({ doc_id }), + JSON.stringify(update), + (error) => { + if (error != null) { + return callback(error) + } + const doc_key = `${project_id}:${doc_id}` + return rclient.sadd('DocsWithPendingUpdates', doc_key, (error) => { + if (error != null) { + return callback(error) + } + return rclient.rpush('pending-updates-list', doc_key, callback) }) - , callback); - }, + } + ) + }, - getDoc(project_id, doc_id, callback) { - if (callback == null) { callback = function(error, res, body) {}; } - return request.get(`http://localhost:3003/project/${project_id}/doc/${doc_id}`, (error, res, body) => { - if ((body != null) && (res.statusCode >= 200) && (res.statusCode < 300)) { - body = JSON.parse(body); - } - return callback(error, res, body); - }); - }, + sendUpdates(project_id, doc_id, updates, callback) { + if (callback == null) { + callback = function (error) {} + } + return DocUpdaterClient.preloadDoc(project_id, doc_id, (error) => { + if (error != null) { + return callback(error) + } + const jobs = [] + for (const update of Array.from(updates)) { + ;((update) => + jobs.push((callback) => + DocUpdaterClient.sendUpdate(project_id, doc_id, update, callback) + ))(update) + } + return async.series(jobs, (err) => + DocUpdaterClient.waitForPendingUpdates(project_id, doc_id, callback) + ) + }) + }, - getDocAndRecentOps(project_id, doc_id, fromVersion, callback) { - if (callback == null) { callback = function(error, res, body) {}; } - return request.get(`http://localhost:3003/project/${project_id}/doc/${doc_id}?fromVersion=${fromVersion}`, (error, res, body) => { - if ((body != null) && (res.statusCode >= 200) && (res.statusCode < 300)) { - body = JSON.parse(body); - } - return callback(error, res, body); - }); - }, + waitForPendingUpdates(project_id, doc_id, callback) { + return async.retry( + { times: 30, interval: 100 }, + (cb) => + rclient.llen(keys.pendingUpdates({ doc_id }), (err, length) => { + if (length > 0) { + return cb(new Error('updates still pending')) + } else { + return cb() + } + }), + callback + ) + }, - preloadDoc(project_id, doc_id, callback) { - if (callback == null) { callback = function(error) {}; } - return DocUpdaterClient.getDoc(project_id, doc_id, callback); - }, + getDoc(project_id, doc_id, callback) { + if (callback == null) { + callback = function (error, res, body) {} + } + return request.get( + `http://localhost:3003/project/${project_id}/doc/${doc_id}`, + (error, res, body) => { + if (body != null && res.statusCode >= 200 && res.statusCode < 300) { + body = JSON.parse(body) + } + return callback(error, res, body) + } + ) + }, - flushDoc(project_id, doc_id, callback) { - if (callback == null) { callback = function(error) {}; } - return request.post(`http://localhost:3003/project/${project_id}/doc/${doc_id}/flush`, (error, res, body) => callback(error, res, body)); - }, + getDocAndRecentOps(project_id, doc_id, fromVersion, callback) { + if (callback == null) { + callback = function (error, res, body) {} + } + return request.get( + `http://localhost:3003/project/${project_id}/doc/${doc_id}?fromVersion=${fromVersion}`, + (error, res, body) => { + if (body != null && res.statusCode >= 200 && res.statusCode < 300) { + body = JSON.parse(body) + } + return callback(error, res, body) + } + ) + }, - setDocLines(project_id, doc_id, lines, source, user_id, undoing, callback) { - if (callback == null) { callback = function(error) {}; } - return request.post({ - url: `http://localhost:3003/project/${project_id}/doc/${doc_id}`, - json: { - lines, - source, - user_id, - undoing - } - }, (error, res, body) => callback(error, res, body)); - }, + preloadDoc(project_id, doc_id, callback) { + if (callback == null) { + callback = function (error) {} + } + return DocUpdaterClient.getDoc(project_id, doc_id, callback) + }, - deleteDoc(project_id, doc_id, callback) { - if (callback == null) { callback = function(error) {}; } - return request.del(`http://localhost:3003/project/${project_id}/doc/${doc_id}`, (error, res, body) => callback(error, res, body)); - }, + flushDoc(project_id, doc_id, callback) { + if (callback == null) { + callback = function (error) {} + } + return request.post( + `http://localhost:3003/project/${project_id}/doc/${doc_id}/flush`, + (error, res, body) => callback(error, res, body) + ) + }, - flushProject(project_id, callback) { - if (callback == null) { callback = function() {}; } - return request.post(`http://localhost:3003/project/${project_id}/flush`, callback); - }, + setDocLines(project_id, doc_id, lines, source, user_id, undoing, callback) { + if (callback == null) { + callback = function (error) {} + } + return request.post( + { + url: `http://localhost:3003/project/${project_id}/doc/${doc_id}`, + json: { + lines, + source, + user_id, + undoing + } + }, + (error, res, body) => callback(error, res, body) + ) + }, - deleteProject(project_id, callback) { - if (callback == null) { callback = function() {}; } - return request.del(`http://localhost:3003/project/${project_id}`, callback); - }, + deleteDoc(project_id, doc_id, callback) { + if (callback == null) { + callback = function (error) {} + } + return request.del( + `http://localhost:3003/project/${project_id}/doc/${doc_id}`, + (error, res, body) => callback(error, res, body) + ) + }, - deleteProjectOnShutdown(project_id, callback) { - if (callback == null) { callback = function() {}; } - return request.del(`http://localhost:3003/project/${project_id}?background=true&shutdown=true`, callback); - }, + flushProject(project_id, callback) { + if (callback == null) { + callback = function () {} + } + return request.post( + `http://localhost:3003/project/${project_id}/flush`, + callback + ) + }, - flushOldProjects(callback) { - if (callback == null) { callback = function() {}; } - return request.get("http://localhost:3003/flush_queued_projects?min_delete_age=1", callback); - }, + deleteProject(project_id, callback) { + if (callback == null) { + callback = function () {} + } + return request.del(`http://localhost:3003/project/${project_id}`, callback) + }, - acceptChange(project_id, doc_id, change_id, callback) { - if (callback == null) { callback = function() {}; } - return request.post(`http://localhost:3003/project/${project_id}/doc/${doc_id}/change/${change_id}/accept`, callback); - }, + deleteProjectOnShutdown(project_id, callback) { + if (callback == null) { + callback = function () {} + } + return request.del( + `http://localhost:3003/project/${project_id}?background=true&shutdown=true`, + callback + ) + }, - removeComment(project_id, doc_id, comment, callback) { - if (callback == null) { callback = function() {}; } - return request.del(`http://localhost:3003/project/${project_id}/doc/${doc_id}/comment/${comment}`, callback); - }, + flushOldProjects(callback) { + if (callback == null) { + callback = function () {} + } + return request.get( + 'http://localhost:3003/flush_queued_projects?min_delete_age=1', + callback + ) + }, - getProjectDocs(project_id, projectStateHash, callback) { - if (callback == null) { callback = function() {}; } - return request.get(`http://localhost:3003/project/${project_id}/doc?state=${projectStateHash}`, (error, res, body) => { - if ((body != null) && (res.statusCode >= 200) && (res.statusCode < 300)) { - body = JSON.parse(body); - } - return callback(error, res, body); - }); - }, + acceptChange(project_id, doc_id, change_id, callback) { + if (callback == null) { + callback = function () {} + } + return request.post( + `http://localhost:3003/project/${project_id}/doc/${doc_id}/change/${change_id}/accept`, + callback + ) + }, - sendProjectUpdate(project_id, userId, docUpdates, fileUpdates, version, callback) { - if (callback == null) { callback = function(error) {}; } - return request.post({ - url: `http://localhost:3003/project/${project_id}`, - json: { userId, docUpdates, fileUpdates, version } - }, (error, res, body) => callback(error, res, body)); - } -}); + removeComment(project_id, doc_id, comment, callback) { + if (callback == null) { + callback = function () {} + } + return request.del( + `http://localhost:3003/project/${project_id}/doc/${doc_id}/comment/${comment}`, + callback + ) + }, + + getProjectDocs(project_id, projectStateHash, callback) { + if (callback == null) { + callback = function () {} + } + return request.get( + `http://localhost:3003/project/${project_id}/doc?state=${projectStateHash}`, + (error, res, body) => { + if (body != null && res.statusCode >= 200 && res.statusCode < 300) { + body = JSON.parse(body) + } + return callback(error, res, body) + } + ) + }, + + sendProjectUpdate( + project_id, + userId, + docUpdates, + fileUpdates, + version, + callback + ) { + if (callback == null) { + callback = function (error) {} + } + return request.post( + { + url: `http://localhost:3003/project/${project_id}`, + json: { userId, docUpdates, fileUpdates, version } + }, + (error, res, body) => callback(error, res, body) + ) + } +} function __range__(left, right, inclusive) { - const range = []; - const ascending = left < right; - const end = !inclusive ? right : ascending ? right + 1 : right - 1; + const range = [] + const ascending = left < right + const end = !inclusive ? right : ascending ? right + 1 : right - 1 for (let i = left; ascending ? i < end : i > end; ascending ? i++ : i--) { - range.push(i); + range.push(i) } - return range; -} \ No newline at end of file + return range +} diff --git a/services/document-updater/test/acceptance/js/helpers/MockProjectHistoryApi.js b/services/document-updater/test/acceptance/js/helpers/MockProjectHistoryApi.js index 84e2d7075c..6d72c77baf 100644 --- a/services/document-updater/test/acceptance/js/helpers/MockProjectHistoryApi.js +++ b/services/document-updater/test/acceptance/js/helpers/MockProjectHistoryApi.js @@ -10,31 +10,35 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let MockProjectHistoryApi; -const express = require("express"); -const app = express(); +let MockProjectHistoryApi +const express = require('express') +const app = express() -module.exports = (MockProjectHistoryApi = { - flushProject(doc_id, callback) { - if (callback == null) { callback = function(error) {}; } - return callback(); - }, +module.exports = MockProjectHistoryApi = { + flushProject(doc_id, callback) { + if (callback == null) { + callback = function (error) {} + } + return callback() + }, - run() { - app.post("/project/:project_id/flush", (req, res, next) => { - return this.flushProject(req.params.project_id, (error) => { - if (error != null) { - return res.sendStatus(500); - } else { - return res.sendStatus(204); - } - }); - }); + run() { + app.post('/project/:project_id/flush', (req, res, next) => { + return this.flushProject(req.params.project_id, (error) => { + if (error != null) { + return res.sendStatus(500) + } else { + return res.sendStatus(204) + } + }) + }) - return app.listen(3054, (error) => { - if (error != null) { throw error; } - }); - } -}); + return app.listen(3054, (error) => { + if (error != null) { + throw error + } + }) + } +} -MockProjectHistoryApi.run(); +MockProjectHistoryApi.run() diff --git a/services/document-updater/test/acceptance/js/helpers/MockTrackChangesApi.js b/services/document-updater/test/acceptance/js/helpers/MockTrackChangesApi.js index ef14b85beb..319a02b7ca 100644 --- a/services/document-updater/test/acceptance/js/helpers/MockTrackChangesApi.js +++ b/services/document-updater/test/acceptance/js/helpers/MockTrackChangesApi.js @@ -10,35 +10,40 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let MockTrackChangesApi; -const express = require("express"); -const app = express(); +let MockTrackChangesApi +const express = require('express') +const app = express() -module.exports = (MockTrackChangesApi = { - flushDoc(doc_id, callback) { - if (callback == null) { callback = function(error) {}; } - return callback(); - }, +module.exports = MockTrackChangesApi = { + flushDoc(doc_id, callback) { + if (callback == null) { + callback = function (error) {} + } + return callback() + }, - run() { - app.post("/project/:project_id/doc/:doc_id/flush", (req, res, next) => { - return this.flushDoc(req.params.doc_id, (error) => { - if (error != null) { - return res.sendStatus(500); - } else { - return res.sendStatus(204); - } - }); - }); + run() { + app.post('/project/:project_id/doc/:doc_id/flush', (req, res, next) => { + return this.flushDoc(req.params.doc_id, (error) => { + if (error != null) { + return res.sendStatus(500) + } else { + return res.sendStatus(204) + } + }) + }) - return app.listen(3015, (error) => { - if (error != null) { throw error; } - }).on("error", (error) => { - console.error("error starting MockTrackChangesApi:", error.message); - return process.exit(1); - }); - } -}); - -MockTrackChangesApi.run(); + return app + .listen(3015, (error) => { + if (error != null) { + throw error + } + }) + .on('error', (error) => { + console.error('error starting MockTrackChangesApi:', error.message) + return process.exit(1) + }) + } +} +MockTrackChangesApi.run() diff --git a/services/document-updater/test/acceptance/js/helpers/MockWebApi.js b/services/document-updater/test/acceptance/js/helpers/MockWebApi.js index 653dc3bf2a..fc6bd49e27 100644 --- a/services/document-updater/test/acceptance/js/helpers/MockWebApi.js +++ b/services/document-updater/test/acceptance/js/helpers/MockWebApi.js @@ -11,72 +11,113 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let MockWebApi; -const express = require("express"); -const bodyParser = require("body-parser"); -const app = express(); -const MAX_REQUEST_SIZE = 2*((2*1024*1024) + (64*1024)); +let MockWebApi +const express = require('express') +const bodyParser = require('body-parser') +const app = express() +const MAX_REQUEST_SIZE = 2 * (2 * 1024 * 1024 + 64 * 1024) -module.exports = (MockWebApi = { - docs: {}, +module.exports = MockWebApi = { + docs: {}, - clearDocs() { return this.docs = {}; }, + clearDocs() { + return (this.docs = {}) + }, - insertDoc(project_id, doc_id, doc) { - if (doc.version == null) { doc.version = 0; } - if (doc.lines == null) { doc.lines = []; } - doc.pathname = '/a/b/c.tex'; - return this.docs[`${project_id}:${doc_id}`] = doc; - }, + insertDoc(project_id, doc_id, doc) { + if (doc.version == null) { + doc.version = 0 + } + if (doc.lines == null) { + doc.lines = [] + } + doc.pathname = '/a/b/c.tex' + return (this.docs[`${project_id}:${doc_id}`] = doc) + }, - setDocument(project_id, doc_id, lines, version, ranges, lastUpdatedAt, lastUpdatedBy, callback) { - if (callback == null) { callback = function(error) {}; } - const doc = this.docs[`${project_id}:${doc_id}`] || (this.docs[`${project_id}:${doc_id}`] = {}); - doc.lines = lines; - doc.version = version; - doc.ranges = ranges; - doc.pathname = '/a/b/c.tex'; - doc.lastUpdatedAt = lastUpdatedAt; - doc.lastUpdatedBy = lastUpdatedBy; - return callback(null); - }, + setDocument( + project_id, + doc_id, + lines, + version, + ranges, + lastUpdatedAt, + lastUpdatedBy, + callback + ) { + if (callback == null) { + callback = function (error) {} + } + const doc = + this.docs[`${project_id}:${doc_id}`] || + (this.docs[`${project_id}:${doc_id}`] = {}) + doc.lines = lines + doc.version = version + doc.ranges = ranges + doc.pathname = '/a/b/c.tex' + doc.lastUpdatedAt = lastUpdatedAt + doc.lastUpdatedBy = lastUpdatedBy + return callback(null) + }, - getDocument(project_id, doc_id, callback) { - if (callback == null) { callback = function(error, doc) {}; } - return callback(null, this.docs[`${project_id}:${doc_id}`]); - }, + getDocument(project_id, doc_id, callback) { + if (callback == null) { + callback = function (error, doc) {} + } + return callback(null, this.docs[`${project_id}:${doc_id}`]) + }, - run() { - app.get("/project/:project_id/doc/:doc_id", (req, res, next) => { - return this.getDocument(req.params.project_id, req.params.doc_id, (error, doc) => { - if (error != null) { - return res.sendStatus(500); - } else if (doc != null) { - return res.send(JSON.stringify(doc)); - } else { - return res.sendStatus(404); - } - }); - }); + run() { + app.get('/project/:project_id/doc/:doc_id', (req, res, next) => { + return this.getDocument( + req.params.project_id, + req.params.doc_id, + (error, doc) => { + if (error != null) { + return res.sendStatus(500) + } else if (doc != null) { + return res.send(JSON.stringify(doc)) + } else { + return res.sendStatus(404) + } + } + ) + }) - app.post("/project/:project_id/doc/:doc_id", bodyParser.json({limit: MAX_REQUEST_SIZE}), (req, res, next) => { - return MockWebApi.setDocument(req.params.project_id, req.params.doc_id, req.body.lines, req.body.version, req.body.ranges, req.body.lastUpdatedAt, req.body.lastUpdatedBy, (error) => { - if (error != null) { - return res.sendStatus(500); - } else { - return res.sendStatus(204); - } - }); - }); + app.post( + '/project/:project_id/doc/:doc_id', + bodyParser.json({ limit: MAX_REQUEST_SIZE }), + (req, res, next) => { + return MockWebApi.setDocument( + req.params.project_id, + req.params.doc_id, + req.body.lines, + req.body.version, + req.body.ranges, + req.body.lastUpdatedAt, + req.body.lastUpdatedBy, + (error) => { + if (error != null) { + return res.sendStatus(500) + } else { + return res.sendStatus(204) + } + } + ) + } + ) - return app.listen(3000, (error) => { - if (error != null) { throw error; } - }).on("error", (error) => { - console.error("error starting MockWebApi:", error.message); - return process.exit(1); - }); - } -}); - -MockWebApi.run(); + return app + .listen(3000, (error) => { + if (error != null) { + throw error + } + }) + .on('error', (error) => { + console.error('error starting MockWebApi:', error.message) + return process.exit(1) + }) + } +} +MockWebApi.run() From b6cc463a1ef3731fc90f948b9cd7131a2926d7e5 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 6 May 2020 12:12:53 +0200 Subject: [PATCH 624/769] decaffeinate: Rename run.coffee from .coffee to .js --- .../document-updater/test/stress/coffee/{run.coffee => run.js} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename services/document-updater/test/stress/coffee/{run.coffee => run.js} (100%) diff --git a/services/document-updater/test/stress/coffee/run.coffee b/services/document-updater/test/stress/coffee/run.js similarity index 100% rename from services/document-updater/test/stress/coffee/run.coffee rename to services/document-updater/test/stress/coffee/run.js From 3b6c0d8ca6ede70ee929e2212e3619352773b457 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 6 May 2020 12:12:55 +0200 Subject: [PATCH 625/769] decaffeinate: Convert run.coffee to JS --- .../test/stress/coffee/run.js | 459 ++++++++++-------- 1 file changed, 267 insertions(+), 192 deletions(-) diff --git a/services/document-updater/test/stress/coffee/run.js b/services/document-updater/test/stress/coffee/run.js index 2c48583014..351e726bbc 100644 --- a/services/document-updater/test/stress/coffee/run.js +++ b/services/document-updater/test/stress/coffee/run.js @@ -1,209 +1,284 @@ -DocUpdaterClient = require "../../acceptance/coffee/helpers/DocUpdaterClient" -# MockTrackChangesApi = require "../../acceptance/js/helpers/MockTrackChangesApi" -# MockWebApi = require "../../acceptance/js/helpers/MockWebApi" -assert = require "assert" -async = require "async" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS202: Simplify dynamic range loops + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const DocUpdaterClient = require("../../acceptance/coffee/helpers/DocUpdaterClient"); +// MockTrackChangesApi = require "../../acceptance/js/helpers/MockTrackChangesApi" +// MockWebApi = require "../../acceptance/js/helpers/MockWebApi" +const assert = require("assert"); +const async = require("async"); -insert = (string, pos, content) -> - result = string.slice(0, pos) + content + string.slice(pos) - return result +const insert = function(string, pos, content) { + const result = string.slice(0, pos) + content + string.slice(pos); + return result; +}; -transform = (op1, op2) -> - if op2.p < op1.p +const transform = function(op1, op2) { + if (op2.p < op1.p) { return { - p: op1.p + op2.i.length + p: op1.p + op2.i.length, i: op1.i - } - else - return op1 + }; + } else { + return op1; + } +}; -class StressTestClient - constructor: (@options = {}) -> - @options.updateDelay ?= 200 - @project_id = @options.project_id or DocUpdaterClient.randomId() - @doc_id = @options.doc_id or DocUpdaterClient.randomId() - @pos = @options.pos or 0 - @content = @options.content or "" +class StressTestClient { + constructor(options) { + if (options == null) { options = {}; } + this.options = options; + if (this.options.updateDelay == null) { this.options.updateDelay = 200; } + this.project_id = this.options.project_id || DocUpdaterClient.randomId(); + this.doc_id = this.options.doc_id || DocUpdaterClient.randomId(); + this.pos = this.options.pos || 0; + this.content = this.options.content || ""; - @client_id = DocUpdaterClient.randomId() - @version = @options.version or 0 - @inflight_op = null - @charCode = 0 + this.client_id = DocUpdaterClient.randomId(); + this.version = this.options.version || 0; + this.inflight_op = null; + this.charCode = 0; - @counts = { - conflicts: 0 - local_updates: 0 - remote_updates: 0 + this.counts = { + conflicts: 0, + local_updates: 0, + remote_updates: 0, max_delay: 0 - } + }; - DocUpdaterClient.subscribeToAppliedOps (channel, update) => - update = JSON.parse(update) - if update.error? - console.error new Error("Error from server: '#{update.error}'") - return - if update.doc_id == @doc_id - @processReply(update) - - sendUpdate: () -> - data = String.fromCharCode(65 + @charCode++ % 26) - @content = insert(@content, @pos, data) - @inflight_op = { - i: data - p: @pos++ - } - @resendUpdate() - @inflight_op_sent = Date.now() - - resendUpdate: () -> - assert(@inflight_op?) - DocUpdaterClient.sendUpdate( - @project_id, @doc_id - { - doc: @doc_id - op: [@inflight_op] - v: @version - meta: - source: @client_id - dupIfSource: [@client_id] + DocUpdaterClient.subscribeToAppliedOps((channel, update) => { + update = JSON.parse(update); + if (update.error != null) { + console.error(new Error(`Error from server: '${update.error}'`)); + return; } - ) - @update_timer = setTimeout () => - console.log "[#{new Date()}] \t[#{@client_id.slice(0,4)}] WARN: Resending update after 5 seconds" - @resendUpdate() - , 5000 + if (update.doc_id === this.doc_id) { + return this.processReply(update); + } + }); + } - processReply: (update) -> - if update.op.v != @version - if update.op.v < @version - console.log "[#{new Date()}] \t[#{@client_id.slice(0,4)}] WARN: Duplicate ack (already seen version)" - return - else - console.error "[#{new Date()}] \t[#{@client_id.slice(0,4)}] ERROR: Version jumped ahead (client: #{@version}, op: #{update.op.v})" - @version++ - if update.op.meta.source == @client_id - if @inflight_op? - @counts.local_updates++ - @inflight_op = null - clearTimeout @update_timer - delay = Date.now() - @inflight_op_sent - @counts.max_delay = Math.max(@counts.max_delay, delay) - @continue() - else - console.log "[#{new Date()}] \t[#{@client_id.slice(0,4)}] WARN: Duplicate ack" - else - assert(update.op.op.length == 1) - @counts.remote_updates++ - external_op = update.op.op[0] - if @inflight_op? - @counts.conflicts++ - @inflight_op = transform(@inflight_op, external_op) - external_op = transform(external_op, @inflight_op) - if external_op.p < @pos - @pos += external_op.i.length - @content = insert(@content, external_op.p, external_op.i) + sendUpdate() { + const data = String.fromCharCode(65 + (this.charCode++ % 26)); + this.content = insert(this.content, this.pos, data); + this.inflight_op = { + i: data, + p: this.pos++ + }; + this.resendUpdate(); + return this.inflight_op_sent = Date.now(); + } - continue: () -> - if @updateCount > 0 - @updateCount-- - setTimeout () => - @sendUpdate() - , @options.updateDelay * ( 0.5 + Math.random() ) - else - @updateCallback() - - runForNUpdates: (n, callback = (error) ->) -> - @updateCallback = callback - @updateCount = n - @continue() - - check: (callback = (error) ->) -> - DocUpdaterClient.getDoc @project_id, @doc_id, (error, res, body) => - throw error if error? - if !body.lines? - return console.error "[#{new Date()}] \t[#{@client_id.slice(0,4)}] ERROR: Invalid response from get doc (#{doc_id})", body - content = body.lines.join("\n") - version = body.version - if content != @content - if version == @version - console.error "[#{new Date()}] \t[#{@client_id.slice(0,4)}] Error: Client content does not match server." - console.error "Server: #{content.split('a')}" - console.error "Client: #{@content.split('a')}" - else - console.error "[#{new Date()}] \t[#{@client_id.slice(0,4)}] Error: Version mismatch (Server: '#{version}', Client: '#{@version}')" - - if !@isContentValid(@content) - for chunk, i in @content.split("") - if chunk? and chunk != "a" - console.log chunk, i - throw new Error("bad content") - callback() - - isChunkValid: (chunk) -> - char = 0 - for letter, i in chunk - if letter.charCodeAt(0) != 65 + i % 26 - console.error "[#{new Date()}] \t[#{@client_id.slice(0,4)}] Invalid Chunk:", chunk - return false - return true - - isContentValid: (content) -> - for chunk in content.split('a') - if chunk? and chunk != "" - if !@isChunkValid(chunk) - - console.error "[#{new Date()}] \t[#{@client_id.slice(0,4)}] Invalid content", content - return false - return true - - -checkDocument = (project_id, doc_id, clients, callback = (error) ->) -> - jobs = clients.map (client) -> - (cb) -> client.check cb - async.parallel jobs, callback - -printSummary = (doc_id, clients) -> - slot = require('cluster-key-slot') - now = new Date() - console.log "[#{now}] [#{doc_id.slice(0,4)} (slot: #{slot(doc_id)})] #{clients.length} clients..." - for client in clients - console.log "[#{now}] \t[#{client.client_id.slice(0,4)}] { local: #{client.counts.local_updates }, remote: #{client.counts.remote_updates}, conflicts: #{client.counts.conflicts}, max_delay: #{client.counts.max_delay} }" - client.counts = { - local_updates: 0 - remote_updates: 0 - conflicts: 0 - max_delay: 0 + resendUpdate() { + assert(this.inflight_op != null); + DocUpdaterClient.sendUpdate( + this.project_id, this.doc_id, + { + doc: this.doc_id, + op: [this.inflight_op], + v: this.version, + meta: { + source: this.client_id + }, + dupIfSource: [this.client_id] + } + ); + return this.update_timer = setTimeout(() => { + console.log(`[${new Date()}] \t[${this.client_id.slice(0,4)}] WARN: Resending update after 5 seconds`); + return this.resendUpdate(); } + , 5000); + } + + processReply(update) { + if (update.op.v !== this.version) { + if (update.op.v < this.version) { + console.log(`[${new Date()}] \t[${this.client_id.slice(0,4)}] WARN: Duplicate ack (already seen version)`); + return; + } else { + console.error(`[${new Date()}] \t[${this.client_id.slice(0,4)}] ERROR: Version jumped ahead (client: ${this.version}, op: ${update.op.v})`); + } + } + this.version++; + if (update.op.meta.source === this.client_id) { + if (this.inflight_op != null) { + this.counts.local_updates++; + this.inflight_op = null; + clearTimeout(this.update_timer); + const delay = Date.now() - this.inflight_op_sent; + this.counts.max_delay = Math.max(this.counts.max_delay, delay); + return this.continue(); + } else { + return console.log(`[${new Date()}] \t[${this.client_id.slice(0,4)}] WARN: Duplicate ack`); + } + } else { + assert(update.op.op.length === 1); + this.counts.remote_updates++; + let external_op = update.op.op[0]; + if (this.inflight_op != null) { + this.counts.conflicts++; + this.inflight_op = transform(this.inflight_op, external_op); + external_op = transform(external_op, this.inflight_op); + } + if (external_op.p < this.pos) { + this.pos += external_op.i.length; + } + return this.content = insert(this.content, external_op.p, external_op.i); + } + } + + continue() { + if (this.updateCount > 0) { + this.updateCount--; + return setTimeout(() => { + return this.sendUpdate(); + } + , this.options.updateDelay * ( 0.5 + Math.random() )); + } else { + return this.updateCallback(); + } + } + + runForNUpdates(n, callback) { + if (callback == null) { callback = function(error) {}; } + this.updateCallback = callback; + this.updateCount = n; + return this.continue(); + } + + check(callback) { + if (callback == null) { callback = function(error) {}; } + return DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, body) => { + if (error != null) { throw error; } + if ((body.lines == null)) { + return console.error(`[${new Date()}] \t[${this.client_id.slice(0,4)}] ERROR: Invalid response from get doc (${doc_id})`, body); + } + const content = body.lines.join("\n"); + const { + version + } = body; + if (content !== this.content) { + if (version === this.version) { + console.error(`[${new Date()}] \t[${this.client_id.slice(0,4)}] Error: Client content does not match server.`); + console.error(`Server: ${content.split('a')}`); + console.error(`Client: ${this.content.split('a')}`); + } else { + console.error(`[${new Date()}] \t[${this.client_id.slice(0,4)}] Error: Version mismatch (Server: '${version}', Client: '${this.version}')`); + } + } -CLIENT_COUNT = parseInt(process.argv[2], 10) -UPDATE_DELAY = parseInt(process.argv[3], 10) -SAMPLE_INTERVAL = parseInt(process.argv[4], 10) + if (!this.isContentValid(this.content)) { + const iterable = this.content.split(""); + for (let i = 0; i < iterable.length; i++) { + const chunk = iterable[i]; + if ((chunk != null) && (chunk !== "a")) { + console.log(chunk, i); + } + } + throw new Error("bad content"); + } + return callback(); + }); + } -for doc_and_project_id in process.argv.slice(5) - do (doc_and_project_id) -> - [project_id, doc_id] = doc_and_project_id.split(":") - console.log {project_id, doc_id} - DocUpdaterClient.setDocLines project_id, doc_id, [(new Array(CLIENT_COUNT + 2)).join('a')], null, null, (error) -> - throw error if error? - DocUpdaterClient.getDoc project_id, doc_id, (error, res, body) => - throw error if error? - if !body.lines? - return console.error "[#{new Date()}] ERROR: Invalid response from get doc (#{doc_id})", body - content = body.lines.join("\n") - version = body.version + isChunkValid(chunk) { + const char = 0; + for (let i = 0; i < chunk.length; i++) { + const letter = chunk[i]; + if (letter.charCodeAt(0) !== (65 + (i % 26))) { + console.error(`[${new Date()}] \t[${this.client_id.slice(0,4)}] Invalid Chunk:`, chunk); + return false; + } + } + return true; + } + + isContentValid(content) { + for (let chunk of Array.from(content.split('a'))) { + if ((chunk != null) && (chunk !== "")) { + if (!this.isChunkValid(chunk)) { + + console.error(`[${new Date()}] \t[${this.client_id.slice(0,4)}] Invalid content`, content); + return false; + } + } + } + return true; + } +} + + +const checkDocument = function(project_id, doc_id, clients, callback) { + if (callback == null) { callback = function(error) {}; } + const jobs = clients.map(client => cb => client.check(cb)); + return async.parallel(jobs, callback); +}; + +const printSummary = function(doc_id, clients) { + const slot = require('cluster-key-slot'); + const now = new Date(); + console.log(`[${now}] [${doc_id.slice(0,4)} (slot: ${slot(doc_id)})] ${clients.length} clients...`); + return (() => { + const result = []; + for (let client of Array.from(clients)) { + console.log(`[${now}] \t[${client.client_id.slice(0,4)}] { local: ${client.counts.local_updates }, remote: ${client.counts.remote_updates}, conflicts: ${client.counts.conflicts}, max_delay: ${client.counts.max_delay} }`); + result.push(client.counts = { + local_updates: 0, + remote_updates: 0, + conflicts: 0, + max_delay: 0 + }); + } + return result; + })(); +}; + +const CLIENT_COUNT = parseInt(process.argv[2], 10); +const UPDATE_DELAY = parseInt(process.argv[3], 10); +const SAMPLE_INTERVAL = parseInt(process.argv[4], 10); + +for (let doc_and_project_id of Array.from(process.argv.slice(5))) { + (function(doc_and_project_id) { + const [project_id, doc_id] = Array.from(doc_and_project_id.split(":")); + console.log({project_id, doc_id}); + return DocUpdaterClient.setDocLines(project_id, doc_id, [(new Array(CLIENT_COUNT + 2)).join('a')], null, null, function(error) { + if (error != null) { throw error; } + return DocUpdaterClient.getDoc(project_id, doc_id, (error, res, body) => { + let runBatch; + if (error != null) { throw error; } + if ((body.lines == null)) { + return console.error(`[${new Date()}] ERROR: Invalid response from get doc (${doc_id})`, body); + } + const content = body.lines.join("\n"); + const { + version + } = body; - clients = [] - for pos in [1..CLIENT_COUNT] - do (pos) -> - client = new StressTestClient({doc_id, project_id, content, pos: pos, version: version, updateDelay: UPDATE_DELAY}) - clients.push client + const clients = []; + for (let pos = 1, end = CLIENT_COUNT, asc = 1 <= end; asc ? pos <= end : pos >= end; asc ? pos++ : pos--) { + (function(pos) { + const client = new StressTestClient({doc_id, project_id, content, pos, version, updateDelay: UPDATE_DELAY}); + return clients.push(client); + })(pos); + } - do runBatch = () -> - jobs = clients.map (client) -> - (cb) -> client.runForNUpdates(SAMPLE_INTERVAL / UPDATE_DELAY, cb) - async.parallel jobs, (error) -> - throw error if error? - printSummary(doc_id, clients) - checkDocument project_id, doc_id, clients, (error) -> - throw error if error? - runBatch() + return (runBatch = function() { + const jobs = clients.map(client => cb => client.runForNUpdates(SAMPLE_INTERVAL / UPDATE_DELAY, cb)); + return async.parallel(jobs, function(error) { + if (error != null) { throw error; } + printSummary(doc_id, clients); + return checkDocument(project_id, doc_id, clients, function(error) { + if (error != null) { throw error; } + return runBatch(); + }); + }); + })(); + }); + }); + })(doc_and_project_id); +} From 86a3b0842a4014d3198a0fb453755fb7c2d6a3f4 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 6 May 2020 12:12:57 +0200 Subject: [PATCH 626/769] decaffeinate: Run post-processing cleanups on run.coffee --- .../test/stress/coffee/run.js | 23 +++++++++++++------ 1 file changed, 16 insertions(+), 7 deletions(-) diff --git a/services/document-updater/test/stress/coffee/run.js b/services/document-updater/test/stress/coffee/run.js index 351e726bbc..da569f17d5 100644 --- a/services/document-updater/test/stress/coffee/run.js +++ b/services/document-updater/test/stress/coffee/run.js @@ -1,3 +1,12 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-return-assign, + no-undef, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -199,7 +208,7 @@ class StressTestClient { } isContentValid(content) { - for (let chunk of Array.from(content.split('a'))) { + for (const chunk of Array.from(content.split('a'))) { if ((chunk != null) && (chunk !== "")) { if (!this.isChunkValid(chunk)) { @@ -225,7 +234,7 @@ const printSummary = function(doc_id, clients) { console.log(`[${now}] [${doc_id.slice(0,4)} (slot: ${slot(doc_id)})] ${clients.length} clients...`); return (() => { const result = []; - for (let client of Array.from(clients)) { + for (const client of Array.from(clients)) { console.log(`[${now}] \t[${client.client_id.slice(0,4)}] { local: ${client.counts.local_updates }, remote: ${client.counts.remote_updates}, conflicts: ${client.counts.conflicts}, max_delay: ${client.counts.max_delay} }`); result.push(client.counts = { local_updates: 0, @@ -242,11 +251,11 @@ const CLIENT_COUNT = parseInt(process.argv[2], 10); const UPDATE_DELAY = parseInt(process.argv[3], 10); const SAMPLE_INTERVAL = parseInt(process.argv[4], 10); -for (let doc_and_project_id of Array.from(process.argv.slice(5))) { +for (const doc_and_project_id of Array.from(process.argv.slice(5))) { (function(doc_and_project_id) { const [project_id, doc_id] = Array.from(doc_and_project_id.split(":")); console.log({project_id, doc_id}); - return DocUpdaterClient.setDocLines(project_id, doc_id, [(new Array(CLIENT_COUNT + 2)).join('a')], null, null, function(error) { + return DocUpdaterClient.setDocLines(project_id, doc_id, [(new Array(CLIENT_COUNT + 2)).join('a')], null, null, (error) => { if (error != null) { throw error; } return DocUpdaterClient.getDoc(project_id, doc_id, (error, res, body) => { let runBatch; @@ -260,7 +269,7 @@ for (let doc_and_project_id of Array.from(process.argv.slice(5))) { } = body; const clients = []; - for (let pos = 1, end = CLIENT_COUNT, asc = 1 <= end; asc ? pos <= end : pos >= end; asc ? pos++ : pos--) { + for (let pos = 1, end = CLIENT_COUNT, asc = end >= 1; asc ? pos <= end : pos >= end; asc ? pos++ : pos--) { (function(pos) { const client = new StressTestClient({doc_id, project_id, content, pos, version, updateDelay: UPDATE_DELAY}); return clients.push(client); @@ -269,10 +278,10 @@ for (let doc_and_project_id of Array.from(process.argv.slice(5))) { return (runBatch = function() { const jobs = clients.map(client => cb => client.runForNUpdates(SAMPLE_INTERVAL / UPDATE_DELAY, cb)); - return async.parallel(jobs, function(error) { + return async.parallel(jobs, (error) => { if (error != null) { throw error; } printSummary(doc_id, clients); - return checkDocument(project_id, doc_id, clients, function(error) { + return checkDocument(project_id, doc_id, clients, (error) => { if (error != null) { throw error; } return runBatch(); }); From 02b29bfaa1448d291fa50761a88182af6328929a Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Wed, 6 May 2020 12:13:03 +0200 Subject: [PATCH 627/769] decaffeinate: rename test/stress/coffee to test/stress/js --- services/document-updater/test/stress/{coffee => js}/run.js | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename services/document-updater/test/stress/{coffee => js}/run.js (100%) diff --git a/services/document-updater/test/stress/coffee/run.js b/services/document-updater/test/stress/js/run.js similarity index 100% rename from services/document-updater/test/stress/coffee/run.js rename to services/document-updater/test/stress/js/run.js From 0bae747b3e9d32e61da5a6c230d7ff3e4845e3ec Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Wed, 6 May 2020 12:13:07 +0200 Subject: [PATCH 628/769] prettier: convert test/stress decaffeinated files to Prettier format --- .../document-updater/test/stress/js/run.js | 614 ++++++++++-------- 1 file changed, 356 insertions(+), 258 deletions(-) diff --git a/services/document-updater/test/stress/js/run.js b/services/document-updater/test/stress/js/run.js index da569f17d5..3ce482b19b 100644 --- a/services/document-updater/test/stress/js/run.js +++ b/services/document-updater/test/stress/js/run.js @@ -16,278 +16,376 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const DocUpdaterClient = require("../../acceptance/coffee/helpers/DocUpdaterClient"); +const DocUpdaterClient = require('../../acceptance/coffee/helpers/DocUpdaterClient') // MockTrackChangesApi = require "../../acceptance/js/helpers/MockTrackChangesApi" // MockWebApi = require "../../acceptance/js/helpers/MockWebApi" -const assert = require("assert"); -const async = require("async"); +const assert = require('assert') +const async = require('async') -const insert = function(string, pos, content) { - const result = string.slice(0, pos) + content + string.slice(pos); - return result; -}; +const insert = function (string, pos, content) { + const result = string.slice(0, pos) + content + string.slice(pos) + return result +} -const transform = function(op1, op2) { - if (op2.p < op1.p) { - return { - p: op1.p + op2.i.length, - i: op1.i - }; - } else { - return op1; - } -}; +const transform = function (op1, op2) { + if (op2.p < op1.p) { + return { + p: op1.p + op2.i.length, + i: op1.i + } + } else { + return op1 + } +} class StressTestClient { - constructor(options) { - if (options == null) { options = {}; } - this.options = options; - if (this.options.updateDelay == null) { this.options.updateDelay = 200; } - this.project_id = this.options.project_id || DocUpdaterClient.randomId(); - this.doc_id = this.options.doc_id || DocUpdaterClient.randomId(); - this.pos = this.options.pos || 0; - this.content = this.options.content || ""; + constructor(options) { + if (options == null) { + options = {} + } + this.options = options + if (this.options.updateDelay == null) { + this.options.updateDelay = 200 + } + this.project_id = this.options.project_id || DocUpdaterClient.randomId() + this.doc_id = this.options.doc_id || DocUpdaterClient.randomId() + this.pos = this.options.pos || 0 + this.content = this.options.content || '' - this.client_id = DocUpdaterClient.randomId(); - this.version = this.options.version || 0; - this.inflight_op = null; - this.charCode = 0; - - this.counts = { - conflicts: 0, - local_updates: 0, - remote_updates: 0, - max_delay: 0 - }; - - DocUpdaterClient.subscribeToAppliedOps((channel, update) => { - update = JSON.parse(update); - if (update.error != null) { - console.error(new Error(`Error from server: '${update.error}'`)); - return; - } - if (update.doc_id === this.doc_id) { - return this.processReply(update); - } - }); - } - - sendUpdate() { - const data = String.fromCharCode(65 + (this.charCode++ % 26)); - this.content = insert(this.content, this.pos, data); - this.inflight_op = { - i: data, - p: this.pos++ - }; - this.resendUpdate(); - return this.inflight_op_sent = Date.now(); - } - - resendUpdate() { - assert(this.inflight_op != null); - DocUpdaterClient.sendUpdate( - this.project_id, this.doc_id, - { - doc: this.doc_id, - op: [this.inflight_op], - v: this.version, - meta: { - source: this.client_id - }, - dupIfSource: [this.client_id] - } - ); - return this.update_timer = setTimeout(() => { - console.log(`[${new Date()}] \t[${this.client_id.slice(0,4)}] WARN: Resending update after 5 seconds`); - return this.resendUpdate(); - } - , 5000); - } - - processReply(update) { - if (update.op.v !== this.version) { - if (update.op.v < this.version) { - console.log(`[${new Date()}] \t[${this.client_id.slice(0,4)}] WARN: Duplicate ack (already seen version)`); - return; - } else { - console.error(`[${new Date()}] \t[${this.client_id.slice(0,4)}] ERROR: Version jumped ahead (client: ${this.version}, op: ${update.op.v})`); - } - } - this.version++; - if (update.op.meta.source === this.client_id) { - if (this.inflight_op != null) { - this.counts.local_updates++; - this.inflight_op = null; - clearTimeout(this.update_timer); - const delay = Date.now() - this.inflight_op_sent; - this.counts.max_delay = Math.max(this.counts.max_delay, delay); - return this.continue(); - } else { - return console.log(`[${new Date()}] \t[${this.client_id.slice(0,4)}] WARN: Duplicate ack`); - } - } else { - assert(update.op.op.length === 1); - this.counts.remote_updates++; - let external_op = update.op.op[0]; - if (this.inflight_op != null) { - this.counts.conflicts++; - this.inflight_op = transform(this.inflight_op, external_op); - external_op = transform(external_op, this.inflight_op); - } - if (external_op.p < this.pos) { - this.pos += external_op.i.length; - } - return this.content = insert(this.content, external_op.p, external_op.i); - } - } - - continue() { - if (this.updateCount > 0) { - this.updateCount--; - return setTimeout(() => { - return this.sendUpdate(); - } - , this.options.updateDelay * ( 0.5 + Math.random() )); - } else { - return this.updateCallback(); - } - } - - runForNUpdates(n, callback) { - if (callback == null) { callback = function(error) {}; } - this.updateCallback = callback; - this.updateCount = n; - return this.continue(); - } - - check(callback) { - if (callback == null) { callback = function(error) {}; } - return DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res, body) => { - if (error != null) { throw error; } - if ((body.lines == null)) { - return console.error(`[${new Date()}] \t[${this.client_id.slice(0,4)}] ERROR: Invalid response from get doc (${doc_id})`, body); - } - const content = body.lines.join("\n"); - const { - version - } = body; - if (content !== this.content) { - if (version === this.version) { - console.error(`[${new Date()}] \t[${this.client_id.slice(0,4)}] Error: Client content does not match server.`); - console.error(`Server: ${content.split('a')}`); - console.error(`Client: ${this.content.split('a')}`); - } else { - console.error(`[${new Date()}] \t[${this.client_id.slice(0,4)}] Error: Version mismatch (Server: '${version}', Client: '${this.version}')`); - } - } + this.client_id = DocUpdaterClient.randomId() + this.version = this.options.version || 0 + this.inflight_op = null + this.charCode = 0 - if (!this.isContentValid(this.content)) { - const iterable = this.content.split(""); - for (let i = 0; i < iterable.length; i++) { - const chunk = iterable[i]; - if ((chunk != null) && (chunk !== "a")) { - console.log(chunk, i); - } - } - throw new Error("bad content"); - } - return callback(); - }); - } + this.counts = { + conflicts: 0, + local_updates: 0, + remote_updates: 0, + max_delay: 0 + } - isChunkValid(chunk) { - const char = 0; - for (let i = 0; i < chunk.length; i++) { - const letter = chunk[i]; - if (letter.charCodeAt(0) !== (65 + (i % 26))) { - console.error(`[${new Date()}] \t[${this.client_id.slice(0,4)}] Invalid Chunk:`, chunk); - return false; - } - } - return true; - } + DocUpdaterClient.subscribeToAppliedOps((channel, update) => { + update = JSON.parse(update) + if (update.error != null) { + console.error(new Error(`Error from server: '${update.error}'`)) + return + } + if (update.doc_id === this.doc_id) { + return this.processReply(update) + } + }) + } - isContentValid(content) { - for (const chunk of Array.from(content.split('a'))) { - if ((chunk != null) && (chunk !== "")) { - if (!this.isChunkValid(chunk)) { - - console.error(`[${new Date()}] \t[${this.client_id.slice(0,4)}] Invalid content`, content); - return false; - } - } - } - return true; - } + sendUpdate() { + const data = String.fromCharCode(65 + (this.charCode++ % 26)) + this.content = insert(this.content, this.pos, data) + this.inflight_op = { + i: data, + p: this.pos++ + } + this.resendUpdate() + return (this.inflight_op_sent = Date.now()) + } + + resendUpdate() { + assert(this.inflight_op != null) + DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, { + doc: this.doc_id, + op: [this.inflight_op], + v: this.version, + meta: { + source: this.client_id + }, + dupIfSource: [this.client_id] + }) + return (this.update_timer = setTimeout(() => { + console.log( + `[${new Date()}] \t[${this.client_id.slice( + 0, + 4 + )}] WARN: Resending update after 5 seconds` + ) + return this.resendUpdate() + }, 5000)) + } + + processReply(update) { + if (update.op.v !== this.version) { + if (update.op.v < this.version) { + console.log( + `[${new Date()}] \t[${this.client_id.slice( + 0, + 4 + )}] WARN: Duplicate ack (already seen version)` + ) + return + } else { + console.error( + `[${new Date()}] \t[${this.client_id.slice( + 0, + 4 + )}] ERROR: Version jumped ahead (client: ${this.version}, op: ${ + update.op.v + })` + ) + } + } + this.version++ + if (update.op.meta.source === this.client_id) { + if (this.inflight_op != null) { + this.counts.local_updates++ + this.inflight_op = null + clearTimeout(this.update_timer) + const delay = Date.now() - this.inflight_op_sent + this.counts.max_delay = Math.max(this.counts.max_delay, delay) + return this.continue() + } else { + return console.log( + `[${new Date()}] \t[${this.client_id.slice( + 0, + 4 + )}] WARN: Duplicate ack` + ) + } + } else { + assert(update.op.op.length === 1) + this.counts.remote_updates++ + let external_op = update.op.op[0] + if (this.inflight_op != null) { + this.counts.conflicts++ + this.inflight_op = transform(this.inflight_op, external_op) + external_op = transform(external_op, this.inflight_op) + } + if (external_op.p < this.pos) { + this.pos += external_op.i.length + } + return (this.content = insert(this.content, external_op.p, external_op.i)) + } + } + + continue() { + if (this.updateCount > 0) { + this.updateCount-- + return setTimeout(() => { + return this.sendUpdate() + }, this.options.updateDelay * (0.5 + Math.random())) + } else { + return this.updateCallback() + } + } + + runForNUpdates(n, callback) { + if (callback == null) { + callback = function (error) {} + } + this.updateCallback = callback + this.updateCount = n + return this.continue() + } + + check(callback) { + if (callback == null) { + callback = function (error) {} + } + return DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, body) => { + if (error != null) { + throw error + } + if (body.lines == null) { + return console.error( + `[${new Date()}] \t[${this.client_id.slice( + 0, + 4 + )}] ERROR: Invalid response from get doc (${doc_id})`, + body + ) + } + const content = body.lines.join('\n') + const { version } = body + if (content !== this.content) { + if (version === this.version) { + console.error( + `[${new Date()}] \t[${this.client_id.slice( + 0, + 4 + )}] Error: Client content does not match server.` + ) + console.error(`Server: ${content.split('a')}`) + console.error(`Client: ${this.content.split('a')}`) + } else { + console.error( + `[${new Date()}] \t[${this.client_id.slice( + 0, + 4 + )}] Error: Version mismatch (Server: '${version}', Client: '${ + this.version + }')` + ) + } + } + + if (!this.isContentValid(this.content)) { + const iterable = this.content.split('') + for (let i = 0; i < iterable.length; i++) { + const chunk = iterable[i] + if (chunk != null && chunk !== 'a') { + console.log(chunk, i) + } + } + throw new Error('bad content') + } + return callback() + } + ) + } + + isChunkValid(chunk) { + const char = 0 + for (let i = 0; i < chunk.length; i++) { + const letter = chunk[i] + if (letter.charCodeAt(0) !== 65 + (i % 26)) { + console.error( + `[${new Date()}] \t[${this.client_id.slice(0, 4)}] Invalid Chunk:`, + chunk + ) + return false + } + } + return true + } + + isContentValid(content) { + for (const chunk of Array.from(content.split('a'))) { + if (chunk != null && chunk !== '') { + if (!this.isChunkValid(chunk)) { + console.error( + `[${new Date()}] \t[${this.client_id.slice(0, 4)}] Invalid content`, + content + ) + return false + } + } + } + return true + } } +const checkDocument = function (project_id, doc_id, clients, callback) { + if (callback == null) { + callback = function (error) {} + } + const jobs = clients.map((client) => (cb) => client.check(cb)) + return async.parallel(jobs, callback) +} -const checkDocument = function(project_id, doc_id, clients, callback) { - if (callback == null) { callback = function(error) {}; } - const jobs = clients.map(client => cb => client.check(cb)); - return async.parallel(jobs, callback); -}; +const printSummary = function (doc_id, clients) { + const slot = require('cluster-key-slot') + const now = new Date() + console.log( + `[${now}] [${doc_id.slice(0, 4)} (slot: ${slot(doc_id)})] ${ + clients.length + } clients...` + ) + return (() => { + const result = [] + for (const client of Array.from(clients)) { + console.log( + `[${now}] \t[${client.client_id.slice(0, 4)}] { local: ${ + client.counts.local_updates + }, remote: ${client.counts.remote_updates}, conflicts: ${ + client.counts.conflicts + }, max_delay: ${client.counts.max_delay} }` + ) + result.push( + (client.counts = { + local_updates: 0, + remote_updates: 0, + conflicts: 0, + max_delay: 0 + }) + ) + } + return result + })() +} -const printSummary = function(doc_id, clients) { - const slot = require('cluster-key-slot'); - const now = new Date(); - console.log(`[${now}] [${doc_id.slice(0,4)} (slot: ${slot(doc_id)})] ${clients.length} clients...`); - return (() => { - const result = []; - for (const client of Array.from(clients)) { - console.log(`[${now}] \t[${client.client_id.slice(0,4)}] { local: ${client.counts.local_updates }, remote: ${client.counts.remote_updates}, conflicts: ${client.counts.conflicts}, max_delay: ${client.counts.max_delay} }`); - result.push(client.counts = { - local_updates: 0, - remote_updates: 0, - conflicts: 0, - max_delay: 0 - }); - } - return result; - })(); -}; - -const CLIENT_COUNT = parseInt(process.argv[2], 10); -const UPDATE_DELAY = parseInt(process.argv[3], 10); -const SAMPLE_INTERVAL = parseInt(process.argv[4], 10); +const CLIENT_COUNT = parseInt(process.argv[2], 10) +const UPDATE_DELAY = parseInt(process.argv[3], 10) +const SAMPLE_INTERVAL = parseInt(process.argv[4], 10) for (const doc_and_project_id of Array.from(process.argv.slice(5))) { - (function(doc_and_project_id) { - const [project_id, doc_id] = Array.from(doc_and_project_id.split(":")); - console.log({project_id, doc_id}); - return DocUpdaterClient.setDocLines(project_id, doc_id, [(new Array(CLIENT_COUNT + 2)).join('a')], null, null, (error) => { - if (error != null) { throw error; } - return DocUpdaterClient.getDoc(project_id, doc_id, (error, res, body) => { - let runBatch; - if (error != null) { throw error; } - if ((body.lines == null)) { - return console.error(`[${new Date()}] ERROR: Invalid response from get doc (${doc_id})`, body); - } - const content = body.lines.join("\n"); - const { - version - } = body; - - const clients = []; - for (let pos = 1, end = CLIENT_COUNT, asc = end >= 1; asc ? pos <= end : pos >= end; asc ? pos++ : pos--) { - (function(pos) { - const client = new StressTestClient({doc_id, project_id, content, pos, version, updateDelay: UPDATE_DELAY}); - return clients.push(client); - })(pos); - } - - return (runBatch = function() { - const jobs = clients.map(client => cb => client.runForNUpdates(SAMPLE_INTERVAL / UPDATE_DELAY, cb)); - return async.parallel(jobs, (error) => { - if (error != null) { throw error; } - printSummary(doc_id, clients); - return checkDocument(project_id, doc_id, clients, (error) => { - if (error != null) { throw error; } - return runBatch(); - }); - }); - })(); - }); - }); - })(doc_and_project_id); + ;(function (doc_and_project_id) { + const [project_id, doc_id] = Array.from(doc_and_project_id.split(':')) + console.log({ project_id, doc_id }) + return DocUpdaterClient.setDocLines( + project_id, + doc_id, + [new Array(CLIENT_COUNT + 2).join('a')], + null, + null, + (error) => { + if (error != null) { + throw error + } + return DocUpdaterClient.getDoc( + project_id, + doc_id, + (error, res, body) => { + let runBatch + if (error != null) { + throw error + } + if (body.lines == null) { + return console.error( + `[${new Date()}] ERROR: Invalid response from get doc (${doc_id})`, + body + ) + } + const content = body.lines.join('\n') + const { version } = body + + const clients = [] + for ( + let pos = 1, end = CLIENT_COUNT, asc = end >= 1; + asc ? pos <= end : pos >= end; + asc ? pos++ : pos-- + ) { + ;(function (pos) { + const client = new StressTestClient({ + doc_id, + project_id, + content, + pos, + version, + updateDelay: UPDATE_DELAY + }) + return clients.push(client) + })(pos) + } + + return (runBatch = function () { + const jobs = clients.map((client) => (cb) => + client.runForNUpdates(SAMPLE_INTERVAL / UPDATE_DELAY, cb) + ) + return async.parallel(jobs, (error) => { + if (error != null) { + throw error + } + printSummary(doc_id, clients) + return checkDocument(project_id, doc_id, clients, (error) => { + if (error != null) { + throw error + } + return runBatch() + }) + }) + })() + } + ) + } + ) + })(doc_and_project_id) } From 86c6c38a6e55f25540e54cc5282503ce1402f873 Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Wed, 6 May 2020 12:13:07 +0200 Subject: [PATCH 629/769] decaffeinate: rename individual coffee files to js files --- services/document-updater/{app.coffee => app.js} | 0 .../config/{settings.defaults.coffee => settings.defaults.js} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename services/document-updater/{app.coffee => app.js} (100%) rename services/document-updater/config/{settings.defaults.coffee => settings.defaults.js} (100%) diff --git a/services/document-updater/app.coffee b/services/document-updater/app.js similarity index 100% rename from services/document-updater/app.coffee rename to services/document-updater/app.js diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.js similarity index 100% rename from services/document-updater/config/settings.defaults.coffee rename to services/document-updater/config/settings.defaults.js From 877bae34b36152daa70e8dffa97102c6ee76d9eb Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Wed, 6 May 2020 12:13:10 +0200 Subject: [PATCH 630/769] decaffeinate: convert individual files to js --- services/document-updater/app.js | 306 ++++++++++-------- .../config/settings.defaults.js | 177 +++++----- 2 files changed, 267 insertions(+), 216 deletions(-) diff --git a/services/document-updater/app.js b/services/document-updater/app.js index 3ca7ef2b6f..602f9be9d0 100644 --- a/services/document-updater/app.js +++ b/services/document-updater/app.js @@ -1,164 +1,196 @@ -Metrics = require "metrics-sharelatex" -Metrics.initialize("doc-updater") +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const Metrics = require("metrics-sharelatex"); +Metrics.initialize("doc-updater"); -express = require('express') -http = require("http") -Settings = require('settings-sharelatex') -logger = require('logger-sharelatex') -logger.initialize("document-updater") +const express = require('express'); +const http = require("http"); +const Settings = require('settings-sharelatex'); +const logger = require('logger-sharelatex'); +logger.initialize("document-updater"); -logger.logger.addSerializers(require("./app/js/LoggerSerializers")) +logger.logger.addSerializers(require("./app/js/LoggerSerializers")); -if Settings.sentry?.dsn? - logger.initializeErrorReporting(Settings.sentry.dsn) +if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) { + logger.initializeErrorReporting(Settings.sentry.dsn); +} -RedisManager = require('./app/js/RedisManager') -DispatchManager = require('./app/js/DispatchManager') -DeleteQueueManager = require('./app/js/DeleteQueueManager') -Errors = require "./app/js/Errors" -HttpController = require "./app/js/HttpController" -mongojs = require "./app/js/mongojs" -async = require "async" +const RedisManager = require('./app/js/RedisManager'); +const DispatchManager = require('./app/js/DispatchManager'); +const DeleteQueueManager = require('./app/js/DeleteQueueManager'); +const Errors = require("./app/js/Errors"); +const HttpController = require("./app/js/HttpController"); +const mongojs = require("./app/js/mongojs"); +const async = require("async"); -Path = require "path" -bodyParser = require "body-parser" +const Path = require("path"); +const bodyParser = require("body-parser"); -Metrics.mongodb.monitor(Path.resolve(__dirname + "/node_modules/mongojs/node_modules/mongodb"), logger) -Metrics.event_loop.monitor(logger, 100) +Metrics.mongodb.monitor(Path.resolve(__dirname + "/node_modules/mongojs/node_modules/mongodb"), logger); +Metrics.event_loop.monitor(logger, 100); -app = express() +const app = express(); app.use(Metrics.http.monitor(logger)); -app.use bodyParser.json({limit: (Settings.max_doc_length + 64 * 1024)}) -Metrics.injectMetricsRoute(app) +app.use(bodyParser.json({limit: (Settings.max_doc_length + (64 * 1024))})); +Metrics.injectMetricsRoute(app); -DispatchManager.createAndStartDispatchers(Settings.dispatcherCount || 10) +DispatchManager.createAndStartDispatchers(Settings.dispatcherCount || 10); -app.param 'project_id', (req, res, next, project_id) -> - if project_id?.match /^[0-9a-f]{24}$/ - next() - else - next new Error("invalid project id") +app.param('project_id', function(req, res, next, project_id) { + if ((project_id != null ? project_id.match(/^[0-9a-f]{24}$/) : undefined)) { + return next(); + } else { + return next(new Error("invalid project id")); + } +}); -app.param 'doc_id', (req, res, next, doc_id) -> - if doc_id?.match /^[0-9a-f]{24}$/ - next() - else - next new Error("invalid doc id") +app.param('doc_id', function(req, res, next, doc_id) { + if ((doc_id != null ? doc_id.match(/^[0-9a-f]{24}$/) : undefined)) { + return next(); + } else { + return next(new Error("invalid doc id")); + } +}); -app.get '/project/:project_id/doc/:doc_id', HttpController.getDoc -# temporarily keep the GET method for backwards compatibility -app.get '/project/:project_id/doc', HttpController.getProjectDocsAndFlushIfOld -# will migrate to the POST method of get_and_flush_if_old instead -app.post '/project/:project_id/get_and_flush_if_old', HttpController.getProjectDocsAndFlushIfOld -app.post '/project/:project_id/clearState', HttpController.clearProjectState -app.post '/project/:project_id/doc/:doc_id', HttpController.setDoc -app.post '/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLoaded -app.delete '/project/:project_id/doc/:doc_id', HttpController.deleteDoc -app.delete '/project/:project_id', HttpController.deleteProject -app.delete '/project', HttpController.deleteMultipleProjects -app.post '/project/:project_id', HttpController.updateProject -app.post '/project/:project_id/history/resync', HttpController.resyncProjectHistory -app.post '/project/:project_id/flush', HttpController.flushProject -app.post '/project/:project_id/doc/:doc_id/change/:change_id/accept', HttpController.acceptChanges -app.post '/project/:project_id/doc/:doc_id/change/accept', HttpController.acceptChanges -app.delete '/project/:project_id/doc/:doc_id/comment/:comment_id', HttpController.deleteComment +app.get('/project/:project_id/doc/:doc_id', HttpController.getDoc); +// temporarily keep the GET method for backwards compatibility +app.get('/project/:project_id/doc', HttpController.getProjectDocsAndFlushIfOld); +// will migrate to the POST method of get_and_flush_if_old instead +app.post('/project/:project_id/get_and_flush_if_old', HttpController.getProjectDocsAndFlushIfOld); +app.post('/project/:project_id/clearState', HttpController.clearProjectState); +app.post('/project/:project_id/doc/:doc_id', HttpController.setDoc); +app.post('/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLoaded); +app.delete('/project/:project_id/doc/:doc_id', HttpController.deleteDoc); +app.delete('/project/:project_id', HttpController.deleteProject); +app.delete('/project', HttpController.deleteMultipleProjects); +app.post('/project/:project_id', HttpController.updateProject); +app.post('/project/:project_id/history/resync', HttpController.resyncProjectHistory); +app.post('/project/:project_id/flush', HttpController.flushProject); +app.post('/project/:project_id/doc/:doc_id/change/:change_id/accept', HttpController.acceptChanges); +app.post('/project/:project_id/doc/:doc_id/change/accept', HttpController.acceptChanges); +app.delete('/project/:project_id/doc/:doc_id/comment/:comment_id', HttpController.deleteComment); -app.get '/flush_all_projects', HttpController.flushAllProjects -app.get '/flush_queued_projects', HttpController.flushQueuedProjects +app.get('/flush_all_projects', HttpController.flushAllProjects); +app.get('/flush_queued_projects', HttpController.flushQueuedProjects); -app.get '/total', (req, res)-> - timer = new Metrics.Timer("http.allDocList") - RedisManager.getCountOfDocsInMemory (err, count)-> - timer.done() - res.send {total:count} +app.get('/total', function(req, res){ + const timer = new Metrics.Timer("http.allDocList"); + return RedisManager.getCountOfDocsInMemory(function(err, count){ + timer.done(); + return res.send({total:count});}); +}); -app.get '/status', (req, res)-> - if Settings.shuttingDown - res.sendStatus 503 # Service unavailable - else - res.send('document updater is alive') +app.get('/status', function(req, res){ + if (Settings.shuttingDown) { + return res.sendStatus(503); // Service unavailable + } else { + return res.send('document updater is alive'); + } +}); -pubsubClient = require("redis-sharelatex").createClient(Settings.redis.pubsub) -app.get "/health_check/redis", (req, res, next) -> - pubsubClient.healthCheck (error) -> - if error? - logger.err {err: error}, "failed redis health check" - res.sendStatus 500 - else - res.sendStatus 200 +const pubsubClient = require("redis-sharelatex").createClient(Settings.redis.pubsub); +app.get("/health_check/redis", (req, res, next) => pubsubClient.healthCheck(function(error) { + if (error != null) { + logger.err({err: error}, "failed redis health check"); + return res.sendStatus(500); + } else { + return res.sendStatus(200); + } +})); -docUpdaterRedisClient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) -app.get "/health_check/redis_cluster", (req, res, next) -> - docUpdaterRedisClient.healthCheck (error) -> - if error? - logger.err {err: error}, "failed redis cluster health check" - res.sendStatus 500 - else - res.sendStatus 200 +const docUpdaterRedisClient = require("redis-sharelatex").createClient(Settings.redis.documentupdater); +app.get("/health_check/redis_cluster", (req, res, next) => docUpdaterRedisClient.healthCheck(function(error) { + if (error != null) { + logger.err({err: error}, "failed redis cluster health check"); + return res.sendStatus(500); + } else { + return res.sendStatus(200); + } +})); -app.get "/health_check", (req, res, next) -> - async.series [ - (cb) -> - pubsubClient.healthCheck (error) -> - if error? - logger.err {err: error}, "failed redis health check" - cb(error) - (cb) -> - docUpdaterRedisClient.healthCheck (error) -> - if error? - logger.err {err: error}, "failed redis cluster health check" - cb(error) - (cb) -> - mongojs.healthCheck (error) -> - if error? - logger.err {err: error}, "failed mongo health check" - cb(error) - ] , (error) -> - if error? - res.sendStatus 500 - else - res.sendStatus 200 +app.get("/health_check", (req, res, next) => async.series([ + cb => pubsubClient.healthCheck(function(error) { + if (error != null) { + logger.err({err: error}, "failed redis health check"); + } + return cb(error); + }), + cb => docUpdaterRedisClient.healthCheck(function(error) { + if (error != null) { + logger.err({err: error}, "failed redis cluster health check"); + } + return cb(error); + }), + cb => mongojs.healthCheck(function(error) { + if (error != null) { + logger.err({err: error}, "failed mongo health check"); + } + return cb(error); + }) +] , function(error) { + if (error != null) { + return res.sendStatus(500); + } else { + return res.sendStatus(200); + } +})); -app.use (error, req, res, next) -> - if error instanceof Errors.NotFoundError - res.sendStatus 404 - else if error instanceof Errors.OpRangeNotAvailableError - res.sendStatus 422 # Unprocessable Entity - else if error.statusCode is 413 - res.status(413).send("request entity too large") - else - logger.error err: error, req: req, "request errored" - res.status(500).send("Oops, something went wrong") +app.use(function(error, req, res, next) { + if (error instanceof Errors.NotFoundError) { + return res.sendStatus(404); + } else if (error instanceof Errors.OpRangeNotAvailableError) { + return res.sendStatus(422); // Unprocessable Entity + } else if (error.statusCode === 413) { + return res.status(413).send("request entity too large"); + } else { + logger.error({err: error, req}, "request errored"); + return res.status(500).send("Oops, something went wrong"); + } +}); -shutdownCleanly = (signal) -> - return () -> - logger.log signal: signal, "received interrupt, cleaning up" - Settings.shuttingDown = true - setTimeout () -> - logger.log signal: signal, "shutting down" - process.exit() - , 10000 +const shutdownCleanly = signal => (function() { + logger.log({signal}, "received interrupt, cleaning up"); + Settings.shuttingDown = true; + return setTimeout(function() { + logger.log({signal}, "shutting down"); + return process.exit(); + } + , 10000); +}); -watchForEvent = (eventName)-> - docUpdaterRedisClient.on eventName, (e)-> - console.log "redis event: #{eventName} #{e}" +const watchForEvent = eventName => docUpdaterRedisClient.on(eventName, e => console.log(`redis event: ${eventName} ${e}`)); -events = ["connect", "ready", "error", "close", "reconnecting", "end"] -for eventName in events - watchForEvent(eventName) +const events = ["connect", "ready", "error", "close", "reconnecting", "end"]; +for (let eventName of Array.from(events)) { + watchForEvent(eventName); +} -port = Settings.internal?.documentupdater?.port or Settings.apis?.documentupdater?.port or 3003 -host = Settings.internal.documentupdater.host or "localhost" -if !module.parent # Called directly - app.listen port, host, -> - logger.info "Document-updater starting up, listening on #{host}:#{port}" - if Settings.continuousBackgroundFlush - logger.info "Starting continuous background flush" - DeleteQueueManager.startBackgroundFlush() +const port = __guard__(Settings.internal != null ? Settings.internal.documentupdater : undefined, x => x.port) || __guard__(Settings.apis != null ? Settings.apis.documentupdater : undefined, x1 => x1.port) || 3003; +const host = Settings.internal.documentupdater.host || "localhost"; +if (!module.parent) { // Called directly + app.listen(port, host, function() { + logger.info(`Document-updater starting up, listening on ${host}:${port}`); + if (Settings.continuousBackgroundFlush) { + logger.info("Starting continuous background flush"); + return DeleteQueueManager.startBackgroundFlush(); + } + }); +} -module.exports = app +module.exports = app; -for signal in ['SIGINT', 'SIGHUP', 'SIGQUIT', 'SIGUSR1', 'SIGUSR2', 'SIGTERM', 'SIGABRT'] - process.on signal, shutdownCleanly(signal) +for (let signal of ['SIGINT', 'SIGHUP', 'SIGQUIT', 'SIGUSR1', 'SIGUSR2', 'SIGTERM', 'SIGABRT']) { + process.on(signal, shutdownCleanly(signal)); +} + +function __guard__(value, transform) { + return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; +} \ No newline at end of file diff --git a/services/document-updater/config/settings.defaults.js b/services/document-updater/config/settings.defaults.js index 0ced9eeedd..190fbff56f 100755 --- a/services/document-updater/config/settings.defaults.js +++ b/services/document-updater/config/settings.defaults.js @@ -1,97 +1,116 @@ -Path = require('path') -http = require('http') -http.globalAgent.maxSockets = 300 +const Path = require('path'); +const http = require('http'); +http.globalAgent.maxSockets = 300; -module.exports = - internal: - documentupdater: - host: process.env["LISTEN_ADDRESS"] or "localhost" +module.exports = { + internal: { + documentupdater: { + host: process.env["LISTEN_ADDRESS"] || "localhost", port: 3003 + } + }, - apis: - web: - url: "http://#{process.env['WEB_API_HOST'] or process.env['WEB_HOST'] or "localhost"}:#{process.env['WEB_API_PORT'] or process.env['WEB_PORT'] or 3000}" - user: process.env['WEB_API_USER'] or "sharelatex" - pass: process.env['WEB_API_PASSWORD'] or "password" - trackchanges: - url: "http://#{process.env["TRACK_CHANGES_HOST"] or "localhost"}:3015" - project_history: - enabled: true - url: "http://#{process.env["PROJECT_HISTORY_HOST"] or "localhost"}:3054" + apis: { + web: { + url: `http://${process.env['WEB_API_HOST'] || process.env['WEB_HOST'] || "localhost"}:${process.env['WEB_API_PORT'] || process.env['WEB_PORT'] || 3000}`, + user: process.env['WEB_API_USER'] || "sharelatex", + pass: process.env['WEB_API_PASSWORD'] || "password" + }, + trackchanges: { + url: `http://${process.env["TRACK_CHANGES_HOST"] || "localhost"}:3015` + }, + project_history: { + enabled: true, + url: `http://${process.env["PROJECT_HISTORY_HOST"] || "localhost"}:3054` + } + }, - redis: + redis: { - pubsub: - host: process.env['PUBSUB_REDIS_HOST'] or process.env['REDIS_HOST'] or "localhost" - port: process.env['PUBSUB_REDIS_PORT'] or process.env['REDIS_PORT'] or "6379" - password: process.env["PUBSUB_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" - maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] or "20") + pubsub: { + host: process.env['PUBSUB_REDIS_HOST'] || process.env['REDIS_HOST'] || "localhost", + port: process.env['PUBSUB_REDIS_PORT'] || process.env['REDIS_PORT'] || "6379", + password: process.env["PUBSUB_REDIS_PASSWORD"] || process.env["REDIS_PASSWORD"] || "", + maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] || "20") + }, - history: - port: process.env["HISTORY_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" - host: process.env["HISTORY_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" - password: process.env["HISTORY_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" - maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] or "20") - key_schema: - uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:{#{doc_id}}" - docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:{#{project_id}}" + history: { + port: process.env["HISTORY_REDIS_PORT"] || process.env["REDIS_PORT"] || "6379", + host: process.env["HISTORY_REDIS_HOST"] || process.env["REDIS_HOST"] || "localhost", + password: process.env["HISTORY_REDIS_PASSWORD"] || process.env["REDIS_PASSWORD"] || "", + maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] || "20"), + key_schema: { + uncompressedHistoryOps({doc_id}) { return `UncompressedHistoryOps:{${doc_id}}`; }, + docsWithHistoryOps({project_id}) { return `DocsWithHistoryOps:{${project_id}}`; } + } + }, - project_history: - port: process.env["NEW_HISTORY_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" - host: process.env["NEW_HISTORY_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" - password: process.env["NEW_HISTORY_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" - maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] or "20") - key_schema: - projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:{#{project_id}}" - projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:{#{project_id}}" + project_history: { + port: process.env["NEW_HISTORY_REDIS_PORT"] || process.env["REDIS_PORT"] || "6379", + host: process.env["NEW_HISTORY_REDIS_HOST"] || process.env["REDIS_HOST"] || "localhost", + password: process.env["NEW_HISTORY_REDIS_PASSWORD"] || process.env["REDIS_PASSWORD"] || "", + maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] || "20"), + key_schema: { + projectHistoryOps({project_id}) { return `ProjectHistory:Ops:{${project_id}}`; }, + projectHistoryFirstOpTimestamp({project_id}) { return `ProjectHistory:FirstOpTimestamp:{${project_id}}`; } + } + }, - lock: - port: process.env["LOCK_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" - host: process.env["LOCK_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" - password: process.env["LOCK_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" - maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] or "20") - key_schema: - blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" + lock: { + port: process.env["LOCK_REDIS_PORT"] || process.env["REDIS_PORT"] || "6379", + host: process.env["LOCK_REDIS_HOST"] || process.env["REDIS_HOST"] || "localhost", + password: process.env["LOCK_REDIS_PASSWORD"] || process.env["REDIS_PASSWORD"] || "", + maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] || "20"), + key_schema: { + blockingKey({doc_id}) { return `Blocking:{${doc_id}}`; } + } + }, - documentupdater: - port: process.env["DOC_UPDATER_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" - host: process.env["DOC_UPDATER_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" - password: process.env["DOC_UPDATER_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" - maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] or "20") - key_schema: - blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" - docLines: ({doc_id}) -> "doclines:{#{doc_id}}" - docOps: ({doc_id}) -> "DocOps:{#{doc_id}}" - docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}" - docHash: ({doc_id}) -> "DocHash:{#{doc_id}}" - projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" - docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" - ranges: ({doc_id}) -> "Ranges:{#{doc_id}}" - unflushedTime: ({doc_id}) -> "UnflushedTime:{#{doc_id}}" - pathname: ({doc_id}) -> "Pathname:{#{doc_id}}" - projectHistoryId: ({doc_id}) -> "ProjectHistoryId:{#{doc_id}}" - projectHistoryType: ({doc_id}) -> "ProjectHistoryType:{#{doc_id}}" - projectState: ({project_id}) -> "ProjectState:{#{project_id}}" - pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" - lastUpdatedBy: ({doc_id}) -> "lastUpdatedBy:{#{doc_id}}" - lastUpdatedAt: ({doc_id}) -> "lastUpdatedAt:{#{doc_id}}" - pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" - flushAndDeleteQueue: () -> "DocUpdaterFlushAndDeleteQueue" + documentupdater: { + port: process.env["DOC_UPDATER_REDIS_PORT"] || process.env["REDIS_PORT"] || "6379", + host: process.env["DOC_UPDATER_REDIS_HOST"] || process.env["REDIS_HOST"] || "localhost", + password: process.env["DOC_UPDATER_REDIS_PASSWORD"] || process.env["REDIS_PASSWORD"] || "", + maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] || "20"), + key_schema: { + blockingKey({doc_id}) { return `Blocking:{${doc_id}}`; }, + docLines({doc_id}) { return `doclines:{${doc_id}}`; }, + docOps({doc_id}) { return `DocOps:{${doc_id}}`; }, + docVersion({doc_id}) { return `DocVersion:{${doc_id}}`; }, + docHash({doc_id}) { return `DocHash:{${doc_id}}`; }, + projectKey({doc_id}) { return `ProjectId:{${doc_id}}`; }, + docsInProject({project_id}) { return `DocsIn:{${project_id}}`; }, + ranges({doc_id}) { return `Ranges:{${doc_id}}`; }, + unflushedTime({doc_id}) { return `UnflushedTime:{${doc_id}}`; }, + pathname({doc_id}) { return `Pathname:{${doc_id}}`; }, + projectHistoryId({doc_id}) { return `ProjectHistoryId:{${doc_id}}`; }, + projectHistoryType({doc_id}) { return `ProjectHistoryType:{${doc_id}}`; }, + projectState({project_id}) { return `ProjectState:{${project_id}}`; }, + pendingUpdates({doc_id}) { return `PendingUpdates:{${doc_id}}`; }, + lastUpdatedBy({doc_id}) { return `lastUpdatedBy:{${doc_id}}`; }, + lastUpdatedAt({doc_id}) { return `lastUpdatedAt:{${doc_id}}`; }, + pendingUpdates({doc_id}) { return `PendingUpdates:{${doc_id}}`; }, + flushAndDeleteQueue() { return "DocUpdaterFlushAndDeleteQueue"; } + } + } + }, - max_doc_length: 2 * 1024 * 1024 # 2mb + max_doc_length: 2 * 1024 * 1024, // 2mb - dispatcherCount: process.env["DISPATCHER_COUNT"] + dispatcherCount: process.env["DISPATCHER_COUNT"], - mongo: - url : process.env['MONGO_CONNECTION_STRING'] || "mongodb://#{process.env['MONGO_HOST'] or '127.0.0.1'}/sharelatex" + mongo: { + url : process.env['MONGO_CONNECTION_STRING'] || `mongodb://${process.env['MONGO_HOST'] || '127.0.0.1'}/sharelatex` + }, - sentry: + sentry: { dsn: process.env.SENTRY_DSN + }, - publishOnIndividualChannels: process.env['PUBLISH_ON_INDIVIDUAL_CHANNELS'] or false + publishOnIndividualChannels: process.env['PUBLISH_ON_INDIVIDUAL_CHANNELS'] || false, - continuousBackgroundFlush: process.env['CONTINUOUS_BACKGROUND_FLUSH'] or false + continuousBackgroundFlush: process.env['CONTINUOUS_BACKGROUND_FLUSH'] || false, - smoothingOffset: process.env['SMOOTHING_OFFSET'] or 1000 # milliseconds + smoothingOffset: process.env['SMOOTHING_OFFSET'] || 1000, // milliseconds - disableDoubleFlush: process.env['DISABLE_DOUBLE_FLUSH'] or false # don't flush track-changes for projects using project-history + disableDoubleFlush: process.env['DISABLE_DOUBLE_FLUSH'] || false // don't flush track-changes for projects using project-history +}; From 29d5e8f8b96cd0349f2b0cdaa52c1da6b147875c Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Wed, 6 May 2020 12:13:11 +0200 Subject: [PATCH 631/769] prettier: convert individual decaffeinated files to Prettier format --- services/document-updater/app.js | 355 ++++++++++-------- .../config/settings.defaults.js | 302 ++++++++++----- 2 files changed, 409 insertions(+), 248 deletions(-) diff --git a/services/document-updater/app.js b/services/document-updater/app.js index 602f9be9d0..abd3370c72 100644 --- a/services/document-updater/app.js +++ b/services/document-updater/app.js @@ -6,191 +6,250 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const Metrics = require("metrics-sharelatex"); -Metrics.initialize("doc-updater"); +const Metrics = require('metrics-sharelatex') +Metrics.initialize('doc-updater') -const express = require('express'); -const http = require("http"); -const Settings = require('settings-sharelatex'); -const logger = require('logger-sharelatex'); -logger.initialize("document-updater"); +const express = require('express') +const http = require('http') +const Settings = require('settings-sharelatex') +const logger = require('logger-sharelatex') +logger.initialize('document-updater') -logger.logger.addSerializers(require("./app/js/LoggerSerializers")); +logger.logger.addSerializers(require('./app/js/LoggerSerializers')) if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) { - logger.initializeErrorReporting(Settings.sentry.dsn); + logger.initializeErrorReporting(Settings.sentry.dsn) } -const RedisManager = require('./app/js/RedisManager'); -const DispatchManager = require('./app/js/DispatchManager'); -const DeleteQueueManager = require('./app/js/DeleteQueueManager'); -const Errors = require("./app/js/Errors"); -const HttpController = require("./app/js/HttpController"); -const mongojs = require("./app/js/mongojs"); -const async = require("async"); +const RedisManager = require('./app/js/RedisManager') +const DispatchManager = require('./app/js/DispatchManager') +const DeleteQueueManager = require('./app/js/DeleteQueueManager') +const Errors = require('./app/js/Errors') +const HttpController = require('./app/js/HttpController') +const mongojs = require('./app/js/mongojs') +const async = require('async') -const Path = require("path"); -const bodyParser = require("body-parser"); +const Path = require('path') +const bodyParser = require('body-parser') -Metrics.mongodb.monitor(Path.resolve(__dirname + "/node_modules/mongojs/node_modules/mongodb"), logger); -Metrics.event_loop.monitor(logger, 100); +Metrics.mongodb.monitor( + Path.resolve(__dirname + '/node_modules/mongojs/node_modules/mongodb'), + logger +) +Metrics.event_loop.monitor(logger, 100) -const app = express(); -app.use(Metrics.http.monitor(logger)); -app.use(bodyParser.json({limit: (Settings.max_doc_length + (64 * 1024))})); -Metrics.injectMetricsRoute(app); +const app = express() +app.use(Metrics.http.monitor(logger)) +app.use(bodyParser.json({ limit: Settings.max_doc_length + 64 * 1024 })) +Metrics.injectMetricsRoute(app) -DispatchManager.createAndStartDispatchers(Settings.dispatcherCount || 10); +DispatchManager.createAndStartDispatchers(Settings.dispatcherCount || 10) -app.param('project_id', function(req, res, next, project_id) { - if ((project_id != null ? project_id.match(/^[0-9a-f]{24}$/) : undefined)) { - return next(); - } else { - return next(new Error("invalid project id")); - } -}); +app.param('project_id', function (req, res, next, project_id) { + if (project_id != null ? project_id.match(/^[0-9a-f]{24}$/) : undefined) { + return next() + } else { + return next(new Error('invalid project id')) + } +}) -app.param('doc_id', function(req, res, next, doc_id) { - if ((doc_id != null ? doc_id.match(/^[0-9a-f]{24}$/) : undefined)) { - return next(); - } else { - return next(new Error("invalid doc id")); - } -}); +app.param('doc_id', function (req, res, next, doc_id) { + if (doc_id != null ? doc_id.match(/^[0-9a-f]{24}$/) : undefined) { + return next() + } else { + return next(new Error('invalid doc id')) + } +}) -app.get('/project/:project_id/doc/:doc_id', HttpController.getDoc); +app.get('/project/:project_id/doc/:doc_id', HttpController.getDoc) // temporarily keep the GET method for backwards compatibility -app.get('/project/:project_id/doc', HttpController.getProjectDocsAndFlushIfOld); +app.get('/project/:project_id/doc', HttpController.getProjectDocsAndFlushIfOld) // will migrate to the POST method of get_and_flush_if_old instead -app.post('/project/:project_id/get_and_flush_if_old', HttpController.getProjectDocsAndFlushIfOld); -app.post('/project/:project_id/clearState', HttpController.clearProjectState); -app.post('/project/:project_id/doc/:doc_id', HttpController.setDoc); -app.post('/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLoaded); -app.delete('/project/:project_id/doc/:doc_id', HttpController.deleteDoc); -app.delete('/project/:project_id', HttpController.deleteProject); -app.delete('/project', HttpController.deleteMultipleProjects); -app.post('/project/:project_id', HttpController.updateProject); -app.post('/project/:project_id/history/resync', HttpController.resyncProjectHistory); -app.post('/project/:project_id/flush', HttpController.flushProject); -app.post('/project/:project_id/doc/:doc_id/change/:change_id/accept', HttpController.acceptChanges); -app.post('/project/:project_id/doc/:doc_id/change/accept', HttpController.acceptChanges); -app.delete('/project/:project_id/doc/:doc_id/comment/:comment_id', HttpController.deleteComment); +app.post( + '/project/:project_id/get_and_flush_if_old', + HttpController.getProjectDocsAndFlushIfOld +) +app.post('/project/:project_id/clearState', HttpController.clearProjectState) +app.post('/project/:project_id/doc/:doc_id', HttpController.setDoc) +app.post( + '/project/:project_id/doc/:doc_id/flush', + HttpController.flushDocIfLoaded +) +app.delete('/project/:project_id/doc/:doc_id', HttpController.deleteDoc) +app.delete('/project/:project_id', HttpController.deleteProject) +app.delete('/project', HttpController.deleteMultipleProjects) +app.post('/project/:project_id', HttpController.updateProject) +app.post( + '/project/:project_id/history/resync', + HttpController.resyncProjectHistory +) +app.post('/project/:project_id/flush', HttpController.flushProject) +app.post( + '/project/:project_id/doc/:doc_id/change/:change_id/accept', + HttpController.acceptChanges +) +app.post( + '/project/:project_id/doc/:doc_id/change/accept', + HttpController.acceptChanges +) +app.delete( + '/project/:project_id/doc/:doc_id/comment/:comment_id', + HttpController.deleteComment +) -app.get('/flush_all_projects', HttpController.flushAllProjects); -app.get('/flush_queued_projects', HttpController.flushQueuedProjects); +app.get('/flush_all_projects', HttpController.flushAllProjects) +app.get('/flush_queued_projects', HttpController.flushQueuedProjects) -app.get('/total', function(req, res){ - const timer = new Metrics.Timer("http.allDocList"); - return RedisManager.getCountOfDocsInMemory(function(err, count){ - timer.done(); - return res.send({total:count});}); -}); +app.get('/total', function (req, res) { + const timer = new Metrics.Timer('http.allDocList') + return RedisManager.getCountOfDocsInMemory(function (err, count) { + timer.done() + return res.send({ total: count }) + }) +}) -app.get('/status', function(req, res){ - if (Settings.shuttingDown) { - return res.sendStatus(503); // Service unavailable - } else { - return res.send('document updater is alive'); - } -}); +app.get('/status', function (req, res) { + if (Settings.shuttingDown) { + return res.sendStatus(503) // Service unavailable + } else { + return res.send('document updater is alive') + } +}) -const pubsubClient = require("redis-sharelatex").createClient(Settings.redis.pubsub); -app.get("/health_check/redis", (req, res, next) => pubsubClient.healthCheck(function(error) { +const pubsubClient = require('redis-sharelatex').createClient( + Settings.redis.pubsub +) +app.get('/health_check/redis', (req, res, next) => + pubsubClient.healthCheck(function (error) { if (error != null) { - logger.err({err: error}, "failed redis health check"); - return res.sendStatus(500); + logger.err({ err: error }, 'failed redis health check') + return res.sendStatus(500) } else { - return res.sendStatus(200); + return res.sendStatus(200) } -})); + }) +) -const docUpdaterRedisClient = require("redis-sharelatex").createClient(Settings.redis.documentupdater); -app.get("/health_check/redis_cluster", (req, res, next) => docUpdaterRedisClient.healthCheck(function(error) { +const docUpdaterRedisClient = require('redis-sharelatex').createClient( + Settings.redis.documentupdater +) +app.get('/health_check/redis_cluster', (req, res, next) => + docUpdaterRedisClient.healthCheck(function (error) { if (error != null) { - logger.err({err: error}, "failed redis cluster health check"); - return res.sendStatus(500); + logger.err({ err: error }, 'failed redis cluster health check') + return res.sendStatus(500) } else { - return res.sendStatus(200); + return res.sendStatus(200) } -})); + }) +) -app.get("/health_check", (req, res, next) => async.series([ - cb => pubsubClient.healthCheck(function(error) { - if (error != null) { - logger.err({err: error}, "failed redis health check"); - } - return cb(error); - }), - cb => docUpdaterRedisClient.healthCheck(function(error) { - if (error != null) { - logger.err({err: error}, "failed redis cluster health check"); - } - return cb(error); - }), - cb => mongojs.healthCheck(function(error) { - if (error != null) { - logger.err({err: error}, "failed mongo health check"); - } - return cb(error); - }) -] , function(error) { - if (error != null) { - return res.sendStatus(500); - } else { - return res.sendStatus(200); +app.get('/health_check', (req, res, next) => + async.series( + [ + (cb) => + pubsubClient.healthCheck(function (error) { + if (error != null) { + logger.err({ err: error }, 'failed redis health check') + } + return cb(error) + }), + (cb) => + docUpdaterRedisClient.healthCheck(function (error) { + if (error != null) { + logger.err({ err: error }, 'failed redis cluster health check') + } + return cb(error) + }), + (cb) => + mongojs.healthCheck(function (error) { + if (error != null) { + logger.err({ err: error }, 'failed mongo health check') + } + return cb(error) + }) + ], + function (error) { + if (error != null) { + return res.sendStatus(500) + } else { + return res.sendStatus(200) + } } -})); + ) +) -app.use(function(error, req, res, next) { - if (error instanceof Errors.NotFoundError) { - return res.sendStatus(404); - } else if (error instanceof Errors.OpRangeNotAvailableError) { - return res.sendStatus(422); // Unprocessable Entity - } else if (error.statusCode === 413) { - return res.status(413).send("request entity too large"); - } else { - logger.error({err: error, req}, "request errored"); - return res.status(500).send("Oops, something went wrong"); - } -}); +app.use(function (error, req, res, next) { + if (error instanceof Errors.NotFoundError) { + return res.sendStatus(404) + } else if (error instanceof Errors.OpRangeNotAvailableError) { + return res.sendStatus(422) // Unprocessable Entity + } else if (error.statusCode === 413) { + return res.status(413).send('request entity too large') + } else { + logger.error({ err: error, req }, 'request errored') + return res.status(500).send('Oops, something went wrong') + } +}) -const shutdownCleanly = signal => (function() { - logger.log({signal}, "received interrupt, cleaning up"); - Settings.shuttingDown = true; - return setTimeout(function() { - logger.log({signal}, "shutting down"); - return process.exit(); - } - , 10000); -}); +const shutdownCleanly = (signal) => + function () { + logger.log({ signal }, 'received interrupt, cleaning up') + Settings.shuttingDown = true + return setTimeout(function () { + logger.log({ signal }, 'shutting down') + return process.exit() + }, 10000) + } -const watchForEvent = eventName => docUpdaterRedisClient.on(eventName, e => console.log(`redis event: ${eventName} ${e}`)); +const watchForEvent = (eventName) => + docUpdaterRedisClient.on(eventName, (e) => + console.log(`redis event: ${eventName} ${e}`) + ) -const events = ["connect", "ready", "error", "close", "reconnecting", "end"]; -for (let eventName of Array.from(events)) { - watchForEvent(eventName); +const events = ['connect', 'ready', 'error', 'close', 'reconnecting', 'end'] +for (const eventName of Array.from(events)) { + watchForEvent(eventName) } - -const port = __guard__(Settings.internal != null ? Settings.internal.documentupdater : undefined, x => x.port) || __guard__(Settings.apis != null ? Settings.apis.documentupdater : undefined, x1 => x1.port) || 3003; -const host = Settings.internal.documentupdater.host || "localhost"; -if (!module.parent) { // Called directly - app.listen(port, host, function() { - logger.info(`Document-updater starting up, listening on ${host}:${port}`); - if (Settings.continuousBackgroundFlush) { - logger.info("Starting continuous background flush"); - return DeleteQueueManager.startBackgroundFlush(); - } - }); +const port = + __guard__( + Settings.internal != null ? Settings.internal.documentupdater : undefined, + (x) => x.port + ) || + __guard__( + Settings.apis != null ? Settings.apis.documentupdater : undefined, + (x1) => x1.port + ) || + 3003 +const host = Settings.internal.documentupdater.host || 'localhost' +if (!module.parent) { + // Called directly + app.listen(port, host, function () { + logger.info(`Document-updater starting up, listening on ${host}:${port}`) + if (Settings.continuousBackgroundFlush) { + logger.info('Starting continuous background flush') + return DeleteQueueManager.startBackgroundFlush() + } + }) } -module.exports = app; +module.exports = app -for (let signal of ['SIGINT', 'SIGHUP', 'SIGQUIT', 'SIGUSR1', 'SIGUSR2', 'SIGTERM', 'SIGABRT']) { - process.on(signal, shutdownCleanly(signal)); +for (const signal of [ + 'SIGINT', + 'SIGHUP', + 'SIGQUIT', + 'SIGUSR1', + 'SIGUSR2', + 'SIGTERM', + 'SIGABRT' +]) { + process.on(signal, shutdownCleanly(signal)) } - function __guard__(value, transform) { - return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; -} \ No newline at end of file + return typeof value !== 'undefined' && value !== null + ? transform(value) + : undefined +} diff --git a/services/document-updater/config/settings.defaults.js b/services/document-updater/config/settings.defaults.js index 190fbff56f..6f51d912bc 100755 --- a/services/document-updater/config/settings.defaults.js +++ b/services/document-updater/config/settings.defaults.js @@ -1,116 +1,218 @@ -const Path = require('path'); -const http = require('http'); -http.globalAgent.maxSockets = 300; +const Path = require('path') +const http = require('http') +http.globalAgent.maxSockets = 300 module.exports = { - internal: { - documentupdater: { - host: process.env["LISTEN_ADDRESS"] || "localhost", - port: 3003 - } - }, + internal: { + documentupdater: { + host: process.env.LISTEN_ADDRESS || 'localhost', + port: 3003 + } + }, - apis: { - web: { - url: `http://${process.env['WEB_API_HOST'] || process.env['WEB_HOST'] || "localhost"}:${process.env['WEB_API_PORT'] || process.env['WEB_PORT'] || 3000}`, - user: process.env['WEB_API_USER'] || "sharelatex", - pass: process.env['WEB_API_PASSWORD'] || "password" - }, - trackchanges: { - url: `http://${process.env["TRACK_CHANGES_HOST"] || "localhost"}:3015` - }, - project_history: { - enabled: true, - url: `http://${process.env["PROJECT_HISTORY_HOST"] || "localhost"}:3054` - } - }, + apis: { + web: { + url: `http://${ + process.env.WEB_API_HOST || process.env.WEB_HOST || 'localhost' + }:${process.env.WEB_API_PORT || process.env.WEB_PORT || 3000}`, + user: process.env.WEB_API_USER || 'sharelatex', + pass: process.env.WEB_API_PASSWORD || 'password' + }, + trackchanges: { + url: `http://${process.env.TRACK_CHANGES_HOST || 'localhost'}:3015` + }, + project_history: { + enabled: true, + url: `http://${process.env.PROJECT_HISTORY_HOST || 'localhost'}:3054` + } + }, - redis: { + redis: { + pubsub: { + host: + process.env.PUBSUB_REDIS_HOST || + process.env.REDIS_HOST || + 'localhost', + port: + process.env.PUBSUB_REDIS_PORT || process.env.REDIS_PORT || '6379', + password: + process.env.PUBSUB_REDIS_PASSWORD || + process.env.REDIS_PASSWORD || + '', + maxRetriesPerRequest: parseInt( + process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20' + ) + }, - pubsub: { - host: process.env['PUBSUB_REDIS_HOST'] || process.env['REDIS_HOST'] || "localhost", - port: process.env['PUBSUB_REDIS_PORT'] || process.env['REDIS_PORT'] || "6379", - password: process.env["PUBSUB_REDIS_PASSWORD"] || process.env["REDIS_PASSWORD"] || "", - maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] || "20") - }, + history: { + port: + process.env.HISTORY_REDIS_PORT || + process.env.REDIS_PORT || + '6379', + host: + process.env.HISTORY_REDIS_HOST || + process.env.REDIS_HOST || + 'localhost', + password: + process.env.HISTORY_REDIS_PASSWORD || + process.env.REDIS_PASSWORD || + '', + maxRetriesPerRequest: parseInt( + process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20' + ), + key_schema: { + uncompressedHistoryOps({ doc_id }) { + return `UncompressedHistoryOps:{${doc_id}}` + }, + docsWithHistoryOps({ project_id }) { + return `DocsWithHistoryOps:{${project_id}}` + } + } + }, - history: { - port: process.env["HISTORY_REDIS_PORT"] || process.env["REDIS_PORT"] || "6379", - host: process.env["HISTORY_REDIS_HOST"] || process.env["REDIS_HOST"] || "localhost", - password: process.env["HISTORY_REDIS_PASSWORD"] || process.env["REDIS_PASSWORD"] || "", - maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] || "20"), - key_schema: { - uncompressedHistoryOps({doc_id}) { return `UncompressedHistoryOps:{${doc_id}}`; }, - docsWithHistoryOps({project_id}) { return `DocsWithHistoryOps:{${project_id}}`; } - } - }, + project_history: { + port: + process.env.NEW_HISTORY_REDIS_PORT || + process.env.REDIS_PORT || + '6379', + host: + process.env.NEW_HISTORY_REDIS_HOST || + process.env.REDIS_HOST || + 'localhost', + password: + process.env.NEW_HISTORY_REDIS_PASSWORD || + process.env.REDIS_PASSWORD || + '', + maxRetriesPerRequest: parseInt( + process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20' + ), + key_schema: { + projectHistoryOps({ project_id }) { + return `ProjectHistory:Ops:{${project_id}}` + }, + projectHistoryFirstOpTimestamp({ project_id }) { + return `ProjectHistory:FirstOpTimestamp:{${project_id}}` + } + } + }, - project_history: { - port: process.env["NEW_HISTORY_REDIS_PORT"] || process.env["REDIS_PORT"] || "6379", - host: process.env["NEW_HISTORY_REDIS_HOST"] || process.env["REDIS_HOST"] || "localhost", - password: process.env["NEW_HISTORY_REDIS_PASSWORD"] || process.env["REDIS_PASSWORD"] || "", - maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] || "20"), - key_schema: { - projectHistoryOps({project_id}) { return `ProjectHistory:Ops:{${project_id}}`; }, - projectHistoryFirstOpTimestamp({project_id}) { return `ProjectHistory:FirstOpTimestamp:{${project_id}}`; } - } - }, + lock: { + port: + process.env.LOCK_REDIS_PORT || process.env.REDIS_PORT || '6379', + host: + process.env.LOCK_REDIS_HOST || + process.env.REDIS_HOST || + 'localhost', + password: + process.env.LOCK_REDIS_PASSWORD || + process.env.REDIS_PASSWORD || + '', + maxRetriesPerRequest: parseInt( + process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20' + ), + key_schema: { + blockingKey({ doc_id }) { + return `Blocking:{${doc_id}}` + } + } + }, - lock: { - port: process.env["LOCK_REDIS_PORT"] || process.env["REDIS_PORT"] || "6379", - host: process.env["LOCK_REDIS_HOST"] || process.env["REDIS_HOST"] || "localhost", - password: process.env["LOCK_REDIS_PASSWORD"] || process.env["REDIS_PASSWORD"] || "", - maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] || "20"), - key_schema: { - blockingKey({doc_id}) { return `Blocking:{${doc_id}}`; } - } - }, + documentupdater: { + port: + process.env.DOC_UPDATER_REDIS_PORT || + process.env.REDIS_PORT || + '6379', + host: + process.env.DOC_UPDATER_REDIS_HOST || + process.env.REDIS_HOST || + 'localhost', + password: + process.env.DOC_UPDATER_REDIS_PASSWORD || + process.env.REDIS_PASSWORD || + '', + maxRetriesPerRequest: parseInt( + process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20' + ), + key_schema: { + blockingKey({ doc_id }) { + return `Blocking:{${doc_id}}` + }, + docLines({ doc_id }) { + return `doclines:{${doc_id}}` + }, + docOps({ doc_id }) { + return `DocOps:{${doc_id}}` + }, + docVersion({ doc_id }) { + return `DocVersion:{${doc_id}}` + }, + docHash({ doc_id }) { + return `DocHash:{${doc_id}}` + }, + projectKey({ doc_id }) { + return `ProjectId:{${doc_id}}` + }, + docsInProject({ project_id }) { + return `DocsIn:{${project_id}}` + }, + ranges({ doc_id }) { + return `Ranges:{${doc_id}}` + }, + unflushedTime({ doc_id }) { + return `UnflushedTime:{${doc_id}}` + }, + pathname({ doc_id }) { + return `Pathname:{${doc_id}}` + }, + projectHistoryId({ doc_id }) { + return `ProjectHistoryId:{${doc_id}}` + }, + projectHistoryType({ doc_id }) { + return `ProjectHistoryType:{${doc_id}}` + }, + projectState({ project_id }) { + return `ProjectState:{${project_id}}` + }, + pendingUpdates({ doc_id }) { + return `PendingUpdates:{${doc_id}}` + }, + lastUpdatedBy({ doc_id }) { + return `lastUpdatedBy:{${doc_id}}` + }, + lastUpdatedAt({ doc_id }) { + return `lastUpdatedAt:{${doc_id}}` + }, + pendingUpdates({ doc_id }) { + return `PendingUpdates:{${doc_id}}` + }, + flushAndDeleteQueue() { + return 'DocUpdaterFlushAndDeleteQueue' + } + } + } + }, - documentupdater: { - port: process.env["DOC_UPDATER_REDIS_PORT"] || process.env["REDIS_PORT"] || "6379", - host: process.env["DOC_UPDATER_REDIS_HOST"] || process.env["REDIS_HOST"] || "localhost", - password: process.env["DOC_UPDATER_REDIS_PASSWORD"] || process.env["REDIS_PASSWORD"] || "", - maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] || "20"), - key_schema: { - blockingKey({doc_id}) { return `Blocking:{${doc_id}}`; }, - docLines({doc_id}) { return `doclines:{${doc_id}}`; }, - docOps({doc_id}) { return `DocOps:{${doc_id}}`; }, - docVersion({doc_id}) { return `DocVersion:{${doc_id}}`; }, - docHash({doc_id}) { return `DocHash:{${doc_id}}`; }, - projectKey({doc_id}) { return `ProjectId:{${doc_id}}`; }, - docsInProject({project_id}) { return `DocsIn:{${project_id}}`; }, - ranges({doc_id}) { return `Ranges:{${doc_id}}`; }, - unflushedTime({doc_id}) { return `UnflushedTime:{${doc_id}}`; }, - pathname({doc_id}) { return `Pathname:{${doc_id}}`; }, - projectHistoryId({doc_id}) { return `ProjectHistoryId:{${doc_id}}`; }, - projectHistoryType({doc_id}) { return `ProjectHistoryType:{${doc_id}}`; }, - projectState({project_id}) { return `ProjectState:{${project_id}}`; }, - pendingUpdates({doc_id}) { return `PendingUpdates:{${doc_id}}`; }, - lastUpdatedBy({doc_id}) { return `lastUpdatedBy:{${doc_id}}`; }, - lastUpdatedAt({doc_id}) { return `lastUpdatedAt:{${doc_id}}`; }, - pendingUpdates({doc_id}) { return `PendingUpdates:{${doc_id}}`; }, - flushAndDeleteQueue() { return "DocUpdaterFlushAndDeleteQueue"; } - } - } - }, + max_doc_length: 2 * 1024 * 1024, // 2mb - max_doc_length: 2 * 1024 * 1024, // 2mb + dispatcherCount: process.env.DISPATCHER_COUNT, - dispatcherCount: process.env["DISPATCHER_COUNT"], + mongo: { + url: + process.env.MONGO_CONNECTION_STRING || + `mongodb://${process.env.MONGO_HOST || '127.0.0.1'}/sharelatex` + }, - mongo: { - url : process.env['MONGO_CONNECTION_STRING'] || `mongodb://${process.env['MONGO_HOST'] || '127.0.0.1'}/sharelatex` - }, + sentry: { + dsn: process.env.SENTRY_DSN + }, - sentry: { - dsn: process.env.SENTRY_DSN - }, + publishOnIndividualChannels: + process.env.PUBLISH_ON_INDIVIDUAL_CHANNELS || false, - publishOnIndividualChannels: process.env['PUBLISH_ON_INDIVIDUAL_CHANNELS'] || false, + continuousBackgroundFlush: + process.env.CONTINUOUS_BACKGROUND_FLUSH || false, - continuousBackgroundFlush: process.env['CONTINUOUS_BACKGROUND_FLUSH'] || false, + smoothingOffset: process.env.SMOOTHING_OFFSET || 1000, // milliseconds - smoothingOffset: process.env['SMOOTHING_OFFSET'] || 1000, // milliseconds - - disableDoubleFlush: process.env['DISABLE_DOUBLE_FLUSH'] || false // don't flush track-changes for projects using project-history -}; + disableDoubleFlush: process.env.DISABLE_DOUBLE_FLUSH || false // don't flush track-changes for projects using project-history +} From 6fdc094b5cab936e604e7d3be5f961787ca11544 Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Wed, 6 May 2020 12:13:24 +0200 Subject: [PATCH 632/769] Uninstall coffee-script --- services/document-updater/package-lock.json | 8 -------- services/document-updater/package.json | 1 - 2 files changed, 9 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index f83a21dbc8..e3d34fc959 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -1535,14 +1535,6 @@ "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.0.tgz", "integrity": "sha512-2Nii8p3RwAPiFwsnZvukotvow2rIHM+yQ6ZcBXGHdniadkYGZYiGmkHJIbZPIV9nfv7m/U1IPMVVcAhoWFeklw==" }, - "coffee-script": { - "version": "1.7.1", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.7.1.tgz", - "integrity": "sha512-W3s+SROY73OmrSGtPTTW/2wp2rmW5vuh0/tUuCK1NvTuyzLOVPccIP9whmhZ4cYWcr2NJPNENZIFaAMkTD5G3w==", - "requires": { - "mkdirp": "~0.3.5" - } - }, "color-convert": { "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 81e27d4c3c..af5f3ff6ac 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -21,7 +21,6 @@ "async": "^2.5.0", "body-parser": "^1.19.0", "bunyan": "~0.22.1", - "coffee-script": "~1.7.0", "express": "4.17.1", "lodash": "^4.17.13", "logger-sharelatex": "^1.9.1", From 96e7a668b7350068eee208015ff80b322581cf67 Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Tue, 28 Apr 2020 11:33:07 +0200 Subject: [PATCH 633/769] disable linting for lib/diff_match_patch.js --- services/document-updater/app/lib/diff_match_patch.js | 1 + 1 file changed, 1 insertion(+) diff --git a/services/document-updater/app/lib/diff_match_patch.js b/services/document-updater/app/lib/diff_match_patch.js index 112130e097..4312010232 100644 --- a/services/document-updater/app/lib/diff_match_patch.js +++ b/services/document-updater/app/lib/diff_match_patch.js @@ -1,3 +1,4 @@ +/* eslint-disable */ /** * Diff Match and Patch * From 41849760997e2ffb80fd58838e02695e52700d8c Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Tue, 28 Apr 2020 11:36:57 +0200 Subject: [PATCH 634/769] fix lint errors in config/settings.defaults.js --- .../config/settings.defaults.js | 76 +++++++++---------- 1 file changed, 36 insertions(+), 40 deletions(-) diff --git a/services/document-updater/config/settings.defaults.js b/services/document-updater/config/settings.defaults.js index 6f51d912bc..899302c64e 100755 --- a/services/document-updater/config/settings.defaults.js +++ b/services/document-updater/config/settings.defaults.js @@ -1,4 +1,3 @@ -const Path = require('path') const http = require('http') http.globalAgent.maxSockets = 300 @@ -61,11 +60,11 @@ module.exports = { process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20' ), key_schema: { - uncompressedHistoryOps({ doc_id }) { - return `UncompressedHistoryOps:{${doc_id}}` + uncompressedHistoryOps({ doc_id: docId }) { + return `UncompressedHistoryOps:{${docId}}` }, - docsWithHistoryOps({ project_id }) { - return `DocsWithHistoryOps:{${project_id}}` + docsWithHistoryOps({ project_id: projectId }) { + return `DocsWithHistoryOps:{${projectId}}` } } }, @@ -134,56 +133,53 @@ module.exports = { process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20' ), key_schema: { - blockingKey({ doc_id }) { - return `Blocking:{${doc_id}}` + blockingKey({ doc_id: docId }) { + return `Blocking:{${docId}}` }, - docLines({ doc_id }) { - return `doclines:{${doc_id}}` + docLines({ doc_id: docId }) { + return `doclines:{${docId}}` }, - docOps({ doc_id }) { - return `DocOps:{${doc_id}}` + docOps({ doc_id: docId }) { + return `DocOps:{${docId}}` }, - docVersion({ doc_id }) { - return `DocVersion:{${doc_id}}` + docVersion({ doc_id: docId }) { + return `DocVersion:{${docId}}` }, - docHash({ doc_id }) { - return `DocHash:{${doc_id}}` + docHash({ doc_id: docId }) { + return `DocHash:{${docId}}` }, - projectKey({ doc_id }) { - return `ProjectId:{${doc_id}}` + projectKey({ doc_id: docId }) { + return `ProjectId:{${docId}}` }, - docsInProject({ project_id }) { - return `DocsIn:{${project_id}}` + docsInProject({ project_id: projectId }) { + return `DocsIn:{${projectId}}` }, - ranges({ doc_id }) { - return `Ranges:{${doc_id}}` + ranges({ doc_id: docId }) { + return `Ranges:{${docId}}` }, - unflushedTime({ doc_id }) { - return `UnflushedTime:{${doc_id}}` + unflushedTime({ doc_id: docId }) { + return `UnflushedTime:{${docId}}` }, - pathname({ doc_id }) { - return `Pathname:{${doc_id}}` + pathname({ doc_id: docId }) { + return `Pathname:{${docId}}` }, - projectHistoryId({ doc_id }) { - return `ProjectHistoryId:{${doc_id}}` + projectHistoryId({ doc_id: docId }) { + return `ProjectHistoryId:{${docId}}` }, - projectHistoryType({ doc_id }) { - return `ProjectHistoryType:{${doc_id}}` + projectHistoryType({ doc_id: docId }) { + return `ProjectHistoryType:{${docId}}` }, - projectState({ project_id }) { - return `ProjectState:{${project_id}}` + projectState({ project_id: projectId }) { + return `ProjectState:{${projectId}}` }, - pendingUpdates({ doc_id }) { - return `PendingUpdates:{${doc_id}}` + pendingUpdates({ doc_id: docId }) { + return `PendingUpdates:{${docId}}` }, - lastUpdatedBy({ doc_id }) { - return `lastUpdatedBy:{${doc_id}}` + lastUpdatedBy({ doc_id: docId }) { + return `lastUpdatedBy:{${docId}}` }, - lastUpdatedAt({ doc_id }) { - return `lastUpdatedAt:{${doc_id}}` - }, - pendingUpdates({ doc_id }) { - return `PendingUpdates:{${doc_id}}` + lastUpdatedAt({ doc_id: docId }) { + return `lastUpdatedAt:{${docId}}` }, flushAndDeleteQueue() { return 'DocUpdaterFlushAndDeleteQueue' From e089cfc93cd78f50034604271efcbc4dca870f72 Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Tue, 28 Apr 2020 11:39:45 +0200 Subject: [PATCH 635/769] format config/settings.defaults.js & lib/diff_match_patch.js --- .../app/lib/diff_match_patch.js | 2191 +++++++++-------- .../config/settings.defaults.js | 58 +- 2 files changed, 1187 insertions(+), 1062 deletions(-) diff --git a/services/document-updater/app/lib/diff_match_patch.js b/services/document-updater/app/lib/diff_match_patch.js index 4312010232..f90ecdd8b0 100644 --- a/services/document-updater/app/lib/diff_match_patch.js +++ b/services/document-updater/app/lib/diff_match_patch.js @@ -29,48 +29,44 @@ * @constructor */ function diff_match_patch() { - // Defaults. // Redefine these in your program to override the defaults. // Number of seconds to map a diff before giving up (0 for infinity). - this.Diff_Timeout = 1.0; + this.Diff_Timeout = 1.0 // Cost of an empty edit operation in terms of edit characters. - this.Diff_EditCost = 4; + this.Diff_EditCost = 4 // At what point is no match declared (0.0 = perfection, 1.0 = very loose). - this.Match_Threshold = 0.5; + this.Match_Threshold = 0.5 // How far to search for a match (0 = exact location, 1000+ = broad match). // A match this many characters away from the expected location will add // 1.0 to the score (0.0 is a perfect match). - this.Match_Distance = 1000; + this.Match_Distance = 1000 // When deleting a large block of text (over ~64 characters), how close do // the contents have to be to match the expected contents. (0.0 = perfection, // 1.0 = very loose). Note that Match_Threshold controls how closely the // end points of a delete need to match. - this.Patch_DeleteThreshold = 0.5; + this.Patch_DeleteThreshold = 0.5 // Chunk size for context length. - this.Patch_Margin = 4; + this.Patch_Margin = 4 // The number of bits in an int. - this.Match_MaxBits = 32; + this.Match_MaxBits = 32 } - // DIFF FUNCTIONS - /** * The data structure representing a diff is an array of tuples: * [[DIFF_DELETE, 'Hello'], [DIFF_INSERT, 'Goodbye'], [DIFF_EQUAL, ' world.']] * which means: delete 'Hello', add 'Goodbye' and keep ' world.' */ -var DIFF_DELETE = -1; -var DIFF_INSERT = 1; -var DIFF_EQUAL = 0; +var DIFF_DELETE = -1 +var DIFF_INSERT = 1 +var DIFF_EQUAL = 0 /** @typedef {{0: number, 1: string}} */ -diff_match_patch.Diff; - +diff_match_patch.Diff /** * Find the differences between two texts. Simplifies the problem by stripping @@ -85,62 +81,65 @@ diff_match_patch.Diff; * instead. * @return {!Array.} Array of diff tuples. */ -diff_match_patch.prototype.diff_main = function(text1, text2, opt_checklines, - opt_deadline) { +diff_match_patch.prototype.diff_main = function ( + text1, + text2, + opt_checklines, + opt_deadline +) { // Set a deadline by which time the diff must be complete. if (typeof opt_deadline == 'undefined') { if (this.Diff_Timeout <= 0) { - opt_deadline = Number.MAX_VALUE; + opt_deadline = Number.MAX_VALUE } else { - opt_deadline = (new Date).getTime() + this.Diff_Timeout * 1000; + opt_deadline = new Date().getTime() + this.Diff_Timeout * 1000 } } - var deadline = opt_deadline; + var deadline = opt_deadline // Check for null inputs. if (text1 == null || text2 == null) { - throw new Error('Null input. (diff_main)'); + throw new Error('Null input. (diff_main)') } // Check for equality (speedup). if (text1 == text2) { if (text1) { - return [[DIFF_EQUAL, text1]]; + return [[DIFF_EQUAL, text1]] } - return []; + return [] } if (typeof opt_checklines == 'undefined') { - opt_checklines = true; + opt_checklines = true } - var checklines = opt_checklines; + var checklines = opt_checklines // Trim off common prefix (speedup). - var commonlength = this.diff_commonPrefix(text1, text2); - var commonprefix = text1.substring(0, commonlength); - text1 = text1.substring(commonlength); - text2 = text2.substring(commonlength); + var commonlength = this.diff_commonPrefix(text1, text2) + var commonprefix = text1.substring(0, commonlength) + text1 = text1.substring(commonlength) + text2 = text2.substring(commonlength) // Trim off common suffix (speedup). - commonlength = this.diff_commonSuffix(text1, text2); - var commonsuffix = text1.substring(text1.length - commonlength); - text1 = text1.substring(0, text1.length - commonlength); - text2 = text2.substring(0, text2.length - commonlength); + commonlength = this.diff_commonSuffix(text1, text2) + var commonsuffix = text1.substring(text1.length - commonlength) + text1 = text1.substring(0, text1.length - commonlength) + text2 = text2.substring(0, text2.length - commonlength) // Compute the diff on the middle block. - var diffs = this.diff_compute_(text1, text2, checklines, deadline); + var diffs = this.diff_compute_(text1, text2, checklines, deadline) // Restore the prefix and suffix. if (commonprefix) { - diffs.unshift([DIFF_EQUAL, commonprefix]); + diffs.unshift([DIFF_EQUAL, commonprefix]) } if (commonsuffix) { - diffs.push([DIFF_EQUAL, commonsuffix]); + diffs.push([DIFF_EQUAL, commonsuffix]) } - this.diff_cleanupMerge(diffs); - return diffs; -}; - + this.diff_cleanupMerge(diffs) + return diffs +} /** * Find the differences between two texts. Assumes that the texts do not @@ -154,64 +153,72 @@ diff_match_patch.prototype.diff_main = function(text1, text2, opt_checklines, * @return {!Array.} Array of diff tuples. * @private */ -diff_match_patch.prototype.diff_compute_ = function(text1, text2, checklines, - deadline) { - var diffs; +diff_match_patch.prototype.diff_compute_ = function ( + text1, + text2, + checklines, + deadline +) { + var diffs if (!text1) { // Just add some text (speedup). - return [[DIFF_INSERT, text2]]; + return [[DIFF_INSERT, text2]] } if (!text2) { // Just delete some text (speedup). - return [[DIFF_DELETE, text1]]; + return [[DIFF_DELETE, text1]] } - var longtext = text1.length > text2.length ? text1 : text2; - var shorttext = text1.length > text2.length ? text2 : text1; - var i = longtext.indexOf(shorttext); + var longtext = text1.length > text2.length ? text1 : text2 + var shorttext = text1.length > text2.length ? text2 : text1 + var i = longtext.indexOf(shorttext) if (i != -1) { // Shorter text is inside the longer text (speedup). - diffs = [[DIFF_INSERT, longtext.substring(0, i)], - [DIFF_EQUAL, shorttext], - [DIFF_INSERT, longtext.substring(i + shorttext.length)]]; + diffs = [ + [DIFF_INSERT, longtext.substring(0, i)], + [DIFF_EQUAL, shorttext], + [DIFF_INSERT, longtext.substring(i + shorttext.length)] + ] // Swap insertions for deletions if diff is reversed. if (text1.length > text2.length) { - diffs[0][0] = diffs[2][0] = DIFF_DELETE; + diffs[0][0] = diffs[2][0] = DIFF_DELETE } - return diffs; + return diffs } if (shorttext.length == 1) { // Single character string. // After the previous speedup, the character can't be an equality. - return [[DIFF_DELETE, text1], [DIFF_INSERT, text2]]; + return [ + [DIFF_DELETE, text1], + [DIFF_INSERT, text2] + ] } // Check to see if the problem can be split in two. - var hm = this.diff_halfMatch_(text1, text2); + var hm = this.diff_halfMatch_(text1, text2) if (hm) { // A half-match was found, sort out the return data. - var text1_a = hm[0]; - var text1_b = hm[1]; - var text2_a = hm[2]; - var text2_b = hm[3]; - var mid_common = hm[4]; + var text1_a = hm[0] + var text1_b = hm[1] + var text2_a = hm[2] + var text2_b = hm[3] + var mid_common = hm[4] // Send both pairs off for separate processing. - var diffs_a = this.diff_main(text1_a, text2_a, checklines, deadline); - var diffs_b = this.diff_main(text1_b, text2_b, checklines, deadline); + var diffs_a = this.diff_main(text1_a, text2_a, checklines, deadline) + var diffs_b = this.diff_main(text1_b, text2_b, checklines, deadline) // Merge the results. - return diffs_a.concat([[DIFF_EQUAL, mid_common]], diffs_b); + return diffs_a.concat([[DIFF_EQUAL, mid_common]], diffs_b) } if (checklines && text1.length > 100 && text2.length > 100) { - return this.diff_lineMode_(text1, text2, deadline); + return this.diff_lineMode_(text1, text2, deadline) } - return this.diff_bisect_(text1, text2, deadline); -}; - + return this.diff_bisect_(text1, text2, deadline) +} /** * Do a quick line-level diff on both strings, then rediff the parts for @@ -223,64 +230,65 @@ diff_match_patch.prototype.diff_compute_ = function(text1, text2, checklines, * @return {!Array.} Array of diff tuples. * @private */ -diff_match_patch.prototype.diff_lineMode_ = function(text1, text2, deadline) { +diff_match_patch.prototype.diff_lineMode_ = function (text1, text2, deadline) { // Scan the text on a line-by-line basis first. - var a = this.diff_linesToChars_(text1, text2); - text1 = a.chars1; - text2 = a.chars2; - var linearray = a.lineArray; + var a = this.diff_linesToChars_(text1, text2) + text1 = a.chars1 + text2 = a.chars2 + var linearray = a.lineArray - var diffs = this.diff_main(text1, text2, false, deadline); + var diffs = this.diff_main(text1, text2, false, deadline) // Convert the diff back to original text. - this.diff_charsToLines_(diffs, linearray); + this.diff_charsToLines_(diffs, linearray) // Eliminate freak matches (e.g. blank lines) - this.diff_cleanupSemantic(diffs); + this.diff_cleanupSemantic(diffs) // Rediff any replacement blocks, this time character-by-character. // Add a dummy entry at the end. - diffs.push([DIFF_EQUAL, '']); - var pointer = 0; - var count_delete = 0; - var count_insert = 0; - var text_delete = ''; - var text_insert = ''; + diffs.push([DIFF_EQUAL, '']) + var pointer = 0 + var count_delete = 0 + var count_insert = 0 + var text_delete = '' + var text_insert = '' while (pointer < diffs.length) { switch (diffs[pointer][0]) { case DIFF_INSERT: - count_insert++; - text_insert += diffs[pointer][1]; - break; + count_insert++ + text_insert += diffs[pointer][1] + break case DIFF_DELETE: - count_delete++; - text_delete += diffs[pointer][1]; - break; + count_delete++ + text_delete += diffs[pointer][1] + break case DIFF_EQUAL: // Upon reaching an equality, check for prior redundancies. if (count_delete >= 1 && count_insert >= 1) { // Delete the offending records and add the merged ones. - diffs.splice(pointer - count_delete - count_insert, - count_delete + count_insert); - pointer = pointer - count_delete - count_insert; - var a = this.diff_main(text_delete, text_insert, false, deadline); + diffs.splice( + pointer - count_delete - count_insert, + count_delete + count_insert + ) + pointer = pointer - count_delete - count_insert + var a = this.diff_main(text_delete, text_insert, false, deadline) for (var j = a.length - 1; j >= 0; j--) { - diffs.splice(pointer, 0, a[j]); + diffs.splice(pointer, 0, a[j]) } - pointer = pointer + a.length; + pointer = pointer + a.length } - count_insert = 0; - count_delete = 0; - text_delete = ''; - text_insert = ''; - break; + count_insert = 0 + count_delete = 0 + text_delete = '' + text_insert = '' + break } - pointer++; + pointer++ } - diffs.pop(); // Remove the dummy entry at the end. - - return diffs; -}; + diffs.pop() // Remove the dummy entry at the end. + return diffs +} /** * Find the 'middle snake' of a diff, split the problem in two @@ -292,69 +300,72 @@ diff_match_patch.prototype.diff_lineMode_ = function(text1, text2, deadline) { * @return {!Array.} Array of diff tuples. * @private */ -diff_match_patch.prototype.diff_bisect_ = function(text1, text2, deadline) { +diff_match_patch.prototype.diff_bisect_ = function (text1, text2, deadline) { // Cache the text lengths to prevent multiple calls. - var text1_length = text1.length; - var text2_length = text2.length; - var max_d = Math.ceil((text1_length + text2_length) / 2); - var v_offset = max_d; - var v_length = 2 * max_d; - var v1 = new Array(v_length); - var v2 = new Array(v_length); + var text1_length = text1.length + var text2_length = text2.length + var max_d = Math.ceil((text1_length + text2_length) / 2) + var v_offset = max_d + var v_length = 2 * max_d + var v1 = new Array(v_length) + var v2 = new Array(v_length) // Setting all elements to -1 is faster in Chrome & Firefox than mixing // integers and undefined. for (var x = 0; x < v_length; x++) { - v1[x] = -1; - v2[x] = -1; + v1[x] = -1 + v2[x] = -1 } - v1[v_offset + 1] = 0; - v2[v_offset + 1] = 0; - var delta = text1_length - text2_length; + v1[v_offset + 1] = 0 + v2[v_offset + 1] = 0 + var delta = text1_length - text2_length // If the total number of characters is odd, then the front path will collide // with the reverse path. - var front = (delta % 2 != 0); + var front = delta % 2 != 0 // Offsets for start and end of k loop. // Prevents mapping of space beyond the grid. - var k1start = 0; - var k1end = 0; - var k2start = 0; - var k2end = 0; + var k1start = 0 + var k1end = 0 + var k2start = 0 + var k2end = 0 for (var d = 0; d < max_d; d++) { // Bail out if deadline is reached. - if ((new Date()).getTime() > deadline) { - break; + if (new Date().getTime() > deadline) { + break } // Walk the front path one step. for (var k1 = -d + k1start; k1 <= d - k1end; k1 += 2) { - var k1_offset = v_offset + k1; - var x1; + var k1_offset = v_offset + k1 + var x1 if (k1 == -d || (k1 != d && v1[k1_offset - 1] < v1[k1_offset + 1])) { - x1 = v1[k1_offset + 1]; + x1 = v1[k1_offset + 1] } else { - x1 = v1[k1_offset - 1] + 1; + x1 = v1[k1_offset - 1] + 1 } - var y1 = x1 - k1; - while (x1 < text1_length && y1 < text2_length && - text1.charAt(x1) == text2.charAt(y1)) { - x1++; - y1++; + var y1 = x1 - k1 + while ( + x1 < text1_length && + y1 < text2_length && + text1.charAt(x1) == text2.charAt(y1) + ) { + x1++ + y1++ } - v1[k1_offset] = x1; + v1[k1_offset] = x1 if (x1 > text1_length) { // Ran off the right of the graph. - k1end += 2; + k1end += 2 } else if (y1 > text2_length) { // Ran off the bottom of the graph. - k1start += 2; + k1start += 2 } else if (front) { - var k2_offset = v_offset + delta - k1; + var k2_offset = v_offset + delta - k1 if (k2_offset >= 0 && k2_offset < v_length && v2[k2_offset] != -1) { // Mirror x2 onto top-left coordinate system. - var x2 = text1_length - v2[k2_offset]; + var x2 = text1_length - v2[k2_offset] if (x1 >= x2) { // Overlap detected. - return this.diff_bisectSplit_(text1, text2, x1, y1, deadline); + return this.diff_bisectSplit_(text1, text2, x1, y1, deadline) } } } @@ -362,37 +373,40 @@ diff_match_patch.prototype.diff_bisect_ = function(text1, text2, deadline) { // Walk the reverse path one step. for (var k2 = -d + k2start; k2 <= d - k2end; k2 += 2) { - var k2_offset = v_offset + k2; - var x2; + var k2_offset = v_offset + k2 + var x2 if (k2 == -d || (k2 != d && v2[k2_offset - 1] < v2[k2_offset + 1])) { - x2 = v2[k2_offset + 1]; + x2 = v2[k2_offset + 1] } else { - x2 = v2[k2_offset - 1] + 1; + x2 = v2[k2_offset - 1] + 1 } - var y2 = x2 - k2; - while (x2 < text1_length && y2 < text2_length && - text1.charAt(text1_length - x2 - 1) == - text2.charAt(text2_length - y2 - 1)) { - x2++; - y2++; + var y2 = x2 - k2 + while ( + x2 < text1_length && + y2 < text2_length && + text1.charAt(text1_length - x2 - 1) == + text2.charAt(text2_length - y2 - 1) + ) { + x2++ + y2++ } - v2[k2_offset] = x2; + v2[k2_offset] = x2 if (x2 > text1_length) { // Ran off the left of the graph. - k2end += 2; + k2end += 2 } else if (y2 > text2_length) { // Ran off the top of the graph. - k2start += 2; + k2start += 2 } else if (!front) { - var k1_offset = v_offset + delta - k2; + var k1_offset = v_offset + delta - k2 if (k1_offset >= 0 && k1_offset < v_length && v1[k1_offset] != -1) { - var x1 = v1[k1_offset]; - var y1 = v_offset + x1 - k1_offset; + var x1 = v1[k1_offset] + var y1 = v_offset + x1 - k1_offset // Mirror x2 onto top-left coordinate system. - x2 = text1_length - x2; + x2 = text1_length - x2 if (x1 >= x2) { // Overlap detected. - return this.diff_bisectSplit_(text1, text2, x1, y1, deadline); + return this.diff_bisectSplit_(text1, text2, x1, y1, deadline) } } } @@ -400,9 +414,11 @@ diff_match_patch.prototype.diff_bisect_ = function(text1, text2, deadline) { } // Diff took too long and hit the deadline or // number of diffs equals number of characters, no commonality at all. - return [[DIFF_DELETE, text1], [DIFF_INSERT, text2]]; -}; - + return [ + [DIFF_DELETE, text1], + [DIFF_INSERT, text2] + ] +} /** * Given the location of the 'middle snake', split the diff in two parts @@ -415,20 +431,24 @@ diff_match_patch.prototype.diff_bisect_ = function(text1, text2, deadline) { * @return {!Array.} Array of diff tuples. * @private */ -diff_match_patch.prototype.diff_bisectSplit_ = function(text1, text2, x, y, - deadline) { - var text1a = text1.substring(0, x); - var text2a = text2.substring(0, y); - var text1b = text1.substring(x); - var text2b = text2.substring(y); +diff_match_patch.prototype.diff_bisectSplit_ = function ( + text1, + text2, + x, + y, + deadline +) { + var text1a = text1.substring(0, x) + var text2a = text2.substring(0, y) + var text1b = text1.substring(x) + var text2b = text2.substring(y) // Compute both diffs serially. - var diffs = this.diff_main(text1a, text2a, false, deadline); - var diffsb = this.diff_main(text1b, text2b, false, deadline); - - return diffs.concat(diffsb); -}; + var diffs = this.diff_main(text1a, text2a, false, deadline) + var diffsb = this.diff_main(text1b, text2b, false, deadline) + return diffs.concat(diffsb) +} /** * Split two texts into an array of strings. Reduce the texts to a string of @@ -441,13 +461,13 @@ diff_match_patch.prototype.diff_bisectSplit_ = function(text1, text2, x, y, * The zeroth element of the array of unique strings is intentionally blank. * @private */ -diff_match_patch.prototype.diff_linesToChars_ = function(text1, text2) { - var lineArray = []; // e.g. lineArray[4] == 'Hello\n' - var lineHash = {}; // e.g. lineHash['Hello\n'] == 4 +diff_match_patch.prototype.diff_linesToChars_ = function (text1, text2) { + var lineArray = [] // e.g. lineArray[4] == 'Hello\n' + var lineHash = {} // e.g. lineHash['Hello\n'] == 4 // '\x00' is a valid character, but various debuggers don't like it. // So we'll insert a junk entry to avoid generating a null character. - lineArray[0] = ''; + lineArray[0] = '' /** * Split a text into an array of strings. Reduce the texts to a string of @@ -458,39 +478,41 @@ diff_match_patch.prototype.diff_linesToChars_ = function(text1, text2) { * @private */ function diff_linesToCharsMunge_(text) { - var chars = ''; + var chars = '' // Walk the text, pulling out a substring for each line. // text.split('\n') would would temporarily double our memory footprint. // Modifying text would create many large strings to garbage collect. - var lineStart = 0; - var lineEnd = -1; + var lineStart = 0 + var lineEnd = -1 // Keeping our own length variable is faster than looking it up. - var lineArrayLength = lineArray.length; + var lineArrayLength = lineArray.length while (lineEnd < text.length - 1) { - lineEnd = text.indexOf('\n', lineStart); + lineEnd = text.indexOf('\n', lineStart) if (lineEnd == -1) { - lineEnd = text.length - 1; + lineEnd = text.length - 1 } - var line = text.substring(lineStart, lineEnd + 1); - lineStart = lineEnd + 1; + var line = text.substring(lineStart, lineEnd + 1) + lineStart = lineEnd + 1 - if (lineHash.hasOwnProperty ? lineHash.hasOwnProperty(line) : - (lineHash[line] !== undefined)) { - chars += String.fromCharCode(lineHash[line]); + if ( + lineHash.hasOwnProperty + ? lineHash.hasOwnProperty(line) + : lineHash[line] !== undefined + ) { + chars += String.fromCharCode(lineHash[line]) } else { - chars += String.fromCharCode(lineArrayLength); - lineHash[line] = lineArrayLength; - lineArray[lineArrayLength++] = line; + chars += String.fromCharCode(lineArrayLength) + lineHash[line] = lineArrayLength + lineArray[lineArrayLength++] = line } } - return chars; + return chars } - var chars1 = diff_linesToCharsMunge_(text1); - var chars2 = diff_linesToCharsMunge_(text2); - return {chars1: chars1, chars2: chars2, lineArray: lineArray}; -}; - + var chars1 = diff_linesToCharsMunge_(text1) + var chars2 = diff_linesToCharsMunge_(text2) + return { chars1: chars1, chars2: chars2, lineArray: lineArray } +} /** * Rehydrate the text in a diff from a string of line hashes to real lines of @@ -499,17 +521,16 @@ diff_match_patch.prototype.diff_linesToChars_ = function(text1, text2) { * @param {!Array.} lineArray Array of unique strings. * @private */ -diff_match_patch.prototype.diff_charsToLines_ = function(diffs, lineArray) { +diff_match_patch.prototype.diff_charsToLines_ = function (diffs, lineArray) { for (var x = 0; x < diffs.length; x++) { - var chars = diffs[x][1]; - var text = []; + var chars = diffs[x][1] + var text = [] for (var y = 0; y < chars.length; y++) { - text[y] = lineArray[chars.charCodeAt(y)]; + text[y] = lineArray[chars.charCodeAt(y)] } - diffs[x][1] = text.join(''); + diffs[x][1] = text.join('') } -}; - +} /** * Determine the common prefix of two strings. @@ -518,30 +539,31 @@ diff_match_patch.prototype.diff_charsToLines_ = function(diffs, lineArray) { * @return {number} The number of characters common to the start of each * string. */ -diff_match_patch.prototype.diff_commonPrefix = function(text1, text2) { +diff_match_patch.prototype.diff_commonPrefix = function (text1, text2) { // Quick check for common null cases. if (!text1 || !text2 || text1.charAt(0) != text2.charAt(0)) { - return 0; + return 0 } // Binary search. // Performance analysis: http://neil.fraser.name/news/2007/10/09/ - var pointermin = 0; - var pointermax = Math.min(text1.length, text2.length); - var pointermid = pointermax; - var pointerstart = 0; + var pointermin = 0 + var pointermax = Math.min(text1.length, text2.length) + var pointermid = pointermax + var pointerstart = 0 while (pointermin < pointermid) { - if (text1.substring(pointerstart, pointermid) == - text2.substring(pointerstart, pointermid)) { - pointermin = pointermid; - pointerstart = pointermin; + if ( + text1.substring(pointerstart, pointermid) == + text2.substring(pointerstart, pointermid) + ) { + pointermin = pointermid + pointerstart = pointermin } else { - pointermax = pointermid; + pointermax = pointermid } - pointermid = Math.floor((pointermax - pointermin) / 2 + pointermin); + pointermid = Math.floor((pointermax - pointermin) / 2 + pointermin) } - return pointermid; -}; - + return pointermid +} /** * Determine the common suffix of two strings. @@ -549,31 +571,35 @@ diff_match_patch.prototype.diff_commonPrefix = function(text1, text2) { * @param {string} text2 Second string. * @return {number} The number of characters common to the end of each string. */ -diff_match_patch.prototype.diff_commonSuffix = function(text1, text2) { +diff_match_patch.prototype.diff_commonSuffix = function (text1, text2) { // Quick check for common null cases. - if (!text1 || !text2 || - text1.charAt(text1.length - 1) != text2.charAt(text2.length - 1)) { - return 0; + if ( + !text1 || + !text2 || + text1.charAt(text1.length - 1) != text2.charAt(text2.length - 1) + ) { + return 0 } // Binary search. // Performance analysis: http://neil.fraser.name/news/2007/10/09/ - var pointermin = 0; - var pointermax = Math.min(text1.length, text2.length); - var pointermid = pointermax; - var pointerend = 0; + var pointermin = 0 + var pointermax = Math.min(text1.length, text2.length) + var pointermid = pointermax + var pointerend = 0 while (pointermin < pointermid) { - if (text1.substring(text1.length - pointermid, text1.length - pointerend) == - text2.substring(text2.length - pointermid, text2.length - pointerend)) { - pointermin = pointermid; - pointerend = pointermin; + if ( + text1.substring(text1.length - pointermid, text1.length - pointerend) == + text2.substring(text2.length - pointermid, text2.length - pointerend) + ) { + pointermin = pointermid + pointerend = pointermin } else { - pointermax = pointermid; + pointermax = pointermid } - pointermid = Math.floor((pointermax - pointermin) / 2 + pointermin); + pointermid = Math.floor((pointermax - pointermin) / 2 + pointermin) } - return pointermid; -}; - + return pointermid +} /** * Determine if the suffix of one string is the prefix of another. @@ -583,46 +609,47 @@ diff_match_patch.prototype.diff_commonSuffix = function(text1, text2) { * string and the start of the second string. * @private */ -diff_match_patch.prototype.diff_commonOverlap_ = function(text1, text2) { +diff_match_patch.prototype.diff_commonOverlap_ = function (text1, text2) { // Cache the text lengths to prevent multiple calls. - var text1_length = text1.length; - var text2_length = text2.length; + var text1_length = text1.length + var text2_length = text2.length // Eliminate the null case. if (text1_length == 0 || text2_length == 0) { - return 0; + return 0 } // Truncate the longer string. if (text1_length > text2_length) { - text1 = text1.substring(text1_length - text2_length); + text1 = text1.substring(text1_length - text2_length) } else if (text1_length < text2_length) { - text2 = text2.substring(0, text1_length); + text2 = text2.substring(0, text1_length) } - var text_length = Math.min(text1_length, text2_length); + var text_length = Math.min(text1_length, text2_length) // Quick check for the worst case. if (text1 == text2) { - return text_length; + return text_length } // Start by looking for a single character match // and increase length until no match is found. // Performance analysis: http://neil.fraser.name/news/2010/11/04/ - var best = 0; - var length = 1; + var best = 0 + var length = 1 while (true) { - var pattern = text1.substring(text_length - length); - var found = text2.indexOf(pattern); + var pattern = text1.substring(text_length - length) + var found = text2.indexOf(pattern) if (found == -1) { - return best; + return best } - length += found; - if (found == 0 || text1.substring(text_length - length) == - text2.substring(0, length)) { - best = length; - length++; + length += found + if ( + found == 0 || + text1.substring(text_length - length) == text2.substring(0, length) + ) { + best = length + length++ } } -}; - +} /** * Do the two texts share a substring which is at least half the length of the @@ -635,17 +662,17 @@ diff_match_patch.prototype.diff_commonOverlap_ = function(text1, text2) { * text2 and the common middle. Or null if there was no match. * @private */ -diff_match_patch.prototype.diff_halfMatch_ = function(text1, text2) { +diff_match_patch.prototype.diff_halfMatch_ = function (text1, text2) { if (this.Diff_Timeout <= 0) { // Don't risk returning a non-optimal diff if we have unlimited time. - return null; + return null } - var longtext = text1.length > text2.length ? text1 : text2; - var shorttext = text1.length > text2.length ? text2 : text1; + var longtext = text1.length > text2.length ? text1 : text2 + var shorttext = text1.length > text2.length ? text2 : text1 if (longtext.length < 4 || shorttext.length * 2 < longtext.length) { - return null; // Pointless. + return null // Pointless. } - var dmp = this; // 'this' becomes 'window' in a closure. + var dmp = this // 'this' becomes 'window' in a closure. /** * Does a substring of shorttext exist within longtext such that the substring @@ -661,132 +688,153 @@ diff_match_patch.prototype.diff_halfMatch_ = function(text1, text2) { */ function diff_halfMatchI_(longtext, shorttext, i) { // Start with a 1/4 length substring at position i as a seed. - var seed = longtext.substring(i, i + Math.floor(longtext.length / 4)); - var j = -1; - var best_common = ''; - var best_longtext_a, best_longtext_b, best_shorttext_a, best_shorttext_b; + var seed = longtext.substring(i, i + Math.floor(longtext.length / 4)) + var j = -1 + var best_common = '' + var best_longtext_a, best_longtext_b, best_shorttext_a, best_shorttext_b while ((j = shorttext.indexOf(seed, j + 1)) != -1) { - var prefixLength = dmp.diff_commonPrefix(longtext.substring(i), - shorttext.substring(j)); - var suffixLength = dmp.diff_commonSuffix(longtext.substring(0, i), - shorttext.substring(0, j)); + var prefixLength = dmp.diff_commonPrefix( + longtext.substring(i), + shorttext.substring(j) + ) + var suffixLength = dmp.diff_commonSuffix( + longtext.substring(0, i), + shorttext.substring(0, j) + ) if (best_common.length < suffixLength + prefixLength) { - best_common = shorttext.substring(j - suffixLength, j) + - shorttext.substring(j, j + prefixLength); - best_longtext_a = longtext.substring(0, i - suffixLength); - best_longtext_b = longtext.substring(i + prefixLength); - best_shorttext_a = shorttext.substring(0, j - suffixLength); - best_shorttext_b = shorttext.substring(j + prefixLength); + best_common = + shorttext.substring(j - suffixLength, j) + + shorttext.substring(j, j + prefixLength) + best_longtext_a = longtext.substring(0, i - suffixLength) + best_longtext_b = longtext.substring(i + prefixLength) + best_shorttext_a = shorttext.substring(0, j - suffixLength) + best_shorttext_b = shorttext.substring(j + prefixLength) } } if (best_common.length * 2 >= longtext.length) { - return [best_longtext_a, best_longtext_b, - best_shorttext_a, best_shorttext_b, best_common]; + return [ + best_longtext_a, + best_longtext_b, + best_shorttext_a, + best_shorttext_b, + best_common + ] } else { - return null; + return null } } // First check if the second quarter is the seed for a half-match. - var hm1 = diff_halfMatchI_(longtext, shorttext, - Math.ceil(longtext.length / 4)); + var hm1 = diff_halfMatchI_( + longtext, + shorttext, + Math.ceil(longtext.length / 4) + ) // Check again based on the third quarter. - var hm2 = diff_halfMatchI_(longtext, shorttext, - Math.ceil(longtext.length / 2)); - var hm; + var hm2 = diff_halfMatchI_( + longtext, + shorttext, + Math.ceil(longtext.length / 2) + ) + var hm if (!hm1 && !hm2) { - return null; + return null } else if (!hm2) { - hm = hm1; + hm = hm1 } else if (!hm1) { - hm = hm2; + hm = hm2 } else { // Both matched. Select the longest. - hm = hm1[4].length > hm2[4].length ? hm1 : hm2; + hm = hm1[4].length > hm2[4].length ? hm1 : hm2 } // A half-match was found, sort out the return data. - var text1_a, text1_b, text2_a, text2_b; + var text1_a, text1_b, text2_a, text2_b if (text1.length > text2.length) { - text1_a = hm[0]; - text1_b = hm[1]; - text2_a = hm[2]; - text2_b = hm[3]; + text1_a = hm[0] + text1_b = hm[1] + text2_a = hm[2] + text2_b = hm[3] } else { - text2_a = hm[0]; - text2_b = hm[1]; - text1_a = hm[2]; - text1_b = hm[3]; + text2_a = hm[0] + text2_b = hm[1] + text1_a = hm[2] + text1_b = hm[3] } - var mid_common = hm[4]; - return [text1_a, text1_b, text2_a, text2_b, mid_common]; -}; - + var mid_common = hm[4] + return [text1_a, text1_b, text2_a, text2_b, mid_common] +} /** * Reduce the number of edits by eliminating semantically trivial equalities. * @param {!Array.} diffs Array of diff tuples. */ -diff_match_patch.prototype.diff_cleanupSemantic = function(diffs) { - var changes = false; - var equalities = []; // Stack of indices where equalities are found. - var equalitiesLength = 0; // Keeping our own length var is faster in JS. +diff_match_patch.prototype.diff_cleanupSemantic = function (diffs) { + var changes = false + var equalities = [] // Stack of indices where equalities are found. + var equalitiesLength = 0 // Keeping our own length var is faster in JS. /** @type {?string} */ - var lastequality = null; + var lastequality = null // Always equal to diffs[equalities[equalitiesLength - 1]][1] - var pointer = 0; // Index of current position. + var pointer = 0 // Index of current position. // Number of characters that changed prior to the equality. - var length_insertions1 = 0; - var length_deletions1 = 0; + var length_insertions1 = 0 + var length_deletions1 = 0 // Number of characters that changed after the equality. - var length_insertions2 = 0; - var length_deletions2 = 0; + var length_insertions2 = 0 + var length_deletions2 = 0 while (pointer < diffs.length) { - if (diffs[pointer][0] == DIFF_EQUAL) { // Equality found. - equalities[equalitiesLength++] = pointer; - length_insertions1 = length_insertions2; - length_deletions1 = length_deletions2; - length_insertions2 = 0; - length_deletions2 = 0; - lastequality = diffs[pointer][1]; - } else { // An insertion or deletion. + if (diffs[pointer][0] == DIFF_EQUAL) { + // Equality found. + equalities[equalitiesLength++] = pointer + length_insertions1 = length_insertions2 + length_deletions1 = length_deletions2 + length_insertions2 = 0 + length_deletions2 = 0 + lastequality = diffs[pointer][1] + } else { + // An insertion or deletion. if (diffs[pointer][0] == DIFF_INSERT) { - length_insertions2 += diffs[pointer][1].length; + length_insertions2 += diffs[pointer][1].length } else { - length_deletions2 += diffs[pointer][1].length; + length_deletions2 += diffs[pointer][1].length } // Eliminate an equality that is smaller or equal to the edits on both // sides of it. - if (lastequality && (lastequality.length <= - Math.max(length_insertions1, length_deletions1)) && - (lastequality.length <= Math.max(length_insertions2, - length_deletions2))) { + if ( + lastequality && + lastequality.length <= + Math.max(length_insertions1, length_deletions1) && + lastequality.length <= Math.max(length_insertions2, length_deletions2) + ) { // Duplicate record. - diffs.splice(equalities[equalitiesLength - 1], 0, - [DIFF_DELETE, lastequality]); + diffs.splice(equalities[equalitiesLength - 1], 0, [ + DIFF_DELETE, + lastequality + ]) // Change second copy to insert. - diffs[equalities[equalitiesLength - 1] + 1][0] = DIFF_INSERT; + diffs[equalities[equalitiesLength - 1] + 1][0] = DIFF_INSERT // Throw away the equality we just deleted. - equalitiesLength--; + equalitiesLength-- // Throw away the previous equality (it needs to be reevaluated). - equalitiesLength--; - pointer = equalitiesLength > 0 ? equalities[equalitiesLength - 1] : -1; - length_insertions1 = 0; // Reset the counters. - length_deletions1 = 0; - length_insertions2 = 0; - length_deletions2 = 0; - lastequality = null; - changes = true; + equalitiesLength-- + pointer = equalitiesLength > 0 ? equalities[equalitiesLength - 1] : -1 + length_insertions1 = 0 // Reset the counters. + length_deletions1 = 0 + length_insertions2 = 0 + length_deletions2 = 0 + lastequality = null + changes = true } } - pointer++; + pointer++ } // Normalize the diff. if (changes) { - this.diff_cleanupMerge(diffs); + this.diff_cleanupMerge(diffs) } - this.diff_cleanupSemanticLossless(diffs); + this.diff_cleanupSemanticLossless(diffs) // Find any overlaps between deletions and insertions. // e.g: abcxxxxxxdef @@ -794,47 +842,59 @@ diff_match_patch.prototype.diff_cleanupSemantic = function(diffs) { // e.g: xxxabcdefxxx // -> defxxxabc // Only extract an overlap if it is as big as the edit ahead or behind it. - pointer = 1; + pointer = 1 while (pointer < diffs.length) { - if (diffs[pointer - 1][0] == DIFF_DELETE && - diffs[pointer][0] == DIFF_INSERT) { - var deletion = diffs[pointer - 1][1]; - var insertion = diffs[pointer][1]; - var overlap_length1 = this.diff_commonOverlap_(deletion, insertion); - var overlap_length2 = this.diff_commonOverlap_(insertion, deletion); + if ( + diffs[pointer - 1][0] == DIFF_DELETE && + diffs[pointer][0] == DIFF_INSERT + ) { + var deletion = diffs[pointer - 1][1] + var insertion = diffs[pointer][1] + var overlap_length1 = this.diff_commonOverlap_(deletion, insertion) + var overlap_length2 = this.diff_commonOverlap_(insertion, deletion) if (overlap_length1 >= overlap_length2) { - if (overlap_length1 >= deletion.length / 2 || - overlap_length1 >= insertion.length / 2) { + if ( + overlap_length1 >= deletion.length / 2 || + overlap_length1 >= insertion.length / 2 + ) { // Overlap found. Insert an equality and trim the surrounding edits. - diffs.splice(pointer, 0, - [DIFF_EQUAL, insertion.substring(0, overlap_length1)]); - diffs[pointer - 1][1] = - deletion.substring(0, deletion.length - overlap_length1); - diffs[pointer + 1][1] = insertion.substring(overlap_length1); - pointer++; + diffs.splice(pointer, 0, [ + DIFF_EQUAL, + insertion.substring(0, overlap_length1) + ]) + diffs[pointer - 1][1] = deletion.substring( + 0, + deletion.length - overlap_length1 + ) + diffs[pointer + 1][1] = insertion.substring(overlap_length1) + pointer++ } } else { - if (overlap_length2 >= deletion.length / 2 || - overlap_length2 >= insertion.length / 2) { + if ( + overlap_length2 >= deletion.length / 2 || + overlap_length2 >= insertion.length / 2 + ) { // Reverse overlap found. // Insert an equality and swap and trim the surrounding edits. - diffs.splice(pointer, 0, - [DIFF_EQUAL, deletion.substring(0, overlap_length2)]); - diffs[pointer - 1][0] = DIFF_INSERT; - diffs[pointer - 1][1] = - insertion.substring(0, insertion.length - overlap_length2); - diffs[pointer + 1][0] = DIFF_DELETE; - diffs[pointer + 1][1] = - deletion.substring(overlap_length2); - pointer++; + diffs.splice(pointer, 0, [ + DIFF_EQUAL, + deletion.substring(0, overlap_length2) + ]) + diffs[pointer - 1][0] = DIFF_INSERT + diffs[pointer - 1][1] = insertion.substring( + 0, + insertion.length - overlap_length2 + ) + diffs[pointer + 1][0] = DIFF_DELETE + diffs[pointer + 1][1] = deletion.substring(overlap_length2) + pointer++ } } - pointer++; + pointer++ } - pointer++; + pointer++ } -}; - +} /** * Look for single edits surrounded on both sides by equalities @@ -842,7 +902,7 @@ diff_match_patch.prototype.diff_cleanupSemantic = function(diffs) { * e.g: The cat came. -> The cat came. * @param {!Array.} diffs Array of diff tuples. */ -diff_match_patch.prototype.diff_cleanupSemanticLossless = function(diffs) { +diff_match_patch.prototype.diff_cleanupSemanticLossless = function (diffs) { /** * Given two strings, compute a score representing whether the internal * boundary falls on logical boundaries. @@ -856,7 +916,7 @@ diff_match_patch.prototype.diff_cleanupSemanticLossless = function(diffs) { function diff_cleanupSemanticScore_(one, two) { if (!one || !two) { // Edges are the best. - return 6; + return 6 } // Each port of this function behaves slightly differently due to @@ -864,150 +924,158 @@ diff_match_patch.prototype.diff_cleanupSemanticLossless = function(diffs) { // 'whitespace'. Since this function's purpose is largely cosmetic, // the choice has been made to use each language's native features // rather than force total conformity. - var char1 = one.charAt(one.length - 1); - var char2 = two.charAt(0); - var nonAlphaNumeric1 = char1.match(diff_match_patch.nonAlphaNumericRegex_); - var nonAlphaNumeric2 = char2.match(diff_match_patch.nonAlphaNumericRegex_); - var whitespace1 = nonAlphaNumeric1 && - char1.match(diff_match_patch.whitespaceRegex_); - var whitespace2 = nonAlphaNumeric2 && - char2.match(diff_match_patch.whitespaceRegex_); - var lineBreak1 = whitespace1 && - char1.match(diff_match_patch.linebreakRegex_); - var lineBreak2 = whitespace2 && - char2.match(diff_match_patch.linebreakRegex_); - var blankLine1 = lineBreak1 && - one.match(diff_match_patch.blanklineEndRegex_); - var blankLine2 = lineBreak2 && - two.match(diff_match_patch.blanklineStartRegex_); + var char1 = one.charAt(one.length - 1) + var char2 = two.charAt(0) + var nonAlphaNumeric1 = char1.match(diff_match_patch.nonAlphaNumericRegex_) + var nonAlphaNumeric2 = char2.match(diff_match_patch.nonAlphaNumericRegex_) + var whitespace1 = + nonAlphaNumeric1 && char1.match(diff_match_patch.whitespaceRegex_) + var whitespace2 = + nonAlphaNumeric2 && char2.match(diff_match_patch.whitespaceRegex_) + var lineBreak1 = + whitespace1 && char1.match(diff_match_patch.linebreakRegex_) + var lineBreak2 = + whitespace2 && char2.match(diff_match_patch.linebreakRegex_) + var blankLine1 = + lineBreak1 && one.match(diff_match_patch.blanklineEndRegex_) + var blankLine2 = + lineBreak2 && two.match(diff_match_patch.blanklineStartRegex_) if (blankLine1 || blankLine2) { // Five points for blank lines. - return 5; + return 5 } else if (lineBreak1 || lineBreak2) { // Four points for line breaks. - return 4; + return 4 } else if (nonAlphaNumeric1 && !whitespace1 && whitespace2) { // Three points for end of sentences. - return 3; + return 3 } else if (whitespace1 || whitespace2) { // Two points for whitespace. - return 2; + return 2 } else if (nonAlphaNumeric1 || nonAlphaNumeric2) { // One point for non-alphanumeric. - return 1; + return 1 } - return 0; + return 0 } - var pointer = 1; + var pointer = 1 // Intentionally ignore the first and last element (don't need checking). while (pointer < diffs.length - 1) { - if (diffs[pointer - 1][0] == DIFF_EQUAL && - diffs[pointer + 1][0] == DIFF_EQUAL) { + if ( + diffs[pointer - 1][0] == DIFF_EQUAL && + diffs[pointer + 1][0] == DIFF_EQUAL + ) { // This is a single edit surrounded by equalities. - var equality1 = diffs[pointer - 1][1]; - var edit = diffs[pointer][1]; - var equality2 = diffs[pointer + 1][1]; + var equality1 = diffs[pointer - 1][1] + var edit = diffs[pointer][1] + var equality2 = diffs[pointer + 1][1] // First, shift the edit as far left as possible. - var commonOffset = this.diff_commonSuffix(equality1, edit); + var commonOffset = this.diff_commonSuffix(equality1, edit) if (commonOffset) { - var commonString = edit.substring(edit.length - commonOffset); - equality1 = equality1.substring(0, equality1.length - commonOffset); - edit = commonString + edit.substring(0, edit.length - commonOffset); - equality2 = commonString + equality2; + var commonString = edit.substring(edit.length - commonOffset) + equality1 = equality1.substring(0, equality1.length - commonOffset) + edit = commonString + edit.substring(0, edit.length - commonOffset) + equality2 = commonString + equality2 } // Second, step character by character right, looking for the best fit. - var bestEquality1 = equality1; - var bestEdit = edit; - var bestEquality2 = equality2; - var bestScore = diff_cleanupSemanticScore_(equality1, edit) + - diff_cleanupSemanticScore_(edit, equality2); + var bestEquality1 = equality1 + var bestEdit = edit + var bestEquality2 = equality2 + var bestScore = + diff_cleanupSemanticScore_(equality1, edit) + + diff_cleanupSemanticScore_(edit, equality2) while (edit.charAt(0) === equality2.charAt(0)) { - equality1 += edit.charAt(0); - edit = edit.substring(1) + equality2.charAt(0); - equality2 = equality2.substring(1); - var score = diff_cleanupSemanticScore_(equality1, edit) + - diff_cleanupSemanticScore_(edit, equality2); + equality1 += edit.charAt(0) + edit = edit.substring(1) + equality2.charAt(0) + equality2 = equality2.substring(1) + var score = + diff_cleanupSemanticScore_(equality1, edit) + + diff_cleanupSemanticScore_(edit, equality2) // The >= encourages trailing rather than leading whitespace on edits. if (score >= bestScore) { - bestScore = score; - bestEquality1 = equality1; - bestEdit = edit; - bestEquality2 = equality2; + bestScore = score + bestEquality1 = equality1 + bestEdit = edit + bestEquality2 = equality2 } } if (diffs[pointer - 1][1] != bestEquality1) { // We have an improvement, save it back to the diff. if (bestEquality1) { - diffs[pointer - 1][1] = bestEquality1; + diffs[pointer - 1][1] = bestEquality1 } else { - diffs.splice(pointer - 1, 1); - pointer--; + diffs.splice(pointer - 1, 1) + pointer-- } - diffs[pointer][1] = bestEdit; + diffs[pointer][1] = bestEdit if (bestEquality2) { - diffs[pointer + 1][1] = bestEquality2; + diffs[pointer + 1][1] = bestEquality2 } else { - diffs.splice(pointer + 1, 1); - pointer--; + diffs.splice(pointer + 1, 1) + pointer-- } } } - pointer++; + pointer++ } -}; +} // Define some regex patterns for matching boundaries. -diff_match_patch.nonAlphaNumericRegex_ = /[^a-zA-Z0-9]/; -diff_match_patch.whitespaceRegex_ = /\s/; -diff_match_patch.linebreakRegex_ = /[\r\n]/; -diff_match_patch.blanklineEndRegex_ = /\n\r?\n$/; -diff_match_patch.blanklineStartRegex_ = /^\r?\n\r?\n/; +diff_match_patch.nonAlphaNumericRegex_ = /[^a-zA-Z0-9]/ +diff_match_patch.whitespaceRegex_ = /\s/ +diff_match_patch.linebreakRegex_ = /[\r\n]/ +diff_match_patch.blanklineEndRegex_ = /\n\r?\n$/ +diff_match_patch.blanklineStartRegex_ = /^\r?\n\r?\n/ /** * Reduce the number of edits by eliminating operationally trivial equalities. * @param {!Array.} diffs Array of diff tuples. */ -diff_match_patch.prototype.diff_cleanupEfficiency = function(diffs) { - var changes = false; - var equalities = []; // Stack of indices where equalities are found. - var equalitiesLength = 0; // Keeping our own length var is faster in JS. +diff_match_patch.prototype.diff_cleanupEfficiency = function (diffs) { + var changes = false + var equalities = [] // Stack of indices where equalities are found. + var equalitiesLength = 0 // Keeping our own length var is faster in JS. /** @type {?string} */ - var lastequality = null; + var lastequality = null // Always equal to diffs[equalities[equalitiesLength - 1]][1] - var pointer = 0; // Index of current position. + var pointer = 0 // Index of current position. // Is there an insertion operation before the last equality. - var pre_ins = false; + var pre_ins = false // Is there a deletion operation before the last equality. - var pre_del = false; + var pre_del = false // Is there an insertion operation after the last equality. - var post_ins = false; + var post_ins = false // Is there a deletion operation after the last equality. - var post_del = false; + var post_del = false while (pointer < diffs.length) { - if (diffs[pointer][0] == DIFF_EQUAL) { // Equality found. - if (diffs[pointer][1].length < this.Diff_EditCost && - (post_ins || post_del)) { + if (diffs[pointer][0] == DIFF_EQUAL) { + // Equality found. + if ( + diffs[pointer][1].length < this.Diff_EditCost && + (post_ins || post_del) + ) { // Candidate found. - equalities[equalitiesLength++] = pointer; - pre_ins = post_ins; - pre_del = post_del; - lastequality = diffs[pointer][1]; + equalities[equalitiesLength++] = pointer + pre_ins = post_ins + pre_del = post_del + lastequality = diffs[pointer][1] } else { // Not a candidate, and can never become one. - equalitiesLength = 0; - lastequality = null; + equalitiesLength = 0 + lastequality = null } - post_ins = post_del = false; - } else { // An insertion or deletion. + post_ins = post_del = false + } else { + // An insertion or deletion. if (diffs[pointer][0] == DIFF_DELETE) { - post_del = true; + post_del = true } else { - post_ins = true; + post_ins = true } /* * Five types to be split: @@ -1017,164 +1085,198 @@ diff_match_patch.prototype.diff_cleanupEfficiency = function(diffs) { * AXCD * ABXC */ - if (lastequality && ((pre_ins && pre_del && post_ins && post_del) || - ((lastequality.length < this.Diff_EditCost / 2) && - (pre_ins + pre_del + post_ins + post_del) == 3))) { + if ( + lastequality && + ((pre_ins && pre_del && post_ins && post_del) || + (lastequality.length < this.Diff_EditCost / 2 && + pre_ins + pre_del + post_ins + post_del == 3)) + ) { // Duplicate record. - diffs.splice(equalities[equalitiesLength - 1], 0, - [DIFF_DELETE, lastequality]); + diffs.splice(equalities[equalitiesLength - 1], 0, [ + DIFF_DELETE, + lastequality + ]) // Change second copy to insert. - diffs[equalities[equalitiesLength - 1] + 1][0] = DIFF_INSERT; - equalitiesLength--; // Throw away the equality we just deleted; - lastequality = null; + diffs[equalities[equalitiesLength - 1] + 1][0] = DIFF_INSERT + equalitiesLength-- // Throw away the equality we just deleted; + lastequality = null if (pre_ins && pre_del) { // No changes made which could affect previous entry, keep going. - post_ins = post_del = true; - equalitiesLength = 0; + post_ins = post_del = true + equalitiesLength = 0 } else { - equalitiesLength--; // Throw away the previous equality. - pointer = equalitiesLength > 0 ? - equalities[equalitiesLength - 1] : -1; - post_ins = post_del = false; + equalitiesLength-- // Throw away the previous equality. + pointer = equalitiesLength > 0 ? equalities[equalitiesLength - 1] : -1 + post_ins = post_del = false } - changes = true; + changes = true } } - pointer++; + pointer++ } if (changes) { - this.diff_cleanupMerge(diffs); + this.diff_cleanupMerge(diffs) } -}; - +} /** * Reorder and merge like edit sections. Merge equalities. * Any edit section can move as long as it doesn't cross an equality. * @param {!Array.} diffs Array of diff tuples. */ -diff_match_patch.prototype.diff_cleanupMerge = function(diffs) { - diffs.push([DIFF_EQUAL, '']); // Add a dummy entry at the end. - var pointer = 0; - var count_delete = 0; - var count_insert = 0; - var text_delete = ''; - var text_insert = ''; - var commonlength; +diff_match_patch.prototype.diff_cleanupMerge = function (diffs) { + diffs.push([DIFF_EQUAL, '']) // Add a dummy entry at the end. + var pointer = 0 + var count_delete = 0 + var count_insert = 0 + var text_delete = '' + var text_insert = '' + var commonlength while (pointer < diffs.length) { switch (diffs[pointer][0]) { case DIFF_INSERT: - count_insert++; - text_insert += diffs[pointer][1]; - pointer++; - break; + count_insert++ + text_insert += diffs[pointer][1] + pointer++ + break case DIFF_DELETE: - count_delete++; - text_delete += diffs[pointer][1]; - pointer++; - break; + count_delete++ + text_delete += diffs[pointer][1] + pointer++ + break case DIFF_EQUAL: // Upon reaching an equality, check for prior redundancies. if (count_delete + count_insert > 1) { if (count_delete !== 0 && count_insert !== 0) { // Factor out any common prefixies. - commonlength = this.diff_commonPrefix(text_insert, text_delete); + commonlength = this.diff_commonPrefix(text_insert, text_delete) if (commonlength !== 0) { - if ((pointer - count_delete - count_insert) > 0 && - diffs[pointer - count_delete - count_insert - 1][0] == - DIFF_EQUAL) { - diffs[pointer - count_delete - count_insert - 1][1] += - text_insert.substring(0, commonlength); + if ( + pointer - count_delete - count_insert > 0 && + diffs[pointer - count_delete - count_insert - 1][0] == + DIFF_EQUAL + ) { + diffs[ + pointer - count_delete - count_insert - 1 + ][1] += text_insert.substring(0, commonlength) } else { - diffs.splice(0, 0, [DIFF_EQUAL, - text_insert.substring(0, commonlength)]); - pointer++; + diffs.splice(0, 0, [ + DIFF_EQUAL, + text_insert.substring(0, commonlength) + ]) + pointer++ } - text_insert = text_insert.substring(commonlength); - text_delete = text_delete.substring(commonlength); + text_insert = text_insert.substring(commonlength) + text_delete = text_delete.substring(commonlength) } // Factor out any common suffixies. - commonlength = this.diff_commonSuffix(text_insert, text_delete); + commonlength = this.diff_commonSuffix(text_insert, text_delete) if (commonlength !== 0) { - diffs[pointer][1] = text_insert.substring(text_insert.length - - commonlength) + diffs[pointer][1]; - text_insert = text_insert.substring(0, text_insert.length - - commonlength); - text_delete = text_delete.substring(0, text_delete.length - - commonlength); + diffs[pointer][1] = + text_insert.substring(text_insert.length - commonlength) + + diffs[pointer][1] + text_insert = text_insert.substring( + 0, + text_insert.length - commonlength + ) + text_delete = text_delete.substring( + 0, + text_delete.length - commonlength + ) } } // Delete the offending records and add the merged ones. if (count_delete === 0) { - diffs.splice(pointer - count_insert, - count_delete + count_insert, [DIFF_INSERT, text_insert]); + diffs.splice(pointer - count_insert, count_delete + count_insert, [ + DIFF_INSERT, + text_insert + ]) } else if (count_insert === 0) { - diffs.splice(pointer - count_delete, - count_delete + count_insert, [DIFF_DELETE, text_delete]); + diffs.splice(pointer - count_delete, count_delete + count_insert, [ + DIFF_DELETE, + text_delete + ]) } else { - diffs.splice(pointer - count_delete - count_insert, - count_delete + count_insert, [DIFF_DELETE, text_delete], - [DIFF_INSERT, text_insert]); + diffs.splice( + pointer - count_delete - count_insert, + count_delete + count_insert, + [DIFF_DELETE, text_delete], + [DIFF_INSERT, text_insert] + ) } - pointer = pointer - count_delete - count_insert + - (count_delete ? 1 : 0) + (count_insert ? 1 : 0) + 1; + pointer = + pointer - + count_delete - + count_insert + + (count_delete ? 1 : 0) + + (count_insert ? 1 : 0) + + 1 } else if (pointer !== 0 && diffs[pointer - 1][0] == DIFF_EQUAL) { // Merge this equality with the previous one. - diffs[pointer - 1][1] += diffs[pointer][1]; - diffs.splice(pointer, 1); + diffs[pointer - 1][1] += diffs[pointer][1] + diffs.splice(pointer, 1) } else { - pointer++; + pointer++ } - count_insert = 0; - count_delete = 0; - text_delete = ''; - text_insert = ''; - break; + count_insert = 0 + count_delete = 0 + text_delete = '' + text_insert = '' + break } } if (diffs[diffs.length - 1][1] === '') { - diffs.pop(); // Remove the dummy entry at the end. + diffs.pop() // Remove the dummy entry at the end. } // Second pass: look for single edits surrounded on both sides by equalities // which can be shifted sideways to eliminate an equality. // e.g: ABAC -> ABAC - var changes = false; - pointer = 1; + var changes = false + pointer = 1 // Intentionally ignore the first and last element (don't need checking). while (pointer < diffs.length - 1) { - if (diffs[pointer - 1][0] == DIFF_EQUAL && - diffs[pointer + 1][0] == DIFF_EQUAL) { + if ( + diffs[pointer - 1][0] == DIFF_EQUAL && + diffs[pointer + 1][0] == DIFF_EQUAL + ) { // This is a single edit surrounded by equalities. - if (diffs[pointer][1].substring(diffs[pointer][1].length - - diffs[pointer - 1][1].length) == diffs[pointer - 1][1]) { + if ( + diffs[pointer][1].substring( + diffs[pointer][1].length - diffs[pointer - 1][1].length + ) == diffs[pointer - 1][1] + ) { // Shift the edit over the previous equality. - diffs[pointer][1] = diffs[pointer - 1][1] + - diffs[pointer][1].substring(0, diffs[pointer][1].length - - diffs[pointer - 1][1].length); - diffs[pointer + 1][1] = diffs[pointer - 1][1] + diffs[pointer + 1][1]; - diffs.splice(pointer - 1, 1); - changes = true; - } else if (diffs[pointer][1].substring(0, diffs[pointer + 1][1].length) == - diffs[pointer + 1][1]) { - // Shift the edit over the next equality. - diffs[pointer - 1][1] += diffs[pointer + 1][1]; diffs[pointer][1] = - diffs[pointer][1].substring(diffs[pointer + 1][1].length) + - diffs[pointer + 1][1]; - diffs.splice(pointer + 1, 1); - changes = true; + diffs[pointer - 1][1] + + diffs[pointer][1].substring( + 0, + diffs[pointer][1].length - diffs[pointer - 1][1].length + ) + diffs[pointer + 1][1] = diffs[pointer - 1][1] + diffs[pointer + 1][1] + diffs.splice(pointer - 1, 1) + changes = true + } else if ( + diffs[pointer][1].substring(0, diffs[pointer + 1][1].length) == + diffs[pointer + 1][1] + ) { + // Shift the edit over the next equality. + diffs[pointer - 1][1] += diffs[pointer + 1][1] + diffs[pointer][1] = + diffs[pointer][1].substring(diffs[pointer + 1][1].length) + + diffs[pointer + 1][1] + diffs.splice(pointer + 1, 1) + changes = true } } - pointer++; + pointer++ } // If shifts were made, the diff needs reordering and another shift sweep. if (changes) { - this.diff_cleanupMerge(diffs); + this.diff_cleanupMerge(diffs) } -}; - +} /** * loc is a location in text1, compute and return the equivalent location in @@ -1184,97 +1286,99 @@ diff_match_patch.prototype.diff_cleanupMerge = function(diffs) { * @param {number} loc Location within text1. * @return {number} Location within text2. */ -diff_match_patch.prototype.diff_xIndex = function(diffs, loc) { - var chars1 = 0; - var chars2 = 0; - var last_chars1 = 0; - var last_chars2 = 0; - var x; +diff_match_patch.prototype.diff_xIndex = function (diffs, loc) { + var chars1 = 0 + var chars2 = 0 + var last_chars1 = 0 + var last_chars2 = 0 + var x for (x = 0; x < diffs.length; x++) { - if (diffs[x][0] !== DIFF_INSERT) { // Equality or deletion. - chars1 += diffs[x][1].length; + if (diffs[x][0] !== DIFF_INSERT) { + // Equality or deletion. + chars1 += diffs[x][1].length } - if (diffs[x][0] !== DIFF_DELETE) { // Equality or insertion. - chars2 += diffs[x][1].length; + if (diffs[x][0] !== DIFF_DELETE) { + // Equality or insertion. + chars2 += diffs[x][1].length } - if (chars1 > loc) { // Overshot the location. - break; + if (chars1 > loc) { + // Overshot the location. + break } - last_chars1 = chars1; - last_chars2 = chars2; + last_chars1 = chars1 + last_chars2 = chars2 } // Was the location was deleted? if (diffs.length != x && diffs[x][0] === DIFF_DELETE) { - return last_chars2; + return last_chars2 } // Add the remaining character length. - return last_chars2 + (loc - last_chars1); -}; - + return last_chars2 + (loc - last_chars1) +} /** * Convert a diff array into a pretty HTML report. * @param {!Array.} diffs Array of diff tuples. * @return {string} HTML representation. */ -diff_match_patch.prototype.diff_prettyHtml = function(diffs) { - var html = []; - var pattern_amp = /&/g; - var pattern_lt = //g; - var pattern_para = /\n/g; +diff_match_patch.prototype.diff_prettyHtml = function (diffs) { + var html = [] + var pattern_amp = /&/g + var pattern_lt = //g + var pattern_para = /\n/g for (var x = 0; x < diffs.length; x++) { - var op = diffs[x][0]; // Operation (insert, delete, equal) - var data = diffs[x][1]; // Text of change. - var text = data.replace(pattern_amp, '&').replace(pattern_lt, '<') - .replace(pattern_gt, '>').replace(pattern_para, '¶
'); + var op = diffs[x][0] // Operation (insert, delete, equal) + var data = diffs[x][1] // Text of change. + var text = data + .replace(pattern_amp, '&') + .replace(pattern_lt, '<') + .replace(pattern_gt, '>') + .replace(pattern_para, '¶
') switch (op) { case DIFF_INSERT: - html[x] = '' + text + ''; - break; + html[x] = '' + text + '' + break case DIFF_DELETE: - html[x] = '' + text + ''; - break; + html[x] = '' + text + '' + break case DIFF_EQUAL: - html[x] = '' + text + ''; - break; + html[x] = '' + text + '' + break } } - return html.join(''); -}; - + return html.join('') +} /** * Compute and return the source text (all equalities and deletions). * @param {!Array.} diffs Array of diff tuples. * @return {string} Source text. */ -diff_match_patch.prototype.diff_text1 = function(diffs) { - var text = []; +diff_match_patch.prototype.diff_text1 = function (diffs) { + var text = [] for (var x = 0; x < diffs.length; x++) { if (diffs[x][0] !== DIFF_INSERT) { - text[x] = diffs[x][1]; + text[x] = diffs[x][1] } } - return text.join(''); -}; - + return text.join('') +} /** * Compute and return the destination text (all equalities and insertions). * @param {!Array.} diffs Array of diff tuples. * @return {string} Destination text. */ -diff_match_patch.prototype.diff_text2 = function(diffs) { - var text = []; +diff_match_patch.prototype.diff_text2 = function (diffs) { + var text = [] for (var x = 0; x < diffs.length; x++) { if (diffs[x][0] !== DIFF_DELETE) { - text[x] = diffs[x][1]; + text[x] = diffs[x][1] } } - return text.join(''); -}; - + return text.join('') +} /** * Compute the Levenshtein distance; the number of inserted, deleted or @@ -1282,32 +1386,31 @@ diff_match_patch.prototype.diff_text2 = function(diffs) { * @param {!Array.} diffs Array of diff tuples. * @return {number} Number of changes. */ -diff_match_patch.prototype.diff_levenshtein = function(diffs) { - var levenshtein = 0; - var insertions = 0; - var deletions = 0; +diff_match_patch.prototype.diff_levenshtein = function (diffs) { + var levenshtein = 0 + var insertions = 0 + var deletions = 0 for (var x = 0; x < diffs.length; x++) { - var op = diffs[x][0]; - var data = diffs[x][1]; + var op = diffs[x][0] + var data = diffs[x][1] switch (op) { case DIFF_INSERT: - insertions += data.length; - break; + insertions += data.length + break case DIFF_DELETE: - deletions += data.length; - break; + deletions += data.length + break case DIFF_EQUAL: // A deletion and an insertion is one substitution. - levenshtein += Math.max(insertions, deletions); - insertions = 0; - deletions = 0; - break; + levenshtein += Math.max(insertions, deletions) + insertions = 0 + deletions = 0 + break } } - levenshtein += Math.max(insertions, deletions); - return levenshtein; -}; - + levenshtein += Math.max(insertions, deletions) + return levenshtein +} /** * Crush the diff into an encoded string which describes the operations @@ -1317,24 +1420,23 @@ diff_match_patch.prototype.diff_levenshtein = function(diffs) { * @param {!Array.} diffs Array of diff tuples. * @return {string} Delta text. */ -diff_match_patch.prototype.diff_toDelta = function(diffs) { - var text = []; +diff_match_patch.prototype.diff_toDelta = function (diffs) { + var text = [] for (var x = 0; x < diffs.length; x++) { switch (diffs[x][0]) { case DIFF_INSERT: - text[x] = '+' + encodeURI(diffs[x][1]); - break; + text[x] = '+' + encodeURI(diffs[x][1]) + break case DIFF_DELETE: - text[x] = '-' + diffs[x][1].length; - break; + text[x] = '-' + diffs[x][1].length + break case DIFF_EQUAL: - text[x] = '=' + diffs[x][1].length; - break; + text[x] = '=' + diffs[x][1].length + break } } - return text.join('\t').replace(/%20/g, ' '); -}; - + return text.join('\t').replace(/%20/g, ' ') +} /** * Given the original text1, and an encoded string which describes the @@ -1344,58 +1446,62 @@ diff_match_patch.prototype.diff_toDelta = function(diffs) { * @return {!Array.} Array of diff tuples. * @throws {!Error} If invalid input. */ -diff_match_patch.prototype.diff_fromDelta = function(text1, delta) { - var diffs = []; - var diffsLength = 0; // Keeping our own length var is faster in JS. - var pointer = 0; // Cursor in text1 - var tokens = delta.split(/\t/g); +diff_match_patch.prototype.diff_fromDelta = function (text1, delta) { + var diffs = [] + var diffsLength = 0 // Keeping our own length var is faster in JS. + var pointer = 0 // Cursor in text1 + var tokens = delta.split(/\t/g) for (var x = 0; x < tokens.length; x++) { // Each token begins with a one character parameter which specifies the // operation of this token (delete, insert, equality). - var param = tokens[x].substring(1); + var param = tokens[x].substring(1) switch (tokens[x].charAt(0)) { case '+': try { - diffs[diffsLength++] = [DIFF_INSERT, decodeURI(param)]; + diffs[diffsLength++] = [DIFF_INSERT, decodeURI(param)] } catch (ex) { // Malformed URI sequence. - throw new Error('Illegal escape in diff_fromDelta: ' + param); + throw new Error('Illegal escape in diff_fromDelta: ' + param) } - break; + break case '-': - // Fall through. + // Fall through. case '=': - var n = parseInt(param, 10); + var n = parseInt(param, 10) if (isNaN(n) || n < 0) { - throw new Error('Invalid number in diff_fromDelta: ' + param); + throw new Error('Invalid number in diff_fromDelta: ' + param) } - var text = text1.substring(pointer, pointer += n); + var text = text1.substring(pointer, (pointer += n)) if (tokens[x].charAt(0) == '=') { - diffs[diffsLength++] = [DIFF_EQUAL, text]; + diffs[diffsLength++] = [DIFF_EQUAL, text] } else { - diffs[diffsLength++] = [DIFF_DELETE, text]; + diffs[diffsLength++] = [DIFF_DELETE, text] } - break; + break default: // Blank tokens are ok (from a trailing \t). // Anything else is an error. if (tokens[x]) { - throw new Error('Invalid diff operation in diff_fromDelta: ' + - tokens[x]); + throw new Error( + 'Invalid diff operation in diff_fromDelta: ' + tokens[x] + ) } } } if (pointer != text1.length) { - throw new Error('Delta length (' + pointer + - ') does not equal source text length (' + text1.length + ').'); + throw new Error( + 'Delta length (' + + pointer + + ') does not equal source text length (' + + text1.length + + ').' + ) } - return diffs; -}; - + return diffs +} // MATCH FUNCTIONS - /** * Locate the best instance of 'pattern' in 'text' near 'loc'. * @param {string} text The text to search. @@ -1403,28 +1509,27 @@ diff_match_patch.prototype.diff_fromDelta = function(text1, delta) { * @param {number} loc The location to search around. * @return {number} Best match index or -1. */ -diff_match_patch.prototype.match_main = function(text, pattern, loc) { +diff_match_patch.prototype.match_main = function (text, pattern, loc) { // Check for null inputs. if (text == null || pattern == null || loc == null) { - throw new Error('Null input. (match_main)'); + throw new Error('Null input. (match_main)') } - loc = Math.max(0, Math.min(loc, text.length)); + loc = Math.max(0, Math.min(loc, text.length)) if (text == pattern) { // Shortcut (potentially not guaranteed by the algorithm) - return 0; + return 0 } else if (!text.length) { // Nothing to match. - return -1; + return -1 } else if (text.substring(loc, loc + pattern.length) == pattern) { // Perfect match at the perfect spot! (Includes case of null pattern) - return loc; + return loc } else { // Do a fuzzy compare. - return this.match_bitap_(text, pattern, loc); + return this.match_bitap_(text, pattern, loc) } -}; - +} /** * Locate the best instance of 'pattern' in 'text' near 'loc' using the @@ -1435,15 +1540,15 @@ diff_match_patch.prototype.match_main = function(text, pattern, loc) { * @return {number} Best match index or -1. * @private */ -diff_match_patch.prototype.match_bitap_ = function(text, pattern, loc) { +diff_match_patch.prototype.match_bitap_ = function (text, pattern, loc) { if (pattern.length > this.Match_MaxBits) { - throw new Error('Pattern too long for this browser.'); + throw new Error('Pattern too long for this browser.') } // Initialise the alphabet. - var s = this.match_alphabet_(pattern); + var s = this.match_alphabet_(pattern) - var dmp = this; // 'this' becomes 'window' in a closure. + var dmp = this // 'this' becomes 'window' in a closure. /** * Compute and return the score for a match with e errors and x location. @@ -1454,95 +1559,99 @@ diff_match_patch.prototype.match_bitap_ = function(text, pattern, loc) { * @private */ function match_bitapScore_(e, x) { - var accuracy = e / pattern.length; - var proximity = Math.abs(loc - x); + var accuracy = e / pattern.length + var proximity = Math.abs(loc - x) if (!dmp.Match_Distance) { // Dodge divide by zero error. - return proximity ? 1.0 : accuracy; + return proximity ? 1.0 : accuracy } - return accuracy + (proximity / dmp.Match_Distance); + return accuracy + proximity / dmp.Match_Distance } // Highest score beyond which we give up. - var score_threshold = this.Match_Threshold; + var score_threshold = this.Match_Threshold // Is there a nearby exact match? (speedup) - var best_loc = text.indexOf(pattern, loc); + var best_loc = text.indexOf(pattern, loc) if (best_loc != -1) { - score_threshold = Math.min(match_bitapScore_(0, best_loc), score_threshold); + score_threshold = Math.min(match_bitapScore_(0, best_loc), score_threshold) // What about in the other direction? (speedup) - best_loc = text.lastIndexOf(pattern, loc + pattern.length); + best_loc = text.lastIndexOf(pattern, loc + pattern.length) if (best_loc != -1) { - score_threshold = - Math.min(match_bitapScore_(0, best_loc), score_threshold); + score_threshold = Math.min( + match_bitapScore_(0, best_loc), + score_threshold + ) } } // Initialise the bit arrays. - var matchmask = 1 << (pattern.length - 1); - best_loc = -1; + var matchmask = 1 << (pattern.length - 1) + best_loc = -1 - var bin_min, bin_mid; - var bin_max = pattern.length + text.length; - var last_rd; + var bin_min, bin_mid + var bin_max = pattern.length + text.length + var last_rd for (var d = 0; d < pattern.length; d++) { // Scan for the best match; each iteration allows for one more error. // Run a binary search to determine how far from 'loc' we can stray at this // error level. - bin_min = 0; - bin_mid = bin_max; + bin_min = 0 + bin_mid = bin_max while (bin_min < bin_mid) { if (match_bitapScore_(d, loc + bin_mid) <= score_threshold) { - bin_min = bin_mid; + bin_min = bin_mid } else { - bin_max = bin_mid; + bin_max = bin_mid } - bin_mid = Math.floor((bin_max - bin_min) / 2 + bin_min); + bin_mid = Math.floor((bin_max - bin_min) / 2 + bin_min) } // Use the result from this iteration as the maximum for the next. - bin_max = bin_mid; - var start = Math.max(1, loc - bin_mid + 1); - var finish = Math.min(loc + bin_mid, text.length) + pattern.length; + bin_max = bin_mid + var start = Math.max(1, loc - bin_mid + 1) + var finish = Math.min(loc + bin_mid, text.length) + pattern.length - var rd = Array(finish + 2); - rd[finish + 1] = (1 << d) - 1; + var rd = Array(finish + 2) + rd[finish + 1] = (1 << d) - 1 for (var j = finish; j >= start; j--) { // The alphabet (s) is a sparse hash, so the following line generates // warnings. - var charMatch = s[text.charAt(j - 1)]; - if (d === 0) { // First pass: exact match. - rd[j] = ((rd[j + 1] << 1) | 1) & charMatch; - } else { // Subsequent passes: fuzzy match. - rd[j] = (((rd[j + 1] << 1) | 1) & charMatch) | - (((last_rd[j + 1] | last_rd[j]) << 1) | 1) | - last_rd[j + 1]; + var charMatch = s[text.charAt(j - 1)] + if (d === 0) { + // First pass: exact match. + rd[j] = ((rd[j + 1] << 1) | 1) & charMatch + } else { + // Subsequent passes: fuzzy match. + rd[j] = + (((rd[j + 1] << 1) | 1) & charMatch) | + (((last_rd[j + 1] | last_rd[j]) << 1) | 1) | + last_rd[j + 1] } if (rd[j] & matchmask) { - var score = match_bitapScore_(d, j - 1); + var score = match_bitapScore_(d, j - 1) // This match will almost certainly be better than any existing match. // But check anyway. if (score <= score_threshold) { // Told you so. - score_threshold = score; - best_loc = j - 1; + score_threshold = score + best_loc = j - 1 if (best_loc > loc) { // When passing loc, don't exceed our current distance from loc. - start = Math.max(1, 2 * loc - best_loc); + start = Math.max(1, 2 * loc - best_loc) } else { // Already passed loc, downhill from here on in. - break; + break } } } } // No hope for a (better) match at greater error levels. if (match_bitapScore_(d + 1, loc) > score_threshold) { - break; + break } - last_rd = rd; + last_rd = rd } - return best_loc; -}; - + return best_loc +} /** * Initialise the alphabet for the Bitap algorithm. @@ -1550,21 +1659,19 @@ diff_match_patch.prototype.match_bitap_ = function(text, pattern, loc) { * @return {!Object} Hash of character locations. * @private */ -diff_match_patch.prototype.match_alphabet_ = function(pattern) { - var s = {}; +diff_match_patch.prototype.match_alphabet_ = function (pattern) { + var s = {} for (var i = 0; i < pattern.length; i++) { - s[pattern.charAt(i)] = 0; + s[pattern.charAt(i)] = 0 } for (var i = 0; i < pattern.length; i++) { - s[pattern.charAt(i)] |= 1 << (pattern.length - i - 1); + s[pattern.charAt(i)] |= 1 << (pattern.length - i - 1) } - return s; -}; - + return s +} // PATCH FUNCTIONS - /** * Increase the context until it is unique, * but don't let the pattern expand beyond Match_MaxBits. @@ -1572,45 +1679,49 @@ diff_match_patch.prototype.match_alphabet_ = function(pattern) { * @param {string} text Source text. * @private */ -diff_match_patch.prototype.patch_addContext_ = function(patch, text) { +diff_match_patch.prototype.patch_addContext_ = function (patch, text) { if (text.length == 0) { - return; + return } - var pattern = text.substring(patch.start2, patch.start2 + patch.length1); - var padding = 0; + var pattern = text.substring(patch.start2, patch.start2 + patch.length1) + var padding = 0 // Look for the first and last matches of pattern in text. If two different // matches are found, increase the pattern length. - while (text.indexOf(pattern) != text.lastIndexOf(pattern) && - pattern.length < this.Match_MaxBits - this.Patch_Margin - - this.Patch_Margin) { - padding += this.Patch_Margin; - pattern = text.substring(patch.start2 - padding, - patch.start2 + patch.length1 + padding); + while ( + text.indexOf(pattern) != text.lastIndexOf(pattern) && + pattern.length < this.Match_MaxBits - this.Patch_Margin - this.Patch_Margin + ) { + padding += this.Patch_Margin + pattern = text.substring( + patch.start2 - padding, + patch.start2 + patch.length1 + padding + ) } // Add one chunk for good luck. - padding += this.Patch_Margin; + padding += this.Patch_Margin // Add the prefix. - var prefix = text.substring(patch.start2 - padding, patch.start2); + var prefix = text.substring(patch.start2 - padding, patch.start2) if (prefix) { - patch.diffs.unshift([DIFF_EQUAL, prefix]); + patch.diffs.unshift([DIFF_EQUAL, prefix]) } // Add the suffix. - var suffix = text.substring(patch.start2 + patch.length1, - patch.start2 + patch.length1 + padding); + var suffix = text.substring( + patch.start2 + patch.length1, + patch.start2 + patch.length1 + padding + ) if (suffix) { - patch.diffs.push([DIFF_EQUAL, suffix]); + patch.diffs.push([DIFF_EQUAL, suffix]) } // Roll back the start points. - patch.start1 -= prefix.length; - patch.start2 -= prefix.length; + patch.start1 -= prefix.length + patch.start2 -= prefix.length // Extend the lengths. - patch.length1 += prefix.length + suffix.length; - patch.length2 += prefix.length + suffix.length; -}; - + patch.length1 += prefix.length + suffix.length + patch.length2 += prefix.length + suffix.length +} /** * Compute a list of patches to turn text1 into text2. @@ -1634,143 +1745,161 @@ diff_match_patch.prototype.patch_addContext_ = function(patch, text) { * for text1 to text2 (method 4) or undefined (methods 1,2,3). * @return {!Array.} Array of Patch objects. */ -diff_match_patch.prototype.patch_make = function(a, opt_b, opt_c) { - var text1, diffs; - if (typeof a == 'string' && typeof opt_b == 'string' && - typeof opt_c == 'undefined') { +diff_match_patch.prototype.patch_make = function (a, opt_b, opt_c) { + var text1, diffs + if ( + typeof a == 'string' && + typeof opt_b == 'string' && + typeof opt_c == 'undefined' + ) { // Method 1: text1, text2 // Compute diffs from text1 and text2. - text1 = /** @type {string} */(a); - diffs = this.diff_main(text1, /** @type {string} */(opt_b), true); + text1 = /** @type {string} */ (a) + diffs = this.diff_main(text1, /** @type {string} */ (opt_b), true) if (diffs.length > 2) { - this.diff_cleanupSemantic(diffs); - this.diff_cleanupEfficiency(diffs); + this.diff_cleanupSemantic(diffs) + this.diff_cleanupEfficiency(diffs) } - } else if (a && typeof a == 'object' && typeof opt_b == 'undefined' && - typeof opt_c == 'undefined') { + } else if ( + a && + typeof a == 'object' && + typeof opt_b == 'undefined' && + typeof opt_c == 'undefined' + ) { // Method 2: diffs // Compute text1 from diffs. - diffs = /** @type {!Array.} */(a); - text1 = this.diff_text1(diffs); - } else if (typeof a == 'string' && opt_b && typeof opt_b == 'object' && - typeof opt_c == 'undefined') { + diffs = /** @type {!Array.} */ (a) + text1 = this.diff_text1(diffs) + } else if ( + typeof a == 'string' && + opt_b && + typeof opt_b == 'object' && + typeof opt_c == 'undefined' + ) { // Method 3: text1, diffs - text1 = /** @type {string} */(a); - diffs = /** @type {!Array.} */(opt_b); - } else if (typeof a == 'string' && typeof opt_b == 'string' && - opt_c && typeof opt_c == 'object') { + text1 = /** @type {string} */ (a) + diffs = /** @type {!Array.} */ (opt_b) + } else if ( + typeof a == 'string' && + typeof opt_b == 'string' && + opt_c && + typeof opt_c == 'object' + ) { // Method 4: text1, text2, diffs // text2 is not used. - text1 = /** @type {string} */(a); - diffs = /** @type {!Array.} */(opt_c); + text1 = /** @type {string} */ (a) + diffs = /** @type {!Array.} */ (opt_c) } else { - throw new Error('Unknown call format to patch_make.'); + throw new Error('Unknown call format to patch_make.') } if (diffs.length === 0) { - return []; // Get rid of the null case. + return [] // Get rid of the null case. } - var patches = []; - var patch = new diff_match_patch.patch_obj(); - var patchDiffLength = 0; // Keeping our own length var is faster in JS. - var char_count1 = 0; // Number of characters into the text1 string. - var char_count2 = 0; // Number of characters into the text2 string. + var patches = [] + var patch = new diff_match_patch.patch_obj() + var patchDiffLength = 0 // Keeping our own length var is faster in JS. + var char_count1 = 0 // Number of characters into the text1 string. + var char_count2 = 0 // Number of characters into the text2 string. // Start with text1 (prepatch_text) and apply the diffs until we arrive at // text2 (postpatch_text). We recreate the patches one by one to determine // context info. - var prepatch_text = text1; - var postpatch_text = text1; + var prepatch_text = text1 + var postpatch_text = text1 for (var x = 0; x < diffs.length; x++) { - var diff_type = diffs[x][0]; - var diff_text = diffs[x][1]; + var diff_type = diffs[x][0] + var diff_text = diffs[x][1] if (!patchDiffLength && diff_type !== DIFF_EQUAL) { // A new patch starts here. - patch.start1 = char_count1; - patch.start2 = char_count2; + patch.start1 = char_count1 + patch.start2 = char_count2 } switch (diff_type) { case DIFF_INSERT: - patch.diffs[patchDiffLength++] = diffs[x]; - patch.length2 += diff_text.length; - postpatch_text = postpatch_text.substring(0, char_count2) + diff_text + - postpatch_text.substring(char_count2); - break; + patch.diffs[patchDiffLength++] = diffs[x] + patch.length2 += diff_text.length + postpatch_text = + postpatch_text.substring(0, char_count2) + + diff_text + + postpatch_text.substring(char_count2) + break case DIFF_DELETE: - patch.length1 += diff_text.length; - patch.diffs[patchDiffLength++] = diffs[x]; - postpatch_text = postpatch_text.substring(0, char_count2) + - postpatch_text.substring(char_count2 + - diff_text.length); - break; + patch.length1 += diff_text.length + patch.diffs[patchDiffLength++] = diffs[x] + postpatch_text = + postpatch_text.substring(0, char_count2) + + postpatch_text.substring(char_count2 + diff_text.length) + break case DIFF_EQUAL: - if (diff_text.length <= 2 * this.Patch_Margin && - patchDiffLength && diffs.length != x + 1) { + if ( + diff_text.length <= 2 * this.Patch_Margin && + patchDiffLength && + diffs.length != x + 1 + ) { // Small equality inside a patch. - patch.diffs[patchDiffLength++] = diffs[x]; - patch.length1 += diff_text.length; - patch.length2 += diff_text.length; + patch.diffs[patchDiffLength++] = diffs[x] + patch.length1 += diff_text.length + patch.length2 += diff_text.length } else if (diff_text.length >= 2 * this.Patch_Margin) { // Time for a new patch. if (patchDiffLength) { - this.patch_addContext_(patch, prepatch_text); - patches.push(patch); - patch = new diff_match_patch.patch_obj(); - patchDiffLength = 0; + this.patch_addContext_(patch, prepatch_text) + patches.push(patch) + patch = new diff_match_patch.patch_obj() + patchDiffLength = 0 // Unlike Unidiff, our patch lists have a rolling context. // http://code.google.com/p/google-diff-match-patch/wiki/Unidiff // Update prepatch text & pos to reflect the application of the // just completed patch. - prepatch_text = postpatch_text; - char_count1 = char_count2; + prepatch_text = postpatch_text + char_count1 = char_count2 } } - break; + break } // Update the current character count. if (diff_type !== DIFF_INSERT) { - char_count1 += diff_text.length; + char_count1 += diff_text.length } if (diff_type !== DIFF_DELETE) { - char_count2 += diff_text.length; + char_count2 += diff_text.length } } // Pick up the leftover patch if not empty. if (patchDiffLength) { - this.patch_addContext_(patch, prepatch_text); - patches.push(patch); + this.patch_addContext_(patch, prepatch_text) + patches.push(patch) } - return patches; -}; - + return patches +} /** * Given an array of patches, return another array that is identical. * @param {!Array.} patches Array of Patch objects. * @return {!Array.} Array of Patch objects. */ -diff_match_patch.prototype.patch_deepCopy = function(patches) { +diff_match_patch.prototype.patch_deepCopy = function (patches) { // Making deep copies is hard in JavaScript. - var patchesCopy = []; + var patchesCopy = [] for (var x = 0; x < patches.length; x++) { - var patch = patches[x]; - var patchCopy = new diff_match_patch.patch_obj(); - patchCopy.diffs = []; + var patch = patches[x] + var patchCopy = new diff_match_patch.patch_obj() + patchCopy.diffs = [] for (var y = 0; y < patch.diffs.length; y++) { - patchCopy.diffs[y] = patch.diffs[y].slice(); + patchCopy.diffs[y] = patch.diffs[y].slice() } - patchCopy.start1 = patch.start1; - patchCopy.start2 = patch.start2; - patchCopy.length1 = patch.length1; - patchCopy.length2 = patch.length2; - patchesCopy[x] = patchCopy; + patchCopy.start1 = patch.start1 + patchCopy.start2 = patch.start2 + patchCopy.length1 = patch.length1 + patchCopy.length2 = patch.length2 + patchesCopy[x] = patchCopy } - return patchesCopy; -}; - + return patchesCopy +} /** * Merge a set of patches onto the text. Return a patched text, as well @@ -1780,94 +1909,108 @@ diff_match_patch.prototype.patch_deepCopy = function(patches) { * @return {!Array.>} Two element Array, containing the * new text and an array of boolean values. */ -diff_match_patch.prototype.patch_apply = function(patches, text) { +diff_match_patch.prototype.patch_apply = function (patches, text) { if (patches.length == 0) { - return [text, []]; + return [text, []] } // Deep copy the patches so that no changes are made to originals. - patches = this.patch_deepCopy(patches); + patches = this.patch_deepCopy(patches) - var nullPadding = this.patch_addPadding(patches); - text = nullPadding + text + nullPadding; + var nullPadding = this.patch_addPadding(patches) + text = nullPadding + text + nullPadding - this.patch_splitMax(patches); + this.patch_splitMax(patches) // delta keeps track of the offset between the expected and actual location // of the previous patch. If there are patches expected at positions 10 and // 20, but the first patch was found at 12, delta is 2 and the second patch // has an effective expected position of 22. - var delta = 0; - var results = []; + var delta = 0 + var results = [] for (var x = 0; x < patches.length; x++) { - var expected_loc = patches[x].start2 + delta; - var text1 = this.diff_text1(patches[x].diffs); - var start_loc; - var end_loc = -1; + var expected_loc = patches[x].start2 + delta + var text1 = this.diff_text1(patches[x].diffs) + var start_loc + var end_loc = -1 if (text1.length > this.Match_MaxBits) { // patch_splitMax will only provide an oversized pattern in the case of // a monster delete. - start_loc = this.match_main(text, text1.substring(0, this.Match_MaxBits), - expected_loc); + start_loc = this.match_main( + text, + text1.substring(0, this.Match_MaxBits), + expected_loc + ) if (start_loc != -1) { - end_loc = this.match_main(text, - text1.substring(text1.length - this.Match_MaxBits), - expected_loc + text1.length - this.Match_MaxBits); + end_loc = this.match_main( + text, + text1.substring(text1.length - this.Match_MaxBits), + expected_loc + text1.length - this.Match_MaxBits + ) if (end_loc == -1 || start_loc >= end_loc) { // Can't find valid trailing context. Drop this patch. - start_loc = -1; + start_loc = -1 } } } else { - start_loc = this.match_main(text, text1, expected_loc); + start_loc = this.match_main(text, text1, expected_loc) } if (start_loc == -1) { // No match found. :( - results[x] = false; + results[x] = false // Subtract the delta for this failed patch from subsequent patches. - delta -= patches[x].length2 - patches[x].length1; + delta -= patches[x].length2 - patches[x].length1 } else { // Found a match. :) - results[x] = true; - delta = start_loc - expected_loc; - var text2; + results[x] = true + delta = start_loc - expected_loc + var text2 if (end_loc == -1) { - text2 = text.substring(start_loc, start_loc + text1.length); + text2 = text.substring(start_loc, start_loc + text1.length) } else { - text2 = text.substring(start_loc, end_loc + this.Match_MaxBits); + text2 = text.substring(start_loc, end_loc + this.Match_MaxBits) } if (text1 == text2) { // Perfect match, just shove the replacement text in. - text = text.substring(0, start_loc) + - this.diff_text2(patches[x].diffs) + - text.substring(start_loc + text1.length); + text = + text.substring(0, start_loc) + + this.diff_text2(patches[x].diffs) + + text.substring(start_loc + text1.length) } else { // Imperfect match. Run a diff to get a framework of equivalent // indices. - var diffs = this.diff_main(text1, text2, false); - if (text1.length > this.Match_MaxBits && - this.diff_levenshtein(diffs) / text1.length > - this.Patch_DeleteThreshold) { + var diffs = this.diff_main(text1, text2, false) + if ( + text1.length > this.Match_MaxBits && + this.diff_levenshtein(diffs) / text1.length > + this.Patch_DeleteThreshold + ) { // The end points match, but the content is unacceptably bad. - results[x] = false; + results[x] = false } else { - this.diff_cleanupSemanticLossless(diffs); - var index1 = 0; - var index2; + this.diff_cleanupSemanticLossless(diffs) + var index1 = 0 + var index2 for (var y = 0; y < patches[x].diffs.length; y++) { - var mod = patches[x].diffs[y]; + var mod = patches[x].diffs[y] if (mod[0] !== DIFF_EQUAL) { - index2 = this.diff_xIndex(diffs, index1); + index2 = this.diff_xIndex(diffs, index1) } - if (mod[0] === DIFF_INSERT) { // Insertion - text = text.substring(0, start_loc + index2) + mod[1] + - text.substring(start_loc + index2); - } else if (mod[0] === DIFF_DELETE) { // Deletion - text = text.substring(0, start_loc + index2) + - text.substring(start_loc + this.diff_xIndex(diffs, - index1 + mod[1].length)); + if (mod[0] === DIFF_INSERT) { + // Insertion + text = + text.substring(0, start_loc + index2) + + mod[1] + + text.substring(start_loc + index2) + } else if (mod[0] === DIFF_DELETE) { + // Deletion + text = + text.substring(0, start_loc + index2) + + text.substring( + start_loc + this.diff_xIndex(diffs, index1 + mod[1].length) + ) } if (mod[0] !== DIFF_DELETE) { - index1 += mod[1].length; + index1 += mod[1].length } } } @@ -1875,10 +2018,9 @@ diff_match_patch.prototype.patch_apply = function(patches, text) { } } // Strip the padding off. - text = text.substring(nullPadding.length, text.length - nullPadding.length); - return [text, results]; -}; - + text = text.substring(nullPadding.length, text.length - nullPadding.length) + return [text, results] +} /** * Add some padding on text start and end so that edges can match something. @@ -1886,58 +2028,57 @@ diff_match_patch.prototype.patch_apply = function(patches, text) { * @param {!Array.} patches Array of Patch objects. * @return {string} The padding string added to each side. */ -diff_match_patch.prototype.patch_addPadding = function(patches) { - var paddingLength = this.Patch_Margin; - var nullPadding = ''; +diff_match_patch.prototype.patch_addPadding = function (patches) { + var paddingLength = this.Patch_Margin + var nullPadding = '' for (var x = 1; x <= paddingLength; x++) { - nullPadding += String.fromCharCode(x); + nullPadding += String.fromCharCode(x) } // Bump all the patches forward. for (var x = 0; x < patches.length; x++) { - patches[x].start1 += paddingLength; - patches[x].start2 += paddingLength; + patches[x].start1 += paddingLength + patches[x].start2 += paddingLength } // Add some padding on start of first diff. - var patch = patches[0]; - var diffs = patch.diffs; + var patch = patches[0] + var diffs = patch.diffs if (diffs.length == 0 || diffs[0][0] != DIFF_EQUAL) { // Add nullPadding equality. - diffs.unshift([DIFF_EQUAL, nullPadding]); - patch.start1 -= paddingLength; // Should be 0. - patch.start2 -= paddingLength; // Should be 0. - patch.length1 += paddingLength; - patch.length2 += paddingLength; + diffs.unshift([DIFF_EQUAL, nullPadding]) + patch.start1 -= paddingLength // Should be 0. + patch.start2 -= paddingLength // Should be 0. + patch.length1 += paddingLength + patch.length2 += paddingLength } else if (paddingLength > diffs[0][1].length) { // Grow first equality. - var extraLength = paddingLength - diffs[0][1].length; - diffs[0][1] = nullPadding.substring(diffs[0][1].length) + diffs[0][1]; - patch.start1 -= extraLength; - patch.start2 -= extraLength; - patch.length1 += extraLength; - patch.length2 += extraLength; + var extraLength = paddingLength - diffs[0][1].length + diffs[0][1] = nullPadding.substring(diffs[0][1].length) + diffs[0][1] + patch.start1 -= extraLength + patch.start2 -= extraLength + patch.length1 += extraLength + patch.length2 += extraLength } // Add some padding on end of last diff. - patch = patches[patches.length - 1]; - diffs = patch.diffs; + patch = patches[patches.length - 1] + diffs = patch.diffs if (diffs.length == 0 || diffs[diffs.length - 1][0] != DIFF_EQUAL) { // Add nullPadding equality. - diffs.push([DIFF_EQUAL, nullPadding]); - patch.length1 += paddingLength; - patch.length2 += paddingLength; + diffs.push([DIFF_EQUAL, nullPadding]) + patch.length1 += paddingLength + patch.length2 += paddingLength } else if (paddingLength > diffs[diffs.length - 1][1].length) { // Grow last equality. - var extraLength = paddingLength - diffs[diffs.length - 1][1].length; - diffs[diffs.length - 1][1] += nullPadding.substring(0, extraLength); - patch.length1 += extraLength; - patch.length2 += extraLength; + var extraLength = paddingLength - diffs[diffs.length - 1][1].length + diffs[diffs.length - 1][1] += nullPadding.substring(0, extraLength) + patch.length1 += extraLength + patch.length2 += extraLength } - return nullPadding; -}; - + return nullPadding +} /** * Look through the patches and break up any which are longer than the maximum @@ -1945,106 +2086,115 @@ diff_match_patch.prototype.patch_addPadding = function(patches) { * Intended to be called only from within patch_apply. * @param {!Array.} patches Array of Patch objects. */ -diff_match_patch.prototype.patch_splitMax = function(patches) { - var patch_size = this.Match_MaxBits; +diff_match_patch.prototype.patch_splitMax = function (patches) { + var patch_size = this.Match_MaxBits for (var x = 0; x < patches.length; x++) { if (patches[x].length1 <= patch_size) { - continue; + continue } - var bigpatch = patches[x]; + var bigpatch = patches[x] // Remove the big old patch. - patches.splice(x--, 1); - var start1 = bigpatch.start1; - var start2 = bigpatch.start2; - var precontext = ''; + patches.splice(x--, 1) + var start1 = bigpatch.start1 + var start2 = bigpatch.start2 + var precontext = '' while (bigpatch.diffs.length !== 0) { // Create one of several smaller patches. - var patch = new diff_match_patch.patch_obj(); - var empty = true; - patch.start1 = start1 - precontext.length; - patch.start2 = start2 - precontext.length; + var patch = new diff_match_patch.patch_obj() + var empty = true + patch.start1 = start1 - precontext.length + patch.start2 = start2 - precontext.length if (precontext !== '') { - patch.length1 = patch.length2 = precontext.length; - patch.diffs.push([DIFF_EQUAL, precontext]); + patch.length1 = patch.length2 = precontext.length + patch.diffs.push([DIFF_EQUAL, precontext]) } - while (bigpatch.diffs.length !== 0 && - patch.length1 < patch_size - this.Patch_Margin) { - var diff_type = bigpatch.diffs[0][0]; - var diff_text = bigpatch.diffs[0][1]; + while ( + bigpatch.diffs.length !== 0 && + patch.length1 < patch_size - this.Patch_Margin + ) { + var diff_type = bigpatch.diffs[0][0] + var diff_text = bigpatch.diffs[0][1] if (diff_type === DIFF_INSERT) { // Insertions are harmless. - patch.length2 += diff_text.length; - start2 += diff_text.length; - patch.diffs.push(bigpatch.diffs.shift()); - empty = false; - } else if (diff_type === DIFF_DELETE && patch.diffs.length == 1 && - patch.diffs[0][0] == DIFF_EQUAL && - diff_text.length > 2 * patch_size) { + patch.length2 += diff_text.length + start2 += diff_text.length + patch.diffs.push(bigpatch.diffs.shift()) + empty = false + } else if ( + diff_type === DIFF_DELETE && + patch.diffs.length == 1 && + patch.diffs[0][0] == DIFF_EQUAL && + diff_text.length > 2 * patch_size + ) { // This is a large deletion. Let it pass in one chunk. - patch.length1 += diff_text.length; - start1 += diff_text.length; - empty = false; - patch.diffs.push([diff_type, diff_text]); - bigpatch.diffs.shift(); + patch.length1 += diff_text.length + start1 += diff_text.length + empty = false + patch.diffs.push([diff_type, diff_text]) + bigpatch.diffs.shift() } else { // Deletion or equality. Only take as much as we can stomach. - diff_text = diff_text.substring(0, - patch_size - patch.length1 - this.Patch_Margin); - patch.length1 += diff_text.length; - start1 += diff_text.length; + diff_text = diff_text.substring( + 0, + patch_size - patch.length1 - this.Patch_Margin + ) + patch.length1 += diff_text.length + start1 += diff_text.length if (diff_type === DIFF_EQUAL) { - patch.length2 += diff_text.length; - start2 += diff_text.length; + patch.length2 += diff_text.length + start2 += diff_text.length } else { - empty = false; + empty = false } - patch.diffs.push([diff_type, diff_text]); + patch.diffs.push([diff_type, diff_text]) if (diff_text == bigpatch.diffs[0][1]) { - bigpatch.diffs.shift(); + bigpatch.diffs.shift() } else { - bigpatch.diffs[0][1] = - bigpatch.diffs[0][1].substring(diff_text.length); + bigpatch.diffs[0][1] = bigpatch.diffs[0][1].substring( + diff_text.length + ) } } } // Compute the head context for the next patch. - precontext = this.diff_text2(patch.diffs); - precontext = - precontext.substring(precontext.length - this.Patch_Margin); + precontext = this.diff_text2(patch.diffs) + precontext = precontext.substring(precontext.length - this.Patch_Margin) // Append the end context for this patch. - var postcontext = this.diff_text1(bigpatch.diffs) - .substring(0, this.Patch_Margin); + var postcontext = this.diff_text1(bigpatch.diffs).substring( + 0, + this.Patch_Margin + ) if (postcontext !== '') { - patch.length1 += postcontext.length; - patch.length2 += postcontext.length; - if (patch.diffs.length !== 0 && - patch.diffs[patch.diffs.length - 1][0] === DIFF_EQUAL) { - patch.diffs[patch.diffs.length - 1][1] += postcontext; + patch.length1 += postcontext.length + patch.length2 += postcontext.length + if ( + patch.diffs.length !== 0 && + patch.diffs[patch.diffs.length - 1][0] === DIFF_EQUAL + ) { + patch.diffs[patch.diffs.length - 1][1] += postcontext } else { - patch.diffs.push([DIFF_EQUAL, postcontext]); + patch.diffs.push([DIFF_EQUAL, postcontext]) } } if (!empty) { - patches.splice(++x, 0, patch); + patches.splice(++x, 0, patch) } } } -}; - +} /** * Take a list of patches and return a textual representation. * @param {!Array.} patches Array of Patch objects. * @return {string} Text representation of patches. */ -diff_match_patch.prototype.patch_toText = function(patches) { - var text = []; +diff_match_patch.prototype.patch_toText = function (patches) { + var text = [] for (var x = 0; x < patches.length; x++) { - text[x] = patches[x]; + text[x] = patches[x] } - return text.join(''); -}; - + return text.join('') +} /** * Parse a textual representation of patches and return a list of Patch objects. @@ -2052,94 +2202,92 @@ diff_match_patch.prototype.patch_toText = function(patches) { * @return {!Array.} Array of Patch objects. * @throws {!Error} If invalid input. */ -diff_match_patch.prototype.patch_fromText = function(textline) { - var patches = []; +diff_match_patch.prototype.patch_fromText = function (textline) { + var patches = [] if (!textline) { - return patches; + return patches } - var text = textline.split('\n'); - var textPointer = 0; - var patchHeader = /^@@ -(\d+),?(\d*) \+(\d+),?(\d*) @@$/; + var text = textline.split('\n') + var textPointer = 0 + var patchHeader = /^@@ -(\d+),?(\d*) \+(\d+),?(\d*) @@$/ while (textPointer < text.length) { - var m = text[textPointer].match(patchHeader); + var m = text[textPointer].match(patchHeader) if (!m) { - throw new Error('Invalid patch string: ' + text[textPointer]); + throw new Error('Invalid patch string: ' + text[textPointer]) } - var patch = new diff_match_patch.patch_obj(); - patches.push(patch); - patch.start1 = parseInt(m[1], 10); + var patch = new diff_match_patch.patch_obj() + patches.push(patch) + patch.start1 = parseInt(m[1], 10) if (m[2] === '') { - patch.start1--; - patch.length1 = 1; + patch.start1-- + patch.length1 = 1 } else if (m[2] == '0') { - patch.length1 = 0; + patch.length1 = 0 } else { - patch.start1--; - patch.length1 = parseInt(m[2], 10); + patch.start1-- + patch.length1 = parseInt(m[2], 10) } - patch.start2 = parseInt(m[3], 10); + patch.start2 = parseInt(m[3], 10) if (m[4] === '') { - patch.start2--; - patch.length2 = 1; + patch.start2-- + patch.length2 = 1 } else if (m[4] == '0') { - patch.length2 = 0; + patch.length2 = 0 } else { - patch.start2--; - patch.length2 = parseInt(m[4], 10); + patch.start2-- + patch.length2 = parseInt(m[4], 10) } - textPointer++; + textPointer++ while (textPointer < text.length) { - var sign = text[textPointer].charAt(0); + var sign = text[textPointer].charAt(0) try { - var line = decodeURI(text[textPointer].substring(1)); + var line = decodeURI(text[textPointer].substring(1)) } catch (ex) { // Malformed URI sequence. - throw new Error('Illegal escape in patch_fromText: ' + line); + throw new Error('Illegal escape in patch_fromText: ' + line) } if (sign == '-') { // Deletion. - patch.diffs.push([DIFF_DELETE, line]); + patch.diffs.push([DIFF_DELETE, line]) } else if (sign == '+') { // Insertion. - patch.diffs.push([DIFF_INSERT, line]); + patch.diffs.push([DIFF_INSERT, line]) } else if (sign == ' ') { // Minor equality. - patch.diffs.push([DIFF_EQUAL, line]); + patch.diffs.push([DIFF_EQUAL, line]) } else if (sign == '@') { // Start of next patch. - break; + break } else if (sign === '') { // Blank line? Whatever. } else { // WTF? - throw new Error('Invalid patch mode "' + sign + '" in: ' + line); + throw new Error('Invalid patch mode "' + sign + '" in: ' + line) } - textPointer++; + textPointer++ } } - return patches; -}; - + return patches +} /** * Class representing one patch operation. * @constructor */ -diff_match_patch.patch_obj = function() { +diff_match_patch.patch_obj = function () { /** @type {!Array.} */ - this.diffs = []; + this.diffs = [] /** @type {?number} */ - this.start1 = null; + this.start1 = null /** @type {?number} */ - this.start2 = null; + this.start2 = null /** @type {number} */ - this.length1 = 0; + this.length1 = 0 /** @type {number} */ - this.length2 = 0; -}; - + this.length2 = 0 +} /** * Emmulate GNU diff's format. @@ -2147,48 +2295,47 @@ diff_match_patch.patch_obj = function() { * Indicies are printed as 1-based, not 0-based. * @return {string} The GNU diff string. */ -diff_match_patch.patch_obj.prototype.toString = function() { - var coords1, coords2; +diff_match_patch.patch_obj.prototype.toString = function () { + var coords1, coords2 if (this.length1 === 0) { - coords1 = this.start1 + ',0'; + coords1 = this.start1 + ',0' } else if (this.length1 == 1) { - coords1 = this.start1 + 1; + coords1 = this.start1 + 1 } else { - coords1 = (this.start1 + 1) + ',' + this.length1; + coords1 = this.start1 + 1 + ',' + this.length1 } if (this.length2 === 0) { - coords2 = this.start2 + ',0'; + coords2 = this.start2 + ',0' } else if (this.length2 == 1) { - coords2 = this.start2 + 1; + coords2 = this.start2 + 1 } else { - coords2 = (this.start2 + 1) + ',' + this.length2; + coords2 = this.start2 + 1 + ',' + this.length2 } - var text = ['@@ -' + coords1 + ' +' + coords2 + ' @@\n']; - var op; + var text = ['@@ -' + coords1 + ' +' + coords2 + ' @@\n'] + var op // Escape the body of the patch with %xx notation. for (var x = 0; x < this.diffs.length; x++) { switch (this.diffs[x][0]) { case DIFF_INSERT: - op = '+'; - break; + op = '+' + break case DIFF_DELETE: - op = '-'; - break; + op = '-' + break case DIFF_EQUAL: - op = ' '; - break; + op = ' ' + break } - text[x + 1] = op + encodeURI(this.diffs[x][1]) + '\n'; + text[x + 1] = op + encodeURI(this.diffs[x][1]) + '\n' } - return text.join('').replace(/%20/g, ' '); -}; - + return text.join('').replace(/%20/g, ' ') +} // Export these global variables so that they survive Google's JS compiler. // In a browser, 'this' will be 'window'. // Users of node.js should 'require' the uncompressed version since Google's // JS compiler may break the following exports for non-browser environments. -this['diff_match_patch'] = diff_match_patch; -this['DIFF_DELETE'] = DIFF_DELETE; -this['DIFF_INSERT'] = DIFF_INSERT; -this['DIFF_EQUAL'] = DIFF_EQUAL; +this['diff_match_patch'] = diff_match_patch +this['DIFF_DELETE'] = DIFF_DELETE +this['DIFF_INSERT'] = DIFF_INSERT +this['DIFF_EQUAL'] = DIFF_EQUAL diff --git a/services/document-updater/config/settings.defaults.js b/services/document-updater/config/settings.defaults.js index 899302c64e..ff5a35a515 100755 --- a/services/document-updater/config/settings.defaults.js +++ b/services/document-updater/config/settings.defaults.js @@ -29,33 +29,21 @@ module.exports = { redis: { pubsub: { host: - process.env.PUBSUB_REDIS_HOST || - process.env.REDIS_HOST || - 'localhost', - port: - process.env.PUBSUB_REDIS_PORT || process.env.REDIS_PORT || '6379', + process.env.PUBSUB_REDIS_HOST || process.env.REDIS_HOST || 'localhost', + port: process.env.PUBSUB_REDIS_PORT || process.env.REDIS_PORT || '6379', password: - process.env.PUBSUB_REDIS_PASSWORD || - process.env.REDIS_PASSWORD || - '', + process.env.PUBSUB_REDIS_PASSWORD || process.env.REDIS_PASSWORD || '', maxRetriesPerRequest: parseInt( process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20' ) }, history: { - port: - process.env.HISTORY_REDIS_PORT || - process.env.REDIS_PORT || - '6379', + port: process.env.HISTORY_REDIS_PORT || process.env.REDIS_PORT || '6379', host: - process.env.HISTORY_REDIS_HOST || - process.env.REDIS_HOST || - 'localhost', + process.env.HISTORY_REDIS_HOST || process.env.REDIS_HOST || 'localhost', password: - process.env.HISTORY_REDIS_PASSWORD || - process.env.REDIS_PASSWORD || - '', + process.env.HISTORY_REDIS_PASSWORD || process.env.REDIS_PASSWORD || '', maxRetriesPerRequest: parseInt( process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20' ), @@ -71,9 +59,7 @@ module.exports = { project_history: { port: - process.env.NEW_HISTORY_REDIS_PORT || - process.env.REDIS_PORT || - '6379', + process.env.NEW_HISTORY_REDIS_PORT || process.env.REDIS_PORT || '6379', host: process.env.NEW_HISTORY_REDIS_HOST || process.env.REDIS_HOST || @@ -86,41 +72,34 @@ module.exports = { process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20' ), key_schema: { - projectHistoryOps({ project_id }) { - return `ProjectHistory:Ops:{${project_id}}` + projectHistoryOps({ project_id: projectId }) { + return `ProjectHistory:Ops:{${projectId}}` }, - projectHistoryFirstOpTimestamp({ project_id }) { - return `ProjectHistory:FirstOpTimestamp:{${project_id}}` + projectHistoryFirstOpTimestamp({ project_id: projectId }) { + return `ProjectHistory:FirstOpTimestamp:{${projectId}}` } } }, lock: { - port: - process.env.LOCK_REDIS_PORT || process.env.REDIS_PORT || '6379', + port: process.env.LOCK_REDIS_PORT || process.env.REDIS_PORT || '6379', host: - process.env.LOCK_REDIS_HOST || - process.env.REDIS_HOST || - 'localhost', + process.env.LOCK_REDIS_HOST || process.env.REDIS_HOST || 'localhost', password: - process.env.LOCK_REDIS_PASSWORD || - process.env.REDIS_PASSWORD || - '', + process.env.LOCK_REDIS_PASSWORD || process.env.REDIS_PASSWORD || '', maxRetriesPerRequest: parseInt( process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20' ), key_schema: { - blockingKey({ doc_id }) { - return `Blocking:{${doc_id}}` + blockingKey({ doc_id: docId }) { + return `Blocking:{${docId}}` } } }, documentupdater: { port: - process.env.DOC_UPDATER_REDIS_PORT || - process.env.REDIS_PORT || - '6379', + process.env.DOC_UPDATER_REDIS_PORT || process.env.REDIS_PORT || '6379', host: process.env.DOC_UPDATER_REDIS_HOST || process.env.REDIS_HOST || @@ -205,8 +184,7 @@ module.exports = { publishOnIndividualChannels: process.env.PUBLISH_ON_INDIVIDUAL_CHANNELS || false, - continuousBackgroundFlush: - process.env.CONTINUOUS_BACKGROUND_FLUSH || false, + continuousBackgroundFlush: process.env.CONTINUOUS_BACKGROUND_FLUSH || false, smoothingOffset: process.env.SMOOTHING_OFFSET || 1000, // milliseconds From ed7d277f982818d7f403c9d2fff2c75f5b0d6ade Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Tue, 28 Apr 2020 11:57:58 +0200 Subject: [PATCH 636/769] lint & format app.js --- services/document-updater/app.js | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/services/document-updater/app.js b/services/document-updater/app.js index abd3370c72..9e772d3e9a 100644 --- a/services/document-updater/app.js +++ b/services/document-updater/app.js @@ -10,7 +10,6 @@ const Metrics = require('metrics-sharelatex') Metrics.initialize('doc-updater') const express = require('express') -const http = require('http') const Settings = require('settings-sharelatex') const logger = require('logger-sharelatex') logger.initialize('document-updater') @@ -33,7 +32,7 @@ const Path = require('path') const bodyParser = require('body-parser') Metrics.mongodb.monitor( - Path.resolve(__dirname + '/node_modules/mongojs/node_modules/mongodb'), + Path.resolve(__dirname, '/node_modules/mongojs/node_modules/mongodb'), logger ) Metrics.event_loop.monitor(logger, 100) @@ -45,16 +44,16 @@ Metrics.injectMetricsRoute(app) DispatchManager.createAndStartDispatchers(Settings.dispatcherCount || 10) -app.param('project_id', function (req, res, next, project_id) { - if (project_id != null ? project_id.match(/^[0-9a-f]{24}$/) : undefined) { +app.param('project_id', function (req, res, next, projectId) { + if (projectId != null ? projectId.match(/^[0-9a-f]{24}$/) : undefined) { return next() } else { return next(new Error('invalid project id')) } }) -app.param('doc_id', function (req, res, next, doc_id) { - if (doc_id != null ? doc_id.match(/^[0-9a-f]{24}$/) : undefined) { +app.param('doc_id', function (req, res, next, docId) { + if (docId != null ? docId.match(/^[0-9a-f]{24}$/) : undefined) { return next() } else { return next(new Error('invalid doc id')) @@ -100,9 +99,12 @@ app.delete( app.get('/flush_all_projects', HttpController.flushAllProjects) app.get('/flush_queued_projects', HttpController.flushQueuedProjects) -app.get('/total', function (req, res) { +app.get('/total', function (req, res, next) { const timer = new Metrics.Timer('http.allDocList') return RedisManager.getCountOfDocsInMemory(function (err, count) { + if (err) { + return next(err) + } timer.done() return res.send({ total: count }) }) @@ -203,8 +205,9 @@ const shutdownCleanly = (signal) => } const watchForEvent = (eventName) => - docUpdaterRedisClient.on(eventName, (e) => - console.log(`redis event: ${eventName} ${e}`) + docUpdaterRedisClient.on( + eventName, + (e) => console.log(`redis event: ${eventName} ${e}`) // eslint-disable-line no-console ) const events = ['connect', 'ready', 'error', 'close', 'reconnecting', 'end'] From 8176cb3e8d70e3113d96b477a2a8f1e076df81d5 Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Tue, 28 Apr 2020 12:10:25 +0200 Subject: [PATCH 637/769] remove requires for coffee-script package in tests --- .../document-updater/test/unit/js/LockManager/CheckingTheLock.js | 1 - .../test/unit/js/LockManager/ReleasingTheLock.js | 1 - 2 files changed, 2 deletions(-) diff --git a/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js b/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js index d91b25b64c..c97677041f 100644 --- a/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js +++ b/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js @@ -11,7 +11,6 @@ * DS206: Consider reworking classes to avoid initClass * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -require('coffee-script') const sinon = require('sinon') const assert = require('assert') const path = require('path') diff --git a/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js b/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js index 9c5d70e1e0..d0c88940cc 100644 --- a/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js +++ b/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js @@ -11,7 +11,6 @@ * DS206: Consider reworking classes to avoid initClass * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -require('coffee-script') const sinon = require('sinon') const assert = require('assert') const path = require('path') From cf87daa754604d9339575df58ac73ff77ab1cb8a Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Tue, 28 Apr 2020 12:17:53 +0200 Subject: [PATCH 638/769] fix Metrics module stub --- .../js/DispatchManager/DispatchManagerTests.js | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js b/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js index 48eb2fbb92..a177f162fb 100644 --- a/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js +++ b/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js @@ -20,6 +20,7 @@ const Errors = require('../../../../app/js/Errors.js') describe('DispatchManager', function () { beforeEach(function () { + let Timer this.timeout(3000) this.DispatchManager = SandboxedModule.require(modulePath, { requires: { @@ -37,11 +38,17 @@ describe('DispatchManager', function () { 'redis-sharelatex': (this.redis = {}), './RateLimitManager': {}, './Errors': Errors, - './Metrics': { - Timer() { - return { done() {} } - } - } + './Metrics': (this.Metrics = { + Timer: (Timer = (function () { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub() + } + } + Timer.initClass() + return Timer + })()) + }) } }) this.callback = sinon.stub() From 9f6ea07002a30fc6c1316e6a687ae044370b8398 Mon Sep 17 00:00:00 2001 From: Tim Alby Date: Mon, 13 Jan 2020 19:03:27 +0100 Subject: [PATCH 639/769] fix SyntaxError on `export` var --- .../app/js/sharejs/types/text-composable.js | 23 +++++++++---------- 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/services/document-updater/app/js/sharejs/types/text-composable.js b/services/document-updater/app/js/sharejs/types/text-composable.js index 6898589908..34246a5f51 100644 --- a/services/document-updater/app/js/sharejs/types/text-composable.js +++ b/services/document-updater/app/js/sharejs/types/text-composable.js @@ -31,11 +31,12 @@ let makeAppend const p = function () {} // require('util').debug const i = function () {} // require('util').inspect -const exports = typeof WEB !== 'undefined' && WEB !== null ? {} : module.exports +const moduleExport = + typeof WEB !== 'undefined' && WEB !== null ? {} : module.exports -exports.name = 'text-composable' +moduleExport.name = 'text-composable' -exports.create = () => '' +moduleExport.create = () => '' // -------- Utility methods @@ -74,7 +75,7 @@ const checkOp = function (op) { // Makes a function for appending components to a given op. // Exported for the randomOpGenerator. -exports._makeAppend = makeAppend = (op) => +moduleExport._makeAppend = makeAppend = (op) => function (component) { if (component === 0 || component.i === '' || component.d === '') { } else if (op.length === 0) { @@ -161,7 +162,7 @@ const componentLength = function (component) { // Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate // adjacent inserts and deletes. -exports.normalize = function (op) { +moduleExport.normalize = function (op) { const newOp = [] const append = makeAppend(newOp) for (const component of Array.from(op)) { @@ -171,7 +172,7 @@ exports.normalize = function (op) { } // Apply the op to the string. Returns the new string. -exports.apply = function (str, op) { +moduleExport.apply = function (str, op) { p(`Applying ${i(op)} to '${str}'`) if (typeof str !== 'string') { throw new Error('Snapshot should be a string') @@ -214,8 +215,7 @@ exports.apply = function (str, op) { // transform op1 by op2. Return transformed version of op1. // op1 and op2 are unchanged by transform. -exports.transform = function (op, otherOp, side) { - let component +moduleExport.transform = function (op, otherOp, side) { if (side !== 'left' && side !== 'right') { throw new Error(`side (${side} must be 'left' or 'right'`) } @@ -294,8 +294,7 @@ exports.transform = function (op, otherOp, side) { } // Compose 2 ops into 1 op. -exports.compose = function (op1, op2) { - let component +moduleExport.compose = function (op1, op2) { p(`COMPOSE ${i(op1)} + ${i(op2)}`) checkOp(op1) checkOp(op2) @@ -377,7 +376,7 @@ const invertComponent = function (c) { } // Invert an op -exports.invert = function (op) { +moduleExport.invert = function (op) { const result = [] const append = makeAppend(result) @@ -395,5 +394,5 @@ if (typeof window !== 'undefined' && window !== null) { if (!window.ot.types) { window.ot.types = {} } - window.ot.types.text = exports + window.ot.types.text = moduleExport } From 53d79d86a94cb370ecd6e411f31da220d8f725cc Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Fri, 8 May 2020 11:21:36 -0400 Subject: [PATCH 640/769] Decaf cleanup: remove Array.from() --- services/document-updater/app.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/services/document-updater/app.js b/services/document-updater/app.js index 9e772d3e9a..da2e745880 100644 --- a/services/document-updater/app.js +++ b/services/document-updater/app.js @@ -1,6 +1,5 @@ /* * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS103: Rewrite code to no longer use __guard__ * DS207: Consider shorter variations of null checks @@ -211,7 +210,7 @@ const watchForEvent = (eventName) => ) const events = ['connect', 'ready', 'error', 'close', 'reconnecting', 'end'] -for (const eventName of Array.from(events)) { +for (const eventName of events) { watchForEvent(eventName) } From 7b2420413c1c40b5abccc04b6d162623801b956a Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Fri, 8 May 2020 11:28:31 -0400 Subject: [PATCH 641/769] Decaf cleanup: unnecessary returns and arrow functions in callbacks --- services/document-updater/app.js | 89 ++++++++++++++++---------------- 1 file changed, 45 insertions(+), 44 deletions(-) diff --git a/services/document-updater/app.js b/services/document-updater/app.js index da2e745880..34d116ee9d 100644 --- a/services/document-updater/app.js +++ b/services/document-updater/app.js @@ -1,6 +1,5 @@ /* * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns * DS103: Rewrite code to no longer use __guard__ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md @@ -43,7 +42,7 @@ Metrics.injectMetricsRoute(app) DispatchManager.createAndStartDispatchers(Settings.dispatcherCount || 10) -app.param('project_id', function (req, res, next, projectId) { +app.param('project_id', (req, res, next, projectId) => { if (projectId != null ? projectId.match(/^[0-9a-f]{24}$/) : undefined) { return next() } else { @@ -51,7 +50,7 @@ app.param('project_id', function (req, res, next, projectId) { } }) -app.param('doc_id', function (req, res, next, docId) { +app.param('doc_id', (req, res, next, docId) => { if (docId != null ? docId.match(/^[0-9a-f]{24}$/) : undefined) { return next() } else { @@ -98,18 +97,18 @@ app.delete( app.get('/flush_all_projects', HttpController.flushAllProjects) app.get('/flush_queued_projects', HttpController.flushQueuedProjects) -app.get('/total', function (req, res, next) { +app.get('/total', (req, res, next) => { const timer = new Metrics.Timer('http.allDocList') - return RedisManager.getCountOfDocsInMemory(function (err, count) { + RedisManager.getCountOfDocsInMemory((err, count) => { if (err) { return next(err) } timer.done() - return res.send({ total: count }) + res.send({ total: count }) }) }) -app.get('/status', function (req, res) { +app.get('/status', (req, res) => { if (Settings.shuttingDown) { return res.sendStatus(503) // Service unavailable } else { @@ -120,8 +119,8 @@ app.get('/status', function (req, res) { const pubsubClient = require('redis-sharelatex').createClient( Settings.redis.pubsub ) -app.get('/health_check/redis', (req, res, next) => - pubsubClient.healthCheck(function (error) { +app.get('/health_check/redis', (req, res, next) => { + pubsubClient.healthCheck((error) => { if (error != null) { logger.err({ err: error }, 'failed redis health check') return res.sendStatus(500) @@ -129,13 +128,13 @@ app.get('/health_check/redis', (req, res, next) => return res.sendStatus(200) } }) -) +}) const docUpdaterRedisClient = require('redis-sharelatex').createClient( Settings.redis.documentupdater ) -app.get('/health_check/redis_cluster', (req, res, next) => - docUpdaterRedisClient.healthCheck(function (error) { +app.get('/health_check/redis_cluster', (req, res, next) => { + docUpdaterRedisClient.healthCheck((error) => { if (error != null) { logger.err({ err: error }, 'failed redis cluster health check') return res.sendStatus(500) @@ -143,34 +142,37 @@ app.get('/health_check/redis_cluster', (req, res, next) => return res.sendStatus(200) } }) -) +}) -app.get('/health_check', (req, res, next) => +app.get('/health_check', (req, res, next) => { async.series( [ - (cb) => - pubsubClient.healthCheck(function (error) { + (cb) => { + pubsubClient.healthCheck((error) => { if (error != null) { logger.err({ err: error }, 'failed redis health check') } - return cb(error) - }), - (cb) => - docUpdaterRedisClient.healthCheck(function (error) { + cb(error) + }) + }, + (cb) => { + docUpdaterRedisClient.healthCheck((error) => { if (error != null) { logger.err({ err: error }, 'failed redis cluster health check') } - return cb(error) - }), - (cb) => - mongojs.healthCheck(function (error) { + cb(error) + }) + }, + (cb) => { + mongojs.healthCheck((error) => { if (error != null) { logger.err({ err: error }, 'failed mongo health check') } - return cb(error) + cb(error) }) + } ], - function (error) { + (error) => { if (error != null) { return res.sendStatus(500) } else { @@ -178,9 +180,9 @@ app.get('/health_check', (req, res, next) => } } ) -) +}) -app.use(function (error, req, res, next) { +app.use((error, req, res, next) => { if (error instanceof Errors.NotFoundError) { return res.sendStatus(404) } else if (error instanceof Errors.OpRangeNotAvailableError) { @@ -193,21 +195,20 @@ app.use(function (error, req, res, next) { } }) -const shutdownCleanly = (signal) => - function () { - logger.log({ signal }, 'received interrupt, cleaning up') - Settings.shuttingDown = true - return setTimeout(function () { - logger.log({ signal }, 'shutting down') - return process.exit() - }, 10000) - } +const shutdownCleanly = (signal) => () => { + logger.log({ signal }, 'received interrupt, cleaning up') + Settings.shuttingDown = true + setTimeout(() => { + logger.log({ signal }, 'shutting down') + process.exit() + }, 10000) +} -const watchForEvent = (eventName) => - docUpdaterRedisClient.on( - eventName, - (e) => console.log(`redis event: ${eventName} ${e}`) // eslint-disable-line no-console - ) +const watchForEvent = (eventName) => { + docUpdaterRedisClient.on(eventName, (e) => { + console.log(`redis event: ${eventName} ${e}`) // eslint-disable-line no-console + }) +} const events = ['connect', 'ready', 'error', 'close', 'reconnecting', 'end'] for (const eventName of events) { @@ -227,11 +228,11 @@ const port = const host = Settings.internal.documentupdater.host || 'localhost' if (!module.parent) { // Called directly - app.listen(port, host, function () { + app.listen(port, host, () => { logger.info(`Document-updater starting up, listening on ${host}:${port}`) if (Settings.continuousBackgroundFlush) { logger.info('Starting continuous background flush') - return DeleteQueueManager.startBackgroundFlush() + DeleteQueueManager.startBackgroundFlush() } }) } From 18b92adcef4d58048da9a873b932f45a11b98b77 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Fri, 8 May 2020 11:32:03 -0400 Subject: [PATCH 642/769] Decaf cleanup: remove __guard__() --- services/document-updater/app.js | 20 +++++--------------- 1 file changed, 5 insertions(+), 15 deletions(-) diff --git a/services/document-updater/app.js b/services/document-updater/app.js index 34d116ee9d..b0a2d8ae9f 100644 --- a/services/document-updater/app.js +++ b/services/document-updater/app.js @@ -1,6 +1,5 @@ /* * decaffeinate suggestions: - * DS103: Rewrite code to no longer use __guard__ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ @@ -216,16 +215,13 @@ for (const eventName of events) { } const port = - __guard__( - Settings.internal != null ? Settings.internal.documentupdater : undefined, - (x) => x.port - ) || - __guard__( - Settings.apis != null ? Settings.apis.documentupdater : undefined, - (x1) => x1.port - ) || + Settings.internal.documentupdater.port || + (Settings.api && + Settings.api.documentupdater && + Settings.api.documentupdater.port) || 3003 const host = Settings.internal.documentupdater.host || 'localhost' + if (!module.parent) { // Called directly app.listen(port, host, () => { @@ -250,9 +246,3 @@ for (const signal of [ ]) { process.on(signal, shutdownCleanly(signal)) } - -function __guard__(value, transform) { - return typeof value !== 'undefined' && value !== null - ? transform(value) - : undefined -} From e60d9237d0d19803a32c8a80e007c9f9e51a54b8 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Fri, 8 May 2020 11:34:01 -0400 Subject: [PATCH 643/769] Decaf cleanup: simplify null checks --- services/document-updater/app.js | 23 +++++++++-------------- 1 file changed, 9 insertions(+), 14 deletions(-) diff --git a/services/document-updater/app.js b/services/document-updater/app.js index b0a2d8ae9f..7555ad666b 100644 --- a/services/document-updater/app.js +++ b/services/document-updater/app.js @@ -1,8 +1,3 @@ -/* - * decaffeinate suggestions: - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ const Metrics = require('metrics-sharelatex') Metrics.initialize('doc-updater') @@ -13,7 +8,7 @@ logger.initialize('document-updater') logger.logger.addSerializers(require('./app/js/LoggerSerializers')) -if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) { +if (Settings.sentry != null && Settings.sentry.dsn != null) { logger.initializeErrorReporting(Settings.sentry.dsn) } @@ -42,7 +37,7 @@ Metrics.injectMetricsRoute(app) DispatchManager.createAndStartDispatchers(Settings.dispatcherCount || 10) app.param('project_id', (req, res, next, projectId) => { - if (projectId != null ? projectId.match(/^[0-9a-f]{24}$/) : undefined) { + if (projectId != null && projectId.match(/^[0-9a-f]{24}$/)) { return next() } else { return next(new Error('invalid project id')) @@ -50,7 +45,7 @@ app.param('project_id', (req, res, next, projectId) => { }) app.param('doc_id', (req, res, next, docId) => { - if (docId != null ? docId.match(/^[0-9a-f]{24}$/) : undefined) { + if (docId != null && docId.match(/^[0-9a-f]{24}$/)) { return next() } else { return next(new Error('invalid doc id')) @@ -120,7 +115,7 @@ const pubsubClient = require('redis-sharelatex').createClient( ) app.get('/health_check/redis', (req, res, next) => { pubsubClient.healthCheck((error) => { - if (error != null) { + if (error) { logger.err({ err: error }, 'failed redis health check') return res.sendStatus(500) } else { @@ -134,7 +129,7 @@ const docUpdaterRedisClient = require('redis-sharelatex').createClient( ) app.get('/health_check/redis_cluster', (req, res, next) => { docUpdaterRedisClient.healthCheck((error) => { - if (error != null) { + if (error) { logger.err({ err: error }, 'failed redis cluster health check') return res.sendStatus(500) } else { @@ -148,7 +143,7 @@ app.get('/health_check', (req, res, next) => { [ (cb) => { pubsubClient.healthCheck((error) => { - if (error != null) { + if (error) { logger.err({ err: error }, 'failed redis health check') } cb(error) @@ -156,7 +151,7 @@ app.get('/health_check', (req, res, next) => { }, (cb) => { docUpdaterRedisClient.healthCheck((error) => { - if (error != null) { + if (error) { logger.err({ err: error }, 'failed redis cluster health check') } cb(error) @@ -164,7 +159,7 @@ app.get('/health_check', (req, res, next) => { }, (cb) => { mongojs.healthCheck((error) => { - if (error != null) { + if (error) { logger.err({ err: error }, 'failed mongo health check') } cb(error) @@ -172,7 +167,7 @@ app.get('/health_check', (req, res, next) => { } ], (error) => { - if (error != null) { + if (error) { return res.sendStatus(500) } else { return res.sendStatus(200) From e8f935d046952dfdf49e21b92c8e20b39b0f50a6 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Fri, 8 May 2020 11:53:02 -0400 Subject: [PATCH 644/769] Make max JSON request size configurable and default to 8 MB This is to allow multi-document updates, for example when creating a new project from a zip file. --- services/document-updater/app.js | 2 +- services/document-updater/config/settings.defaults.js | 2 ++ .../test/acceptance/js/SettingADocumentTests.js | 3 +-- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/services/document-updater/app.js b/services/document-updater/app.js index 7555ad666b..e23fa3ca7b 100644 --- a/services/document-updater/app.js +++ b/services/document-updater/app.js @@ -31,7 +31,7 @@ Metrics.event_loop.monitor(logger, 100) const app = express() app.use(Metrics.http.monitor(logger)) -app.use(bodyParser.json({ limit: Settings.max_doc_length + 64 * 1024 })) +app.use(bodyParser.json({ limit: Settings.maxJsonRequestSize })) Metrics.injectMetricsRoute(app) DispatchManager.createAndStartDispatchers(Settings.dispatcherCount || 10) diff --git a/services/document-updater/config/settings.defaults.js b/services/document-updater/config/settings.defaults.js index ff5a35a515..21c3219a33 100755 --- a/services/document-updater/config/settings.defaults.js +++ b/services/document-updater/config/settings.defaults.js @@ -168,6 +168,8 @@ module.exports = { }, max_doc_length: 2 * 1024 * 1024, // 2mb + maxJsonRequestSize: + parseInt(process.env.MAX_JSON_REQUEST_SIZE, 10) || 8 * 1024 * 1024, dispatcherCount: process.env.DISPATCHER_COUNT, diff --git a/services/document-updater/test/acceptance/js/SettingADocumentTests.js b/services/document-updater/test/acceptance/js/SettingADocumentTests.js index 6c13282ba5..2107f46e92 100644 --- a/services/document-updater/test/acceptance/js/SettingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/SettingADocumentTests.js @@ -230,8 +230,7 @@ describe('Setting a document', function () { }) this.newLines = [] while ( - JSON.stringify(this.newLines).length < - Settings.max_doc_length + 64 * 1024 + JSON.stringify(this.newLines).length <= Settings.maxJsonRequestSize ) { this.newLines.push('(a long line of text)'.repeat(10000)) } From ff2d31c066ddfdd0484a81c83fb5afbad4e1c58e Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Fri, 8 May 2020 12:01:25 -0400 Subject: [PATCH 645/769] Decaf cleanup: remove Array.from() --- .../acceptance/js/SettingADocumentTests.js | 37 ++++++------------- 1 file changed, 12 insertions(+), 25 deletions(-) diff --git a/services/document-updater/test/acceptance/js/SettingADocumentTests.js b/services/document-updater/test/acceptance/js/SettingADocumentTests.js index 2107f46e92..25a46f8019 100644 --- a/services/document-updater/test/acceptance/js/SettingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/SettingADocumentTests.js @@ -7,7 +7,6 @@ // Fix any style issues and re-enable lint. /* * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md @@ -61,10 +60,8 @@ describe('Setting a document', function () { describe('when the updated doc exists in the doc updater', function () { before(function (done) { - ;[this.project_id, this.doc_id] = Array.from([ - DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() - ]) + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, version: this.version @@ -155,10 +152,8 @@ describe('Setting a document', function () { describe('when the updated doc does not exist in the doc updater', function () { before(function (done) { - ;[this.project_id, this.doc_id] = Array.from([ - DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() - ]) + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, version: this.version @@ -220,10 +215,8 @@ describe('Setting a document', function () { describe('when the updated doc is too large for the body parser', function () { before(function (done) { - ;[this.project_id, this.doc_id] = Array.from([ - DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() - ]) + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, version: this.version @@ -274,10 +267,8 @@ describe('Setting a document', function () { describe('when the updated doc is large but under the bodyParser and HTTPController size limit', function () { before(function (done) { - ;[this.project_id, this.doc_id] = Array.from([ - DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() - ]) + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, version: this.version @@ -343,10 +334,8 @@ describe('Setting a document', function () { describe('with the undo flag', function () { before(function (done) { - ;[this.project_id, this.doc_id] = Array.from([ - DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() - ]) + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, version: this.version @@ -407,10 +396,8 @@ describe('Setting a document', function () { return describe('without the undo flag', function () { before(function (done) { - ;[this.project_id, this.doc_id] = Array.from([ - DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() - ]) + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, version: this.version From dc5d77998c60a9aa61c329e5ab019c6e733cef69 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Fri, 8 May 2020 12:03:54 -0400 Subject: [PATCH 646/769] Decaf cleanup: remove unnecessary returns --- .../acceptance/js/SettingADocumentTests.js | 113 ++++++++---------- 1 file changed, 49 insertions(+), 64 deletions(-) diff --git a/services/document-updater/test/acceptance/js/SettingADocumentTests.js b/services/document-updater/test/acceptance/js/SettingADocumentTests.js index 25a46f8019..ddbbf0e9c1 100644 --- a/services/document-updater/test/acceptance/js/SettingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/SettingADocumentTests.js @@ -7,7 +7,6 @@ // Fix any style issues and re-enable lint. /* * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ @@ -49,13 +48,13 @@ describe('Setting a document', function () { sinon.spy(MockTrackChangesApi, 'flushDoc') sinon.spy(MockProjectHistoryApi, 'flushProject') sinon.spy(MockWebApi, 'setDocument') - return DocUpdaterApp.ensureRunning(done) + DocUpdaterApp.ensureRunning(done) }) after(function () { MockTrackChangesApi.flushDoc.restore() MockProjectHistoryApi.flushProject.restore() - return MockWebApi.setDocument.restore() + MockWebApi.setDocument.restore() }) describe('when the updated doc exists in the doc updater', function () { @@ -70,7 +69,7 @@ describe('Setting a document', function () { if (error != null) { throw error } - return DocUpdaterClient.sendUpdate( + DocUpdaterClient.sendUpdate( this.project_id, this.doc_id, this.update, @@ -78,8 +77,8 @@ describe('Setting a document', function () { if (error != null) { throw error } - return setTimeout(() => { - return DocUpdaterClient.setDocLines( + setTimeout(() => { + DocUpdaterClient.setDocLines( this.project_id, this.doc_id, this.newLines, @@ -88,28 +87,27 @@ describe('Setting a document', function () { false, (error, res, body) => { this.statusCode = res.statusCode - return done() + done() } ) }, 200) } ) }) - return null }) after(function () { MockTrackChangesApi.flushDoc.reset() MockProjectHistoryApi.flushProject.reset() - return MockWebApi.setDocument.reset() + MockWebApi.setDocument.reset() }) it('should return a 204 status code', function () { - return this.statusCode.should.equal(204) + this.statusCode.should.equal(204) }) it('should send the updated doc lines and version to the web api', function () { - return MockWebApi.setDocument + MockWebApi.setDocument .calledWith(this.project_id, this.doc_id, this.newLines) .should.equal(true) }) @@ -120,10 +118,9 @@ describe('Setting a document', function () { this.doc_id, (error, res, doc) => { doc.lines.should.deep.equal(this.newLines) - return done() + done() } ) - return null }) it('should bump the version in the doc updater', function (done) { @@ -132,21 +129,19 @@ describe('Setting a document', function () { this.doc_id, (error, res, doc) => { doc.version.should.equal(this.version + 2) - return done() + done() } ) - return null }) - return it('should leave the document in redis', function (done) { + it('should leave the document in redis', function (done) { rclient_du.get(Keys.docLines({ doc_id: this.doc_id }), (error, lines) => { if (error != null) { throw error } expect(JSON.parse(lines)).to.deep.equal(this.newLines) - return done() + done() }) - return null }) }) @@ -167,49 +162,45 @@ describe('Setting a document', function () { false, (error, res, body) => { this.statusCode = res.statusCode - return setTimeout(done, 200) + setTimeout(done, 200) } ) - return null }) after(function () { MockTrackChangesApi.flushDoc.reset() MockProjectHistoryApi.flushProject.reset() - return MockWebApi.setDocument.reset() + MockWebApi.setDocument.reset() }) it('should return a 204 status code', function () { - return this.statusCode.should.equal(204) + this.statusCode.should.equal(204) }) it('should send the updated doc lines to the web api', function () { - return MockWebApi.setDocument + MockWebApi.setDocument .calledWith(this.project_id, this.doc_id, this.newLines) .should.equal(true) }) it('should flush track changes', function () { - return MockTrackChangesApi.flushDoc - .calledWith(this.doc_id) - .should.equal(true) + MockTrackChangesApi.flushDoc.calledWith(this.doc_id).should.equal(true) }) it('should flush project history', function () { - return MockProjectHistoryApi.flushProject + MockProjectHistoryApi.flushProject .calledWith(this.project_id) .should.equal(true) }) - return it('should remove the document from redis', function (done) { + it('should remove the document from redis', function (done) { rclient_du.get(Keys.docLines({ doc_id: this.doc_id }), (error, lines) => { if (error != null) { throw error } expect(lines).to.not.exist - return done() + done() }) - return null }) }) @@ -236,32 +227,31 @@ describe('Setting a document', function () { false, (error, res, body) => { this.statusCode = res.statusCode - return setTimeout(done, 200) + setTimeout(done, 200) } ) - return null }) after(function () { MockTrackChangesApi.flushDoc.reset() MockProjectHistoryApi.flushProject.reset() - return MockWebApi.setDocument.reset() + MockWebApi.setDocument.reset() }) it('should return a 413 status code', function () { - return this.statusCode.should.equal(413) + this.statusCode.should.equal(413) }) it('should not send the updated doc lines to the web api', function () { - return MockWebApi.setDocument.called.should.equal(false) + MockWebApi.setDocument.called.should.equal(false) }) it('should not flush track changes', function () { - return MockTrackChangesApi.flushDoc.called.should.equal(false) + MockTrackChangesApi.flushDoc.called.should.equal(false) }) - return it('should not flush project history', function () { - return MockProjectHistoryApi.flushProject.called.should.equal(false) + it('should not flush project history', function () { + MockProjectHistoryApi.flushProject.called.should.equal(false) }) }) @@ -289,34 +279,33 @@ describe('Setting a document', function () { false, (error, res, body) => { this.statusCode = res.statusCode - return setTimeout(done, 200) + setTimeout(done, 200) } ) - return null }) after(function () { MockTrackChangesApi.flushDoc.reset() MockProjectHistoryApi.flushProject.reset() - return MockWebApi.setDocument.reset() + MockWebApi.setDocument.reset() }) it('should return a 204 status code', function () { - return this.statusCode.should.equal(204) + this.statusCode.should.equal(204) }) - return it('should send the updated doc lines to the web api', function () { - return MockWebApi.setDocument + it('should send the updated doc lines to the web api', function () { + MockWebApi.setDocument .calledWith(this.project_id, this.doc_id, this.newLines) .should.equal(true) }) }) - return describe('with track changes', function () { + describe('with track changes', function () { before(function () { this.lines = ['one', 'one and a half', 'two', 'three'] this.id_seed = '587357bd35e64f6157' - return (this.update = { + this.update = { doc: this.doc_id, op: [ { @@ -329,7 +318,7 @@ describe('Setting a document', function () { user_id: this.user_id }, v: this.version - }) + } }) describe('with the undo flag', function () { @@ -344,7 +333,7 @@ describe('Setting a document', function () { if (error != null) { throw error } - return DocUpdaterClient.sendUpdate( + DocUpdaterClient.sendUpdate( this.project_id, this.doc_id, this.update, @@ -353,7 +342,7 @@ describe('Setting a document', function () { throw error } // Go back to old lines, with undo flag - return DocUpdaterClient.setDocLines( + DocUpdaterClient.setDocLines( this.project_id, this.doc_id, this.lines, @@ -362,22 +351,21 @@ describe('Setting a document', function () { true, (error, res, body) => { this.statusCode = res.statusCode - return setTimeout(done, 200) + setTimeout(done, 200) } ) } ) }) - return null }) after(function () { MockTrackChangesApi.flushDoc.reset() MockProjectHistoryApi.flushProject.reset() - return MockWebApi.setDocument.reset() + MockWebApi.setDocument.reset() }) - return it('should undo the tracked changes', function (done) { + it('should undo the tracked changes', function (done) { DocUpdaterClient.getDoc( this.project_id, this.doc_id, @@ -387,14 +375,13 @@ describe('Setting a document', function () { } const { ranges } = data expect(ranges.changes).to.be.undefined - return done() + done() } ) - return null }) }) - return describe('without the undo flag', function () { + describe('without the undo flag', function () { before(function (done) { this.project_id = DocUpdaterClient.randomId() this.doc_id = DocUpdaterClient.randomId() @@ -406,7 +393,7 @@ describe('Setting a document', function () { if (error != null) { throw error } - return DocUpdaterClient.sendUpdate( + DocUpdaterClient.sendUpdate( this.project_id, this.doc_id, this.update, @@ -415,7 +402,7 @@ describe('Setting a document', function () { throw error } // Go back to old lines, without undo flag - return DocUpdaterClient.setDocLines( + DocUpdaterClient.setDocLines( this.project_id, this.doc_id, this.lines, @@ -424,22 +411,21 @@ describe('Setting a document', function () { false, (error, res, body) => { this.statusCode = res.statusCode - return setTimeout(done, 200) + setTimeout(done, 200) } ) } ) }) - return null }) after(function () { MockTrackChangesApi.flushDoc.reset() MockProjectHistoryApi.flushProject.reset() - return MockWebApi.setDocument.reset() + MockWebApi.setDocument.reset() }) - return it('should not undo the tracked changes', function (done) { + it('should not undo the tracked changes', function (done) { DocUpdaterClient.getDoc( this.project_id, this.doc_id, @@ -449,10 +435,9 @@ describe('Setting a document', function () { } const { ranges } = data expect(ranges.changes.length).to.equal(1) - return done() + done() } ) - return null }) }) }) From 150c4a88f10cb40a53e48e4510d4e063e07754cd Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Fri, 8 May 2020 14:06:25 -0400 Subject: [PATCH 647/769] Decaf cleanup: simplify null checks --- .../acceptance/js/SettingADocumentTests.js | 27 +++++++------------ 1 file changed, 10 insertions(+), 17 deletions(-) diff --git a/services/document-updater/test/acceptance/js/SettingADocumentTests.js b/services/document-updater/test/acceptance/js/SettingADocumentTests.js index ddbbf0e9c1..0e99770f58 100644 --- a/services/document-updater/test/acceptance/js/SettingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/SettingADocumentTests.js @@ -3,13 +3,6 @@ handle-callback-err, no-return-assign, */ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ const sinon = require('sinon') const chai = require('chai') chai.should() @@ -66,7 +59,7 @@ describe('Setting a document', function () { version: this.version }) DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => { - if (error != null) { + if (error) { throw error } DocUpdaterClient.sendUpdate( @@ -74,7 +67,7 @@ describe('Setting a document', function () { this.doc_id, this.update, (error) => { - if (error != null) { + if (error) { throw error } setTimeout(() => { @@ -136,7 +129,7 @@ describe('Setting a document', function () { it('should leave the document in redis', function (done) { rclient_du.get(Keys.docLines({ doc_id: this.doc_id }), (error, lines) => { - if (error != null) { + if (error) { throw error } expect(JSON.parse(lines)).to.deep.equal(this.newLines) @@ -195,7 +188,7 @@ describe('Setting a document', function () { it('should remove the document from redis', function (done) { rclient_du.get(Keys.docLines({ doc_id: this.doc_id }), (error, lines) => { - if (error != null) { + if (error) { throw error } expect(lines).to.not.exist @@ -330,7 +323,7 @@ describe('Setting a document', function () { version: this.version }) DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => { - if (error != null) { + if (error) { throw error } DocUpdaterClient.sendUpdate( @@ -338,7 +331,7 @@ describe('Setting a document', function () { this.doc_id, this.update, (error) => { - if (error != null) { + if (error) { throw error } // Go back to old lines, with undo flag @@ -370,7 +363,7 @@ describe('Setting a document', function () { this.project_id, this.doc_id, (error, res, data) => { - if (error != null) { + if (error) { throw error } const { ranges } = data @@ -390,7 +383,7 @@ describe('Setting a document', function () { version: this.version }) DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => { - if (error != null) { + if (error) { throw error } DocUpdaterClient.sendUpdate( @@ -398,7 +391,7 @@ describe('Setting a document', function () { this.doc_id, this.update, (error) => { - if (error != null) { + if (error) { throw error } // Go back to old lines, without undo flag @@ -430,7 +423,7 @@ describe('Setting a document', function () { this.project_id, this.doc_id, (error, res, data) => { - if (error != null) { + if (error) { throw error } const { ranges } = data From 75f9b0ff10477fd2849fa37bd526b0041d40fbb6 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Fri, 8 May 2020 14:08:31 -0400 Subject: [PATCH 648/769] Decaf cleanup: handle errors --- .../acceptance/js/SettingADocumentTests.js | 26 +++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/services/document-updater/test/acceptance/js/SettingADocumentTests.js b/services/document-updater/test/acceptance/js/SettingADocumentTests.js index 0e99770f58..54e7541b33 100644 --- a/services/document-updater/test/acceptance/js/SettingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/SettingADocumentTests.js @@ -1,7 +1,5 @@ /* eslint-disable camelcase, - handle-callback-err, - no-return-assign, */ const sinon = require('sinon') const chai = require('chai') @@ -79,6 +77,9 @@ describe('Setting a document', function () { this.user_id, false, (error, res, body) => { + if (error) { + return done(error) + } this.statusCode = res.statusCode done() } @@ -110,6 +111,9 @@ describe('Setting a document', function () { this.project_id, this.doc_id, (error, res, doc) => { + if (error) { + return done(error) + } doc.lines.should.deep.equal(this.newLines) done() } @@ -121,6 +125,9 @@ describe('Setting a document', function () { this.project_id, this.doc_id, (error, res, doc) => { + if (error) { + return done(error) + } doc.version.should.equal(this.version + 2) done() } @@ -154,6 +161,9 @@ describe('Setting a document', function () { this.user_id, false, (error, res, body) => { + if (error) { + return done(error) + } this.statusCode = res.statusCode setTimeout(done, 200) } @@ -219,6 +229,9 @@ describe('Setting a document', function () { this.user_id, false, (error, res, body) => { + if (error) { + return done(error) + } this.statusCode = res.statusCode setTimeout(done, 200) } @@ -271,6 +284,9 @@ describe('Setting a document', function () { this.user_id, false, (error, res, body) => { + if (error) { + return done(error) + } this.statusCode = res.statusCode setTimeout(done, 200) } @@ -343,6 +359,9 @@ describe('Setting a document', function () { this.user_id, true, (error, res, body) => { + if (error) { + return done(error) + } this.statusCode = res.statusCode setTimeout(done, 200) } @@ -403,6 +422,9 @@ describe('Setting a document', function () { this.user_id, false, (error, res, body) => { + if (error) { + return done(error) + } this.statusCode = res.statusCode setTimeout(done, 200) } From f99125c65ac0f3627221570b3d9fe08b4e7fd405 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Fri, 8 May 2020 14:09:54 -0400 Subject: [PATCH 649/769] Decaf cleanup: camel case variables --- .../acceptance/js/SettingADocumentTests.js | 35 ++++++++++--------- 1 file changed, 19 insertions(+), 16 deletions(-) diff --git a/services/document-updater/test/acceptance/js/SettingADocumentTests.js b/services/document-updater/test/acceptance/js/SettingADocumentTests.js index 54e7541b33..6d57259891 100644 --- a/services/document-updater/test/acceptance/js/SettingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/SettingADocumentTests.js @@ -1,12 +1,9 @@ -/* eslint-disable - camelcase, -*/ const sinon = require('sinon') const chai = require('chai') chai.should() const { expect } = require('chai') const Settings = require('settings-sharelatex') -const rclient_du = require('redis-sharelatex').createClient( +const docUpdaterRedis = require('redis-sharelatex').createClient( Settings.redis.documentupdater ) const Keys = Settings.redis.documentupdater.key_schema @@ -135,13 +132,16 @@ describe('Setting a document', function () { }) it('should leave the document in redis', function (done) { - rclient_du.get(Keys.docLines({ doc_id: this.doc_id }), (error, lines) => { - if (error) { - throw error + docUpdaterRedis.get( + Keys.docLines({ doc_id: this.doc_id }), + (error, lines) => { + if (error) { + throw error + } + expect(JSON.parse(lines)).to.deep.equal(this.newLines) + done() } - expect(JSON.parse(lines)).to.deep.equal(this.newLines) - done() - }) + ) }) }) @@ -197,13 +197,16 @@ describe('Setting a document', function () { }) it('should remove the document from redis', function (done) { - rclient_du.get(Keys.docLines({ doc_id: this.doc_id }), (error, lines) => { - if (error) { - throw error + docUpdaterRedis.get( + Keys.docLines({ doc_id: this.doc_id }), + (error, lines) => { + if (error) { + throw error + } + expect(lines).to.not.exist + done() } - expect(lines).to.not.exist - done() - }) + ) }) }) From 41c0899b0ce0a847c0f7b6110d1a1a13ab0793d9 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Mon, 11 May 2020 10:41:32 -0400 Subject: [PATCH 650/769] Add a test for document size slightly over max doc length --- .../acceptance/js/SettingADocumentTests.js | 103 ++++++++++-------- 1 file changed, 58 insertions(+), 45 deletions(-) diff --git a/services/document-updater/test/acceptance/js/SettingADocumentTests.js b/services/document-updater/test/acceptance/js/SettingADocumentTests.js index 6d57259891..484d51b57c 100644 --- a/services/document-updater/test/acceptance/js/SettingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/SettingADocumentTests.js @@ -210,57 +210,70 @@ describe('Setting a document', function () { }) }) - describe('when the updated doc is too large for the body parser', function () { - before(function (done) { - this.project_id = DocUpdaterClient.randomId() - this.doc_id = DocUpdaterClient.randomId() - MockWebApi.insertDoc(this.project_id, this.doc_id, { - lines: this.lines, - version: this.version - }) - this.newLines = [] - while ( - JSON.stringify(this.newLines).length <= Settings.maxJsonRequestSize - ) { - this.newLines.push('(a long line of text)'.repeat(10000)) - } - DocUpdaterClient.setDocLines( - this.project_id, - this.doc_id, - this.newLines, - this.source, - this.user_id, - false, - (error, res, body) => { - if (error) { - return done(error) - } - this.statusCode = res.statusCode - setTimeout(done, 200) + const DOC_TOO_LARGE_TEST_CASES = [ + { + desc: 'when the updated doc is too large for the body parser', + size: Settings.maxJsonRequestSize, + expectedStatusCode: 413 + }, + { + desc: 'when the updated doc is larger than the HTTP controller limit', + size: Settings.max_doc_length, + expectedStatusCode: 406 + } + ] + + DOC_TOO_LARGE_TEST_CASES.forEach((testCase) => { + describe(testCase.desc, function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version + }) + this.newLines = [] + while (JSON.stringify(this.newLines).length <= testCase.size) { + this.newLines.push('(a long line of text)'.repeat(10000)) } - ) - }) + DocUpdaterClient.setDocLines( + this.project_id, + this.doc_id, + this.newLines, + this.source, + this.user_id, + false, + (error, res, body) => { + if (error) { + return done(error) + } + this.statusCode = res.statusCode + setTimeout(done, 200) + } + ) + }) - after(function () { - MockTrackChangesApi.flushDoc.reset() - MockProjectHistoryApi.flushProject.reset() - MockWebApi.setDocument.reset() - }) + after(function () { + MockTrackChangesApi.flushDoc.reset() + MockProjectHistoryApi.flushProject.reset() + MockWebApi.setDocument.reset() + }) - it('should return a 413 status code', function () { - this.statusCode.should.equal(413) - }) + it(`should return a ${testCase.expectedStatusCode} status code`, function () { + this.statusCode.should.equal(testCase.expectedStatusCode) + }) - it('should not send the updated doc lines to the web api', function () { - MockWebApi.setDocument.called.should.equal(false) - }) + it('should not send the updated doc lines to the web api', function () { + MockWebApi.setDocument.called.should.equal(false) + }) - it('should not flush track changes', function () { - MockTrackChangesApi.flushDoc.called.should.equal(false) - }) + it('should not flush track changes', function () { + MockTrackChangesApi.flushDoc.called.should.equal(false) + }) - it('should not flush project history', function () { - MockProjectHistoryApi.flushProject.called.should.equal(false) + it('should not flush project history', function () { + MockProjectHistoryApi.flushProject.called.should.equal(false) + }) }) }) From 3385ec5f263f0f46c4a37c69b57262a2e47cb13b Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Mon, 11 May 2020 10:43:22 -0400 Subject: [PATCH 651/769] Decaf cleanup: unnecessary Array.from() --- services/document-updater/app/js/HttpController.js | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/services/document-updater/app/js/HttpController.js b/services/document-updater/app/js/HttpController.js index b6bd00214e..9ad2fe3d65 100644 --- a/services/document-updater/app/js/HttpController.js +++ b/services/document-updater/app/js/HttpController.js @@ -6,7 +6,6 @@ // Fix any style issues and re-enable lint. /* * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS103: Rewrite code to no longer use __guard__ * DS207: Consider shorter variations of null checks @@ -69,7 +68,7 @@ module.exports = HttpController = { _getTotalSizeOfLines(lines) { let size = 0 - for (const line of Array.from(lines)) { + for (const line of lines) { size += line.length + 1 } return size @@ -89,10 +88,8 @@ module.exports = HttpController = { logger.log({ project_id, exclude: excludeItems }, 'getting docs via http') const timer = new Metrics.Timer('http.getAllDocs') const excludeVersions = {} - for (const item of Array.from(excludeItems)) { - const [id, version] = Array.from( - item != null ? item.split(':') : undefined - ) + for (const item of excludeItems) { + const [id, version] = item.split(':') excludeVersions[id] = version } logger.log( @@ -113,7 +110,7 @@ module.exports = HttpController = { logger.log( { project_id, - result: Array.from(result).map((doc) => `${doc._id}:${doc.v}`) + result: result.map((doc) => `${doc._id}:${doc.v}`) }, 'got docs via http' ) From 814ac40e07f303ccc782d3e9e2d02bd853262d76 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Mon, 11 May 2020 10:45:39 -0400 Subject: [PATCH 652/769] Decaf cleanup: unnecessary returns --- .../document-updater/app/js/HttpController.js | 98 +++++++++---------- 1 file changed, 45 insertions(+), 53 deletions(-) diff --git a/services/document-updater/app/js/HttpController.js b/services/document-updater/app/js/HttpController.js index 9ad2fe3d65..3d029461b0 100644 --- a/services/document-updater/app/js/HttpController.js +++ b/services/document-updater/app/js/HttpController.js @@ -6,7 +6,6 @@ // Fix any style issues and re-enable lint. /* * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns * DS103: Rewrite code to no longer use __guard__ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md @@ -41,7 +40,7 @@ module.exports = HttpController = { fromVersion = -1 } - return DocumentManager.getDocAndRecentOpsWithLock( + DocumentManager.getDocAndRecentOpsWithLock( project_id, doc_id, fromVersion, @@ -54,7 +53,7 @@ module.exports = HttpController = { if (lines == null || version == null) { return next(new Errors.NotFoundError('document not found')) } - return res.json({ + res.json({ id: doc_id, lines, version, @@ -96,16 +95,16 @@ module.exports = HttpController = { { project_id, projectStateHash, excludeVersions }, 'excluding versions' ) - return ProjectManager.getProjectDocsAndFlushIfOld( + ProjectManager.getProjectDocsAndFlushIfOld( project_id, projectStateHash, excludeVersions, function (error, result) { timer.done() if (error instanceof Errors.ProjectStateChangedError) { - return res.sendStatus(409) // conflict + res.sendStatus(409) // conflict } else if (error != null) { - return next(error) + next(error) } else { logger.log( { @@ -114,7 +113,7 @@ module.exports = HttpController = { }, 'got docs via http' ) - return res.send(result) + res.send(result) } } ) @@ -127,12 +126,12 @@ module.exports = HttpController = { const { project_id } = req.params const timer = new Metrics.Timer('http.clearProjectState') logger.log({ project_id }, 'clearing project state via http') - return ProjectManager.clearProjectState(project_id, function (error) { + ProjectManager.clearProjectState(project_id, function (error) { timer.done() if (error != null) { - return next(error) + next(error) } else { - return res.sendStatus(200) + res.sendStatus(200) } }) }, @@ -157,7 +156,7 @@ module.exports = HttpController = { 'setting doc via http' ) const timer = new Metrics.Timer('http.setDoc') - return DocumentManager.setDocWithLock( + DocumentManager.setDocWithLock( project_id, doc_id, lines, @@ -170,7 +169,7 @@ module.exports = HttpController = { return next(error) } logger.log({ project_id, doc_id }, 'set doc via http') - return res.sendStatus(204) + res.sendStatus(204) } ) }, // No Content @@ -183,18 +182,16 @@ module.exports = HttpController = { const { project_id } = req.params logger.log({ project_id, doc_id }, 'flushing doc via http') const timer = new Metrics.Timer('http.flushDoc') - return DocumentManager.flushDocIfLoadedWithLock( - project_id, - doc_id, - function (error) { - timer.done() - if (error != null) { - return next(error) - } - logger.log({ project_id, doc_id }, 'flushed doc via http') - return res.sendStatus(204) + DocumentManager.flushDocIfLoadedWithLock(project_id, doc_id, function ( + error + ) { + timer.done() + if (error != null) { + return next(error) } - ) + logger.log({ project_id, doc_id }, 'flushed doc via http') + res.sendStatus(204) + }) }, // No Content deleteDoc(req, res, next) { @@ -206,7 +203,7 @@ module.exports = HttpController = { const ignoreFlushErrors = req.query.ignore_flush_errors === 'true' const timer = new Metrics.Timer('http.deleteDoc') logger.log({ project_id, doc_id }, 'deleting doc via http') - return DocumentManager.flushAndDeleteDocWithLock( + DocumentManager.flushAndDeleteDocWithLock( project_id, doc_id, { ignoreFlushErrors }, @@ -220,7 +217,7 @@ module.exports = HttpController = { return next(error) } logger.log({ project_id, doc_id }, 'deleted doc via http') - return res.sendStatus(204) + res.sendStatus(204) } ) }, // No Content @@ -232,13 +229,13 @@ module.exports = HttpController = { const { project_id } = req.params logger.log({ project_id }, 'flushing project via http') const timer = new Metrics.Timer('http.flushProject') - return ProjectManager.flushProjectWithLocks(project_id, function (error) { + ProjectManager.flushProjectWithLocks(project_id, function (error) { timer.done() if (error != null) { return next(error) } logger.log({ project_id }, 'flushed project via http') - return res.sendStatus(204) + res.sendStatus(204) }) }, // No Content @@ -256,18 +253,16 @@ module.exports = HttpController = { options.skip_history_flush = true } // don't flush history when realtime shuts down if (req.query != null ? req.query.background : undefined) { - return ProjectManager.queueFlushAndDeleteProject(project_id, function ( - error - ) { + ProjectManager.queueFlushAndDeleteProject(project_id, function (error) { if (error != null) { return next(error) } logger.log({ project_id }, 'queue delete of project via http') - return res.sendStatus(204) + res.sendStatus(204) }) // No Content } else { const timer = new Metrics.Timer('http.deleteProject') - return ProjectManager.flushAndDeleteProjectWithLocks( + ProjectManager.flushAndDeleteProjectWithLocks( project_id, options, function (error) { @@ -276,7 +271,7 @@ module.exports = HttpController = { return next(error) } logger.log({ project_id }, 'deleted project via http') - return res.sendStatus(204) + res.sendStatus(204) } ) } @@ -289,17 +284,17 @@ module.exports = HttpController = { const project_ids = (req.body != null ? req.body.project_ids : undefined) || [] logger.log({ project_ids }, 'deleting multiple projects via http') - return async.eachSeries( + async.eachSeries( project_ids, function (project_id, cb) { logger.log({ project_id }, 'queue delete of project via http') - return ProjectManager.queueFlushAndDeleteProject(project_id, cb) + ProjectManager.queueFlushAndDeleteProject(project_id, cb) }, function (error) { if (error != null) { return next(error) } - return res.sendStatus(204) + res.sendStatus(204) } ) }, // No Content @@ -318,7 +313,7 @@ module.exports = HttpController = { `accepting ${change_ids.length} changes via http` ) const timer = new Metrics.Timer('http.acceptChanges') - return DocumentManager.acceptChangesWithLock( + DocumentManager.acceptChangesWithLock( project_id, doc_id, change_ids, @@ -331,7 +326,7 @@ module.exports = HttpController = { { project_id, doc_id }, `accepted ${change_ids.length} changes via http` ) - return res.sendStatus(204) + res.sendStatus(204) } ) }, // No Content @@ -343,7 +338,7 @@ module.exports = HttpController = { const { project_id, doc_id, comment_id } = req.params logger.log({ project_id, doc_id, comment_id }, 'deleting comment via http') const timer = new Metrics.Timer('http.deleteComment') - return DocumentManager.deleteCommentWithLock( + DocumentManager.deleteCommentWithLock( project_id, doc_id, comment_id, @@ -356,7 +351,7 @@ module.exports = HttpController = { { project_id, doc_id, comment_id }, 'deleted comment via http' ) - return res.sendStatus(204) + res.sendStatus(204) } ) }, // No Content @@ -379,7 +374,7 @@ module.exports = HttpController = { 'updating project via http' ) - return ProjectManager.updateProjectWithLocks( + ProjectManager.updateProjectWithLocks( project_id, projectHistoryId, userId, @@ -392,7 +387,7 @@ module.exports = HttpController = { return next(error) } logger.log({ project_id }, 'updated project via http') - return res.sendStatus(204) + res.sendStatus(204) } ) }, // No Content @@ -408,7 +403,7 @@ module.exports = HttpController = { { project_id, docs, files }, 'queuing project history resync via http' ) - return HistoryManager.resyncProjectHistory( + HistoryManager.resyncProjectHistory( project_id, projectHistoryId, docs, @@ -418,7 +413,7 @@ module.exports = HttpController = { return next(error) } logger.log({ project_id }, 'queued project history resync via http') - return res.sendStatus(204) + res.sendStatus(204) } ) }, @@ -433,15 +428,12 @@ module.exports = HttpController = { concurrency: req.query.concurrency || 5, dryRun: req.query.dryRun || false } - return ProjectFlusher.flushAllProjects(options, function ( - err, - project_ids - ) { + ProjectFlusher.flushAllProjects(options, function (err, project_ids) { if (err != null) { logger.err({ err }, 'error bulk flushing projects') - return res.sendStatus(500) + res.sendStatus(500) } else { - return res.send(project_ids) + res.send(project_ids) } }) }, @@ -456,16 +448,16 @@ module.exports = HttpController = { timeout: 5 * 60 * 1000, min_delete_age: req.query.min_delete_age || 5 * 60 * 1000 } - return DeleteQueueManager.flushAndDeleteOldProjects(options, function ( + DeleteQueueManager.flushAndDeleteOldProjects(options, function ( err, flushed ) { if (err != null) { logger.err({ err }, 'error flushing old projects') - return res.sendStatus(500) + res.sendStatus(500) } else { logger.log({ flushed }, 'flush of queued projects completed') - return res.send({ flushed }) + res.send({ flushed }) } }) } From 80ea49c69cf64e6fa4c90abfc3b91a0f017d7c7d Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Mon, 11 May 2020 10:47:27 -0400 Subject: [PATCH 653/769] Decaf cleanup: remove __guard__ --- services/document-updater/app/js/HttpController.js | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/services/document-updater/app/js/HttpController.js b/services/document-updater/app/js/HttpController.js index 3d029461b0..8c2501fdbe 100644 --- a/services/document-updater/app/js/HttpController.js +++ b/services/document-updater/app/js/HttpController.js @@ -6,7 +6,6 @@ // Fix any style issues and re-enable lint. /* * decaffeinate suggestions: - * DS103: Rewrite code to no longer use __guard__ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ @@ -81,9 +80,7 @@ module.exports = HttpController = { const projectStateHash = req.query != null ? req.query.state : undefined // exclude is string of existing docs "id:version,id:version,..." const excludeItems = - __guard__(req.query != null ? req.query.exclude : undefined, (x) => - x.split(',') - ) || [] + req.query.exclude != null ? req.query.exclude.split(',') : [] logger.log({ project_id, exclude: excludeItems }, 'getting docs via http') const timer = new Metrics.Timer('http.getAllDocs') const excludeVersions = {} @@ -462,9 +459,3 @@ module.exports = HttpController = { }) } } - -function __guard__(value, transform) { - return typeof value !== 'undefined' && value !== null - ? transform(value) - : undefined -} From fc73bbe1a534f64c9b65d275c896335aac389794 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Mon, 11 May 2020 10:52:06 -0400 Subject: [PATCH 654/769] Decaf cleanup: simplify null checks --- .../document-updater/app/js/HttpController.js | 99 +++++-------------- 1 file changed, 23 insertions(+), 76 deletions(-) diff --git a/services/document-updater/app/js/HttpController.js b/services/document-updater/app/js/HttpController.js index 8c2501fdbe..800aee4c44 100644 --- a/services/document-updater/app/js/HttpController.js +++ b/services/document-updater/app/js/HttpController.js @@ -2,13 +2,6 @@ camelcase, handle-callback-err, */ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ let HttpController const DocumentManager = require('./DocumentManager') const HistoryManager = require('./HistoryManager') @@ -25,15 +18,12 @@ const TWO_MEGABYTES = 2 * 1024 * 1024 module.exports = HttpController = { getDoc(req, res, next) { let fromVersion - if (next == null) { - next = function (error) {} - } const { doc_id } = req.params const { project_id } = req.params logger.log({ project_id, doc_id }, 'getting doc via http') const timer = new Metrics.Timer('http.getDoc') - if ((req.query != null ? req.query.fromVersion : undefined) != null) { + if (req.query.fromVersion != null) { fromVersion = parseInt(req.query.fromVersion, 10) } else { fromVersion = -1 @@ -45,7 +35,7 @@ module.exports = HttpController = { fromVersion, function (error, lines, version, ops, ranges, pathname) { timer.done() - if (error != null) { + if (error) { return next(error) } logger.log({ project_id, doc_id }, 'got doc via http') @@ -73,11 +63,8 @@ module.exports = HttpController = { }, getProjectDocsAndFlushIfOld(req, res, next) { - if (next == null) { - next = function (error) {} - } const { project_id } = req.params - const projectStateHash = req.query != null ? req.query.state : undefined + const projectStateHash = req.query.state // exclude is string of existing docs "id:version,id:version,..." const excludeItems = req.query.exclude != null ? req.query.exclude.split(',') : [] @@ -100,7 +87,7 @@ module.exports = HttpController = { timer.done() if (error instanceof Errors.ProjectStateChangedError) { res.sendStatus(409) // conflict - } else if (error != null) { + } else if (error) { next(error) } else { logger.log( @@ -117,15 +104,12 @@ module.exports = HttpController = { }, clearProjectState(req, res, next) { - if (next == null) { - next = function (error) {} - } const { project_id } = req.params const timer = new Metrics.Timer('http.clearProjectState') logger.log({ project_id }, 'clearing project state via http') ProjectManager.clearProjectState(project_id, function (error) { timer.done() - if (error != null) { + if (error) { next(error) } else { res.sendStatus(200) @@ -134,9 +118,6 @@ module.exports = HttpController = { }, setDoc(req, res, next) { - if (next == null) { - next = function (error) {} - } const { doc_id } = req.params const { project_id } = req.params const { lines, source, user_id, undoing } = req.body @@ -162,7 +143,7 @@ module.exports = HttpController = { undoing, function (error) { timer.done() - if (error != null) { + if (error) { return next(error) } logger.log({ project_id, doc_id }, 'set doc via http') @@ -172,9 +153,6 @@ module.exports = HttpController = { }, // No Content flushDocIfLoaded(req, res, next) { - if (next == null) { - next = function (error) {} - } const { doc_id } = req.params const { project_id } = req.params logger.log({ project_id, doc_id }, 'flushing doc via http') @@ -183,7 +161,7 @@ module.exports = HttpController = { error ) { timer.done() - if (error != null) { + if (error) { return next(error) } logger.log({ project_id, doc_id }, 'flushed doc via http') @@ -192,9 +170,6 @@ module.exports = HttpController = { }, // No Content deleteDoc(req, res, next) { - if (next == null) { - next = function (error) {} - } const { doc_id } = req.params const { project_id } = req.params const ignoreFlushErrors = req.query.ignore_flush_errors === 'true' @@ -210,7 +185,7 @@ module.exports = HttpController = { // failed and sometimes it is required HistoryManager.flushProjectChangesAsync(project_id) - if (error != null) { + if (error) { return next(error) } logger.log({ project_id, doc_id }, 'deleted doc via http') @@ -220,15 +195,12 @@ module.exports = HttpController = { }, // No Content flushProject(req, res, next) { - if (next == null) { - next = function (error) {} - } const { project_id } = req.params logger.log({ project_id }, 'flushing project via http') const timer = new Metrics.Timer('http.flushProject') ProjectManager.flushProjectWithLocks(project_id, function (error) { timer.done() - if (error != null) { + if (error) { return next(error) } logger.log({ project_id }, 'flushed project via http') @@ -237,21 +209,18 @@ module.exports = HttpController = { }, // No Content deleteProject(req, res, next) { - if (next == null) { - next = function (error) {} - } const { project_id } = req.params logger.log({ project_id }, 'deleting project via http') const options = {} - if (req.query != null ? req.query.background : undefined) { + if (req.query.background) { options.background = true } // allow non-urgent flushes to be queued - if (req.query != null ? req.query.shutdown : undefined) { + if (req.query.shutdown) { options.skip_history_flush = true } // don't flush history when realtime shuts down - if (req.query != null ? req.query.background : undefined) { + if (req.query.background) { ProjectManager.queueFlushAndDeleteProject(project_id, function (error) { - if (error != null) { + if (error) { return next(error) } logger.log({ project_id }, 'queue delete of project via http') @@ -264,7 +233,7 @@ module.exports = HttpController = { options, function (error) { timer.done() - if (error != null) { + if (error) { return next(error) } logger.log({ project_id }, 'deleted project via http') @@ -275,11 +244,7 @@ module.exports = HttpController = { }, // No Content deleteMultipleProjects(req, res, next) { - if (next == null) { - next = function (error) {} - } - const project_ids = - (req.body != null ? req.body.project_ids : undefined) || [] + const project_ids = req.body.project_ids || [] logger.log({ project_ids }, 'deleting multiple projects via http') async.eachSeries( project_ids, @@ -288,7 +253,7 @@ module.exports = HttpController = { ProjectManager.queueFlushAndDeleteProject(project_id, cb) }, function (error) { - if (error != null) { + if (error) { return next(error) } res.sendStatus(204) @@ -297,11 +262,8 @@ module.exports = HttpController = { }, // No Content acceptChanges(req, res, next) { - if (next == null) { - next = function (error) {} - } const { project_id, doc_id } = req.params - let change_ids = req.body != null ? req.body.change_ids : undefined + let change_ids = req.body.change_ids if (change_ids == null) { change_ids = [req.params.change_id] } @@ -316,7 +278,7 @@ module.exports = HttpController = { change_ids, function (error) { timer.done() - if (error != null) { + if (error) { return next(error) } logger.log( @@ -329,9 +291,6 @@ module.exports = HttpController = { }, // No Content deleteComment(req, res, next) { - if (next == null) { - next = function (error) {} - } const { project_id, doc_id, comment_id } = req.params logger.log({ project_id, doc_id, comment_id }, 'deleting comment via http') const timer = new Metrics.Timer('http.deleteComment') @@ -341,7 +300,7 @@ module.exports = HttpController = { comment_id, function (error) { timer.done() - if (error != null) { + if (error) { return next(error) } logger.log( @@ -354,9 +313,6 @@ module.exports = HttpController = { }, // No Content updateProject(req, res, next) { - if (next == null) { - next = function (error) {} - } const timer = new Metrics.Timer('http.updateProject') const { project_id } = req.params const { @@ -380,7 +336,7 @@ module.exports = HttpController = { version, function (error) { timer.done() - if (error != null) { + if (error) { return next(error) } logger.log({ project_id }, 'updated project via http') @@ -390,9 +346,6 @@ module.exports = HttpController = { }, // No Content resyncProjectHistory(req, res, next) { - if (next == null) { - next = function (error) {} - } const { project_id } = req.params const { projectHistoryId, docs, files } = req.body @@ -406,7 +359,7 @@ module.exports = HttpController = { docs, files, function (error) { - if (error != null) { + if (error) { return next(error) } logger.log({ project_id }, 'queued project history resync via http') @@ -416,9 +369,6 @@ module.exports = HttpController = { }, flushAllProjects(req, res, next) { - if (next == null) { - next = function (error) {} - } res.setTimeout(5 * 60 * 1000) const options = { limit: req.query.limit || 1000, @@ -426,7 +376,7 @@ module.exports = HttpController = { dryRun: req.query.dryRun || false } ProjectFlusher.flushAllProjects(options, function (err, project_ids) { - if (err != null) { + if (err) { logger.err({ err }, 'error bulk flushing projects') res.sendStatus(500) } else { @@ -436,9 +386,6 @@ module.exports = HttpController = { }, flushQueuedProjects(req, res, next) { - if (next == null) { - next = function (error) {} - } res.setTimeout(10 * 60 * 1000) const options = { limit: req.query.limit || 1000, @@ -449,7 +396,7 @@ module.exports = HttpController = { err, flushed ) { - if (err != null) { + if (err) { logger.err({ err }, 'error flushing old projects') res.sendStatus(500) } else { From 64a881461f761a1ca887a1ff48a46da6e39b55ea Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Mon, 11 May 2020 11:07:15 -0400 Subject: [PATCH 655/769] Decaf cleanup: camel case variables --- .../document-updater/app/js/HttpController.js | 173 +++++++++--------- 1 file changed, 85 insertions(+), 88 deletions(-) diff --git a/services/document-updater/app/js/HttpController.js b/services/document-updater/app/js/HttpController.js index 800aee4c44..61258ad172 100644 --- a/services/document-updater/app/js/HttpController.js +++ b/services/document-updater/app/js/HttpController.js @@ -1,7 +1,3 @@ -/* eslint-disable - camelcase, - handle-callback-err, -*/ let HttpController const DocumentManager = require('./DocumentManager') const HistoryManager = require('./HistoryManager') @@ -18,9 +14,9 @@ const TWO_MEGABYTES = 2 * 1024 * 1024 module.exports = HttpController = { getDoc(req, res, next) { let fromVersion - const { doc_id } = req.params - const { project_id } = req.params - logger.log({ project_id, doc_id }, 'getting doc via http') + const docId = req.params.doc_id + const projectId = req.params.project_id + logger.log({ projectId, docId }, 'getting doc via http') const timer = new Metrics.Timer('http.getDoc') if (req.query.fromVersion != null) { @@ -30,20 +26,20 @@ module.exports = HttpController = { } DocumentManager.getDocAndRecentOpsWithLock( - project_id, - doc_id, + projectId, + docId, fromVersion, function (error, lines, version, ops, ranges, pathname) { timer.done() if (error) { return next(error) } - logger.log({ project_id, doc_id }, 'got doc via http') + logger.log({ projectId, docId }, 'got doc via http') if (lines == null || version == null) { return next(new Errors.NotFoundError('document not found')) } res.json({ - id: doc_id, + id: docId, lines, version, ops, @@ -63,12 +59,12 @@ module.exports = HttpController = { }, getProjectDocsAndFlushIfOld(req, res, next) { - const { project_id } = req.params + const projectId = req.params.project_id const projectStateHash = req.query.state // exclude is string of existing docs "id:version,id:version,..." const excludeItems = req.query.exclude != null ? req.query.exclude.split(',') : [] - logger.log({ project_id, exclude: excludeItems }, 'getting docs via http') + logger.log({ projectId, exclude: excludeItems }, 'getting docs via http') const timer = new Metrics.Timer('http.getAllDocs') const excludeVersions = {} for (const item of excludeItems) { @@ -76,11 +72,11 @@ module.exports = HttpController = { excludeVersions[id] = version } logger.log( - { project_id, projectStateHash, excludeVersions }, + { projectId, projectStateHash, excludeVersions }, 'excluding versions' ) ProjectManager.getProjectDocsAndFlushIfOld( - project_id, + projectId, projectStateHash, excludeVersions, function (error, result) { @@ -92,7 +88,7 @@ module.exports = HttpController = { } else { logger.log( { - project_id, + projectId, result: result.map((doc) => `${doc._id}:${doc.v}`) }, 'got docs via http' @@ -104,10 +100,10 @@ module.exports = HttpController = { }, clearProjectState(req, res, next) { - const { project_id } = req.params + const projectId = req.params.project_id const timer = new Metrics.Timer('http.clearProjectState') - logger.log({ project_id }, 'clearing project state via http') - ProjectManager.clearProjectState(project_id, function (error) { + logger.log({ projectId }, 'clearing project state via http') + ProjectManager.clearProjectState(projectId, function (error) { timer.done() if (error) { next(error) @@ -118,99 +114,99 @@ module.exports = HttpController = { }, setDoc(req, res, next) { - const { doc_id } = req.params - const { project_id } = req.params - const { lines, source, user_id, undoing } = req.body + const docId = req.params.doc_id + const projectId = req.params.project_id + const { lines, source, user_id: userId, undoing } = req.body const lineSize = HttpController._getTotalSizeOfLines(lines) if (lineSize > TWO_MEGABYTES) { logger.log( - { project_id, doc_id, source, lineSize, user_id }, + { projectId, docId, source, lineSize, userId }, 'document too large, returning 406 response' ) return res.sendStatus(406) } logger.log( - { project_id, doc_id, lines, source, user_id, undoing }, + { projectId, docId, lines, source, userId, undoing }, 'setting doc via http' ) const timer = new Metrics.Timer('http.setDoc') DocumentManager.setDocWithLock( - project_id, - doc_id, + projectId, + docId, lines, source, - user_id, + userId, undoing, function (error) { timer.done() if (error) { return next(error) } - logger.log({ project_id, doc_id }, 'set doc via http') + logger.log({ projectId, docId }, 'set doc via http') res.sendStatus(204) } ) }, // No Content flushDocIfLoaded(req, res, next) { - const { doc_id } = req.params - const { project_id } = req.params - logger.log({ project_id, doc_id }, 'flushing doc via http') + const docId = req.params.doc_id + const projectId = req.params.project_id + logger.log({ projectId, docId }, 'flushing doc via http') const timer = new Metrics.Timer('http.flushDoc') - DocumentManager.flushDocIfLoadedWithLock(project_id, doc_id, function ( + DocumentManager.flushDocIfLoadedWithLock(projectId, docId, function ( error ) { timer.done() if (error) { return next(error) } - logger.log({ project_id, doc_id }, 'flushed doc via http') + logger.log({ projectId, docId }, 'flushed doc via http') res.sendStatus(204) }) }, // No Content deleteDoc(req, res, next) { - const { doc_id } = req.params - const { project_id } = req.params + const docId = req.params.doc_id + const projectId = req.params.project_id const ignoreFlushErrors = req.query.ignore_flush_errors === 'true' const timer = new Metrics.Timer('http.deleteDoc') - logger.log({ project_id, doc_id }, 'deleting doc via http') + logger.log({ projectId, docId }, 'deleting doc via http') DocumentManager.flushAndDeleteDocWithLock( - project_id, - doc_id, + projectId, + docId, { ignoreFlushErrors }, function (error) { timer.done() // There is no harm in flushing project history if the previous call // failed and sometimes it is required - HistoryManager.flushProjectChangesAsync(project_id) + HistoryManager.flushProjectChangesAsync(projectId) if (error) { return next(error) } - logger.log({ project_id, doc_id }, 'deleted doc via http') + logger.log({ projectId, docId }, 'deleted doc via http') res.sendStatus(204) } ) }, // No Content flushProject(req, res, next) { - const { project_id } = req.params - logger.log({ project_id }, 'flushing project via http') + const projectId = req.params.project_id + logger.log({ projectId }, 'flushing project via http') const timer = new Metrics.Timer('http.flushProject') - ProjectManager.flushProjectWithLocks(project_id, function (error) { + ProjectManager.flushProjectWithLocks(projectId, function (error) { timer.done() if (error) { return next(error) } - logger.log({ project_id }, 'flushed project via http') + logger.log({ projectId }, 'flushed project via http') res.sendStatus(204) }) }, // No Content deleteProject(req, res, next) { - const { project_id } = req.params - logger.log({ project_id }, 'deleting project via http') + const projectId = req.params.project_id + logger.log({ projectId }, 'deleting project via http') const options = {} if (req.query.background) { options.background = true @@ -219,24 +215,24 @@ module.exports = HttpController = { options.skip_history_flush = true } // don't flush history when realtime shuts down if (req.query.background) { - ProjectManager.queueFlushAndDeleteProject(project_id, function (error) { + ProjectManager.queueFlushAndDeleteProject(projectId, function (error) { if (error) { return next(error) } - logger.log({ project_id }, 'queue delete of project via http') + logger.log({ projectId }, 'queue delete of project via http') res.sendStatus(204) }) // No Content } else { const timer = new Metrics.Timer('http.deleteProject') ProjectManager.flushAndDeleteProjectWithLocks( - project_id, + projectId, options, function (error) { timer.done() if (error) { return next(error) } - logger.log({ project_id }, 'deleted project via http') + logger.log({ projectId }, 'deleted project via http') res.sendStatus(204) } ) @@ -244,13 +240,13 @@ module.exports = HttpController = { }, // No Content deleteMultipleProjects(req, res, next) { - const project_ids = req.body.project_ids || [] - logger.log({ project_ids }, 'deleting multiple projects via http') + const projectIds = req.body.project_ids || [] + logger.log({ projectIds }, 'deleting multiple projects via http') async.eachSeries( - project_ids, - function (project_id, cb) { - logger.log({ project_id }, 'queue delete of project via http') - ProjectManager.queueFlushAndDeleteProject(project_id, cb) + projectIds, + function (projectId, cb) { + logger.log({ projectId }, 'queue delete of project via http') + ProjectManager.queueFlushAndDeleteProject(projectId, cb) }, function (error) { if (error) { @@ -262,51 +258,52 @@ module.exports = HttpController = { }, // No Content acceptChanges(req, res, next) { - const { project_id, doc_id } = req.params - let change_ids = req.body.change_ids - if (change_ids == null) { - change_ids = [req.params.change_id] + const { project_id: projectId, doc_id: docId } = req.params + let changeIds = req.body.change_ids + if (changeIds == null) { + changeIds = [req.params.change_id] } logger.log( - { project_id, doc_id }, - `accepting ${change_ids.length} changes via http` + { projectId, docId }, + `accepting ${changeIds.length} changes via http` ) const timer = new Metrics.Timer('http.acceptChanges') DocumentManager.acceptChangesWithLock( - project_id, - doc_id, - change_ids, + projectId, + docId, + changeIds, function (error) { timer.done() if (error) { return next(error) } logger.log( - { project_id, doc_id }, - `accepted ${change_ids.length} changes via http` + { projectId, docId }, + `accepted ${changeIds.length} changes via http` ) - res.sendStatus(204) + res.sendStatus(204) // No Content } ) - }, // No Content + }, deleteComment(req, res, next) { - const { project_id, doc_id, comment_id } = req.params - logger.log({ project_id, doc_id, comment_id }, 'deleting comment via http') + const { + project_id: projectId, + doc_id: docId, + comment_id: commentId + } = req.params + logger.log({ projectId, docId, commentId }, 'deleting comment via http') const timer = new Metrics.Timer('http.deleteComment') DocumentManager.deleteCommentWithLock( - project_id, - doc_id, - comment_id, + projectId, + docId, + commentId, function (error) { timer.done() if (error) { return next(error) } - logger.log( - { project_id, doc_id, comment_id }, - 'deleted comment via http' - ) + logger.log({ projectId, docId, commentId }, 'deleted comment via http') res.sendStatus(204) } ) @@ -314,7 +311,7 @@ module.exports = HttpController = { updateProject(req, res, next) { const timer = new Metrics.Timer('http.updateProject') - const { project_id } = req.params + const projectId = req.params.project_id const { projectHistoryId, userId, @@ -323,12 +320,12 @@ module.exports = HttpController = { version } = req.body logger.log( - { project_id, docUpdates, fileUpdates, version }, + { projectId, docUpdates, fileUpdates, version }, 'updating project via http' ) ProjectManager.updateProjectWithLocks( - project_id, + projectId, projectHistoryId, userId, docUpdates, @@ -339,22 +336,22 @@ module.exports = HttpController = { if (error) { return next(error) } - logger.log({ project_id }, 'updated project via http') + logger.log({ projectId }, 'updated project via http') res.sendStatus(204) } ) }, // No Content resyncProjectHistory(req, res, next) { - const { project_id } = req.params + const projectId = req.params.project_id const { projectHistoryId, docs, files } = req.body logger.log( - { project_id, docs, files }, + { projectId, docs, files }, 'queuing project history resync via http' ) HistoryManager.resyncProjectHistory( - project_id, + projectId, projectHistoryId, docs, files, @@ -362,7 +359,7 @@ module.exports = HttpController = { if (error) { return next(error) } - logger.log({ project_id }, 'queued project history resync via http') + logger.log({ projectId }, 'queued project history resync via http') res.sendStatus(204) } ) @@ -375,12 +372,12 @@ module.exports = HttpController = { concurrency: req.query.concurrency || 5, dryRun: req.query.dryRun || false } - ProjectFlusher.flushAllProjects(options, function (err, project_ids) { + ProjectFlusher.flushAllProjects(options, function (err, projectIds) { if (err) { logger.err({ err }, 'error bulk flushing projects') res.sendStatus(500) } else { - res.send(project_ids) + res.send(projectIds) } }) }, From e4ac63dd1942fb2d0976f7af2e1760b6ec93b7a9 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Mon, 11 May 2020 11:12:15 -0400 Subject: [PATCH 656/769] Decaf cleanup: move functions to top level --- .../document-updater/app/js/HttpController.js | 739 +++++++++--------- 1 file changed, 372 insertions(+), 367 deletions(-) diff --git a/services/document-updater/app/js/HttpController.js b/services/document-updater/app/js/HttpController.js index 61258ad172..5895bececd 100644 --- a/services/document-updater/app/js/HttpController.js +++ b/services/document-updater/app/js/HttpController.js @@ -1,4 +1,3 @@ -let HttpController const DocumentManager = require('./DocumentManager') const HistoryManager = require('./HistoryManager') const ProjectManager = require('./ProjectManager') @@ -11,395 +10,401 @@ const async = require('async') const TWO_MEGABYTES = 2 * 1024 * 1024 -module.exports = HttpController = { - getDoc(req, res, next) { - let fromVersion - const docId = req.params.doc_id - const projectId = req.params.project_id - logger.log({ projectId, docId }, 'getting doc via http') - const timer = new Metrics.Timer('http.getDoc') +module.exports = { + getDoc, + getProjectDocsAndFlushIfOld, + clearProjectState, + setDoc, + flushDocIfLoaded, + deleteDoc, + flushProject, + deleteProject, + deleteMultipleProjects, + acceptChanges, + deleteComment, + updateProject, + resyncProjectHistory, + flushAllProjects, + flushQueuedProjects +} - if (req.query.fromVersion != null) { - fromVersion = parseInt(req.query.fromVersion, 10) - } else { - fromVersion = -1 - } +function getDoc(req, res, next) { + let fromVersion + const docId = req.params.doc_id + const projectId = req.params.project_id + logger.log({ projectId, docId }, 'getting doc via http') + const timer = new Metrics.Timer('http.getDoc') - DocumentManager.getDocAndRecentOpsWithLock( - projectId, - docId, - fromVersion, - function (error, lines, version, ops, ranges, pathname) { - timer.done() - if (error) { - return next(error) - } - logger.log({ projectId, docId }, 'got doc via http') - if (lines == null || version == null) { - return next(new Errors.NotFoundError('document not found')) - } - res.json({ - id: docId, - lines, - version, - ops, - ranges, - pathname - }) - } - ) - }, + if (req.query.fromVersion != null) { + fromVersion = parseInt(req.query.fromVersion, 10) + } else { + fromVersion = -1 + } - _getTotalSizeOfLines(lines) { - let size = 0 - for (const line of lines) { - size += line.length + 1 - } - return size - }, - - getProjectDocsAndFlushIfOld(req, res, next) { - const projectId = req.params.project_id - const projectStateHash = req.query.state - // exclude is string of existing docs "id:version,id:version,..." - const excludeItems = - req.query.exclude != null ? req.query.exclude.split(',') : [] - logger.log({ projectId, exclude: excludeItems }, 'getting docs via http') - const timer = new Metrics.Timer('http.getAllDocs') - const excludeVersions = {} - for (const item of excludeItems) { - const [id, version] = item.split(':') - excludeVersions[id] = version - } - logger.log( - { projectId, projectStateHash, excludeVersions }, - 'excluding versions' - ) - ProjectManager.getProjectDocsAndFlushIfOld( - projectId, - projectStateHash, - excludeVersions, - function (error, result) { - timer.done() - if (error instanceof Errors.ProjectStateChangedError) { - res.sendStatus(409) // conflict - } else if (error) { - next(error) - } else { - logger.log( - { - projectId, - result: result.map((doc) => `${doc._id}:${doc.v}`) - }, - 'got docs via http' - ) - res.send(result) - } - } - ) - }, - - clearProjectState(req, res, next) { - const projectId = req.params.project_id - const timer = new Metrics.Timer('http.clearProjectState') - logger.log({ projectId }, 'clearing project state via http') - ProjectManager.clearProjectState(projectId, function (error) { + DocumentManager.getDocAndRecentOpsWithLock( + projectId, + docId, + fromVersion, + function (error, lines, version, ops, ranges, pathname) { timer.done() if (error) { + return next(error) + } + logger.log({ projectId, docId }, 'got doc via http') + if (lines == null || version == null) { + return next(new Errors.NotFoundError('document not found')) + } + res.json({ + id: docId, + lines, + version, + ops, + ranges, + pathname + }) + } + ) +} + +function _getTotalSizeOfLines(lines) { + let size = 0 + for (const line of lines) { + size += line.length + 1 + } + return size +} + +function getProjectDocsAndFlushIfOld(req, res, next) { + const projectId = req.params.project_id + const projectStateHash = req.query.state + // exclude is string of existing docs "id:version,id:version,..." + const excludeItems = + req.query.exclude != null ? req.query.exclude.split(',') : [] + logger.log({ projectId, exclude: excludeItems }, 'getting docs via http') + const timer = new Metrics.Timer('http.getAllDocs') + const excludeVersions = {} + for (const item of excludeItems) { + const [id, version] = item.split(':') + excludeVersions[id] = version + } + logger.log( + { projectId, projectStateHash, excludeVersions }, + 'excluding versions' + ) + ProjectManager.getProjectDocsAndFlushIfOld( + projectId, + projectStateHash, + excludeVersions, + function (error, result) { + timer.done() + if (error instanceof Errors.ProjectStateChangedError) { + res.sendStatus(409) // conflict + } else if (error) { next(error) } else { - res.sendStatus(200) + logger.log( + { + projectId, + result: result.map((doc) => `${doc._id}:${doc.v}`) + }, + 'got docs via http' + ) + res.send(result) } - }) - }, - - setDoc(req, res, next) { - const docId = req.params.doc_id - const projectId = req.params.project_id - const { lines, source, user_id: userId, undoing } = req.body - const lineSize = HttpController._getTotalSizeOfLines(lines) - if (lineSize > TWO_MEGABYTES) { - logger.log( - { projectId, docId, source, lineSize, userId }, - 'document too large, returning 406 response' - ) - return res.sendStatus(406) } - logger.log( - { projectId, docId, lines, source, userId, undoing }, - 'setting doc via http' - ) - const timer = new Metrics.Timer('http.setDoc') - DocumentManager.setDocWithLock( - projectId, - docId, - lines, - source, - userId, - undoing, - function (error) { - timer.done() - if (error) { - return next(error) - } - logger.log({ projectId, docId }, 'set doc via http') - res.sendStatus(204) - } - ) - }, // No Content + ) +} - flushDocIfLoaded(req, res, next) { - const docId = req.params.doc_id - const projectId = req.params.project_id - logger.log({ projectId, docId }, 'flushing doc via http') - const timer = new Metrics.Timer('http.flushDoc') - DocumentManager.flushDocIfLoadedWithLock(projectId, docId, function ( +function clearProjectState(req, res, next) { + const projectId = req.params.project_id + const timer = new Metrics.Timer('http.clearProjectState') + logger.log({ projectId }, 'clearing project state via http') + ProjectManager.clearProjectState(projectId, function (error) { + timer.done() + if (error) { + next(error) + } else { + res.sendStatus(200) + } + }) +} + +function setDoc(req, res, next) { + const docId = req.params.doc_id + const projectId = req.params.project_id + const { lines, source, user_id: userId, undoing } = req.body + const lineSize = _getTotalSizeOfLines(lines) + if (lineSize > TWO_MEGABYTES) { + logger.log( + { projectId, docId, source, lineSize, userId }, + 'document too large, returning 406 response' + ) + return res.sendStatus(406) + } + logger.log( + { projectId, docId, lines, source, userId, undoing }, + 'setting doc via http' + ) + const timer = new Metrics.Timer('http.setDoc') + DocumentManager.setDocWithLock( + projectId, + docId, + lines, + source, + userId, + undoing, + function (error) { + timer.done() + if (error) { + return next(error) + } + logger.log({ projectId, docId }, 'set doc via http') + res.sendStatus(204) // No Content + } + ) +} + +function flushDocIfLoaded(req, res, next) { + const docId = req.params.doc_id + const projectId = req.params.project_id + logger.log({ projectId, docId }, 'flushing doc via http') + const timer = new Metrics.Timer('http.flushDoc') + DocumentManager.flushDocIfLoadedWithLock(projectId, docId, function (error) { + timer.done() + if (error) { + return next(error) + } + logger.log({ projectId, docId }, 'flushed doc via http') + res.sendStatus(204) // No Content + }) +} + +function deleteDoc(req, res, next) { + const docId = req.params.doc_id + const projectId = req.params.project_id + const ignoreFlushErrors = req.query.ignore_flush_errors === 'true' + const timer = new Metrics.Timer('http.deleteDoc') + logger.log({ projectId, docId }, 'deleting doc via http') + DocumentManager.flushAndDeleteDocWithLock( + projectId, + docId, + { ignoreFlushErrors }, + function (error) { + timer.done() + // There is no harm in flushing project history if the previous call + // failed and sometimes it is required + HistoryManager.flushProjectChangesAsync(projectId) + + if (error) { + return next(error) + } + logger.log({ projectId, docId }, 'deleted doc via http') + res.sendStatus(204) // No Content + } + ) +} + +function flushProject(req, res, next) { + const projectId = req.params.project_id + logger.log({ projectId }, 'flushing project via http') + const timer = new Metrics.Timer('http.flushProject') + ProjectManager.flushProjectWithLocks(projectId, function (error) { + timer.done() + if (error) { + return next(error) + } + logger.log({ projectId }, 'flushed project via http') + res.sendStatus(204) // No Content + }) +} + +function deleteProject(req, res, next) { + const projectId = req.params.project_id + logger.log({ projectId }, 'deleting project via http') + const options = {} + if (req.query.background) { + options.background = true + } // allow non-urgent flushes to be queued + if (req.query.shutdown) { + options.skip_history_flush = true + } // don't flush history when realtime shuts down + if (req.query.background) { + ProjectManager.queueFlushAndDeleteProject(projectId, function (error) { + if (error) { + return next(error) + } + logger.log({ projectId }, 'queue delete of project via http') + res.sendStatus(204) + }) // No Content + } else { + const timer = new Metrics.Timer('http.deleteProject') + ProjectManager.flushAndDeleteProjectWithLocks(projectId, options, function ( error ) { timer.done() if (error) { return next(error) } - logger.log({ projectId, docId }, 'flushed doc via http') - res.sendStatus(204) + logger.log({ projectId }, 'deleted project via http') + res.sendStatus(204) // No Content }) - }, // No Content + } +} - deleteDoc(req, res, next) { - const docId = req.params.doc_id - const projectId = req.params.project_id - const ignoreFlushErrors = req.query.ignore_flush_errors === 'true' - const timer = new Metrics.Timer('http.deleteDoc') - logger.log({ projectId, docId }, 'deleting doc via http') - DocumentManager.flushAndDeleteDocWithLock( - projectId, - docId, - { ignoreFlushErrors }, - function (error) { - timer.done() - // There is no harm in flushing project history if the previous call - // failed and sometimes it is required - HistoryManager.flushProjectChangesAsync(projectId) - - if (error) { - return next(error) - } - logger.log({ projectId, docId }, 'deleted doc via http') - res.sendStatus(204) +function deleteMultipleProjects(req, res, next) { + const projectIds = req.body.project_ids || [] + logger.log({ projectIds }, 'deleting multiple projects via http') + async.eachSeries( + projectIds, + function (projectId, cb) { + logger.log({ projectId }, 'queue delete of project via http') + ProjectManager.queueFlushAndDeleteProject(projectId, cb) + }, + function (error) { + if (error) { + return next(error) } - ) - }, // No Content + res.sendStatus(204) // No Content + } + ) +} - flushProject(req, res, next) { - const projectId = req.params.project_id - logger.log({ projectId }, 'flushing project via http') - const timer = new Metrics.Timer('http.flushProject') - ProjectManager.flushProjectWithLocks(projectId, function (error) { +function acceptChanges(req, res, next) { + const { project_id: projectId, doc_id: docId } = req.params + let changeIds = req.body.change_ids + if (changeIds == null) { + changeIds = [req.params.change_id] + } + logger.log( + { projectId, docId }, + `accepting ${changeIds.length} changes via http` + ) + const timer = new Metrics.Timer('http.acceptChanges') + DocumentManager.acceptChangesWithLock(projectId, docId, changeIds, function ( + error + ) { + timer.done() + if (error) { + return next(error) + } + logger.log( + { projectId, docId }, + `accepted ${changeIds.length} changes via http` + ) + res.sendStatus(204) // No Content + }) +} + +function deleteComment(req, res, next) { + const { + project_id: projectId, + doc_id: docId, + comment_id: commentId + } = req.params + logger.log({ projectId, docId, commentId }, 'deleting comment via http') + const timer = new Metrics.Timer('http.deleteComment') + DocumentManager.deleteCommentWithLock(projectId, docId, commentId, function ( + error + ) { + timer.done() + if (error) { + return next(error) + } + logger.log({ projectId, docId, commentId }, 'deleted comment via http') + res.sendStatus(204) // No Content + }) +} + +function updateProject(req, res, next) { + const timer = new Metrics.Timer('http.updateProject') + const projectId = req.params.project_id + const { + projectHistoryId, + userId, + docUpdates, + fileUpdates, + version + } = req.body + logger.log( + { projectId, docUpdates, fileUpdates, version }, + 'updating project via http' + ) + + ProjectManager.updateProjectWithLocks( + projectId, + projectHistoryId, + userId, + docUpdates, + fileUpdates, + version, + function (error) { timer.done() if (error) { return next(error) } - logger.log({ projectId }, 'flushed project via http') - res.sendStatus(204) - }) - }, // No Content - - deleteProject(req, res, next) { - const projectId = req.params.project_id - logger.log({ projectId }, 'deleting project via http') - const options = {} - if (req.query.background) { - options.background = true - } // allow non-urgent flushes to be queued - if (req.query.shutdown) { - options.skip_history_flush = true - } // don't flush history when realtime shuts down - if (req.query.background) { - ProjectManager.queueFlushAndDeleteProject(projectId, function (error) { - if (error) { - return next(error) - } - logger.log({ projectId }, 'queue delete of project via http') - res.sendStatus(204) - }) // No Content - } else { - const timer = new Metrics.Timer('http.deleteProject') - ProjectManager.flushAndDeleteProjectWithLocks( - projectId, - options, - function (error) { - timer.done() - if (error) { - return next(error) - } - logger.log({ projectId }, 'deleted project via http') - res.sendStatus(204) - } - ) + logger.log({ projectId }, 'updated project via http') + res.sendStatus(204) // No Content } - }, // No Content - - deleteMultipleProjects(req, res, next) { - const projectIds = req.body.project_ids || [] - logger.log({ projectIds }, 'deleting multiple projects via http') - async.eachSeries( - projectIds, - function (projectId, cb) { - logger.log({ projectId }, 'queue delete of project via http') - ProjectManager.queueFlushAndDeleteProject(projectId, cb) - }, - function (error) { - if (error) { - return next(error) - } - res.sendStatus(204) - } - ) - }, // No Content - - acceptChanges(req, res, next) { - const { project_id: projectId, doc_id: docId } = req.params - let changeIds = req.body.change_ids - if (changeIds == null) { - changeIds = [req.params.change_id] - } - logger.log( - { projectId, docId }, - `accepting ${changeIds.length} changes via http` - ) - const timer = new Metrics.Timer('http.acceptChanges') - DocumentManager.acceptChangesWithLock( - projectId, - docId, - changeIds, - function (error) { - timer.done() - if (error) { - return next(error) - } - logger.log( - { projectId, docId }, - `accepted ${changeIds.length} changes via http` - ) - res.sendStatus(204) // No Content - } - ) - }, - - deleteComment(req, res, next) { - const { - project_id: projectId, - doc_id: docId, - comment_id: commentId - } = req.params - logger.log({ projectId, docId, commentId }, 'deleting comment via http') - const timer = new Metrics.Timer('http.deleteComment') - DocumentManager.deleteCommentWithLock( - projectId, - docId, - commentId, - function (error) { - timer.done() - if (error) { - return next(error) - } - logger.log({ projectId, docId, commentId }, 'deleted comment via http') - res.sendStatus(204) - } - ) - }, // No Content - - updateProject(req, res, next) { - const timer = new Metrics.Timer('http.updateProject') - const projectId = req.params.project_id - const { - projectHistoryId, - userId, - docUpdates, - fileUpdates, - version - } = req.body - logger.log( - { projectId, docUpdates, fileUpdates, version }, - 'updating project via http' - ) - - ProjectManager.updateProjectWithLocks( - projectId, - projectHistoryId, - userId, - docUpdates, - fileUpdates, - version, - function (error) { - timer.done() - if (error) { - return next(error) - } - logger.log({ projectId }, 'updated project via http') - res.sendStatus(204) - } - ) - }, // No Content - - resyncProjectHistory(req, res, next) { - const projectId = req.params.project_id - const { projectHistoryId, docs, files } = req.body - - logger.log( - { projectId, docs, files }, - 'queuing project history resync via http' - ) - HistoryManager.resyncProjectHistory( - projectId, - projectHistoryId, - docs, - files, - function (error) { - if (error) { - return next(error) - } - logger.log({ projectId }, 'queued project history resync via http') - res.sendStatus(204) - } - ) - }, - - flushAllProjects(req, res, next) { - res.setTimeout(5 * 60 * 1000) - const options = { - limit: req.query.limit || 1000, - concurrency: req.query.concurrency || 5, - dryRun: req.query.dryRun || false - } - ProjectFlusher.flushAllProjects(options, function (err, projectIds) { - if (err) { - logger.err({ err }, 'error bulk flushing projects') - res.sendStatus(500) - } else { - res.send(projectIds) - } - }) - }, - - flushQueuedProjects(req, res, next) { - res.setTimeout(10 * 60 * 1000) - const options = { - limit: req.query.limit || 1000, - timeout: 5 * 60 * 1000, - min_delete_age: req.query.min_delete_age || 5 * 60 * 1000 - } - DeleteQueueManager.flushAndDeleteOldProjects(options, function ( - err, - flushed - ) { - if (err) { - logger.err({ err }, 'error flushing old projects') - res.sendStatus(500) - } else { - logger.log({ flushed }, 'flush of queued projects completed') - res.send({ flushed }) - } - }) - } + ) +} + +function resyncProjectHistory(req, res, next) { + const projectId = req.params.project_id + const { projectHistoryId, docs, files } = req.body + + logger.log( + { projectId, docs, files }, + 'queuing project history resync via http' + ) + HistoryManager.resyncProjectHistory( + projectId, + projectHistoryId, + docs, + files, + function (error) { + if (error) { + return next(error) + } + logger.log({ projectId }, 'queued project history resync via http') + res.sendStatus(204) + } + ) +} + +function flushAllProjects(req, res, next) { + res.setTimeout(5 * 60 * 1000) + const options = { + limit: req.query.limit || 1000, + concurrency: req.query.concurrency || 5, + dryRun: req.query.dryRun || false + } + ProjectFlusher.flushAllProjects(options, function (err, projectIds) { + if (err) { + logger.err({ err }, 'error bulk flushing projects') + res.sendStatus(500) + } else { + res.send(projectIds) + } + }) +} + +function flushQueuedProjects(req, res, next) { + res.setTimeout(10 * 60 * 1000) + const options = { + limit: req.query.limit || 1000, + timeout: 5 * 60 * 1000, + min_delete_age: req.query.min_delete_age || 5 * 60 * 1000 + } + DeleteQueueManager.flushAndDeleteOldProjects(options, function ( + err, + flushed + ) { + if (err) { + logger.err({ err }, 'error flushing old projects') + res.sendStatus(500) + } else { + logger.log({ flushed }, 'flush of queued projects completed') + res.send({ flushed }) + } + }) } From cb959ddfc168015773d170fa68f1e54aadfa5e3f Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Mon, 11 May 2020 11:14:37 -0400 Subject: [PATCH 657/769] Decaf cleanup: use arrow functions for callbacks --- .../document-updater/app/js/HttpController.js | 99 ++++++++++--------- 1 file changed, 52 insertions(+), 47 deletions(-) diff --git a/services/document-updater/app/js/HttpController.js b/services/document-updater/app/js/HttpController.js index 5895bececd..2e8bd084a5 100644 --- a/services/document-updater/app/js/HttpController.js +++ b/services/document-updater/app/js/HttpController.js @@ -45,7 +45,7 @@ function getDoc(req, res, next) { projectId, docId, fromVersion, - function (error, lines, version, ops, ranges, pathname) { + (error, lines, version, ops, ranges, pathname) => { timer.done() if (error) { return next(error) @@ -95,7 +95,7 @@ function getProjectDocsAndFlushIfOld(req, res, next) { projectId, projectStateHash, excludeVersions, - function (error, result) { + (error, result) => { timer.done() if (error instanceof Errors.ProjectStateChangedError) { res.sendStatus(409) // conflict @@ -119,7 +119,7 @@ function clearProjectState(req, res, next) { const projectId = req.params.project_id const timer = new Metrics.Timer('http.clearProjectState') logger.log({ projectId }, 'clearing project state via http') - ProjectManager.clearProjectState(projectId, function (error) { + ProjectManager.clearProjectState(projectId, (error) => { timer.done() if (error) { next(error) @@ -153,7 +153,7 @@ function setDoc(req, res, next) { source, userId, undoing, - function (error) { + (error) => { timer.done() if (error) { return next(error) @@ -169,7 +169,7 @@ function flushDocIfLoaded(req, res, next) { const projectId = req.params.project_id logger.log({ projectId, docId }, 'flushing doc via http') const timer = new Metrics.Timer('http.flushDoc') - DocumentManager.flushDocIfLoadedWithLock(projectId, docId, function (error) { + DocumentManager.flushDocIfLoadedWithLock(projectId, docId, (error) => { timer.done() if (error) { return next(error) @@ -189,7 +189,7 @@ function deleteDoc(req, res, next) { projectId, docId, { ignoreFlushErrors }, - function (error) { + (error) => { timer.done() // There is no harm in flushing project history if the previous call // failed and sometimes it is required @@ -208,7 +208,7 @@ function flushProject(req, res, next) { const projectId = req.params.project_id logger.log({ projectId }, 'flushing project via http') const timer = new Metrics.Timer('http.flushProject') - ProjectManager.flushProjectWithLocks(projectId, function (error) { + ProjectManager.flushProjectWithLocks(projectId, (error) => { timer.done() if (error) { return next(error) @@ -229,7 +229,7 @@ function deleteProject(req, res, next) { options.skip_history_flush = true } // don't flush history when realtime shuts down if (req.query.background) { - ProjectManager.queueFlushAndDeleteProject(projectId, function (error) { + ProjectManager.queueFlushAndDeleteProject(projectId, (error) => { if (error) { return next(error) } @@ -238,16 +238,18 @@ function deleteProject(req, res, next) { }) // No Content } else { const timer = new Metrics.Timer('http.deleteProject') - ProjectManager.flushAndDeleteProjectWithLocks(projectId, options, function ( - error - ) { - timer.done() - if (error) { - return next(error) + ProjectManager.flushAndDeleteProjectWithLocks( + projectId, + options, + (error) => { + timer.done() + if (error) { + return next(error) + } + logger.log({ projectId }, 'deleted project via http') + res.sendStatus(204) // No Content } - logger.log({ projectId }, 'deleted project via http') - res.sendStatus(204) // No Content - }) + ) } } @@ -256,11 +258,11 @@ function deleteMultipleProjects(req, res, next) { logger.log({ projectIds }, 'deleting multiple projects via http') async.eachSeries( projectIds, - function (projectId, cb) { + (projectId, cb) => { logger.log({ projectId }, 'queue delete of project via http') ProjectManager.queueFlushAndDeleteProject(projectId, cb) }, - function (error) { + (error) => { if (error) { return next(error) } @@ -280,19 +282,22 @@ function acceptChanges(req, res, next) { `accepting ${changeIds.length} changes via http` ) const timer = new Metrics.Timer('http.acceptChanges') - DocumentManager.acceptChangesWithLock(projectId, docId, changeIds, function ( - error - ) { - timer.done() - if (error) { - return next(error) + DocumentManager.acceptChangesWithLock( + projectId, + docId, + changeIds, + (error) => { + timer.done() + if (error) { + return next(error) + } + logger.log( + { projectId, docId }, + `accepted ${changeIds.length} changes via http` + ) + res.sendStatus(204) // No Content } - logger.log( - { projectId, docId }, - `accepted ${changeIds.length} changes via http` - ) - res.sendStatus(204) // No Content - }) + ) } function deleteComment(req, res, next) { @@ -303,16 +308,19 @@ function deleteComment(req, res, next) { } = req.params logger.log({ projectId, docId, commentId }, 'deleting comment via http') const timer = new Metrics.Timer('http.deleteComment') - DocumentManager.deleteCommentWithLock(projectId, docId, commentId, function ( - error - ) { - timer.done() - if (error) { - return next(error) + DocumentManager.deleteCommentWithLock( + projectId, + docId, + commentId, + (error) => { + timer.done() + if (error) { + return next(error) + } + logger.log({ projectId, docId, commentId }, 'deleted comment via http') + res.sendStatus(204) // No Content } - logger.log({ projectId, docId, commentId }, 'deleted comment via http') - res.sendStatus(204) // No Content - }) + ) } function updateProject(req, res, next) { @@ -337,7 +345,7 @@ function updateProject(req, res, next) { docUpdates, fileUpdates, version, - function (error) { + (error) => { timer.done() if (error) { return next(error) @@ -361,7 +369,7 @@ function resyncProjectHistory(req, res, next) { projectHistoryId, docs, files, - function (error) { + (error) => { if (error) { return next(error) } @@ -378,7 +386,7 @@ function flushAllProjects(req, res, next) { concurrency: req.query.concurrency || 5, dryRun: req.query.dryRun || false } - ProjectFlusher.flushAllProjects(options, function (err, projectIds) { + ProjectFlusher.flushAllProjects(options, (err, projectIds) => { if (err) { logger.err({ err }, 'error bulk flushing projects') res.sendStatus(500) @@ -395,10 +403,7 @@ function flushQueuedProjects(req, res, next) { timeout: 5 * 60 * 1000, min_delete_age: req.query.min_delete_age || 5 * 60 * 1000 } - DeleteQueueManager.flushAndDeleteOldProjects(options, function ( - err, - flushed - ) { + DeleteQueueManager.flushAndDeleteOldProjects(options, (err, flushed) => { if (err) { logger.err({ err }, 'error flushing old projects') res.sendStatus(500) From a2a1914a53342ba20b4ca0ef2c75a27b5493242d Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Mon, 11 May 2020 11:15:37 -0400 Subject: [PATCH 658/769] Use max_doc_length setting to limit incoming doc size --- services/document-updater/app/js/HttpController.js | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/services/document-updater/app/js/HttpController.js b/services/document-updater/app/js/HttpController.js index 2e8bd084a5..646a8578df 100644 --- a/services/document-updater/app/js/HttpController.js +++ b/services/document-updater/app/js/HttpController.js @@ -3,13 +3,12 @@ const HistoryManager = require('./HistoryManager') const ProjectManager = require('./ProjectManager') const Errors = require('./Errors') const logger = require('logger-sharelatex') +const Settings = require('settings-sharelatex') const Metrics = require('./Metrics') const ProjectFlusher = require('./ProjectFlusher') const DeleteQueueManager = require('./DeleteQueueManager') const async = require('async') -const TWO_MEGABYTES = 2 * 1024 * 1024 - module.exports = { getDoc, getProjectDocsAndFlushIfOld, @@ -134,7 +133,7 @@ function setDoc(req, res, next) { const projectId = req.params.project_id const { lines, source, user_id: userId, undoing } = req.body const lineSize = _getTotalSizeOfLines(lines) - if (lineSize > TWO_MEGABYTES) { + if (lineSize > Settings.max_doc_length) { logger.log( { projectId, docId, source, lineSize, userId }, 'document too large, returning 406 response' From 526ef25fcf5ef8069b40d66178539978846ae7de Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Mon, 11 May 2020 11:20:55 -0400 Subject: [PATCH 659/769] Decaf cleanup: unnecessary returns --- .../js/HttpController/HttpControllerTests.js | 341 +++++++++--------- 1 file changed, 162 insertions(+), 179 deletions(-) diff --git a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js index 8f4125fcfa..18c74691bc 100644 --- a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js +++ b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js @@ -6,7 +6,6 @@ // Fix any style issues and re-enable lint. /* * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns * DS206: Consider reworking classes to avoid initClass * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ @@ -46,11 +45,11 @@ describe('HttpController', function () { this.project_id = 'project-id-123' this.doc_id = 'doc-id-123' this.next = sinon.stub() - return (this.res = { + this.res = { send: sinon.stub(), sendStatus: sinon.stub(), json: sinon.stub() - }) + } }) describe('getDoc', function () { @@ -61,12 +60,12 @@ describe('HttpController', function () { this.fromVersion = 42 this.ranges = { changes: 'mock', comments: 'mock' } this.pathname = '/a/b/c' - return (this.req = { + this.req = { params: { project_id: this.project_id, doc_id: this.doc_id } - }) + } }) describe('when the document exists and no recent ops are requested', function () { @@ -82,17 +81,17 @@ describe('HttpController', function () { this.ranges, this.pathname ) - return this.HttpController.getDoc(this.req, this.res, this.next) + this.HttpController.getDoc(this.req, this.res, this.next) }) it('should get the doc', function () { - return this.DocumentManager.getDocAndRecentOpsWithLock + this.DocumentManager.getDocAndRecentOpsWithLock .calledWith(this.project_id, this.doc_id, -1) .should.equal(true) }) it('should return the doc as JSON', function () { - return this.res.json + this.res.json .calledWith({ id: this.doc_id, lines: this.lines, @@ -105,7 +104,7 @@ describe('HttpController', function () { }) it('should log the request', function () { - return this.logger.log + this.logger.log .calledWith( { doc_id: this.doc_id, project_id: this.project_id }, 'getting doc via http' @@ -113,8 +112,8 @@ describe('HttpController', function () { .should.equal(true) }) - return it('should time the request', function () { - return this.Metrics.Timer.prototype.done.called.should.equal(true) + it('should time the request', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) }) }) @@ -132,17 +131,17 @@ describe('HttpController', function () { this.pathname ) this.req.query = { fromVersion: `${this.fromVersion}` } - return this.HttpController.getDoc(this.req, this.res, this.next) + this.HttpController.getDoc(this.req, this.res, this.next) }) it('should get the doc', function () { - return this.DocumentManager.getDocAndRecentOpsWithLock + this.DocumentManager.getDocAndRecentOpsWithLock .calledWith(this.project_id, this.doc_id, this.fromVersion) .should.equal(true) }) it('should return the doc as JSON', function () { - return this.res.json + this.res.json .calledWith({ id: this.doc_id, lines: this.lines, @@ -155,7 +154,7 @@ describe('HttpController', function () { }) it('should log the request', function () { - return this.logger.log + this.logger.log .calledWith( { doc_id: this.doc_id, project_id: this.project_id }, 'getting doc via http' @@ -163,8 +162,8 @@ describe('HttpController', function () { .should.equal(true) }) - return it('should time the request', function () { - return this.Metrics.Timer.prototype.done.called.should.equal(true) + it('should time the request', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) }) }) @@ -173,26 +172,26 @@ describe('HttpController', function () { this.DocumentManager.getDocAndRecentOpsWithLock = sinon .stub() .callsArgWith(3, null, null, null) - return this.HttpController.getDoc(this.req, this.res, this.next) + this.HttpController.getDoc(this.req, this.res, this.next) }) - return it('should call next with NotFoundError', function () { - return this.next + it('should call next with NotFoundError', function () { + this.next .calledWith(new Errors.NotFoundError('not found')) .should.equal(true) }) }) - return describe('when an errors occurs', function () { + describe('when an errors occurs', function () { beforeEach(function () { this.DocumentManager.getDocAndRecentOpsWithLock = sinon .stub() .callsArgWith(3, new Error('oops'), null, null) - return this.HttpController.getDoc(this.req, this.res, this.next) + this.HttpController.getDoc(this.req, this.res, this.next) }) - return it('should call next with the error', function () { - return this.next.calledWith(new Error('oops')).should.equal(true) + it('should call next with the error', function () { + this.next.calledWith(new Error('oops')).should.equal(true) }) }) }) @@ -202,7 +201,7 @@ describe('HttpController', function () { this.lines = ['one', 'two', 'three'] this.source = 'dropbox' this.user_id = 'user-id-123' - return (this.req = { + this.req = { headers: {}, params: { project_id: this.project_id, @@ -214,17 +213,17 @@ describe('HttpController', function () { user_id: this.user_id, undoing: (this.undoing = true) } - }) + } }) describe('successfully', function () { beforeEach(function () { this.DocumentManager.setDocWithLock = sinon.stub().callsArgWith(6) - return this.HttpController.setDoc(this.req, this.res, this.next) + this.HttpController.setDoc(this.req, this.res, this.next) }) it('should set the doc', function () { - return this.DocumentManager.setDocWithLock + this.DocumentManager.setDocWithLock .calledWith( this.project_id, this.doc_id, @@ -237,11 +236,11 @@ describe('HttpController', function () { }) it('should return a successful No Content response', function () { - return this.res.sendStatus.calledWith(204).should.equal(true) + this.res.sendStatus.calledWith(204).should.equal(true) }) it('should log the request', function () { - return this.logger.log + this.logger.log .calledWith( { doc_id: this.doc_id, @@ -256,8 +255,8 @@ describe('HttpController', function () { .should.equal(true) }) - return it('should time the request', function () { - return this.Metrics.Timer.prototype.done.called.should.equal(true) + it('should time the request', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) }) }) @@ -266,15 +265,15 @@ describe('HttpController', function () { this.DocumentManager.setDocWithLock = sinon .stub() .callsArgWith(6, new Error('oops')) - return this.HttpController.setDoc(this.req, this.res, this.next) + this.HttpController.setDoc(this.req, this.res, this.next) }) - return it('should call next with the error', function () { - return this.next.calledWith(new Error('oops')).should.equal(true) + it('should call next with the error', function () { + this.next.calledWith(new Error('oops')).should.equal(true) }) }) - return describe('when the payload is too large', function () { + describe('when the payload is too large', function () { beforeEach(function () { const lines = [] for (let _ = 0; _ <= 200000; _++) { @@ -282,46 +281,46 @@ describe('HttpController', function () { } this.req.body.lines = lines this.DocumentManager.setDocWithLock = sinon.stub().callsArgWith(6) - return this.HttpController.setDoc(this.req, this.res, this.next) + this.HttpController.setDoc(this.req, this.res, this.next) }) it('should send back a 406 response', function () { - return this.res.sendStatus.calledWith(406).should.equal(true) + this.res.sendStatus.calledWith(406).should.equal(true) }) - return it('should not call setDocWithLock', function () { - return this.DocumentManager.setDocWithLock.callCount.should.equal(0) + it('should not call setDocWithLock', function () { + this.DocumentManager.setDocWithLock.callCount.should.equal(0) }) }) }) describe('flushProject', function () { beforeEach(function () { - return (this.req = { + this.req = { params: { project_id: this.project_id } - }) + } }) describe('successfully', function () { beforeEach(function () { this.ProjectManager.flushProjectWithLocks = sinon.stub().callsArgWith(1) - return this.HttpController.flushProject(this.req, this.res, this.next) + this.HttpController.flushProject(this.req, this.res, this.next) }) it('should flush the project', function () { - return this.ProjectManager.flushProjectWithLocks + this.ProjectManager.flushProjectWithLocks .calledWith(this.project_id) .should.equal(true) }) it('should return a successful No Content response', function () { - return this.res.sendStatus.calledWith(204).should.equal(true) + this.res.sendStatus.calledWith(204).should.equal(true) }) it('should log the request', function () { - return this.logger.log + this.logger.log .calledWith( { project_id: this.project_id }, 'flushing project via http' @@ -329,21 +328,21 @@ describe('HttpController', function () { .should.equal(true) }) - return it('should time the request', function () { - return this.Metrics.Timer.prototype.done.called.should.equal(true) + it('should time the request', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) }) }) - return describe('when an errors occurs', function () { + describe('when an errors occurs', function () { beforeEach(function () { this.ProjectManager.flushProjectWithLocks = sinon .stub() .callsArgWith(1, new Error('oops')) - return this.HttpController.flushProject(this.req, this.res, this.next) + this.HttpController.flushProject(this.req, this.res, this.next) }) - return it('should call next with the error', function () { - return this.next.calledWith(new Error('oops')).should.equal(true) + it('should call next with the error', function () { + this.next.calledWith(new Error('oops')).should.equal(true) }) }) }) @@ -352,12 +351,12 @@ describe('HttpController', function () { beforeEach(function () { this.lines = ['one', 'two', 'three'] this.version = 42 - return (this.req = { + this.req = { params: { project_id: this.project_id, doc_id: this.doc_id } - }) + } }) describe('successfully', function () { @@ -365,25 +364,21 @@ describe('HttpController', function () { this.DocumentManager.flushDocIfLoadedWithLock = sinon .stub() .callsArgWith(2) - return this.HttpController.flushDocIfLoaded( - this.req, - this.res, - this.next - ) + this.HttpController.flushDocIfLoaded(this.req, this.res, this.next) }) it('should flush the doc', function () { - return this.DocumentManager.flushDocIfLoadedWithLock + this.DocumentManager.flushDocIfLoadedWithLock .calledWith(this.project_id, this.doc_id) .should.equal(true) }) it('should return a successful No Content response', function () { - return this.res.sendStatus.calledWith(204).should.equal(true) + this.res.sendStatus.calledWith(204).should.equal(true) }) it('should log the request', function () { - return this.logger.log + this.logger.log .calledWith( { doc_id: this.doc_id, project_id: this.project_id }, 'flushing doc via http' @@ -391,38 +386,34 @@ describe('HttpController', function () { .should.equal(true) }) - return it('should time the request', function () { - return this.Metrics.Timer.prototype.done.called.should.equal(true) + it('should time the request', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) }) }) - return describe('when an errors occurs', function () { + describe('when an errors occurs', function () { beforeEach(function () { this.DocumentManager.flushDocIfLoadedWithLock = sinon .stub() .callsArgWith(2, new Error('oops')) - return this.HttpController.flushDocIfLoaded( - this.req, - this.res, - this.next - ) + this.HttpController.flushDocIfLoaded(this.req, this.res, this.next) }) - return it('should call next with the error', function () { - return this.next.calledWith(new Error('oops')).should.equal(true) + it('should call next with the error', function () { + this.next.calledWith(new Error('oops')).should.equal(true) }) }) }) describe('deleteDoc', function () { beforeEach(function () { - return (this.req = { + this.req = { params: { project_id: this.project_id, doc_id: this.doc_id }, query: {} - }) + } }) describe('successfully', function () { @@ -430,11 +421,11 @@ describe('HttpController', function () { this.DocumentManager.flushAndDeleteDocWithLock = sinon .stub() .callsArgWith(3) - return this.HttpController.deleteDoc(this.req, this.res, this.next) + this.HttpController.deleteDoc(this.req, this.res, this.next) }) it('should flush and delete the doc', function () { - return this.DocumentManager.flushAndDeleteDocWithLock + this.DocumentManager.flushAndDeleteDocWithLock .calledWith(this.project_id, this.doc_id, { ignoreFlushErrors: false }) @@ -442,17 +433,17 @@ describe('HttpController', function () { }) it('should flush project history', function () { - return this.HistoryManager.flushProjectChangesAsync + this.HistoryManager.flushProjectChangesAsync .calledWithExactly(this.project_id) .should.equal(true) }) it('should return a successful No Content response', function () { - return this.res.sendStatus.calledWith(204).should.equal(true) + this.res.sendStatus.calledWith(204).should.equal(true) }) it('should log the request', function () { - return this.logger.log + this.logger.log .calledWith( { doc_id: this.doc_id, project_id: this.project_id }, 'deleting doc via http' @@ -460,8 +451,8 @@ describe('HttpController', function () { .should.equal(true) }) - return it('should time the request', function () { - return this.Metrics.Timer.prototype.done.called.should.equal(true) + it('should time the request', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) }) }) @@ -469,47 +460,47 @@ describe('HttpController', function () { beforeEach(function () { this.req.query.ignore_flush_errors = 'true' this.DocumentManager.flushAndDeleteDocWithLock = sinon.stub().yields() - return this.HttpController.deleteDoc(this.req, this.res, this.next) + this.HttpController.deleteDoc(this.req, this.res, this.next) }) it('should delete the doc', function () { - return this.DocumentManager.flushAndDeleteDocWithLock + this.DocumentManager.flushAndDeleteDocWithLock .calledWith(this.project_id, this.doc_id, { ignoreFlushErrors: true }) .should.equal(true) }) - return it('should return a successful No Content response', function () { - return this.res.sendStatus.calledWith(204).should.equal(true) + it('should return a successful No Content response', function () { + this.res.sendStatus.calledWith(204).should.equal(true) }) }) - return describe('when an errors occurs', function () { + describe('when an errors occurs', function () { beforeEach(function () { this.DocumentManager.flushAndDeleteDocWithLock = sinon .stub() .callsArgWith(3, new Error('oops')) - return this.HttpController.deleteDoc(this.req, this.res, this.next) + this.HttpController.deleteDoc(this.req, this.res, this.next) }) it('should flush project history', function () { - return this.HistoryManager.flushProjectChangesAsync + this.HistoryManager.flushProjectChangesAsync .calledWithExactly(this.project_id) .should.equal(true) }) - return it('should call next with the error', function () { - return this.next.calledWith(new Error('oops')).should.equal(true) + it('should call next with the error', function () { + this.next.calledWith(new Error('oops')).should.equal(true) }) }) }) describe('deleteProject', function () { beforeEach(function () { - return (this.req = { + this.req = { params: { project_id: this.project_id } - }) + } }) describe('successfully', function () { @@ -517,21 +508,21 @@ describe('HttpController', function () { this.ProjectManager.flushAndDeleteProjectWithLocks = sinon .stub() .callsArgWith(2) - return this.HttpController.deleteProject(this.req, this.res, this.next) + this.HttpController.deleteProject(this.req, this.res, this.next) }) it('should delete the project', function () { - return this.ProjectManager.flushAndDeleteProjectWithLocks + this.ProjectManager.flushAndDeleteProjectWithLocks .calledWith(this.project_id) .should.equal(true) }) it('should return a successful No Content response', function () { - return this.res.sendStatus.calledWith(204).should.equal(true) + this.res.sendStatus.calledWith(204).should.equal(true) }) it('should log the request', function () { - return this.logger.log + this.logger.log .calledWith( { project_id: this.project_id }, 'deleting project via http' @@ -539,8 +530,8 @@ describe('HttpController', function () { .should.equal(true) }) - return it('should time the request', function () { - return this.Metrics.Timer.prototype.done.called.should.equal(true) + it('should time the request', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) }) }) @@ -550,39 +541,39 @@ describe('HttpController', function () { .stub() .callsArgWith(1) this.req.query = { background: true, shutdown: true } - return this.HttpController.deleteProject(this.req, this.res, this.next) + this.HttpController.deleteProject(this.req, this.res, this.next) }) - return it('should queue the flush and delete', function () { - return this.ProjectManager.queueFlushAndDeleteProject + it('should queue the flush and delete', function () { + this.ProjectManager.queueFlushAndDeleteProject .calledWith(this.project_id) .should.equal(true) }) }) - return describe('when an errors occurs', function () { + describe('when an errors occurs', function () { beforeEach(function () { this.ProjectManager.flushAndDeleteProjectWithLocks = sinon .stub() .callsArgWith(2, new Error('oops')) - return this.HttpController.deleteProject(this.req, this.res, this.next) + this.HttpController.deleteProject(this.req, this.res, this.next) }) - return it('should call next with the error', function () { - return this.next.calledWith(new Error('oops')).should.equal(true) + it('should call next with the error', function () { + this.next.calledWith(new Error('oops')).should.equal(true) }) }) }) describe('acceptChanges', function () { beforeEach(function () { - return (this.req = { + this.req = { params: { project_id: this.project_id, doc_id: this.doc_id, change_id: (this.change_id = 'mock-change-od-1') } - }) + } }) describe('successfully with a single change', function () { @@ -590,21 +581,21 @@ describe('HttpController', function () { this.DocumentManager.acceptChangesWithLock = sinon .stub() .callsArgWith(3) - return this.HttpController.acceptChanges(this.req, this.res, this.next) + this.HttpController.acceptChanges(this.req, this.res, this.next) }) it('should accept the change', function () { - return this.DocumentManager.acceptChangesWithLock + this.DocumentManager.acceptChangesWithLock .calledWith(this.project_id, this.doc_id, [this.change_id]) .should.equal(true) }) it('should return a successful No Content response', function () { - return this.res.sendStatus.calledWith(204).should.equal(true) + this.res.sendStatus.calledWith(204).should.equal(true) }) it('should log the request', function () { - return this.logger.log + this.logger.log .calledWith( { project_id: this.project_id, doc_id: this.doc_id }, 'accepting 1 changes via http' @@ -612,8 +603,8 @@ describe('HttpController', function () { .should.equal(true) }) - return it('should time the request', function () { - return this.Metrics.Timer.prototype.done.called.should.equal(true) + it('should time the request', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) }) }) @@ -629,17 +620,17 @@ describe('HttpController', function () { this.DocumentManager.acceptChangesWithLock = sinon .stub() .callsArgWith(3) - return this.HttpController.acceptChanges(this.req, this.res, this.next) + this.HttpController.acceptChanges(this.req, this.res, this.next) }) it('should accept the changes in the body payload', function () { - return this.DocumentManager.acceptChangesWithLock + this.DocumentManager.acceptChangesWithLock .calledWith(this.project_id, this.doc_id, this.change_ids) .should.equal(true) }) - return it('should log the request with the correct number of changes', function () { - return this.logger.log + it('should log the request with the correct number of changes', function () { + this.logger.log .calledWith( { project_id: this.project_id, doc_id: this.doc_id }, `accepting ${this.change_ids.length} changes via http` @@ -648,29 +639,29 @@ describe('HttpController', function () { }) }) - return describe('when an errors occurs', function () { + describe('when an errors occurs', function () { beforeEach(function () { this.DocumentManager.acceptChangesWithLock = sinon .stub() .callsArgWith(3, new Error('oops')) - return this.HttpController.acceptChanges(this.req, this.res, this.next) + this.HttpController.acceptChanges(this.req, this.res, this.next) }) - return it('should call next with the error', function () { - return this.next.calledWith(new Error('oops')).should.equal(true) + it('should call next with the error', function () { + this.next.calledWith(new Error('oops')).should.equal(true) }) }) }) describe('deleteComment', function () { beforeEach(function () { - return (this.req = { + this.req = { params: { project_id: this.project_id, doc_id: this.doc_id, comment_id: (this.comment_id = 'mock-comment-id') } - }) + } }) describe('successfully', function () { @@ -678,21 +669,21 @@ describe('HttpController', function () { this.DocumentManager.deleteCommentWithLock = sinon .stub() .callsArgWith(3) - return this.HttpController.deleteComment(this.req, this.res, this.next) + this.HttpController.deleteComment(this.req, this.res, this.next) }) it('should accept the change', function () { - return this.DocumentManager.deleteCommentWithLock + this.DocumentManager.deleteCommentWithLock .calledWith(this.project_id, this.doc_id, this.comment_id) .should.equal(true) }) it('should return a successful No Content response', function () { - return this.res.sendStatus.calledWith(204).should.equal(true) + this.res.sendStatus.calledWith(204).should.equal(true) }) it('should log the request', function () { - return this.logger.log + this.logger.log .calledWith( { project_id: this.project_id, @@ -704,21 +695,21 @@ describe('HttpController', function () { .should.equal(true) }) - return it('should time the request', function () { - return this.Metrics.Timer.prototype.done.called.should.equal(true) + it('should time the request', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) }) }) - return describe('when an errors occurs', function () { + describe('when an errors occurs', function () { beforeEach(function () { this.DocumentManager.deleteCommentWithLock = sinon .stub() .callsArgWith(3, new Error('oops')) - return this.HttpController.deleteComment(this.req, this.res, this.next) + this.HttpController.deleteComment(this.req, this.res, this.next) }) - return it('should call next with the error', function () { - return this.next.calledWith(new Error('oops')).should.equal(true) + it('should call next with the error', function () { + this.next.calledWith(new Error('oops')).should.equal(true) }) }) }) @@ -730,14 +721,14 @@ describe('HttpController', function () { { _id: '1234', lines: 'hello', v: 23 }, { _id: '4567', lines: 'world', v: 45 } ] - return (this.req = { + this.req = { params: { project_id: this.project_id }, query: { state: this.state } - }) + } }) describe('successfully', function () { @@ -745,7 +736,7 @@ describe('HttpController', function () { this.ProjectManager.getProjectDocsAndFlushIfOld = sinon .stub() .callsArgWith(3, null, this.docs) - return this.HttpController.getProjectDocsAndFlushIfOld( + this.HttpController.getProjectDocsAndFlushIfOld( this.req, this.res, this.next @@ -753,17 +744,17 @@ describe('HttpController', function () { }) it('should get docs from the project manager', function () { - return this.ProjectManager.getProjectDocsAndFlushIfOld + this.ProjectManager.getProjectDocsAndFlushIfOld .calledWith(this.project_id, this.state, {}) .should.equal(true) }) it('should return a successful response', function () { - return this.res.send.calledWith(this.docs).should.equal(true) + this.res.send.calledWith(this.docs).should.equal(true) }) it('should log the request', function () { - return this.logger.log + this.logger.log .calledWith( { project_id: this.project_id, exclude: [] }, 'getting docs via http' @@ -772,7 +763,7 @@ describe('HttpController', function () { }) it('should log the response', function () { - return this.logger.log + this.logger.log .calledWith( { project_id: this.project_id, result: ['1234:23', '4567:45'] }, 'got docs via http' @@ -780,8 +771,8 @@ describe('HttpController', function () { .should.equal(true) }) - return it('should time the request', function () { - return this.Metrics.Timer.prototype.done.called.should.equal(true) + it('should time the request', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) }) }) @@ -793,32 +784,32 @@ describe('HttpController', function () { 3, new Errors.ProjectStateChangedError('project state changed') ) - return this.HttpController.getProjectDocsAndFlushIfOld( + this.HttpController.getProjectDocsAndFlushIfOld( this.req, this.res, this.next ) }) - return it('should return an HTTP 409 Conflict response', function () { - return this.res.sendStatus.calledWith(409).should.equal(true) + it('should return an HTTP 409 Conflict response', function () { + this.res.sendStatus.calledWith(409).should.equal(true) }) }) - return describe('when an error occurs', function () { + describe('when an error occurs', function () { beforeEach(function () { this.ProjectManager.getProjectDocsAndFlushIfOld = sinon .stub() .callsArgWith(3, new Error('oops')) - return this.HttpController.getProjectDocsAndFlushIfOld( + this.HttpController.getProjectDocsAndFlushIfOld( this.req, this.res, this.next ) }) - return it('should call next with the error', function () { - return this.next.calledWith(new Error('oops')).should.equal(true) + it('should call next with the error', function () { + this.next.calledWith(new Error('oops')).should.equal(true) }) }) }) @@ -830,7 +821,7 @@ describe('HttpController', function () { this.docUpdates = sinon.stub() this.fileUpdates = sinon.stub() this.version = 1234567 - return (this.req = { + this.req = { body: { projectHistoryId: this.projectHistoryId, userId: this.userId, @@ -841,7 +832,7 @@ describe('HttpController', function () { params: { project_id: this.project_id } - }) + } }) describe('successfully', function () { @@ -849,11 +840,11 @@ describe('HttpController', function () { this.ProjectManager.updateProjectWithLocks = sinon .stub() .callsArgWith(6) - return this.HttpController.updateProject(this.req, this.res, this.next) + this.HttpController.updateProject(this.req, this.res, this.next) }) it('should accept the change', function () { - return this.ProjectManager.updateProjectWithLocks + this.ProjectManager.updateProjectWithLocks .calledWith( this.project_id, this.projectHistoryId, @@ -866,35 +857,35 @@ describe('HttpController', function () { }) it('should return a successful No Content response', function () { - return this.res.sendStatus.calledWith(204).should.equal(true) + this.res.sendStatus.calledWith(204).should.equal(true) }) - return it('should time the request', function () { - return this.Metrics.Timer.prototype.done.called.should.equal(true) + it('should time the request', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) }) }) - return describe('when an errors occurs', function () { + describe('when an errors occurs', function () { beforeEach(function () { this.ProjectManager.updateProjectWithLocks = sinon .stub() .callsArgWith(6, new Error('oops')) - return this.HttpController.updateProject(this.req, this.res, this.next) + this.HttpController.updateProject(this.req, this.res, this.next) }) - return it('should call next with the error', function () { - return this.next.calledWith(new Error('oops')).should.equal(true) + it('should call next with the error', function () { + this.next.calledWith(new Error('oops')).should.equal(true) }) }) }) - return describe('resyncProjectHistory', function () { + describe('resyncProjectHistory', function () { beforeEach(function () { this.projectHistoryId = 'history-id-123' this.docs = sinon.stub() this.files = sinon.stub() this.fileUpdates = sinon.stub() - return (this.req = { + this.req = { body: { projectHistoryId: this.projectHistoryId, docs: this.docs, @@ -903,21 +894,17 @@ describe('HttpController', function () { params: { project_id: this.project_id } - }) + } }) describe('successfully', function () { beforeEach(function () { this.HistoryManager.resyncProjectHistory = sinon.stub().callsArgWith(4) - return this.HttpController.resyncProjectHistory( - this.req, - this.res, - this.next - ) + this.HttpController.resyncProjectHistory(this.req, this.res, this.next) }) it('should accept the change', function () { - return this.HistoryManager.resyncProjectHistory + this.HistoryManager.resyncProjectHistory .calledWith( this.project_id, this.projectHistoryId, @@ -927,25 +914,21 @@ describe('HttpController', function () { .should.equal(true) }) - return it('should return a successful No Content response', function () { - return this.res.sendStatus.calledWith(204).should.equal(true) + it('should return a successful No Content response', function () { + this.res.sendStatus.calledWith(204).should.equal(true) }) }) - return describe('when an errors occurs', function () { + describe('when an errors occurs', function () { beforeEach(function () { this.HistoryManager.resyncProjectHistory = sinon .stub() .callsArgWith(4, new Error('oops')) - return this.HttpController.resyncProjectHistory( - this.req, - this.res, - this.next - ) + this.HttpController.resyncProjectHistory(this.req, this.res, this.next) }) - return it('should call next with the error', function () { - return this.next.calledWith(new Error('oops')).should.equal(true) + it('should call next with the error', function () { + this.next.calledWith(new Error('oops')).should.equal(true) }) }) }) From 3acb970442b1ee714869b6d973b9dc6caf653410 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Mon, 11 May 2020 11:24:07 -0400 Subject: [PATCH 660/769] Decaf cleanup: simplify stubbed class --- .../js/HttpController/HttpControllerTests.js | 19 +++---------------- 1 file changed, 3 insertions(+), 16 deletions(-) diff --git a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js index 18c74691bc..c913a107cd 100644 --- a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js +++ b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js @@ -2,13 +2,6 @@ no-return-assign, no-unused-vars, */ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS206: Consider reworking classes to avoid initClass - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ const sinon = require('sinon') const chai = require('chai') const should = chai.should() @@ -33,15 +26,9 @@ describe('HttpController', function () { './Errors': Errors } }) - this.Metrics.Timer = Timer = (function () { - Timer = class Timer { - static initClass() { - this.prototype.done = sinon.stub() - } - } - Timer.initClass() - return Timer - })() + this.Metrics.Timer = class Timer {} + Timer.prototype.done = sinon.stub() + this.project_id = 'project-id-123' this.doc_id = 'doc-id-123' this.next = sinon.stub() From 0b1c7e90af0feec57edd00f7d066ebefde06cec5 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Mon, 11 May 2020 11:26:05 -0400 Subject: [PATCH 661/769] Decaf cleanup: remove unused variables --- .../test/unit/js/HttpController/HttpControllerTests.js | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js index c913a107cd..091addc78a 100644 --- a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js +++ b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js @@ -1,17 +1,10 @@ -/* eslint-disable - no-return-assign, - no-unused-vars, -*/ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/HttpController.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../../app/js/Errors.js') describe('HttpController', function () { beforeEach(function () { - let Timer this.HttpController = SandboxedModule.require(modulePath, { requires: { './DocumentManager': (this.DocumentManager = {}), @@ -27,7 +20,7 @@ describe('HttpController', function () { } }) this.Metrics.Timer = class Timer {} - Timer.prototype.done = sinon.stub() + this.Metrics.Timer.prototype.done = sinon.stub() this.project_id = 'project-id-123' this.doc_id = 'doc-id-123' From 16c0ed23db1fd88736d15bfd6ad22658f8a7e650 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Mon, 11 May 2020 11:37:59 -0400 Subject: [PATCH 662/769] Fix tests after decaf cleanup * Camel casing in logs * The Express request object always has query and body properties --- .../js/HttpController/HttpControllerTests.js | 65 ++++++++++++------- 1 file changed, 41 insertions(+), 24 deletions(-) diff --git a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js index 091addc78a..64751e55db 100644 --- a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js +++ b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js @@ -44,7 +44,9 @@ describe('HttpController', function () { params: { project_id: this.project_id, doc_id: this.doc_id - } + }, + query: {}, + body: {} } }) @@ -86,7 +88,7 @@ describe('HttpController', function () { it('should log the request', function () { this.logger.log .calledWith( - { doc_id: this.doc_id, project_id: this.project_id }, + { docId: this.doc_id, projectId: this.project_id }, 'getting doc via http' ) .should.equal(true) @@ -136,7 +138,7 @@ describe('HttpController', function () { it('should log the request', function () { this.logger.log .calledWith( - { doc_id: this.doc_id, project_id: this.project_id }, + { docId: this.doc_id, projectId: this.project_id }, 'getting doc via http' ) .should.equal(true) @@ -187,6 +189,7 @@ describe('HttpController', function () { project_id: this.project_id, doc_id: this.doc_id }, + query: {}, body: { lines: this.lines, source: this.source, @@ -223,11 +226,11 @@ describe('HttpController', function () { this.logger.log .calledWith( { - doc_id: this.doc_id, - project_id: this.project_id, + docId: this.doc_id, + projectId: this.project_id, lines: this.lines, source: this.source, - user_id: this.user_id, + userId: this.user_id, undoing: this.undoing }, 'setting doc via http' @@ -279,7 +282,9 @@ describe('HttpController', function () { this.req = { params: { project_id: this.project_id - } + }, + query: {}, + body: {} } }) @@ -302,7 +307,7 @@ describe('HttpController', function () { it('should log the request', function () { this.logger.log .calledWith( - { project_id: this.project_id }, + { projectId: this.project_id }, 'flushing project via http' ) .should.equal(true) @@ -335,7 +340,9 @@ describe('HttpController', function () { params: { project_id: this.project_id, doc_id: this.doc_id - } + }, + query: {}, + body: {} } }) @@ -360,7 +367,7 @@ describe('HttpController', function () { it('should log the request', function () { this.logger.log .calledWith( - { doc_id: this.doc_id, project_id: this.project_id }, + { docId: this.doc_id, projectId: this.project_id }, 'flushing doc via http' ) .should.equal(true) @@ -392,7 +399,8 @@ describe('HttpController', function () { project_id: this.project_id, doc_id: this.doc_id }, - query: {} + query: {}, + body: {} } }) @@ -425,7 +433,7 @@ describe('HttpController', function () { it('should log the request', function () { this.logger.log .calledWith( - { doc_id: this.doc_id, project_id: this.project_id }, + { docId: this.doc_id, projectId: this.project_id }, 'deleting doc via http' ) .should.equal(true) @@ -479,7 +487,9 @@ describe('HttpController', function () { this.req = { params: { project_id: this.project_id - } + }, + query: {}, + body: {} } }) @@ -504,7 +514,7 @@ describe('HttpController', function () { it('should log the request', function () { this.logger.log .calledWith( - { project_id: this.project_id }, + { projectId: this.project_id }, 'deleting project via http' ) .should.equal(true) @@ -552,7 +562,9 @@ describe('HttpController', function () { project_id: this.project_id, doc_id: this.doc_id, change_id: (this.change_id = 'mock-change-od-1') - } + }, + query: {}, + body: {} } }) @@ -577,7 +589,7 @@ describe('HttpController', function () { it('should log the request', function () { this.logger.log .calledWith( - { project_id: this.project_id, doc_id: this.doc_id }, + { projectId: this.project_id, docId: this.doc_id }, 'accepting 1 changes via http' ) .should.equal(true) @@ -612,7 +624,7 @@ describe('HttpController', function () { it('should log the request with the correct number of changes', function () { this.logger.log .calledWith( - { project_id: this.project_id, doc_id: this.doc_id }, + { projectId: this.project_id, docId: this.doc_id }, `accepting ${this.change_ids.length} changes via http` ) .should.equal(true) @@ -640,7 +652,9 @@ describe('HttpController', function () { project_id: this.project_id, doc_id: this.doc_id, comment_id: (this.comment_id = 'mock-comment-id') - } + }, + query: {}, + body: {} } }) @@ -666,9 +680,9 @@ describe('HttpController', function () { this.logger.log .calledWith( { - project_id: this.project_id, - doc_id: this.doc_id, - comment_id: this.comment_id + projectId: this.project_id, + docId: this.doc_id, + commentId: this.comment_id }, 'deleting comment via http' ) @@ -707,7 +721,8 @@ describe('HttpController', function () { }, query: { state: this.state - } + }, + body: {} } }) @@ -736,7 +751,7 @@ describe('HttpController', function () { it('should log the request', function () { this.logger.log .calledWith( - { project_id: this.project_id, exclude: [] }, + { projectId: this.project_id, exclude: [] }, 'getting docs via http' ) .should.equal(true) @@ -745,7 +760,7 @@ describe('HttpController', function () { it('should log the response', function () { this.logger.log .calledWith( - { project_id: this.project_id, result: ['1234:23', '4567:45'] }, + { projectId: this.project_id, result: ['1234:23', '4567:45'] }, 'got docs via http' ) .should.equal(true) @@ -802,6 +817,7 @@ describe('HttpController', function () { this.fileUpdates = sinon.stub() this.version = 1234567 this.req = { + query: {}, body: { projectHistoryId: this.projectHistoryId, userId: this.userId, @@ -866,6 +882,7 @@ describe('HttpController', function () { this.files = sinon.stub() this.fileUpdates = sinon.stub() this.req = { + query: {}, body: { projectHistoryId: this.projectHistoryId, docs: this.docs, From 819aa378d99bb749804dd190d613159a3bbce56b Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Thu, 14 May 2020 16:32:05 -0400 Subject: [PATCH 663/769] Decaf cleanup: remove Array.from() --- .../document-updater/app/js/ProjectManager.js | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/services/document-updater/app/js/ProjectManager.js b/services/document-updater/app/js/ProjectManager.js index 2955904adb..d48cdef0d2 100644 --- a/services/document-updater/app/js/ProjectManager.js +++ b/services/document-updater/app/js/ProjectManager.js @@ -7,7 +7,6 @@ // Fix any style issues and re-enable lint. /* * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md @@ -30,7 +29,7 @@ module.exports = ProjectManager = { const timer = new Metrics.Timer('projectManager.flushProjectWithLocks') const callback = function (...args) { timer.done() - return _callback(...Array.from(args || [])) + return _callback(...args) } return RedisManager.getDocIdsInProject(project_id, function ( @@ -42,7 +41,7 @@ module.exports = ProjectManager = { } const jobs = [] const errors = [] - for (const doc_id of Array.from(doc_ids || [])) { + for (const doc_id of doc_ids) { ;((doc_id) => jobs.push((callback) => DocumentManager.flushDocIfLoadedWithLock( @@ -92,7 +91,7 @@ module.exports = ProjectManager = { ) const callback = function (...args) { timer.done() - return _callback(...Array.from(args || [])) + return _callback(...args) } return RedisManager.getDocIdsInProject(project_id, function ( @@ -104,7 +103,7 @@ module.exports = ProjectManager = { } const jobs = [] const errors = [] - for (const doc_id of Array.from(doc_ids || [])) { + for (const doc_id of doc_ids) { ;((doc_id) => jobs.push((callback) => DocumentManager.flushAndDeleteDocWithLock( @@ -210,7 +209,7 @@ module.exports = ProjectManager = { ) const callback = function (...args) { timer.done() - return _callback(...Array.from(args || [])) + return _callback(...args) } return RedisManager.checkOrSetProjectState( @@ -243,7 +242,7 @@ module.exports = ProjectManager = { return callback(error) } const jobs = [] - for (const doc_id of Array.from(doc_ids || [])) { + for (const doc_id of doc_ids) { ;((doc_id) => jobs.push(( cb // get the doc lines from redis @@ -298,7 +297,7 @@ module.exports = ProjectManager = { const timer = new Metrics.Timer('projectManager.updateProject') const callback = function (...args) { timer.done() - return _callback(...Array.from(args || [])) + return _callback(...args) } const project_version = version @@ -389,5 +388,5 @@ module.exports = ProjectManager = { return callback() }) }) - } + }, } From 2bff83137c8ea61abf4fc76579396425f86b0b3d Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Thu, 14 May 2020 16:35:10 -0400 Subject: [PATCH 664/769] Decaf cleanup: unnecessary returns --- .../document-updater/app/js/ProjectManager.js | 183 ++++++++---------- 1 file changed, 80 insertions(+), 103 deletions(-) diff --git a/services/document-updater/app/js/ProjectManager.js b/services/document-updater/app/js/ProjectManager.js index d48cdef0d2..cc39f3f399 100644 --- a/services/document-updater/app/js/ProjectManager.js +++ b/services/document-updater/app/js/ProjectManager.js @@ -7,7 +7,6 @@ // Fix any style issues and re-enable lint. /* * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ @@ -29,13 +28,10 @@ module.exports = ProjectManager = { const timer = new Metrics.Timer('projectManager.flushProjectWithLocks') const callback = function (...args) { timer.done() - return _callback(...args) + _callback(...args) } - return RedisManager.getDocIdsInProject(project_id, function ( - error, - doc_ids - ) { + RedisManager.getDocIdsInProject(project_id, function (error, doc_ids) { if (error != null) { return callback(error) } @@ -53,16 +49,16 @@ module.exports = ProjectManager = { { err: error, project_id, doc_id }, 'found deleted doc when flushing' ) - return callback() + callback() } else if (error != null) { logger.error( { err: error, project_id, doc_id }, 'error flushing doc' ) errors.push(error) - return callback() + callback() } else { - return callback() + callback() } } ) @@ -70,13 +66,11 @@ module.exports = ProjectManager = { } logger.log({ project_id, doc_ids }, 'flushing docs') - return async.series(jobs, function () { + async.series(jobs, function () { if (errors.length > 0) { - return callback( - new Error('Errors flushing docs. See log for details') - ) + callback(new Error('Errors flushing docs. See log for details')) } else { - return callback(null) + callback(null) } }) }) @@ -91,13 +85,10 @@ module.exports = ProjectManager = { ) const callback = function (...args) { timer.done() - return _callback(...args) + _callback(...args) } - return RedisManager.getDocIdsInProject(project_id, function ( - error, - doc_ids - ) { + RedisManager.getDocIdsInProject(project_id, function (error, doc_ids) { if (error != null) { return callback(error) } @@ -118,14 +109,14 @@ module.exports = ProjectManager = { ) errors.push(error) } - return callback() + callback() } ) ))(doc_id) } logger.log({ project_id, doc_ids }, 'deleting docs') - return async.series(jobs, () => + async.series(jobs, () => // When deleting the project here we want to ensure that project // history is completely flushed because the project may be // deleted in web after this call completes, and so further @@ -134,13 +125,11 @@ module.exports = ProjectManager = { error ) { if (errors.length > 0) { - return callback( - new Error('Errors deleting docs. See log for details') - ) + callback(new Error('Errors deleting docs. See log for details')) } else if (error != null) { - return callback(error) + callback(error) } else { - return callback(null) + callback(null) } }) ) @@ -151,9 +140,7 @@ module.exports = ProjectManager = { if (callback == null) { callback = function (error) {} } - return RedisManager.queueFlushAndDeleteProject(project_id, function ( - error - ) { + RedisManager.queueFlushAndDeleteProject(project_id, function (error) { if (error != null) { logger.error( { project_id, error }, @@ -162,7 +149,7 @@ module.exports = ProjectManager = { return callback(error) } Metrics.inc('queued-delete') - return callback() + callback() }) }, @@ -170,24 +157,18 @@ module.exports = ProjectManager = { if (callback == null) { callback = function (error) {} } - return RedisManager.getDocIdsInProject(project_id, function ( - error, - doc_ids - ) { + RedisManager.getDocIdsInProject(project_id, function (error, doc_ids) { if (error != null) { return callback(error) } if (!(doc_ids != null ? doc_ids.length : undefined)) { return callback(null, []) } - return RedisManager.getDocTimestamps(doc_ids, function ( - error, - timestamps - ) { + RedisManager.getDocTimestamps(doc_ids, function (error, timestamps) { if (error != null) { return callback(error) } - return callback(null, timestamps) + callback(null, timestamps) }) }) }, @@ -209,77 +190,73 @@ module.exports = ProjectManager = { ) const callback = function (...args) { timer.done() - return _callback(...args) + _callback(...args) } - return RedisManager.checkOrSetProjectState( - project_id, - projectStateHash, - function (error, projectStateChanged) { + RedisManager.checkOrSetProjectState(project_id, projectStateHash, function ( + error, + projectStateChanged + ) { + if (error != null) { + logger.error( + { err: error, project_id }, + 'error getting/setting project state in getProjectDocsAndFlushIfOld' + ) + return callback(error) + } + // we can't return docs if project structure has changed + if (projectStateChanged) { + return callback( + Errors.ProjectStateChangedError('project state changed') + ) + } + // project structure hasn't changed, return doc content from redis + RedisManager.getDocIdsInProject(project_id, function (error, doc_ids) { if (error != null) { logger.error( { err: error, project_id }, - 'error getting/setting project state in getProjectDocsAndFlushIfOld' + 'error getting doc ids in getProjectDocs' ) return callback(error) } - // we can't return docs if project structure has changed - if (projectStateChanged) { - return callback( - Errors.ProjectStateChangedError('project state changed') - ) + const jobs = [] + for (const doc_id of doc_ids) { + ;((doc_id) => + jobs.push(( + cb // get the doc lines from redis + ) => + DocumentManager.getDocAndFlushIfOldWithLock( + project_id, + doc_id, + function (err, lines, version) { + if (err != null) { + logger.error( + { err, project_id, doc_id }, + 'error getting project doc lines in getProjectDocsAndFlushIfOld' + ) + return cb(err) + } + const doc = { _id: doc_id, lines, v: version } // create a doc object to return + cb(null, doc) + } + ) + ))(doc_id) } - // project structure hasn't changed, return doc content from redis - return RedisManager.getDocIdsInProject(project_id, function ( - error, - doc_ids - ) { + async.series(jobs, function (error, docs) { if (error != null) { - logger.error( - { err: error, project_id }, - 'error getting doc ids in getProjectDocs' - ) return callback(error) } - const jobs = [] - for (const doc_id of doc_ids) { - ;((doc_id) => - jobs.push(( - cb // get the doc lines from redis - ) => - DocumentManager.getDocAndFlushIfOldWithLock( - project_id, - doc_id, - function (err, lines, version) { - if (err != null) { - logger.error( - { err, project_id, doc_id }, - 'error getting project doc lines in getProjectDocsAndFlushIfOld' - ) - return cb(err) - } - const doc = { _id: doc_id, lines, v: version } // create a doc object to return - return cb(null, doc) - } - ) - ))(doc_id) - } - return async.series(jobs, function (error, docs) { - if (error != null) { - return callback(error) - } - return callback(null, docs) - }) + callback(null, docs) }) - } - ) + }) + }) }, clearProjectState(project_id, callback) { if (callback == null) { callback = function (error) {} } - return RedisManager.clearProjectState(project_id, callback) + RedisManager.clearProjectState(project_id, callback) }, updateProjectWithLocks( @@ -297,7 +274,7 @@ module.exports = ProjectManager = { const timer = new Metrics.Timer('projectManager.updateProject') const callback = function (...args) { timer.done() - return _callback(...args) + _callback(...args) } const project_version = version @@ -309,7 +286,7 @@ module.exports = ProjectManager = { const doc_id = projectUpdate.id projectUpdate.version = `${project_version}.${project_subversion++}` if (projectUpdate.docLines != null) { - return ProjectHistoryRedisManager.queueAddEntity( + ProjectHistoryRedisManager.queueAddEntity( project_id, projectHistoryId, 'doc', @@ -318,11 +295,11 @@ module.exports = ProjectManager = { projectUpdate, function (error, count) { project_ops_length = count - return cb(error) + cb(error) } ) } else { - return DocumentManager.renameDocWithLock( + DocumentManager.renameDocWithLock( project_id, doc_id, user_id, @@ -330,7 +307,7 @@ module.exports = ProjectManager = { projectHistoryId, function (error, count) { project_ops_length = count - return cb(error) + cb(error) } ) } @@ -340,7 +317,7 @@ module.exports = ProjectManager = { const file_id = projectUpdate.id projectUpdate.version = `${project_version}.${project_subversion++}` if (projectUpdate.url != null) { - return ProjectHistoryRedisManager.queueAddEntity( + ProjectHistoryRedisManager.queueAddEntity( project_id, projectHistoryId, 'file', @@ -349,11 +326,11 @@ module.exports = ProjectManager = { projectUpdate, function (error, count) { project_ops_length = count - return cb(error) + cb(error) } ) } else { - return ProjectHistoryRedisManager.queueRenameEntity( + ProjectHistoryRedisManager.queueRenameEntity( project_id, projectHistoryId, 'file', @@ -362,17 +339,17 @@ module.exports = ProjectManager = { projectUpdate, function (error, count) { project_ops_length = count - return cb(error) + cb(error) } ) } } - return async.eachSeries(docUpdates, handleDocUpdate, function (error) { + async.eachSeries(docUpdates, handleDocUpdate, function (error) { if (error != null) { return callback(error) } - return async.eachSeries(fileUpdates, handleFileUpdate, function (error) { + async.eachSeries(fileUpdates, handleFileUpdate, function (error) { if (error != null) { return callback(error) } @@ -385,7 +362,7 @@ module.exports = ProjectManager = { ) { HistoryManager.flushProjectChangesAsync(project_id) } - return callback() + callback() }) }) }, From f2c67b66fa236e7e26b81bc10fea65914278c85f Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Thu, 14 May 2020 16:50:04 -0400 Subject: [PATCH 665/769] Decaf cleanup: remove default callbacks --- .../document-updater/app/js/ProjectManager.js | 24 ------------------- 1 file changed, 24 deletions(-) diff --git a/services/document-updater/app/js/ProjectManager.js b/services/document-updater/app/js/ProjectManager.js index cc39f3f399..4c06ffdcdf 100644 --- a/services/document-updater/app/js/ProjectManager.js +++ b/services/document-updater/app/js/ProjectManager.js @@ -22,9 +22,6 @@ const Errors = require('./Errors') module.exports = ProjectManager = { flushProjectWithLocks(project_id, _callback) { - if (_callback == null) { - _callback = function (error) {} - } const timer = new Metrics.Timer('projectManager.flushProjectWithLocks') const callback = function (...args) { timer.done() @@ -77,9 +74,6 @@ module.exports = ProjectManager = { }, flushAndDeleteProjectWithLocks(project_id, options, _callback) { - if (_callback == null) { - _callback = function (error) {} - } const timer = new Metrics.Timer( 'projectManager.flushAndDeleteProjectWithLocks' ) @@ -137,9 +131,6 @@ module.exports = ProjectManager = { }, queueFlushAndDeleteProject(project_id, callback) { - if (callback == null) { - callback = function (error) {} - } RedisManager.queueFlushAndDeleteProject(project_id, function (error) { if (error != null) { logger.error( @@ -154,9 +145,6 @@ module.exports = ProjectManager = { }, getProjectDocsTimestamps(project_id, callback) { - if (callback == null) { - callback = function (error) {} - } RedisManager.getDocIdsInProject(project_id, function (error, doc_ids) { if (error != null) { return callback(error) @@ -179,12 +167,6 @@ module.exports = ProjectManager = { excludeVersions, _callback ) { - if (excludeVersions == null) { - excludeVersions = {} - } - if (_callback == null) { - _callback = function (error, docs) {} - } const timer = new Metrics.Timer( 'projectManager.getProjectDocsAndFlushIfOld' ) @@ -253,9 +235,6 @@ module.exports = ProjectManager = { }, clearProjectState(project_id, callback) { - if (callback == null) { - callback = function (error) {} - } RedisManager.clearProjectState(project_id, callback) }, @@ -268,9 +247,6 @@ module.exports = ProjectManager = { version, _callback ) { - if (_callback == null) { - _callback = function (error) {} - } const timer = new Metrics.Timer('projectManager.updateProject') const callback = function (...args) { timer.done() From 6b5760ca282619573446d1b209e308765d8f55f4 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Thu, 14 May 2020 16:53:22 -0400 Subject: [PATCH 666/769] Decaf cleanup: simplify null checks --- .../document-updater/app/js/ProjectManager.js | 39 ++++++++----------- 1 file changed, 16 insertions(+), 23 deletions(-) diff --git a/services/document-updater/app/js/ProjectManager.js b/services/document-updater/app/js/ProjectManager.js index 4c06ffdcdf..c83710cdb4 100644 --- a/services/document-updater/app/js/ProjectManager.js +++ b/services/document-updater/app/js/ProjectManager.js @@ -3,13 +3,6 @@ handle-callback-err, no-unused-vars, */ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ let ProjectManager const RedisManager = require('./RedisManager') const ProjectHistoryRedisManager = require('./ProjectHistoryRedisManager') @@ -29,7 +22,7 @@ module.exports = ProjectManager = { } RedisManager.getDocIdsInProject(project_id, function (error, doc_ids) { - if (error != null) { + if (error) { return callback(error) } const jobs = [] @@ -41,13 +34,13 @@ module.exports = ProjectManager = { project_id, doc_id, function (error) { - if (error != null && error instanceof Errors.NotFoundError) { + if (error instanceof Errors.NotFoundError) { logger.warn( { err: error, project_id, doc_id }, 'found deleted doc when flushing' ) callback() - } else if (error != null) { + } else if (error) { logger.error( { err: error, project_id, doc_id }, 'error flushing doc' @@ -83,7 +76,7 @@ module.exports = ProjectManager = { } RedisManager.getDocIdsInProject(project_id, function (error, doc_ids) { - if (error != null) { + if (error) { return callback(error) } const jobs = [] @@ -96,7 +89,7 @@ module.exports = ProjectManager = { doc_id, {}, function (error) { - if (error != null) { + if (error) { logger.error( { err: error, project_id, doc_id }, 'error deleting doc' @@ -120,7 +113,7 @@ module.exports = ProjectManager = { ) { if (errors.length > 0) { callback(new Error('Errors deleting docs. See log for details')) - } else if (error != null) { + } else if (error) { callback(error) } else { callback(null) @@ -132,7 +125,7 @@ module.exports = ProjectManager = { queueFlushAndDeleteProject(project_id, callback) { RedisManager.queueFlushAndDeleteProject(project_id, function (error) { - if (error != null) { + if (error) { logger.error( { project_id, error }, 'error adding project to flush and delete queue' @@ -146,14 +139,14 @@ module.exports = ProjectManager = { getProjectDocsTimestamps(project_id, callback) { RedisManager.getDocIdsInProject(project_id, function (error, doc_ids) { - if (error != null) { + if (error) { return callback(error) } - if (!(doc_ids != null ? doc_ids.length : undefined)) { + if (doc_ids.length === 0) { return callback(null, []) } RedisManager.getDocTimestamps(doc_ids, function (error, timestamps) { - if (error != null) { + if (error) { return callback(error) } callback(null, timestamps) @@ -179,7 +172,7 @@ module.exports = ProjectManager = { error, projectStateChanged ) { - if (error != null) { + if (error) { logger.error( { err: error, project_id }, 'error getting/setting project state in getProjectDocsAndFlushIfOld' @@ -194,7 +187,7 @@ module.exports = ProjectManager = { } // project structure hasn't changed, return doc content from redis RedisManager.getDocIdsInProject(project_id, function (error, doc_ids) { - if (error != null) { + if (error) { logger.error( { err: error, project_id }, 'error getting doc ids in getProjectDocs' @@ -211,7 +204,7 @@ module.exports = ProjectManager = { project_id, doc_id, function (err, lines, version) { - if (err != null) { + if (err) { logger.error( { err, project_id, doc_id }, 'error getting project doc lines in getProjectDocsAndFlushIfOld' @@ -225,7 +218,7 @@ module.exports = ProjectManager = { ))(doc_id) } async.series(jobs, function (error, docs) { - if (error != null) { + if (error) { return callback(error) } callback(null, docs) @@ -322,11 +315,11 @@ module.exports = ProjectManager = { } async.eachSeries(docUpdates, handleDocUpdate, function (error) { - if (error != null) { + if (error) { return callback(error) } async.eachSeries(fileUpdates, handleFileUpdate, function (error) { - if (error != null) { + if (error) { return callback(error) } if ( From ceb77f4c937ac7f6d085db2997ee702e0880300f Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Thu, 14 May 2020 16:54:08 -0400 Subject: [PATCH 667/769] Decaf cleanup: remove unused variables --- services/document-updater/app/js/ProjectManager.js | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/services/document-updater/app/js/ProjectManager.js b/services/document-updater/app/js/ProjectManager.js index c83710cdb4..3da062995b 100644 --- a/services/document-updater/app/js/ProjectManager.js +++ b/services/document-updater/app/js/ProjectManager.js @@ -1,9 +1,7 @@ /* eslint-disable camelcase, handle-callback-err, - no-unused-vars, */ -let ProjectManager const RedisManager = require('./RedisManager') const ProjectHistoryRedisManager = require('./ProjectHistoryRedisManager') const DocumentManager = require('./DocumentManager') @@ -13,7 +11,7 @@ const logger = require('logger-sharelatex') const Metrics = require('./Metrics') const Errors = require('./Errors') -module.exports = ProjectManager = { +module.exports = { flushProjectWithLocks(project_id, _callback) { const timer = new Metrics.Timer('projectManager.flushProjectWithLocks') const callback = function (...args) { From 7a5a782dc72b21d512b3f235b70efea05c9812ca Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Thu, 14 May 2020 16:58:57 -0400 Subject: [PATCH 668/769] Decaf cleanup: camel case variables --- .../document-updater/app/js/ProjectManager.js | 140 +++++++++--------- .../flushAndDeleteProjectTests.js | 2 +- .../js/ProjectManager/flushProjectTests.js | 2 +- .../js/ProjectManager/getProjectDocsTests.js | 2 +- 4 files changed, 71 insertions(+), 75 deletions(-) diff --git a/services/document-updater/app/js/ProjectManager.js b/services/document-updater/app/js/ProjectManager.js index 3da062995b..d3dc0b14f7 100644 --- a/services/document-updater/app/js/ProjectManager.js +++ b/services/document-updater/app/js/ProjectManager.js @@ -1,7 +1,3 @@ -/* eslint-disable - camelcase, - handle-callback-err, -*/ const RedisManager = require('./RedisManager') const ProjectHistoryRedisManager = require('./ProjectHistoryRedisManager') const DocumentManager = require('./DocumentManager') @@ -12,35 +8,35 @@ const Metrics = require('./Metrics') const Errors = require('./Errors') module.exports = { - flushProjectWithLocks(project_id, _callback) { + flushProjectWithLocks(projectId, _callback) { const timer = new Metrics.Timer('projectManager.flushProjectWithLocks') const callback = function (...args) { timer.done() _callback(...args) } - RedisManager.getDocIdsInProject(project_id, function (error, doc_ids) { + RedisManager.getDocIdsInProject(projectId, function (error, docIds) { if (error) { return callback(error) } const jobs = [] const errors = [] - for (const doc_id of doc_ids) { - ;((doc_id) => + for (const docId of docIds) { + ;((docId) => jobs.push((callback) => DocumentManager.flushDocIfLoadedWithLock( - project_id, - doc_id, + projectId, + docId, function (error) { if (error instanceof Errors.NotFoundError) { logger.warn( - { err: error, project_id, doc_id }, + { err: error, projectId, docId }, 'found deleted doc when flushing' ) callback() } else if (error) { logger.error( - { err: error, project_id, doc_id }, + { err: error, projectId, docId }, 'error flushing doc' ) errors.push(error) @@ -50,10 +46,10 @@ module.exports = { } } ) - ))(doc_id) + ))(docId) } - logger.log({ project_id, doc_ids }, 'flushing docs') + logger.log({ projectId, docIds }, 'flushing docs') async.series(jobs, function () { if (errors.length > 0) { callback(new Error('Errors flushing docs. See log for details')) @@ -64,7 +60,7 @@ module.exports = { }) }, - flushAndDeleteProjectWithLocks(project_id, options, _callback) { + flushAndDeleteProjectWithLocks(projectId, options, _callback) { const timer = new Metrics.Timer( 'projectManager.flushAndDeleteProjectWithLocks' ) @@ -73,23 +69,23 @@ module.exports = { _callback(...args) } - RedisManager.getDocIdsInProject(project_id, function (error, doc_ids) { + RedisManager.getDocIdsInProject(projectId, function (error, docIds) { if (error) { return callback(error) } const jobs = [] const errors = [] - for (const doc_id of doc_ids) { - ;((doc_id) => + for (const docId of docIds) { + ;((docId) => jobs.push((callback) => DocumentManager.flushAndDeleteDocWithLock( - project_id, - doc_id, + projectId, + docId, {}, function (error) { if (error) { logger.error( - { err: error, project_id, doc_id }, + { err: error, projectId, docId }, 'error deleting doc' ) errors.push(error) @@ -97,16 +93,16 @@ module.exports = { callback() } ) - ))(doc_id) + ))(docId) } - logger.log({ project_id, doc_ids }, 'deleting docs') + logger.log({ projectId, docIds }, 'deleting docs') async.series(jobs, () => // When deleting the project here we want to ensure that project // history is completely flushed because the project may be // deleted in web after this call completes, and so further // attempts to flush would fail after that. - HistoryManager.flushProjectChanges(project_id, options, function ( + HistoryManager.flushProjectChanges(projectId, options, function ( error ) { if (errors.length > 0) { @@ -121,11 +117,11 @@ module.exports = { }) }, - queueFlushAndDeleteProject(project_id, callback) { - RedisManager.queueFlushAndDeleteProject(project_id, function (error) { + queueFlushAndDeleteProject(projectId, callback) { + RedisManager.queueFlushAndDeleteProject(projectId, function (error) { if (error) { logger.error( - { project_id, error }, + { projectId, error }, 'error adding project to flush and delete queue' ) return callback(error) @@ -135,15 +131,15 @@ module.exports = { }) }, - getProjectDocsTimestamps(project_id, callback) { - RedisManager.getDocIdsInProject(project_id, function (error, doc_ids) { + getProjectDocsTimestamps(projectId, callback) { + RedisManager.getDocIdsInProject(projectId, function (error, docIds) { if (error) { return callback(error) } - if (doc_ids.length === 0) { + if (docIds.length === 0) { return callback(null, []) } - RedisManager.getDocTimestamps(doc_ids, function (error, timestamps) { + RedisManager.getDocTimestamps(docIds, function (error, timestamps) { if (error) { return callback(error) } @@ -153,7 +149,7 @@ module.exports = { }, getProjectDocsAndFlushIfOld( - project_id, + projectId, projectStateHash, excludeVersions, _callback @@ -166,13 +162,13 @@ module.exports = { _callback(...args) } - RedisManager.checkOrSetProjectState(project_id, projectStateHash, function ( + RedisManager.checkOrSetProjectState(projectId, projectStateHash, function ( error, projectStateChanged ) { if (error) { logger.error( - { err: error, project_id }, + { err: error, projectId }, 'error getting/setting project state in getProjectDocsAndFlushIfOld' ) return callback(error) @@ -184,36 +180,36 @@ module.exports = { ) } // project structure hasn't changed, return doc content from redis - RedisManager.getDocIdsInProject(project_id, function (error, doc_ids) { + RedisManager.getDocIdsInProject(projectId, function (error, docIds) { if (error) { logger.error( - { err: error, project_id }, + { err: error, projectId }, 'error getting doc ids in getProjectDocs' ) return callback(error) } const jobs = [] - for (const doc_id of doc_ids) { - ;((doc_id) => + for (const docId of docIds) { + ;((docId) => jobs.push(( cb // get the doc lines from redis ) => DocumentManager.getDocAndFlushIfOldWithLock( - project_id, - doc_id, + projectId, + docId, function (err, lines, version) { if (err) { logger.error( - { err, project_id, doc_id }, + { err, projectId, docId }, 'error getting project doc lines in getProjectDocsAndFlushIfOld' ) return cb(err) } - const doc = { _id: doc_id, lines, v: version } // create a doc object to return + const doc = { _id: docId, lines, v: version } // create a doc object to return cb(null, doc) } ) - ))(doc_id) + ))(docId) } async.series(jobs, function (error, docs) { if (error) { @@ -225,14 +221,14 @@ module.exports = { }) }, - clearProjectState(project_id, callback) { - RedisManager.clearProjectState(project_id, callback) + clearProjectState(projectId, callback) { + RedisManager.clearProjectState(projectId, callback) }, updateProjectWithLocks( - project_id, + projectId, projectHistoryId, - user_id, + userId, docUpdates, fileUpdates, version, @@ -244,36 +240,36 @@ module.exports = { _callback(...args) } - const project_version = version - let project_subversion = 0 // project versions can have multiple operations + const projectVersion = version + let projectSubversion = 0 // project versions can have multiple operations - let project_ops_length = 0 + let projectOpsLength = 0 const handleDocUpdate = function (projectUpdate, cb) { - const doc_id = projectUpdate.id - projectUpdate.version = `${project_version}.${project_subversion++}` + const docId = projectUpdate.id + projectUpdate.version = `${projectVersion}.${projectSubversion++}` if (projectUpdate.docLines != null) { ProjectHistoryRedisManager.queueAddEntity( - project_id, + projectId, projectHistoryId, 'doc', - doc_id, - user_id, + docId, + userId, projectUpdate, function (error, count) { - project_ops_length = count + projectOpsLength = count cb(error) } ) } else { DocumentManager.renameDocWithLock( - project_id, - doc_id, - user_id, + projectId, + docId, + userId, projectUpdate, projectHistoryId, function (error, count) { - project_ops_length = count + projectOpsLength = count cb(error) } ) @@ -281,31 +277,31 @@ module.exports = { } const handleFileUpdate = function (projectUpdate, cb) { - const file_id = projectUpdate.id - projectUpdate.version = `${project_version}.${project_subversion++}` + const fileId = projectUpdate.id + projectUpdate.version = `${projectVersion}.${projectSubversion++}` if (projectUpdate.url != null) { ProjectHistoryRedisManager.queueAddEntity( - project_id, + projectId, projectHistoryId, 'file', - file_id, - user_id, + fileId, + userId, projectUpdate, function (error, count) { - project_ops_length = count + projectOpsLength = count cb(error) } ) } else { ProjectHistoryRedisManager.queueRenameEntity( - project_id, + projectId, projectHistoryId, 'file', - file_id, - user_id, + fileId, + userId, projectUpdate, function (error, count) { - project_ops_length = count + projectOpsLength = count cb(error) } ) @@ -322,12 +318,12 @@ module.exports = { } if ( HistoryManager.shouldFlushHistoryOps( - project_ops_length, + projectOpsLength, docUpdates.length + fileUpdates.length, HistoryManager.FLUSH_PROJECT_EVERY_N_OPS ) ) { - HistoryManager.flushProjectChangesAsync(project_id) + HistoryManager.flushProjectChangesAsync(projectId) } callback() }) diff --git a/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js index 7afa407e7c..266d1d1a2e 100644 --- a/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js @@ -140,7 +140,7 @@ describe('ProjectManager - flushAndDeleteProject', function () { it('should record the error', function () { return this.logger.error .calledWith( - { err: this.error, project_id: this.project_id, doc_id: 'doc-id-1' }, + { err: this.error, projectId: this.project_id, docId: 'doc-id-1' }, 'error deleting doc' ) .should.equal(true) diff --git a/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js index 110a827e64..1907a26228 100644 --- a/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js @@ -129,7 +129,7 @@ describe('ProjectManager - flushProject', function () { it('should record the error', function () { return this.logger.error .calledWith( - { err: this.error, project_id: this.project_id, doc_id: 'doc-id-1' }, + { err: this.error, projectId: this.project_id, docId: 'doc-id-1' }, 'error flushing doc' ) .should.equal(true) diff --git a/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js b/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js index 72ffe39ec1..a10b328864 100644 --- a/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js @@ -187,7 +187,7 @@ describe('ProjectManager - getProjectDocsAndFlushIfOld', function () { it('should record the error', function () { return this.logger.error .calledWith( - { err: this.error, project_id: this.project_id, doc_id: 'doc-id-2' }, + { err: this.error, projectId: this.project_id, docId: 'doc-id-2' }, 'error getting project doc lines in getProjectDocsAndFlushIfOld' ) .should.equal(true) From 751d9ea718082b8791a4a8a662292e4bfac628a0 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Thu, 14 May 2020 17:03:14 -0400 Subject: [PATCH 669/769] Decaf cleanup: simplify loops --- .../document-updater/app/js/ProjectManager.js | 124 ++++++++---------- 1 file changed, 53 insertions(+), 71 deletions(-) diff --git a/services/document-updater/app/js/ProjectManager.js b/services/document-updater/app/js/ProjectManager.js index d3dc0b14f7..16997df3cf 100644 --- a/services/document-updater/app/js/ProjectManager.js +++ b/services/document-updater/app/js/ProjectManager.js @@ -19,35 +19,26 @@ module.exports = { if (error) { return callback(error) } - const jobs = [] const errors = [] - for (const docId of docIds) { - ;((docId) => - jobs.push((callback) => - DocumentManager.flushDocIfLoadedWithLock( - projectId, - docId, - function (error) { - if (error instanceof Errors.NotFoundError) { - logger.warn( - { err: error, projectId, docId }, - 'found deleted doc when flushing' - ) - callback() - } else if (error) { - logger.error( - { err: error, projectId, docId }, - 'error flushing doc' - ) - errors.push(error) - callback() - } else { - callback() - } - } + const jobs = docIds.map((docId) => (callback) => { + DocumentManager.flushDocIfLoadedWithLock(projectId, docId, function ( + error + ) { + if (error instanceof Errors.NotFoundError) { + logger.warn( + { err: error, projectId, docId }, + 'found deleted doc when flushing' ) - ))(docId) - } + callback() + } else if (error) { + logger.error({ err: error, projectId, docId }, 'error flushing doc') + errors.push(error) + callback() + } else { + callback() + } + }) + }) logger.log({ projectId, docIds }, 'flushing docs') async.series(jobs, function () { @@ -73,28 +64,24 @@ module.exports = { if (error) { return callback(error) } - const jobs = [] const errors = [] - for (const docId of docIds) { - ;((docId) => - jobs.push((callback) => - DocumentManager.flushAndDeleteDocWithLock( - projectId, - docId, - {}, - function (error) { - if (error) { - logger.error( - { err: error, projectId, docId }, - 'error deleting doc' - ) - errors.push(error) - } - callback() - } - ) - ))(docId) - } + const jobs = docIds.map((docId) => (callback) => { + DocumentManager.flushAndDeleteDocWithLock( + projectId, + docId, + {}, + function (error) { + if (error) { + logger.error( + { err: error, projectId, docId }, + 'error deleting doc' + ) + errors.push(error) + } + callback() + } + ) + }) logger.log({ projectId, docIds }, 'deleting docs') async.series(jobs, () => @@ -188,29 +175,24 @@ module.exports = { ) return callback(error) } - const jobs = [] - for (const docId of docIds) { - ;((docId) => - jobs.push(( - cb // get the doc lines from redis - ) => - DocumentManager.getDocAndFlushIfOldWithLock( - projectId, - docId, - function (err, lines, version) { - if (err) { - logger.error( - { err, projectId, docId }, - 'error getting project doc lines in getProjectDocsAndFlushIfOld' - ) - return cb(err) - } - const doc = { _id: docId, lines, v: version } // create a doc object to return - cb(null, doc) - } - ) - ))(docId) - } + // get the doc lines from redis + const jobs = docIds.map((docId) => (cb) => { + DocumentManager.getDocAndFlushIfOldWithLock( + projectId, + docId, + function (err, lines, version) { + if (err) { + logger.error( + { err, projectId, docId }, + 'error getting project doc lines in getProjectDocsAndFlushIfOld' + ) + return cb(err) + } + const doc = { _id: docId, lines, v: version } // create a doc object to return + cb(null, doc) + } + ) + }) async.series(jobs, function (error, docs) { if (error) { return callback(error) From 3d000bcbe67603aa22b44b6f33f59de8b450b594 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Thu, 14 May 2020 17:05:59 -0400 Subject: [PATCH 670/769] Decaf cleanup: use arrow functions for callbacks --- .../document-updater/app/js/ProjectManager.js | 117 +++++++++--------- 1 file changed, 57 insertions(+), 60 deletions(-) diff --git a/services/document-updater/app/js/ProjectManager.js b/services/document-updater/app/js/ProjectManager.js index 16997df3cf..25de54f3d1 100644 --- a/services/document-updater/app/js/ProjectManager.js +++ b/services/document-updater/app/js/ProjectManager.js @@ -15,15 +15,13 @@ module.exports = { _callback(...args) } - RedisManager.getDocIdsInProject(projectId, function (error, docIds) { + RedisManager.getDocIdsInProject(projectId, (error, docIds) => { if (error) { return callback(error) } const errors = [] const jobs = docIds.map((docId) => (callback) => { - DocumentManager.flushDocIfLoadedWithLock(projectId, docId, function ( - error - ) { + DocumentManager.flushDocIfLoadedWithLock(projectId, docId, (error) => { if (error instanceof Errors.NotFoundError) { logger.warn( { err: error, projectId, docId }, @@ -41,7 +39,7 @@ module.exports = { }) logger.log({ projectId, docIds }, 'flushing docs') - async.series(jobs, function () { + async.series(jobs, () => { if (errors.length > 0) { callback(new Error('Errors flushing docs. See log for details')) } else { @@ -60,7 +58,7 @@ module.exports = { _callback(...args) } - RedisManager.getDocIdsInProject(projectId, function (error, docIds) { + RedisManager.getDocIdsInProject(projectId, (error, docIds) => { if (error) { return callback(error) } @@ -70,7 +68,7 @@ module.exports = { projectId, docId, {}, - function (error) { + (error) => { if (error) { logger.error( { err: error, projectId, docId }, @@ -89,9 +87,7 @@ module.exports = { // history is completely flushed because the project may be // deleted in web after this call completes, and so further // attempts to flush would fail after that. - HistoryManager.flushProjectChanges(projectId, options, function ( - error - ) { + HistoryManager.flushProjectChanges(projectId, options, (error) => { if (errors.length > 0) { callback(new Error('Errors deleting docs. See log for details')) } else if (error) { @@ -105,7 +101,7 @@ module.exports = { }, queueFlushAndDeleteProject(projectId, callback) { - RedisManager.queueFlushAndDeleteProject(projectId, function (error) { + RedisManager.queueFlushAndDeleteProject(projectId, (error) => { if (error) { logger.error( { projectId, error }, @@ -119,14 +115,14 @@ module.exports = { }, getProjectDocsTimestamps(projectId, callback) { - RedisManager.getDocIdsInProject(projectId, function (error, docIds) { + RedisManager.getDocIdsInProject(projectId, (error, docIds) => { if (error) { return callback(error) } if (docIds.length === 0) { return callback(null, []) } - RedisManager.getDocTimestamps(docIds, function (error, timestamps) { + RedisManager.getDocTimestamps(docIds, (error, timestamps) => { if (error) { return callback(error) } @@ -149,58 +145,59 @@ module.exports = { _callback(...args) } - RedisManager.checkOrSetProjectState(projectId, projectStateHash, function ( - error, - projectStateChanged - ) { - if (error) { - logger.error( - { err: error, projectId }, - 'error getting/setting project state in getProjectDocsAndFlushIfOld' - ) - return callback(error) - } - // we can't return docs if project structure has changed - if (projectStateChanged) { - return callback( - Errors.ProjectStateChangedError('project state changed') - ) - } - // project structure hasn't changed, return doc content from redis - RedisManager.getDocIdsInProject(projectId, function (error, docIds) { + RedisManager.checkOrSetProjectState( + projectId, + projectStateHash, + (error, projectStateChanged) => { if (error) { logger.error( { err: error, projectId }, - 'error getting doc ids in getProjectDocs' + 'error getting/setting project state in getProjectDocsAndFlushIfOld' ) return callback(error) } - // get the doc lines from redis - const jobs = docIds.map((docId) => (cb) => { - DocumentManager.getDocAndFlushIfOldWithLock( - projectId, - docId, - function (err, lines, version) { - if (err) { - logger.error( - { err, projectId, docId }, - 'error getting project doc lines in getProjectDocsAndFlushIfOld' - ) - return cb(err) - } - const doc = { _id: docId, lines, v: version } // create a doc object to return - cb(null, doc) - } + // we can't return docs if project structure has changed + if (projectStateChanged) { + return callback( + Errors.ProjectStateChangedError('project state changed') ) - }) - async.series(jobs, function (error, docs) { + } + // project structure hasn't changed, return doc content from redis + RedisManager.getDocIdsInProject(projectId, (error, docIds) => { if (error) { + logger.error( + { err: error, projectId }, + 'error getting doc ids in getProjectDocs' + ) return callback(error) } - callback(null, docs) + // get the doc lines from redis + const jobs = docIds.map((docId) => (cb) => { + DocumentManager.getDocAndFlushIfOldWithLock( + projectId, + docId, + (err, lines, version) => { + if (err) { + logger.error( + { err, projectId, docId }, + 'error getting project doc lines in getProjectDocsAndFlushIfOld' + ) + return cb(err) + } + const doc = { _id: docId, lines, v: version } // create a doc object to return + cb(null, doc) + } + ) + }) + async.series(jobs, (error, docs) => { + if (error) { + return callback(error) + } + callback(null, docs) + }) }) - }) - }) + } + ) }, clearProjectState(projectId, callback) { @@ -238,7 +235,7 @@ module.exports = { docId, userId, projectUpdate, - function (error, count) { + (error, count) => { projectOpsLength = count cb(error) } @@ -250,7 +247,7 @@ module.exports = { userId, projectUpdate, projectHistoryId, - function (error, count) { + (error, count) => { projectOpsLength = count cb(error) } @@ -269,7 +266,7 @@ module.exports = { fileId, userId, projectUpdate, - function (error, count) { + (error, count) => { projectOpsLength = count cb(error) } @@ -282,7 +279,7 @@ module.exports = { fileId, userId, projectUpdate, - function (error, count) { + (error, count) => { projectOpsLength = count cb(error) } @@ -290,11 +287,11 @@ module.exports = { } } - async.eachSeries(docUpdates, handleDocUpdate, function (error) { + async.eachSeries(docUpdates, handleDocUpdate, (error) => { if (error) { return callback(error) } - async.eachSeries(fileUpdates, handleFileUpdate, function (error) { + async.eachSeries(fileUpdates, handleFileUpdate, (error) => { if (error) { return callback(error) } From 6269ace987a5dd5e8872f79635a748aeeacff1cd Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Thu, 14 May 2020 17:09:01 -0400 Subject: [PATCH 671/769] Decaf cleanup: move functions to top level --- .../document-updater/app/js/ProjectManager.js | 519 +++++++++--------- 1 file changed, 261 insertions(+), 258 deletions(-) diff --git a/services/document-updater/app/js/ProjectManager.js b/services/document-updater/app/js/ProjectManager.js index 25de54f3d1..768ab3e759 100644 --- a/services/document-updater/app/js/ProjectManager.js +++ b/services/document-updater/app/js/ProjectManager.js @@ -8,304 +8,307 @@ const Metrics = require('./Metrics') const Errors = require('./Errors') module.exports = { - flushProjectWithLocks(projectId, _callback) { - const timer = new Metrics.Timer('projectManager.flushProjectWithLocks') - const callback = function (...args) { - timer.done() - _callback(...args) + flushProjectWithLocks, + flushAndDeleteProjectWithLocks, + queueFlushAndDeleteProject, + getProjectDocsTimestamps, + getProjectDocsAndFlushIfOld, + clearProjectState, + updateProjectWithLocks +} + +function flushProjectWithLocks(projectId, _callback) { + const timer = new Metrics.Timer('projectManager.flushProjectWithLocks') + const callback = function (...args) { + timer.done() + _callback(...args) + } + + RedisManager.getDocIdsInProject(projectId, (error, docIds) => { + if (error) { + return callback(error) } - - RedisManager.getDocIdsInProject(projectId, (error, docIds) => { - if (error) { - return callback(error) - } - const errors = [] - const jobs = docIds.map((docId) => (callback) => { - DocumentManager.flushDocIfLoadedWithLock(projectId, docId, (error) => { - if (error instanceof Errors.NotFoundError) { - logger.warn( - { err: error, projectId, docId }, - 'found deleted doc when flushing' - ) - callback() - } else if (error) { - logger.error({ err: error, projectId, docId }, 'error flushing doc') - errors.push(error) - callback() - } else { - callback() - } - }) + const errors = [] + const jobs = docIds.map((docId) => (callback) => { + DocumentManager.flushDocIfLoadedWithLock(projectId, docId, (error) => { + if (error instanceof Errors.NotFoundError) { + logger.warn( + { err: error, projectId, docId }, + 'found deleted doc when flushing' + ) + callback() + } else if (error) { + logger.error({ err: error, projectId, docId }, 'error flushing doc') + errors.push(error) + callback() + } else { + callback() + } }) + }) - logger.log({ projectId, docIds }, 'flushing docs') - async.series(jobs, () => { + logger.log({ projectId, docIds }, 'flushing docs') + async.series(jobs, () => { + if (errors.length > 0) { + callback(new Error('Errors flushing docs. See log for details')) + } else { + callback(null) + } + }) + }) +} + +function flushAndDeleteProjectWithLocks(projectId, options, _callback) { + const timer = new Metrics.Timer( + 'projectManager.flushAndDeleteProjectWithLocks' + ) + const callback = function (...args) { + timer.done() + _callback(...args) + } + + RedisManager.getDocIdsInProject(projectId, (error, docIds) => { + if (error) { + return callback(error) + } + const errors = [] + const jobs = docIds.map((docId) => (callback) => { + DocumentManager.flushAndDeleteDocWithLock( + projectId, + docId, + {}, + (error) => { + if (error) { + logger.error({ err: error, projectId, docId }, 'error deleting doc') + errors.push(error) + } + callback() + } + ) + }) + + logger.log({ projectId, docIds }, 'deleting docs') + async.series(jobs, () => + // When deleting the project here we want to ensure that project + // history is completely flushed because the project may be + // deleted in web after this call completes, and so further + // attempts to flush would fail after that. + HistoryManager.flushProjectChanges(projectId, options, (error) => { if (errors.length > 0) { - callback(new Error('Errors flushing docs. See log for details')) + callback(new Error('Errors deleting docs. See log for details')) + } else if (error) { + callback(error) } else { callback(null) } }) - }) - }, - - flushAndDeleteProjectWithLocks(projectId, options, _callback) { - const timer = new Metrics.Timer( - 'projectManager.flushAndDeleteProjectWithLocks' ) - const callback = function (...args) { - timer.done() - _callback(...args) - } + }) +} - RedisManager.getDocIdsInProject(projectId, (error, docIds) => { - if (error) { - return callback(error) - } - const errors = [] - const jobs = docIds.map((docId) => (callback) => { - DocumentManager.flushAndDeleteDocWithLock( - projectId, - docId, - {}, - (error) => { - if (error) { - logger.error( - { err: error, projectId, docId }, - 'error deleting doc' - ) - errors.push(error) - } - callback() - } - ) - }) - - logger.log({ projectId, docIds }, 'deleting docs') - async.series(jobs, () => - // When deleting the project here we want to ensure that project - // history is completely flushed because the project may be - // deleted in web after this call completes, and so further - // attempts to flush would fail after that. - HistoryManager.flushProjectChanges(projectId, options, (error) => { - if (errors.length > 0) { - callback(new Error('Errors deleting docs. See log for details')) - } else if (error) { - callback(error) - } else { - callback(null) - } - }) +function queueFlushAndDeleteProject(projectId, callback) { + RedisManager.queueFlushAndDeleteProject(projectId, (error) => { + if (error) { + logger.error( + { projectId, error }, + 'error adding project to flush and delete queue' ) - }) - }, + return callback(error) + } + Metrics.inc('queued-delete') + callback() + }) +} - queueFlushAndDeleteProject(projectId, callback) { - RedisManager.queueFlushAndDeleteProject(projectId, (error) => { - if (error) { - logger.error( - { projectId, error }, - 'error adding project to flush and delete queue' - ) - return callback(error) - } - Metrics.inc('queued-delete') - callback() - }) - }, - - getProjectDocsTimestamps(projectId, callback) { - RedisManager.getDocIdsInProject(projectId, (error, docIds) => { +function getProjectDocsTimestamps(projectId, callback) { + RedisManager.getDocIdsInProject(projectId, (error, docIds) => { + if (error) { + return callback(error) + } + if (docIds.length === 0) { + return callback(null, []) + } + RedisManager.getDocTimestamps(docIds, (error, timestamps) => { if (error) { return callback(error) } - if (docIds.length === 0) { - return callback(null, []) - } - RedisManager.getDocTimestamps(docIds, (error, timestamps) => { - if (error) { - return callback(error) - } - callback(null, timestamps) - }) + callback(null, timestamps) }) - }, + }) +} - getProjectDocsAndFlushIfOld( +function getProjectDocsAndFlushIfOld( + projectId, + projectStateHash, + excludeVersions, + _callback +) { + const timer = new Metrics.Timer('projectManager.getProjectDocsAndFlushIfOld') + const callback = function (...args) { + timer.done() + _callback(...args) + } + + RedisManager.checkOrSetProjectState( projectId, projectStateHash, - excludeVersions, - _callback - ) { - const timer = new Metrics.Timer( - 'projectManager.getProjectDocsAndFlushIfOld' - ) - const callback = function (...args) { - timer.done() - _callback(...args) - } - - RedisManager.checkOrSetProjectState( - projectId, - projectStateHash, - (error, projectStateChanged) => { + (error, projectStateChanged) => { + if (error) { + logger.error( + { err: error, projectId }, + 'error getting/setting project state in getProjectDocsAndFlushIfOld' + ) + return callback(error) + } + // we can't return docs if project structure has changed + if (projectStateChanged) { + return callback( + Errors.ProjectStateChangedError('project state changed') + ) + } + // project structure hasn't changed, return doc content from redis + RedisManager.getDocIdsInProject(projectId, (error, docIds) => { if (error) { logger.error( { err: error, projectId }, - 'error getting/setting project state in getProjectDocsAndFlushIfOld' + 'error getting doc ids in getProjectDocs' ) return callback(error) } - // we can't return docs if project structure has changed - if (projectStateChanged) { - return callback( - Errors.ProjectStateChangedError('project state changed') + // get the doc lines from redis + const jobs = docIds.map((docId) => (cb) => { + DocumentManager.getDocAndFlushIfOldWithLock( + projectId, + docId, + (err, lines, version) => { + if (err) { + logger.error( + { err, projectId, docId }, + 'error getting project doc lines in getProjectDocsAndFlushIfOld' + ) + return cb(err) + } + const doc = { _id: docId, lines, v: version } // create a doc object to return + cb(null, doc) + } ) - } - // project structure hasn't changed, return doc content from redis - RedisManager.getDocIdsInProject(projectId, (error, docIds) => { + }) + async.series(jobs, (error, docs) => { if (error) { - logger.error( - { err: error, projectId }, - 'error getting doc ids in getProjectDocs' - ) return callback(error) } - // get the doc lines from redis - const jobs = docIds.map((docId) => (cb) => { - DocumentManager.getDocAndFlushIfOldWithLock( - projectId, - docId, - (err, lines, version) => { - if (err) { - logger.error( - { err, projectId, docId }, - 'error getting project doc lines in getProjectDocsAndFlushIfOld' - ) - return cb(err) - } - const doc = { _id: docId, lines, v: version } // create a doc object to return - cb(null, doc) - } - ) - }) - async.series(jobs, (error, docs) => { - if (error) { - return callback(error) - } - callback(null, docs) - }) + callback(null, docs) }) - } - ) - }, - - clearProjectState(projectId, callback) { - RedisManager.clearProjectState(projectId, callback) - }, - - updateProjectWithLocks( - projectId, - projectHistoryId, - userId, - docUpdates, - fileUpdates, - version, - _callback - ) { - const timer = new Metrics.Timer('projectManager.updateProject') - const callback = function (...args) { - timer.done() - _callback(...args) + }) } + ) +} - const projectVersion = version - let projectSubversion = 0 // project versions can have multiple operations +function clearProjectState(projectId, callback) { + RedisManager.clearProjectState(projectId, callback) +} - let projectOpsLength = 0 +function updateProjectWithLocks( + projectId, + projectHistoryId, + userId, + docUpdates, + fileUpdates, + version, + _callback +) { + const timer = new Metrics.Timer('projectManager.updateProject') + const callback = function (...args) { + timer.done() + _callback(...args) + } - const handleDocUpdate = function (projectUpdate, cb) { - const docId = projectUpdate.id - projectUpdate.version = `${projectVersion}.${projectSubversion++}` - if (projectUpdate.docLines != null) { - ProjectHistoryRedisManager.queueAddEntity( - projectId, - projectHistoryId, - 'doc', - docId, - userId, - projectUpdate, - (error, count) => { - projectOpsLength = count - cb(error) - } - ) - } else { - DocumentManager.renameDocWithLock( - projectId, - docId, - userId, - projectUpdate, - projectHistoryId, - (error, count) => { - projectOpsLength = count - cb(error) - } - ) - } + const projectVersion = version + let projectSubversion = 0 // project versions can have multiple operations + + let projectOpsLength = 0 + + const handleDocUpdate = function (projectUpdate, cb) { + const docId = projectUpdate.id + projectUpdate.version = `${projectVersion}.${projectSubversion++}` + if (projectUpdate.docLines != null) { + ProjectHistoryRedisManager.queueAddEntity( + projectId, + projectHistoryId, + 'doc', + docId, + userId, + projectUpdate, + (error, count) => { + projectOpsLength = count + cb(error) + } + ) + } else { + DocumentManager.renameDocWithLock( + projectId, + docId, + userId, + projectUpdate, + projectHistoryId, + (error, count) => { + projectOpsLength = count + cb(error) + } + ) } + } - const handleFileUpdate = function (projectUpdate, cb) { - const fileId = projectUpdate.id - projectUpdate.version = `${projectVersion}.${projectSubversion++}` - if (projectUpdate.url != null) { - ProjectHistoryRedisManager.queueAddEntity( - projectId, - projectHistoryId, - 'file', - fileId, - userId, - projectUpdate, - (error, count) => { - projectOpsLength = count - cb(error) - } - ) - } else { - ProjectHistoryRedisManager.queueRenameEntity( - projectId, - projectHistoryId, - 'file', - fileId, - userId, - projectUpdate, - (error, count) => { - projectOpsLength = count - cb(error) - } - ) - } + const handleFileUpdate = function (projectUpdate, cb) { + const fileId = projectUpdate.id + projectUpdate.version = `${projectVersion}.${projectSubversion++}` + if (projectUpdate.url != null) { + ProjectHistoryRedisManager.queueAddEntity( + projectId, + projectHistoryId, + 'file', + fileId, + userId, + projectUpdate, + (error, count) => { + projectOpsLength = count + cb(error) + } + ) + } else { + ProjectHistoryRedisManager.queueRenameEntity( + projectId, + projectHistoryId, + 'file', + fileId, + userId, + projectUpdate, + (error, count) => { + projectOpsLength = count + cb(error) + } + ) } + } - async.eachSeries(docUpdates, handleDocUpdate, (error) => { + async.eachSeries(docUpdates, handleDocUpdate, (error) => { + if (error) { + return callback(error) + } + async.eachSeries(fileUpdates, handleFileUpdate, (error) => { if (error) { return callback(error) } - async.eachSeries(fileUpdates, handleFileUpdate, (error) => { - if (error) { - return callback(error) - } - if ( - HistoryManager.shouldFlushHistoryOps( - projectOpsLength, - docUpdates.length + fileUpdates.length, - HistoryManager.FLUSH_PROJECT_EVERY_N_OPS - ) - ) { - HistoryManager.flushProjectChangesAsync(projectId) - } - callback() - }) + if ( + HistoryManager.shouldFlushHistoryOps( + projectOpsLength, + docUpdates.length + fileUpdates.length, + HistoryManager.FLUSH_PROJECT_EVERY_N_OPS + ) + ) { + HistoryManager.flushProjectChangesAsync(projectId) + } + callback() }) - }, + }) } From 9e4280916a434f9edb2e478c334b53973cc922dc Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Fri, 15 May 2020 08:31:06 -0400 Subject: [PATCH 672/769] Decaf cleanup: unnecessary returns --- .../js/ProjectManager/updateProjectTests.js | 79 +++++++++---------- 1 file changed, 38 insertions(+), 41 deletions(-) diff --git a/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js index 5b439ef839..8c16c950d3 100644 --- a/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js @@ -6,7 +6,6 @@ // Fix any style issues and re-enable lint. /* * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns * DS206: Consider reworking classes to avoid initClass * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ @@ -50,10 +49,10 @@ describe('ProjectManager', function () { this.version = 1234567 this.HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(false) this.HistoryManager.flushProjectChangesAsync = sinon.stub() - return (this.callback = sinon.stub()) + this.callback = sinon.stub() }) - return describe('updateProjectWithLocks', function () { + describe('updateProjectWithLocks', function () { describe('rename operations', function () { beforeEach(function () { this.firstDocUpdate = { @@ -74,14 +73,14 @@ describe('ProjectManager', function () { } this.fileUpdates = [this.firstFileUpdate] this.DocumentManager.renameDocWithLock = sinon.stub().yields() - return (this.ProjectHistoryRedisManager.queueRenameEntity = sinon + this.ProjectHistoryRedisManager.queueRenameEntity = sinon .stub() - .yields()) + .yields() }) describe('successfully', function () { beforeEach(function () { - return this.ProjectManager.updateProjectWithLocks( + this.ProjectManager.updateProjectWithLocks( this.project_id, this.projectHistoryId, this.user_id, @@ -110,7 +109,7 @@ describe('ProjectManager', function () { this.projectHistoryId ) .should.equal(true) - return this.DocumentManager.renameDocWithLock + this.DocumentManager.renameDocWithLock .calledWith( this.project_id, this.secondDocUpdate.id, @@ -127,7 +126,7 @@ describe('ProjectManager', function () { this.firstFileUpdate, { version: `${this.version}.2` } ) - return this.ProjectHistoryRedisManager.queueRenameEntity + this.ProjectHistoryRedisManager.queueRenameEntity .calledWith( this.project_id, this.projectHistoryId, @@ -140,13 +139,13 @@ describe('ProjectManager', function () { }) it('should not flush the history', function () { - return this.HistoryManager.flushProjectChangesAsync + this.HistoryManager.flushProjectChangesAsync .calledWith(this.project_id) .should.equal(false) }) - return it('should call the callback', function () { - return this.callback.called.should.equal(true) + it('should call the callback', function () { + this.callback.called.should.equal(true) }) }) @@ -156,7 +155,7 @@ describe('ProjectManager', function () { this.DocumentManager.renameDocWithLock = sinon .stub() .yields(this.error) - return this.ProjectManager.updateProjectWithLocks( + this.ProjectManager.updateProjectWithLocks( this.project_id, this.projectHistoryId, this.user_id, @@ -167,8 +166,8 @@ describe('ProjectManager', function () { ) }) - return it('should call the callback with the error', function () { - return this.callback.calledWith(this.error).should.equal(true) + it('should call the callback with the error', function () { + this.callback.calledWith(this.error).should.equal(true) }) }) @@ -178,7 +177,7 @@ describe('ProjectManager', function () { this.ProjectHistoryRedisManager.queueRenameEntity = sinon .stub() .yields(this.error) - return this.ProjectManager.updateProjectWithLocks( + this.ProjectManager.updateProjectWithLocks( this.project_id, this.projectHistoryId, this.user_id, @@ -189,15 +188,15 @@ describe('ProjectManager', function () { ) }) - return it('should call the callback with the error', function () { - return this.callback.calledWith(this.error).should.equal(true) + it('should call the callback with the error', function () { + this.callback.calledWith(this.error).should.equal(true) }) }) - return describe('with enough ops to flush', function () { + describe('with enough ops to flush', function () { beforeEach(function () { this.HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(true) - return this.ProjectManager.updateProjectWithLocks( + this.ProjectManager.updateProjectWithLocks( this.project_id, this.projectHistoryId, this.user_id, @@ -208,15 +207,15 @@ describe('ProjectManager', function () { ) }) - return it('should flush the history', function () { - return this.HistoryManager.flushProjectChangesAsync + it('should flush the history', function () { + this.HistoryManager.flushProjectChangesAsync .calledWith(this.project_id) .should.equal(true) }) }) }) - return describe('add operations', function () { + describe('add operations', function () { beforeEach(function () { this.firstDocUpdate = { id: 1, @@ -236,14 +235,12 @@ describe('ProjectManager', function () { url: 'filestore.example.com/3' } this.fileUpdates = [this.firstFileUpdate, this.secondFileUpdate] - return (this.ProjectHistoryRedisManager.queueAddEntity = sinon - .stub() - .yields()) + this.ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields() }) describe('successfully', function () { beforeEach(function () { - return this.ProjectManager.updateProjectWithLocks( + this.ProjectManager.updateProjectWithLocks( this.project_id, this.projectHistoryId, this.user_id, @@ -274,7 +271,7 @@ describe('ProjectManager', function () { firstDocUpdateWithVersion ) .should.equal(true) - return this.ProjectHistoryRedisManager.queueAddEntity + this.ProjectHistoryRedisManager.queueAddEntity .getCall(1) .calledWith( this.project_id, @@ -309,7 +306,7 @@ describe('ProjectManager', function () { firstFileUpdateWithVersion ) .should.equal(true) - return this.ProjectHistoryRedisManager.queueAddEntity + this.ProjectHistoryRedisManager.queueAddEntity .getCall(3) .calledWith( this.project_id, @@ -323,13 +320,13 @@ describe('ProjectManager', function () { }) it('should not flush the history', function () { - return this.HistoryManager.flushProjectChangesAsync + this.HistoryManager.flushProjectChangesAsync .calledWith(this.project_id) .should.equal(false) }) - return it('should call the callback', function () { - return this.callback.called.should.equal(true) + it('should call the callback', function () { + this.callback.called.should.equal(true) }) }) @@ -339,7 +336,7 @@ describe('ProjectManager', function () { this.ProjectHistoryRedisManager.queueAddEntity = sinon .stub() .yields(this.error) - return this.ProjectManager.updateProjectWithLocks( + this.ProjectManager.updateProjectWithLocks( this.project_id, this.projectHistoryId, this.user_id, @@ -350,8 +347,8 @@ describe('ProjectManager', function () { ) }) - return it('should call the callback with the error', function () { - return this.callback.calledWith(this.error).should.equal(true) + it('should call the callback with the error', function () { + this.callback.calledWith(this.error).should.equal(true) }) }) @@ -361,7 +358,7 @@ describe('ProjectManager', function () { this.ProjectHistoryRedisManager.queueAddEntity = sinon .stub() .yields(this.error) - return this.ProjectManager.updateProjectWithLocks( + this.ProjectManager.updateProjectWithLocks( this.project_id, this.projectHistoryId, this.user_id, @@ -372,15 +369,15 @@ describe('ProjectManager', function () { ) }) - return it('should call the callback with the error', function () { - return this.callback.calledWith(this.error).should.equal(true) + it('should call the callback with the error', function () { + this.callback.calledWith(this.error).should.equal(true) }) }) - return describe('with enough ops to flush', function () { + describe('with enough ops to flush', function () { beforeEach(function () { this.HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(true) - return this.ProjectManager.updateProjectWithLocks( + this.ProjectManager.updateProjectWithLocks( this.project_id, this.projectHistoryId, this.user_id, @@ -391,8 +388,8 @@ describe('ProjectManager', function () { ) }) - return it('should flush the history', function () { - return this.HistoryManager.flushProjectChangesAsync + it('should flush the history', function () { + this.HistoryManager.flushProjectChangesAsync .calledWith(this.project_id) .should.equal(true) }) From 86747ba64f0947f7196c98957a3a44a33d342c96 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Fri, 15 May 2020 08:40:11 -0400 Subject: [PATCH 673/769] Decaf cleanup: stub setup in tests --- .../js/ProjectManager/updateProjectTests.js | 82 ++++++++----------- 1 file changed, 34 insertions(+), 48 deletions(-) diff --git a/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js index 8c16c950d3..94d132f962 100644 --- a/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js @@ -2,13 +2,6 @@ no-return-assign, no-unused-vars, */ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS206: Consider reworking classes to avoid initClass - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ const sinon = require('sinon') const chai = require('chai') const should = chai.should() @@ -18,28 +11,36 @@ const _ = require('lodash') describe('ProjectManager', function () { beforeEach(function () { - let Timer + this.RedisManager = {} + this.ProjectHistoryRedisManager = { + queueRenameEntity: sinon.stub().yields(), + queueAddEntity: sinon.stub().yields() + } + this.DocumentManager = { + renameDocWithLock: sinon.stub().yields() + } + this.HistoryManager = { + flushProjectChangesAsync: sinon.stub(), + shouldFlushHistoryOps: sinon.stub().returns(false) + } + this.Metrics = { + Timer: class Timer {} + } + this.Metrics.Timer.prototype.done = sinon.stub() + + this.logger = { + log: sinon.stub(), + error: sinon.stub() + } + this.ProjectManager = SandboxedModule.require(modulePath, { requires: { - './RedisManager': (this.RedisManager = {}), - './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), - './DocumentManager': (this.DocumentManager = {}), - 'logger-sharelatex': (this.logger = { - log: sinon.stub(), - error: sinon.stub() - }), - './HistoryManager': (this.HistoryManager = {}), - './Metrics': (this.Metrics = { - Timer: (Timer = (function () { - Timer = class Timer { - static initClass() { - this.prototype.done = sinon.stub() - } - } - Timer.initClass() - return Timer - })()) - }) + './RedisManager': this.RedisManager, + './ProjectHistoryRedisManager': this.ProjectHistoryRedisManager, + './DocumentManager': this.DocumentManager, + 'logger-sharelatex': this.logger, + './HistoryManager': this.HistoryManager, + './Metrics': this.Metrics } }) @@ -47,8 +48,6 @@ describe('ProjectManager', function () { this.projectHistoryId = 'history-id-123' this.user_id = 'user-id-123' this.version = 1234567 - this.HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(false) - this.HistoryManager.flushProjectChangesAsync = sinon.stub() this.callback = sinon.stub() }) @@ -72,10 +71,6 @@ describe('ProjectManager', function () { newPathname: 'bar2' } this.fileUpdates = [this.firstFileUpdate] - this.DocumentManager.renameDocWithLock = sinon.stub().yields() - this.ProjectHistoryRedisManager.queueRenameEntity = sinon - .stub() - .yields() }) describe('successfully', function () { @@ -152,9 +147,7 @@ describe('ProjectManager', function () { describe('when renaming a doc fails', function () { beforeEach(function () { this.error = new Error('error') - this.DocumentManager.renameDocWithLock = sinon - .stub() - .yields(this.error) + this.DocumentManager.renameDocWithLock.yields(this.error) this.ProjectManager.updateProjectWithLocks( this.project_id, this.projectHistoryId, @@ -174,9 +167,7 @@ describe('ProjectManager', function () { describe('when renaming a file fails', function () { beforeEach(function () { this.error = new Error('error') - this.ProjectHistoryRedisManager.queueRenameEntity = sinon - .stub() - .yields(this.error) + this.ProjectHistoryRedisManager.queueRenameEntity.yields(this.error) this.ProjectManager.updateProjectWithLocks( this.project_id, this.projectHistoryId, @@ -195,7 +186,7 @@ describe('ProjectManager', function () { describe('with enough ops to flush', function () { beforeEach(function () { - this.HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(true) + this.HistoryManager.shouldFlushHistoryOps.returns(true) this.ProjectManager.updateProjectWithLocks( this.project_id, this.projectHistoryId, @@ -235,7 +226,6 @@ describe('ProjectManager', function () { url: 'filestore.example.com/3' } this.fileUpdates = [this.firstFileUpdate, this.secondFileUpdate] - this.ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields() }) describe('successfully', function () { @@ -333,9 +323,7 @@ describe('ProjectManager', function () { describe('when adding a doc fails', function () { beforeEach(function () { this.error = new Error('error') - this.ProjectHistoryRedisManager.queueAddEntity = sinon - .stub() - .yields(this.error) + this.ProjectHistoryRedisManager.queueAddEntity.yields(this.error) this.ProjectManager.updateProjectWithLocks( this.project_id, this.projectHistoryId, @@ -355,9 +343,7 @@ describe('ProjectManager', function () { describe('when adding a file fails', function () { beforeEach(function () { this.error = new Error('error') - this.ProjectHistoryRedisManager.queueAddEntity = sinon - .stub() - .yields(this.error) + this.ProjectHistoryRedisManager.queueAddEntity.yields(this.error) this.ProjectManager.updateProjectWithLocks( this.project_id, this.projectHistoryId, @@ -376,7 +362,7 @@ describe('ProjectManager', function () { describe('with enough ops to flush', function () { beforeEach(function () { - this.HistoryManager.shouldFlushHistoryOps = sinon.stub().returns(true) + this.HistoryManager.shouldFlushHistoryOps.returns(true) this.ProjectManager.updateProjectWithLocks( this.project_id, this.projectHistoryId, From 759d385be8a1c6d5550c1b56e5fdf86939403f6c Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Fri, 15 May 2020 08:41:18 -0400 Subject: [PATCH 674/769] Decaf cleanup: unused variables --- .../test/unit/js/ProjectManager/updateProjectTests.js | 6 ------ 1 file changed, 6 deletions(-) diff --git a/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js index 94d132f962..d17f80d44b 100644 --- a/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js @@ -1,10 +1,4 @@ -/* eslint-disable - no-return-assign, - no-unused-vars, -*/ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/ProjectManager.js' const SandboxedModule = require('sandboxed-module') const _ = require('lodash') From 65fe74fd2dde6f36abeef9ac6de96807a21bf838 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Fri, 15 May 2020 14:29:49 -0400 Subject: [PATCH 675/769] Upgrade Sinon --- services/document-updater/package-lock.json | 445 +++++++++++------- services/document-updater/package.json | 2 +- .../test/acceptance/js/FlushingDocsTests.js | 38 +- .../acceptance/js/GettingADocumentTests.js | 28 +- .../acceptance/js/SettingADocumentTests.js | 36 +- .../DocumentManager/DocumentManagerTests.js | 17 +- .../HistoryRedisManagerTests.js | 2 +- .../js/HttpController/HttpControllerTests.js | 24 +- .../unit/js/LockManager/ReleasingTheLock.js | 2 +- .../test/unit/js/LockManager/getLockTests.js | 10 +- .../test/unit/js/LockManager/tryLockTests.js | 5 +- .../PersistenceManagerTests.js | 17 +- .../flushAndDeleteProjectTests.js | 4 +- .../js/ProjectManager/flushProjectTests.js | 4 +- .../js/ProjectManager/getProjectDocsTests.js | 11 +- .../RealTimeRedisManagerTests.js | 2 +- .../unit/js/RedisManager/RedisManagerTests.js | 24 +- .../test/unit/js/ShareJsDB/ShareJsDBTests.js | 5 +- .../ShareJsUpdateManagerTests.js | 4 +- 19 files changed, 404 insertions(+), 276 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index e3d34fc959..6d838e6217 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -160,7 +160,7 @@ "@google-cloud/debug-agent": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-3.2.0.tgz", - "integrity": "sha512-fP87kYbS6aeDna08BivwQ1J260mwJGchRi99XdWCgqbRwuFac8ul0OT5i2wEeDSc5QaDX8ZuWQQ0igZvh1rTyQ==", + "integrity": "sha1-2qdjWhaYpWY31dxXzhED536uKdM=", "requires": { "@google-cloud/common": "^0.32.0", "@sindresorhus/is": "^0.15.0", @@ -393,7 +393,7 @@ "@google-cloud/profiler": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/@google-cloud/profiler/-/profiler-0.2.3.tgz", - "integrity": "sha512-rNvtrFtIebIxZEJ/O0t8n7HciZGIXBo8DvHxWqAmsCaeLvkTtsaL6HmPkwxrNQ1IhbYWAxF+E/DwCiHyhKmgTg==", + "integrity": "sha1-Fj3738Mwuug1X+RuHlvgZTV7H1w=", "requires": { "@google-cloud/common": "^0.26.0", "@types/console-log-level": "^1.4.0", @@ -415,7 +415,7 @@ "@google-cloud/common": { "version": "0.26.2", "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.26.2.tgz", - "integrity": "sha512-xJ2M/q3MrUbnYZuFlpF01caAlEhAUoRn0NXp93Hn3pkFpfSOG8YfbKbpBAHvcKVbBOAKVIwPsleNtuyuabUwLQ==", + "integrity": "sha1-nFTiRxqEqgMelaJIJJduCA8lVkU=", "requires": { "@google-cloud/projectify": "^0.3.2", "@google-cloud/promisify": "^0.3.0", @@ -439,7 +439,7 @@ "@google-cloud/promisify": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.3.1.tgz", - "integrity": "sha512-QzB0/IMvB0eFxFK7Eqh+bfC8NLv3E9ScjWQrPOk6GgfNroxcVITdTlT8NRsRrcp5+QQJVPLkRqKG0PUdaWXmHw==" + "integrity": "sha1-9kHm2USo4KBe4MsQkd+mAIm+zbo=" }, "agent-base": { "version": "4.3.0", @@ -452,7 +452,7 @@ "arrify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", - "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==" + "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=" }, "debug": { "version": "3.2.6", @@ -476,7 +476,7 @@ "gcp-metadata": { "version": "0.9.3", "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.9.3.tgz", - "integrity": "sha512-caV4S84xAjENtpezLCT/GILEAF5h/bC4cNqZFmt/tjTn8t+JBtTkQrgBrJu3857YdsnlM8rxX/PMcKGtE8hUlw==", + "integrity": "sha1-H510lfdGChRSZIHynhFZbdVj3SY=", "requires": { "gaxios": "^1.0.2", "json-bigint": "^0.3.0" @@ -485,7 +485,7 @@ "google-auth-library": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-2.0.2.tgz", - "integrity": "sha512-FURxmo1hBVmcfLauuMRKOPYAPKht3dGuI2wjeJFalDUThO0HoYVjr4yxt5cgYSFm1dgUpmN9G/poa7ceTFAIiA==", + "integrity": "sha1-ejFdIDZ0Svavyth7IQ7mY4tA9Xs=", "requires": { "axios": "^0.18.0", "gcp-metadata": "^0.7.0", @@ -499,7 +499,7 @@ "gcp-metadata": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.7.0.tgz", - "integrity": "sha512-ffjC09amcDWjh3VZdkDngIo7WoluyC5Ag9PAYxZbmQLOLNI8lvPtoKTSCyU54j2gwy5roZh6sSMTfkY2ct7K3g==", + "integrity": "sha1-bDXbtSvaMqQnu5yY9UI33dG1QG8=", "requires": { "axios": "^0.18.0", "extend": "^3.0.1", @@ -602,7 +602,7 @@ "@google-cloud/trace-agent": { "version": "3.6.1", "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-3.6.1.tgz", - "integrity": "sha512-KDo85aPN4gSxJ7oEIOlKd7aGENZFXAM1kbIn1Ds+61gh/K1CQWSyepgJo3nUpAwH6D1ezDWV7Iaf8ueoITc8Uw==", + "integrity": "sha1-W+dEE5TQ6ldY8o25IqUAT/PwO+w=", "requires": { "@google-cloud/common": "^0.32.1", "builtin-modules": "^3.0.0", @@ -829,27 +829,27 @@ "@protobufjs/aspromise": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", - "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==" + "integrity": "sha1-m4sMxmPWaafY9vXQiToU00jzD78=" }, "@protobufjs/base64": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", - "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==" + "integrity": "sha1-TIVzDlm5ofHzSQR9vyQpYDS7JzU=" }, "@protobufjs/codegen": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", - "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==" + "integrity": "sha1-fvN/DQEPsCitGtWXIuUG2SYoFcs=" }, "@protobufjs/eventemitter": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", - "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==" + "integrity": "sha1-NVy8mLr61ZePntCV85diHx0Ga3A=" }, "@protobufjs/fetch": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", - "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", + "integrity": "sha1-upn7WYYUr2VwDBYZ/wbUVLDYTEU=", "requires": { "@protobufjs/aspromise": "^1.1.1", "@protobufjs/inquire": "^1.1.0" @@ -858,32 +858,93 @@ "@protobufjs/float": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", - "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==" + "integrity": "sha1-Xp4avctz/Ap8uLKR33jIy9l7h9E=" }, "@protobufjs/inquire": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", - "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==" + "integrity": "sha1-/yAOPnzyQp4tyvwRQIKOjMY48Ik=" }, "@protobufjs/path": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", - "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==" + "integrity": "sha1-bMKyDFya1q0NzP0hynZz2Nf79o0=" }, "@protobufjs/pool": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", - "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==" + "integrity": "sha1-Cf0V8tbTq/qbZbw2ZQbWrXhG/1Q=" }, "@protobufjs/utf8": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", - "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==" + "integrity": "sha1-p3c2C1s5oaLlEG+OhY8v0tBgxXA=" }, "@sindresorhus/is": { "version": "0.15.0", "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.15.0.tgz", - "integrity": "sha512-lu8BpxjAtRCAo5ifytTpCPCj99LF7o/2Myn+NXyNCBqvPYn7Pjd76AMmUB5l7XF1U6t0hcWrlEM5ESufW7wAeA==" + "integrity": "sha1-lpFbqgXmpqHRN7rfSYTT/AWCC7Y=" + }, + "@sinonjs/commons": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.7.2.tgz", + "integrity": "sha512-+DUO6pnp3udV/v2VfUWgaY5BIE1IfT7lLfeDzPVeMT1XKkaAp9LgSI9x5RtrFQoZ9Oi0PgXQQHPaoKu7dCjVxw==", + "dev": true, + "requires": { + "type-detect": "4.0.8" + }, + "dependencies": { + "type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true + } + } + }, + "@sinonjs/fake-timers": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-6.0.1.tgz", + "integrity": "sha512-MZPUxrmFubI36XS1DI3qmI0YdN1gks62JtFZvxR67ljjSNCeK6U08Zx4msEWOXuofgqUt6zPHSi1H9fbjR/NRA==", + "dev": true, + "requires": { + "@sinonjs/commons": "^1.7.0" + } + }, + "@sinonjs/formatio": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/@sinonjs/formatio/-/formatio-5.0.1.tgz", + "integrity": "sha512-KaiQ5pBf1MpS09MuA0kp6KBQt2JUOQycqVG1NZXvzeaXe5LGFqAKueIS0bw4w0P9r7KuBSVdUk5QjXsUdu2CxQ==", + "dev": true, + "requires": { + "@sinonjs/commons": "^1", + "@sinonjs/samsam": "^5.0.2" + } + }, + "@sinonjs/samsam": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-5.0.3.tgz", + "integrity": "sha512-QucHkc2uMJ0pFGjJUDP3F9dq5dx8QIaqISl9QgwLOh6P9yv877uONPGXh/OH/0zmM3tW1JjuJltAZV2l7zU+uQ==", + "dev": true, + "requires": { + "@sinonjs/commons": "^1.6.0", + "lodash.get": "^4.4.2", + "type-detect": "^4.0.8" + }, + "dependencies": { + "type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true + } + } + }, + "@sinonjs/text-encoding": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/@sinonjs/text-encoding/-/text-encoding-0.7.1.tgz", + "integrity": "sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ==", + "dev": true }, "@tootallnate/once": { "version": "1.0.0", @@ -893,7 +954,7 @@ "@types/caseless": { "version": "0.12.2", "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.2.tgz", - "integrity": "sha512-6ckxMjBBD8URvjB6J3NcnuAn5Pkl7t3TizAg+xdlzzQGSPSmBcXf8KoIH0ua/i+tio+ZRUHEXp0HEmvaR4kt0w==" + "integrity": "sha1-9l09Y4ngHutFi9VNyPUrlalGO8g=" }, "@types/color-name": { "version": "1.1.1", @@ -904,12 +965,12 @@ "@types/console-log-level": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/@types/console-log-level/-/console-log-level-1.4.0.tgz", - "integrity": "sha512-x+OscEQwcx5Biair4enH7ov9W+clcqUWaZRaxn5IkT4yNWWjRr2oiYDkY/x1uXSTVZOQ2xlbFQySaQGB+VdXGQ==" + "integrity": "sha1-7/ccQa689RyLpa2LBdfVQkviuPM=" }, "@types/duplexify": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/@types/duplexify/-/duplexify-3.6.0.tgz", - "integrity": "sha512-5zOA53RUlzN74bvrSGwjudssD9F3a797sDZQkiYpUOxW+WHaXTCPz4/d5Dgi6FKnOqZ2CpaTo0DhgIfsXAOE/A==", + "integrity": "sha1-38grZL06IWj1vSZESvFlvwI33Ng=", "requires": { "@types/node": "*" } @@ -970,7 +1031,7 @@ "@types/semver": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/@types/semver/-/semver-5.5.0.tgz", - "integrity": "sha512-41qEJgBH/TWgo5NFSvBCJ1qkoi3Q6ONSF2avrHq1LVEZfYpdHmj0y9SuTK+u9ZhG1sYQKBL1AWXKyLWP4RaUoQ==" + "integrity": "sha1-FGwqKe59O65L8vyydGNuJkyBPEU=" }, "@types/tough-cookie": { "version": "2.3.6", @@ -1033,7 +1094,7 @@ "abort-controller": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", - "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "integrity": "sha1-6vVNU7YrrkE46AnKIlyEOabvs5I=", "requires": { "event-target-shim": "^5.0.0" } @@ -1182,7 +1243,7 @@ "arrify": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", - "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==" + "integrity": "sha1-yWVekzHgq81YjSp8rX6ZVvZnAfo=" }, "asn1": { "version": "0.2.4", @@ -1200,7 +1261,7 @@ "assertion-error": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", - "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "integrity": "sha1-5gtrDo8wG9l+U3UhW9pAbIURjAs=", "dev": true }, "ast-types-flow": { @@ -1226,7 +1287,7 @@ "async-listener": { "version": "0.6.10", "resolved": "https://registry.npmjs.org/async-listener/-/async-listener-0.6.10.tgz", - "integrity": "sha512-gpuo6xOyF4D5DE5WvyqZdPA3NGhiT6Qf07l7DCB0wwDEsLvDIbCr6j9S5aj5Ch96dLace5tXVzWBZkxU/c5ohw==", + "integrity": "sha1-p8l6vlcLpgLXgic8DeYKUePhfLw=", "requires": { "semver": "^5.3.0", "shimmer": "^1.1.0" @@ -1242,12 +1303,12 @@ "asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" }, "aws-sign2": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", - "integrity": "sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA==" + "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=" }, "aws4": { "version": "1.9.1", @@ -1257,7 +1318,7 @@ "axios": { "version": "0.18.1", "resolved": "https://registry.npmjs.org/axios/-/axios-0.18.1.tgz", - "integrity": "sha512-0BfJq4NSfQXd+SkFdrvFbG7addhYSBA2mQwISr46pD6E5iqkWg02RAs8vyTT/j0RTnoYmeXauBuSv1qKwR179g==", + "integrity": "sha1-/z8N4ue10YDnV62YAA8Qgbh7zqM=", "requires": { "follow-redirects": "1.5.10", "is-buffer": "^2.0.2" @@ -1286,7 +1347,7 @@ "balanced-match": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha512-9Y0g0Q8rmSt+H33DfKv7FOc3v+iRI+o1lbzt8jGcIosYW37IIW/2XVYq5NPdmaD5NQ59Nk26Kl/vZbwW9Fr8vg==" + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" }, "base64-js": { "version": "1.3.1", @@ -1304,12 +1365,12 @@ "bignumber.js": { "version": "7.2.1", "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz", - "integrity": "sha512-S4XzBk5sMB+Rcb/LNcpzXr57VRTxgAvaAEDAl1AwRx27j00hT84O6OkteE7u8UB3NuaaygCRrEpqox4uDOrbdQ==" + "integrity": "sha1-gMBIdZ2CaACAfEv9Uh5Q7bulel8=" }, "bindings": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", - "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", + "integrity": "sha1-EDU8npRTNLwFEabZCzj7x8nFBN8=", "requires": { "file-uri-to-path": "1.0.0" } @@ -1317,7 +1378,7 @@ "bintrees": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz", - "integrity": "sha512-tbaUB1QpTIj4cKY8c1rvNAvEQXA+ekzHmbe4jzNfW3QWsF9GnnP/BRWyl6/qqS53heoYJ93naaFcm/jooONH8g==" + "integrity": "sha1-DmVcm5wkNeqraL9AJyJtK1WjRSQ=" }, "bl": { "version": "2.2.0", @@ -1379,7 +1440,7 @@ "browser-stdout": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", - "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "integrity": "sha1-uqVZ7hTO1zRSIputcyZGfGH6vWA=", "dev": true }, "bson": { @@ -1390,17 +1451,17 @@ "buffer-equal-constant-time": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", - "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==" + "integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk=" }, "builtin-modules": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.1.0.tgz", - "integrity": "sha512-k0KL0aWZuBt2lrxrcASWDfwOLMnodeQjodT/1SxEQAXsHANgo6ZC/VEaSEHCXt7aSTZ4/4H5LKa+tBXmW7Vtvw==" + "integrity": "sha1-qtl8FRMet2tltQ7yCOdYTNdqdIQ=" }, "bunyan": { "version": "0.22.3", "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-0.22.3.tgz", - "integrity": "sha512-v9dd5qmd6nJHEi7fiNo1fR2pMpE8AiB47Ap984p4iJKj+dEA69jSccmq6grFQn6pxIh0evvKpC5XO1SKfiaRoQ==", + "integrity": "sha1-ehncG0yMZF90AkGnQPIkUUfGfsI=", "requires": { "dtrace-provider": "0.2.8", "mv": "~2" @@ -1414,21 +1475,6 @@ } } }, - "buster-core": { - "version": "0.6.4", - "resolved": "https://registry.npmjs.org/buster-core/-/buster-core-0.6.4.tgz", - "integrity": "sha512-WxitPqvzr2J7AA2eLEddv72XYaunQUDI0ICZhd1ucT/HhQI6JqfA7WQtoBoVZSgUn1+7uf9r7Plhh8PdO2+Kjg==", - "dev": true - }, - "buster-format": { - "version": "0.5.6", - "resolved": "https://registry.npmjs.org/buster-format/-/buster-format-0.5.6.tgz", - "integrity": "sha512-AiH2uZZSwyhgtM7l8/A/7HaqUBJnXwcj9oUPIrSvTKTOo/Go0KJiqnGnRRl4zGqIf3IK3pC/KfkgJMgn6lpyzw==", - "dev": true, - "requires": { - "buster-core": "=0.6.4" - } - }, "bytes": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", @@ -1470,12 +1516,12 @@ "caseless": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", - "integrity": "sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw==" + "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" }, "chai": { "version": "3.5.0", "resolved": "https://registry.npmjs.org/chai/-/chai-3.5.0.tgz", - "integrity": "sha512-eRYY0vPS2a9zt5w5Z0aCeWbrXTEyvk7u/Xf71EzNObrjSCPgMm1Nku/D/u2tiqHBX5j40wWhj54YJLtgn8g55A==", + "integrity": "sha1-TQJjewZ/6Vi9v906QOxW/vc3Mkc=", "dev": true, "requires": { "assertion-error": "^1.0.1", @@ -1486,7 +1532,7 @@ "chai-spies": { "version": "0.7.1", "resolved": "https://registry.npmjs.org/chai-spies/-/chai-spies-0.7.1.tgz", - "integrity": "sha512-ezo+u5DUDjPhOYkgsjbbVhtdzsnVr6n2CL/juJA89YnBsWO4ocL14Ake0txlGrGZo/HwcfhFGaV0czdunr3tHA==", + "integrity": "sha1-ND2Z9RJEIS6LF+ZLk5lv97LCqbE=", "dev": true }, "chalk": { @@ -1576,12 +1622,12 @@ "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" }, "console-log-level": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz", - "integrity": "sha512-VZzbIORbP+PPcN/gg3DXClTLPLg5Slwd5fL2MIc+o1qZ4BXBvWyc6QxPk6T/Mkr6IVjRpoAGf32XxP3ZWMVRcQ==" + "integrity": "sha1-nFprue8e9lsFq6gwKLD/iUzfYwo=" }, "contains-path": { "version": "0.1.0", @@ -1605,7 +1651,7 @@ "continuation-local-storage": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/continuation-local-storage/-/continuation-local-storage-3.2.1.tgz", - "integrity": "sha512-jx44cconVqkCEEyLSKWwkvUXwO561jXMa3LPjTPsm5QR22PA0/mhe33FT4Xb5y74JDvt/Cq+5lm8S8rskLv9ZA==", + "integrity": "sha1-EfYT906RT+mzTJKtLSj+auHbf/s=", "requires": { "async-listener": "^0.6.0", "emitter-listener": "^1.1.1" @@ -1644,7 +1690,7 @@ "core-util-is": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ==" + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" }, "cp-file": { "version": "6.2.0", @@ -1697,7 +1743,7 @@ "dashdash": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", - "integrity": "sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==", + "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", "requires": { "assert-plus": "^1.0.0" } @@ -1718,7 +1764,7 @@ "deep-eql": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-0.1.3.tgz", - "integrity": "sha512-6sEotTRGBFiNcqVoeHwnfopbSpi5NbH1VWJmYCVkmxMmaVTT0bUTrNaGyBwhgP4MZL012W/mkzIn3Da+iDYweg==", + "integrity": "sha1-71WKyrjeJSBs1xOQbXTlaTDrafI=", "dev": true, "requires": { "type-detect": "0.1.1" @@ -1727,7 +1773,7 @@ "type-detect": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-0.1.1.tgz", - "integrity": "sha512-5rqszGVwYgBoDkIm2oUtvkfZMQ0vk29iDMU0W2qCa3rG0vPDNczCMT4hV/bLBgLg8k8ri6+u3Zbt+S/14eMzlA==", + "integrity": "sha1-C6XsKohWQORw6k6FBZcZANrFiCI=", "dev": true } } @@ -1758,7 +1804,7 @@ "delay": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/delay/-/delay-4.3.0.tgz", - "integrity": "sha512-Lwaf3zVFDMBop1yDuFZ19F9WyGcZcGacsbdlZtWjQmM50tOcMntm1njF/Nb/Vjij3KaSvCF+sEYGKrrjObu2NA==" + "integrity": "sha1-7+6/uPVFV5yzlrOnIkQ+yW0UxQ4=" }, "delayed-stream": { "version": "1.0.0", @@ -1768,7 +1814,7 @@ "denque": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/denque/-/denque-1.4.1.tgz", - "integrity": "sha512-OfzPuSZKGcgr96rf1oODnfjqBFmr1DVoc/TrItj3Ohe0Ah1C5WX5Baquw/9U9KovnQ88EqmJbD66rKYUQYN1tQ==" + "integrity": "sha1-Z0T/dkHBSMP4ppwwflEjXB9KN88=" }, "depd": { "version": "1.1.2", @@ -1783,7 +1829,7 @@ "diff": { "version": "3.5.0", "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", - "integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==", + "integrity": "sha1-gAwN0eCov7yVg1wgKtIg/jF+WhI=", "dev": true }, "dlv": { @@ -1821,7 +1867,7 @@ "duplexify": { "version": "3.7.1", "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz", - "integrity": "sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g==", + "integrity": "sha1-Kk31MX9sz9kfhtb9JdjYoQO4gwk=", "requires": { "end-of-stream": "^1.0.0", "inherits": "^2.0.1", @@ -1861,7 +1907,7 @@ "each-series": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/each-series/-/each-series-1.0.0.tgz", - "integrity": "sha512-4MQloCGGCmT5GJZK5ibgJSvTK1c1QSrNlDvLk6fEyRxjZnXjl+NNFfzhfXpmnWh33Owc9D9klrdzCUi7yc9r4Q==" + "integrity": "sha1-+Ibmxm39sl7x/nNWQUbuXLR4r8s=" }, "ecc-jsbn": { "version": "0.1.2", @@ -1875,7 +1921,7 @@ "ecdsa-sig-formatter": { "version": "1.0.11", "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", - "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", + "integrity": "sha1-rg8PothQRe8UqBfao86azQSJ5b8=", "requires": { "safe-buffer": "^5.0.1" } @@ -1883,7 +1929,7 @@ "emitter-listener": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/emitter-listener/-/emitter-listener-1.1.2.tgz", - "integrity": "sha512-Bt1sBAGFHY9DKY+4/2cV6izcKJUf5T7/gkdmkxzX/qv9CcGH8xSwVRW5mtX03SWJtRTWSOpzCuWN9rBFYZepZQ==", + "integrity": "sha1-VrFA6PaZI3Wz18ssqxzHQy2WMug=", "requires": { "shimmer": "^1.2.0" } @@ -1909,7 +1955,7 @@ "ent": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz", - "integrity": "sha512-GHrMyVZQWvTIdDtpiEXdHZnFQKzeO09apj8Cbl4pKWy4i0Oprcq17usfDt5aO63swf0JOeMWjWQE/LzgSRuWpA==" + "integrity": "sha1-6WQhkyWiHQX0RGai9obtbOX13R0=" }, "error-ex": { "version": "1.3.2", @@ -1957,12 +2003,12 @@ "es6-promise": { "version": "4.2.8", "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz", - "integrity": "sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w==" + "integrity": "sha1-TrIVlMlyvEBVPSduUQU5FD21Pgo=" }, "es6-promisify": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz", - "integrity": "sha512-C+d6UdsYDk0lMebHNR4S2NybQMMngAOnOwYBQjTOiv0MkoJMP0Myw2mgpDLBcpfCmRLxyFqYhS/CfOENq4SJhQ==", + "integrity": "sha1-UQnWLz5W6pZ8S2NQWu8IKRyKUgM=", "requires": { "es6-promise": "^4.0.3" } @@ -1975,7 +2021,7 @@ "escape-string-regexp": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" }, "eslint": { "version": "6.6.0", @@ -2565,7 +2611,7 @@ "event-target-shim": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", - "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==" + "integrity": "sha1-XU0+vflYPWOlMzzi3rdICrKwV4k=" }, "eventid": { "version": "1.0.0", @@ -2632,7 +2678,7 @@ "extsprintf": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", - "integrity": "sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g==" + "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=" }, "fast-deep-equal": { "version": "3.1.1", @@ -2659,7 +2705,7 @@ "fast-text-encoding": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.0.tgz", - "integrity": "sha512-R9bHCvweUxxwkDwhjav5vxpFvdPGlVngtqmx4pIZfSUhM/Q4NiIUHB456BAf+Q1Nwu3HEZYONtu+Rya+af4jiQ==" + "integrity": "sha1-PlzoKTQJz6pxd6cbnKhOGx5vJe8=" }, "figures": { "version": "3.2.0", @@ -2682,7 +2728,7 @@ "file-uri-to-path": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", - "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==" + "integrity": "sha1-VTp7hEb/b2hDWcRF8eN6BdrMM90=" }, "finalhandler": { "version": "1.1.2", @@ -2719,7 +2765,7 @@ "findit2": { "version": "2.2.3", "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz", - "integrity": "sha512-lg/Moejf4qXovVutL0Lz4IsaPoNYMuxt4PA0nGqFxnJ1CTTGGlEO2wKgoDpwknhvZ8k4Q2F+eesgkLbG2Mxfog==" + "integrity": "sha1-WKRmaX34piBc39vzlVNri9d3pfY=" }, "flat-cache": { "version": "2.0.1", @@ -2766,7 +2812,7 @@ "follow-redirects": { "version": "1.5.10", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.10.tgz", - "integrity": "sha512-0V5l4Cizzvqt5D44aTXbFZz+FtyXV1vrDN6qrelxtfYQKW0KO0W2T/hkE8xvGa/540LkZlkaUjO4ailYTFtHVQ==", + "integrity": "sha1-e3qfmuov3/NnhqlP9kPtB/T/Xio=", "requires": { "debug": "=3.1.0" }, @@ -2828,7 +2874,7 @@ "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" }, "function-bind": { "version": "1.1.1", @@ -2877,7 +2923,7 @@ "getpass": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", - "integrity": "sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==", + "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", "requires": { "assert-plus": "^1.0.0" } @@ -2885,7 +2931,7 @@ "glob": { "version": "6.0.4", "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", - "integrity": "sha512-MKZeRNyYZAVVVG1oZeLaWie1uweH40m9AZwIwxyPbTSX4hHrVYSzLg0Ro5Z5R7XKkIX+Cc6oD1rqeDJnwsB8/A==", + "integrity": "sha1-DwiGD2oVUSey+t1PnOJLGqtuTSI=", "optional": true, "requires": { "inflight": "^1.0.4", @@ -3009,7 +3055,7 @@ "har-schema": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", - "integrity": "sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q==" + "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=" }, "har-validator": { "version": "5.1.3", @@ -3075,13 +3121,13 @@ "he": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", - "integrity": "sha512-z/GDPjlRMNOa2XJiB4em8wJpuuBfrFOlYKTZxtpkdr1uPdibHI8rYA3MY0KDObpVyaes0e/aunid/t88ZI2EKA==", + "integrity": "sha1-k0EP0hsAlzUVH4howvJx80J+I/0=", "dev": true }, "hex2dec": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/hex2dec/-/hex2dec-1.1.2.tgz", - "integrity": "sha512-Yu+q/XWr2fFQ11tHxPq4p4EiNkb2y+lAacJNhAdRXVfRIcDH6gi7htWFnnlIzvqHMHoWeIsfXlNAjZInpAOJDA==" + "integrity": "sha1-jhzkvvNqdPfVcjw/swkMKGAHczg=" }, "hosted-git-info": { "version": "2.8.8", @@ -3217,7 +3263,7 @@ "inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", "requires": { "once": "^1.3.0", "wrappy": "1" @@ -3389,7 +3435,7 @@ "is": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/is/-/is-3.3.0.tgz", - "integrity": "sha512-nW24QBoPcFGGHJGUwnfpI7Yc5CdqWNdsyHQszVE/z2pKHXzh7FZ5GWhJqSyaQ9wMkQnsTx+kAI8bHlCX4tKdbg==" + "integrity": "sha1-Yc/23TxBk9uUo9YlggcrROVkXXk=" }, "is-arrayish": { "version": "0.2.1", @@ -3475,7 +3521,7 @@ "is-typedarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==" + "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=" }, "isarray": { "version": "1.0.0", @@ -3490,7 +3536,7 @@ "isstream": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g==" + "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" }, "istanbul-lib-coverage": { "version": "2.0.5", @@ -3612,7 +3658,7 @@ "jsbn": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==" + "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=" }, "jsesc": { "version": "2.5.2", @@ -3622,7 +3668,7 @@ "json-bigint": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-0.3.0.tgz", - "integrity": "sha512-u+c/u/F+JNPUekHCFyGVycRPyh9UHD5iUhSyIAn10kxbDTJxijwAbT6XHaONEOXuGGfmWUSroheXgHcml4gLgg==", + "integrity": "sha1-DM2RLEuCcNBfBW+9E4FLU9OCWx4=", "requires": { "bignumber.js": "^7.0.0" } @@ -3635,7 +3681,7 @@ "json-schema": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", - "integrity": "sha512-a3xHnILGMtk+hDOqNwHzF6e2fNbiMrXZvxKQiEv2MlgQP+pjIOzqAmKYD2mDpXYE/44M7g+n9p2bKkYWDUcXCQ==" + "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=" }, "json-schema-traverse": { "version": "0.4.1", @@ -3651,12 +3697,12 @@ "json-stringify-safe": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==" + "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" }, "jsprim": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", - "integrity": "sha512-4Dj8Rf+fQ+/Pn7C5qeEX02op1WfOss3PKTE9Nsop3Dx+6UPxlm1dr/og7o2cRa5hNN07CACr4NFzRLtj/rjWog==", + "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", "requires": { "assert-plus": "1.0.0", "extsprintf": "1.3.0", @@ -3674,6 +3720,12 @@ "object.assign": "^4.1.0" } }, + "just-extend": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-4.1.0.tgz", + "integrity": "sha512-ApcjaOdVTJ7y4r08xI5wIqpvwS48Q0PBG4DJROcEkH1f8MdAiNFyFxz3xoL0LWAVwjrwPYZdVHHxhRHcx/uGLA==", + "dev": true + }, "jwa": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz", @@ -3748,18 +3800,24 @@ "lodash.defaults": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz", - "integrity": "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==" + "integrity": "sha1-0JF4cW/+pN3p5ft7N/bwgCJ0WAw=" }, "lodash.flatten": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz", - "integrity": "sha512-C5N2Z3DgnnKr0LOpv/hKCgKdb7ZZwafIrsesve6lmzvZIRZRGaZ/l6Q8+2W7NaT+ZwO3fFlSCzCzrDCFdJfZ4g==" + "integrity": "sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8=" }, "lodash.flattendeep": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz", "integrity": "sha1-+wMJF/hqMTTlvJvsDWngAT3f7bI=" }, + "lodash.get": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", + "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=", + "dev": true + }, "lodash.has": { "version": "4.5.2", "resolved": "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz", @@ -3780,7 +3838,7 @@ "lodash.pickby": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz", - "integrity": "sha512-AZV+GsS/6ckvPOVQPXSiFFacKvKB4kOQu6ynt9wz0F3LO4R9Ij4K1ddYsIytDpSgLz88JHd9P+oaLeej5/Sl7Q==" + "integrity": "sha1-feoh2MGNdwOifHBMFdO4SmfjOv8=" }, "lodash.unescape": { "version": "4.0.1", @@ -3912,7 +3970,7 @@ "long": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", - "integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==" + "integrity": "sha1-mntxz7fTYaGU6lVSQckvdGjVvyg=" }, "loose-envify": { "version": "1.4.0", @@ -3926,7 +3984,7 @@ "lru-cache": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "integrity": "sha1-HaJ+ZxAnGUdpXa9oSOhH8B2EuSA=", "requires": { "yallist": "^3.0.2" } @@ -3934,7 +3992,7 @@ "lsmod": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/lsmod/-/lsmod-1.0.0.tgz", - "integrity": "sha512-Y+6V75r+mGWzWEPr9h6PFmStielICu5JBHLUg18jCsD2VFmEfgHbq/EgnY4inElsUD9eKL9id1qp34w46rSIKQ==" + "integrity": "sha1-mgD3bco26yP6BTUK/htYXUKZ5ks=" }, "lynx": { "version": "0.1.1", @@ -4011,7 +4069,7 @@ "mersenne": { "version": "0.0.4", "resolved": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz", - "integrity": "sha512-XoSUL+nF8hMTKGQxUs8r3Btdsf1yuKKBdCCGbh3YXgCXuVKishpZv1CNc385w9s8t4Ynwc5h61BwW/FCVulkbg==" + "integrity": "sha1-QB/ex+whzbngPNPTAhOY2iGycIU=" }, "messageformat": { "version": "2.3.0", @@ -4090,7 +4148,7 @@ "minimatch": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "integrity": "sha1-UWbihkV/AzBgZL5Ul+jbsMPTIIM=", "requires": { "brace-expansion": "^1.1.7" } @@ -4098,17 +4156,17 @@ "minimist": { "version": "0.0.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", - "integrity": "sha512-miQKw5Hv4NS1Psg2517mV4e4dYNaO3++hjAvLOAzKqZ61rH8NS1SK+vbfBWZ5PY/Me/bEWhUwqMghEW5Fb9T7Q==" + "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" }, "mkdirp": { "version": "0.3.5", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz", - "integrity": "sha512-8OCq0De/h9ZxseqzCH8Kw/Filf5pF/vMI6+BH7Lu0jXz2pqYCjTAQRolSxRIi+Ax+oCCjlxoJMP0YQ4XlrQNHg==" + "integrity": "sha1-3j5fiWHIjHh+4TaN+EmsRBPsqNc=" }, "mocha": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/mocha/-/mocha-5.2.0.tgz", - "integrity": "sha512-2IUgKDhc3J7Uug+FxMXuqIyYzH7gJjXECKe/w43IGgQHTSj3InJi+yAA7T24L9bQMRKiUEHxEX37G5JpVUGLcQ==", + "integrity": "sha1-bYrlCPWRZ/lA8rWzxKYSrlDJCuY=", "dev": true, "requires": { "browser-stdout": "1.3.1", @@ -4127,7 +4185,7 @@ "commander": { "version": "2.15.1", "resolved": "https://registry.npmjs.org/commander/-/commander-2.15.1.tgz", - "integrity": "sha512-VlfT9F3V0v+jr4yxPc5gg9s62/fIVWsd2Bk2iD435um1NlGMYdVCq+MjcXnhYq2icNOizHr1kK+5TI6H0Hy0ag==", + "integrity": "sha1-30boZ9D8Kuxmo0ZitAapzK//Ww8=", "dev": true }, "debug": { @@ -4142,7 +4200,7 @@ "glob": { "version": "7.1.2", "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz", - "integrity": "sha512-MJTUg1kjuLeQCJ+ccE4Vpa6kKVXkPYJ2mOCQyUuKLcLQsdrMCpBPUi8qVE6+YuaJkozeA9NusTAw3hLr8Xe5EQ==", + "integrity": "sha1-wZyd+aAocC1nhhI4SmVSQExjbRU=", "dev": true, "requires": { "fs.realpath": "^1.0.0", @@ -4156,7 +4214,7 @@ "mkdirp": { "version": "0.5.1", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha512-SknJC52obPfGQPnjIkXbmA6+5H15E+fR+E4iR2oQ3zzCLbd7/ONua69R/Gw7AgkTLsRG+r5fzksYwWe1AgTyWA==", + "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", "dev": true, "requires": { "minimist": "0.0.8" @@ -4173,12 +4231,12 @@ "module-details-from-path": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz", - "integrity": "sha512-ySViT69/76t8VhE1xXHK6Ch4NcDd26gx0MzKXLO+F7NOtnqH68d9zF94nT8ZWSxXh8ELOERsnJO/sWt1xZYw5A==" + "integrity": "sha1-EUyUlnPiqKNenTV4hSeqN7Z52is=" }, "moment": { "version": "2.24.0", "resolved": "https://registry.npmjs.org/moment/-/moment-2.24.0.tgz", - "integrity": "sha512-bV7f+6l2QigeBBZSM/6yTNq4P2fNpSWj/0e7jQcy87A8e7o2nAfP/34/2ky5Vw4B9S446EtIhodAzkFCcR4dQg==", + "integrity": "sha1-DQVdU/UFKqZTyfbraLtdEr9cK1s=", "optional": true }, "mongodb": { @@ -4223,7 +4281,7 @@ "mv": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", - "integrity": "sha512-at/ZndSy3xEGJ8i0ygALh8ru9qy7gWW1cmkaqBN29JmMlIvM//MEO9y1sk/avxuwnPcfhkejkLsuPxH81BrkSg==", + "integrity": "sha1-rmzg1vbV4KT32JN5jQPB6pVZtqI=", "optional": true, "requires": { "mkdirp": "~0.5.1", @@ -4234,7 +4292,7 @@ "mkdirp": { "version": "0.5.1", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha512-SknJC52obPfGQPnjIkXbmA6+5H15E+fR+E4iR2oQ3zzCLbd7/ONua69R/Gw7AgkTLsRG+r5fzksYwWe1AgTyWA==", + "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", "optional": true, "requires": { "minimist": "0.0.8" @@ -4256,7 +4314,7 @@ "ncp": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", - "integrity": "sha512-zIdGUrPRFTUELUvr3Gmc7KZ2Sw/h1PiVM0Af/oHB6zgnV1ikqSfRk+TOufi79aHYCW3NiOXmr1BP5nWbzojLaA==", + "integrity": "sha1-GVoh1sRuNh0vsSgbo4uR6d9727M=", "optional": true }, "negotiator": { @@ -4275,10 +4333,40 @@ "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", "dev": true }, + "nise": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/nise/-/nise-4.0.3.tgz", + "integrity": "sha512-EGlhjm7/4KvmmE6B/UFsKh7eHykRl9VH+au8dduHLCyWUO/hr7+N+WtTvDUwc9zHuM1IaIJs/0lQ6Ag1jDkQSg==", + "dev": true, + "requires": { + "@sinonjs/commons": "^1.7.0", + "@sinonjs/fake-timers": "^6.0.0", + "@sinonjs/text-encoding": "^0.7.1", + "just-extend": "^4.0.2", + "path-to-regexp": "^1.7.0" + }, + "dependencies": { + "isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=", + "dev": true + }, + "path-to-regexp": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz", + "integrity": "sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA==", + "dev": true, + "requires": { + "isarray": "0.0.1" + } + } + } + }, "node-fetch": { "version": "2.6.0", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz", - "integrity": "sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==" + "integrity": "sha1-5jNFY4bUqlWGP2dqerDaqP3ssP0=" }, "node-forge": { "version": "0.9.1", @@ -4452,7 +4540,7 @@ "once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", "requires": { "wrappy": "1" } @@ -4510,7 +4598,7 @@ "p-try": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" + "integrity": "sha1-yyhoVA4xPWHeWPr741zpAE1VQOY=" }, "package-hash": { "version": "3.0.0", @@ -4549,12 +4637,12 @@ "parse-mongo-url": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/parse-mongo-url/-/parse-mongo-url-1.1.1.tgz", - "integrity": "sha512-7bZUusQIrFLwvsLHBnCz2WKYQ5LKO/LwKPnvQxbMIh9gDx8H5ZsknRmLjZdn6GVdrgVOwqDrZKsY0qDLNmRgcw==" + "integrity": "sha1-ZiON9fjnwMjKTNlw1KtqE3PrdbU=" }, "parse-ms": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.1.0.tgz", - "integrity": "sha512-kHt7kzLoS9VBZfUsiKjv43mr91ea+U05EyKkEtqp7vNbHxmaVuEqN7XxeEVnGrMtYOAxGrDElSi96K7EgO1zCA==" + "integrity": "sha1-NIVlp1PUOR+lJAKZVrFyy3dTCX0=" }, "parseurl": { "version": "1.3.3", @@ -4569,7 +4657,7 @@ "path-is-absolute": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==" + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" }, "path-is-inside": { "version": "1.0.2", @@ -4586,7 +4674,7 @@ "path-parse": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", - "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==" + "integrity": "sha1-1i27VnlAXXLEc37FhgDp3c8G0kw=" }, "path-to-regexp": { "version": "0.1.7", @@ -4611,12 +4699,12 @@ "performance-now": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", - "integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==" + "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" }, "pify": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", - "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==" + "integrity": "sha1-SyzSXFDVmHNcUCkiJP2MbfQeMjE=" }, "pkg-dir": { "version": "3.0.0", @@ -5282,7 +5370,7 @@ "pretty-ms": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-4.0.0.tgz", - "integrity": "sha512-qG66ahoLCwpLXD09ZPHSCbUWYTqdosB7SMP4OffgTgL2PBKXMuUsrk5Bwg8q4qPkjTXsKBMr+YK3Ltd/6F9s/Q==", + "integrity": "sha1-Mbr0G5T9AiJwmKqgO9YmCOsNbpI=", "requires": { "parse-ms": "^2.0.0" } @@ -5320,7 +5408,7 @@ "protobufjs": { "version": "6.8.8", "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz", - "integrity": "sha512-AAmHtD5pXgZfi7GMpllpO3q1Xw1OYldr+dMUlAnffGTAhqkg72WdmSY71uKBF/JuyiKs8psYbtKrhi0ASCD8qw==", + "integrity": "sha1-yLTxKC/XqQ5vWxCe0RyEr4KQjnw=", "requires": { "@protobufjs/aspromise": "^1.1.2", "@protobufjs/base64": "^1.1.2", @@ -5396,7 +5484,7 @@ "q": { "version": "0.9.2", "resolved": "https://registry.npmjs.org/q/-/q-0.9.2.tgz", - "integrity": "sha512-ZOxMuWPMJnsUdYhuQ9glpZwKhB4cm8ubYFy1nNCY8TkSAuZun5fd8jCDTlf2ykWnK8x9HGn1stNtLeG179DebQ==" + "integrity": "sha1-I8BsRsgTKGFqrhaNPuI6Vr1D2vY=" }, "qs": { "version": "6.7.0", @@ -5423,7 +5511,7 @@ "raven": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/raven/-/raven-1.1.3.tgz", - "integrity": "sha512-RYov4wAaflZasWiCrZuizd3jNXxCOkW1WrXgWsGVb8kRpdHNZ+vPY27R6RhVtqzWp+DG9a5l6iP0QUPK4EgzaQ==", + "integrity": "sha1-QnPBrm005CMPUbLAEEGjK5Iygio=", "requires": { "cookie": "0.3.1", "json-stringify-safe": "5.0.1", @@ -5435,7 +5523,7 @@ "cookie": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", - "integrity": "sha512-+IJOX0OqlHCszo2mBUq+SrEbCj6w7Kpffqx60zYbPTFaO4+yYgRjHwcZNpWvaTylDHaV7PPmBHzSecZiMhtPgw==" + "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" }, "uuid": { "version": "3.0.0", @@ -5493,22 +5581,22 @@ "redis": { "version": "0.11.0", "resolved": "https://registry.npmjs.org/redis/-/redis-0.11.0.tgz", - "integrity": "sha512-wkgzIZ9HuxJ6Sul1IW/6FG13Ecv6q8kmdHb5xo09Hu6bgWzz5qsnM06SVMpDxFNbyApaRjy8CwnmVaRMMhAMWg==" + "integrity": "sha1-/cAdSrTL5LO7LLKByP5WnDhX9XE=" }, "redis-commands": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.5.0.tgz", - "integrity": "sha512-6KxamqpZ468MeQC3bkWmCB1fp56XL64D4Kf0zJSwDZbVLLm7KFkoIcHrgRvQ+sk8dnhySs7+yBg94yIkAK7aJg==" + "integrity": "sha1-gNLiBpj+aI8icSf/nlFkp90X54U=" }, "redis-errors": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz", - "integrity": "sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w==" + "integrity": "sha1-62LSrbFeTq9GEMBK/hUpOEJQq60=" }, "redis-parser": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz", - "integrity": "sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A==", + "integrity": "sha1-tm2CjNyv5rS4pCin3vTGvKwxyLQ=", "requires": { "redis-errors": "^1.0.0" } @@ -5516,7 +5604,7 @@ "redis-sentinel": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/redis-sentinel/-/redis-sentinel-0.1.1.tgz", - "integrity": "sha512-cKtLSUzDsKmsB50J1eIV/SH11DSMiHgsm/gDPRCU5lXz5OyTSuLKWg9oc8d5n74kZwtAyRkfJP0x8vYXvlPjFQ==", + "integrity": "sha1-Vj3TQduZMgMfSX+v3Td+hkj/s+U=", "requires": { "q": "0.9.2", "redis": "0.11.x" @@ -5673,7 +5761,7 @@ "require-like": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", - "integrity": "sha512-oyrU88skkMtDdauHDuKVrgR+zuItqr6/c//FXzvmxRGMexSDc6hNvJInGW3LL46n+8b50RykrvwSUIIQH2LQ5A==", + "integrity": "sha1-rW8wwTvs15cBDEaK+ndcDAprR/o=", "dev": true }, "require-main-filename": { @@ -5690,7 +5778,7 @@ "require_optional": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/require_optional/-/require_optional-1.0.1.tgz", - "integrity": "sha512-qhM/y57enGWHAe3v/NcwML6a3/vfESLe/sGM2dII+gEO0BpKRUkWZow/tyloNqJyN6kXSl3RyyM8Ll5D/sJP8g==", + "integrity": "sha1-TPNaQkf2TKPfjC7yCMxJSxyo/C4=", "requires": { "resolve-from": "^2.0.0", "semver": "^5.1.0" @@ -5714,7 +5802,7 @@ "resolve-from": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz", - "integrity": "sha512-qpFcKaXsq8+oRoLilkwyc7zHGF5i9Q2/25NIgLQQ/+VVv9rU4qvr6nXVAw1DsnXJyQkZsR4Ytfbtg5ehfcUssQ==" + "integrity": "sha1-lICrIOlP+h2egKgEx+oUdhGWa1c=" }, "restore-cursor": { "version": "3.1.0", @@ -5729,7 +5817,7 @@ "retry-axios": { "version": "0.3.2", "resolved": "https://registry.npmjs.org/retry-axios/-/retry-axios-0.3.2.tgz", - "integrity": "sha512-jp4YlI0qyDFfXiXGhkCOliBN1G7fRH03Nqy8YdShzGqbY5/9S2x/IR6C88ls2DFkbWuL3ASkP7QD3pVrNpPgwQ==" + "integrity": "sha1-V1fID1hbTMTEmGqi/9R6YMbTXhM=" }, "retry-request": { "version": "4.1.1", @@ -5758,7 +5846,7 @@ "rimraf": { "version": "2.4.5", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", - "integrity": "sha512-J5xnxTyqaiw06JjMftq7L9ouA448dw/E7dKghkP9WpKNuwmARNNg+Gk8/u5ryb9N/Yo2+z3MCwuqFK/+qPOPfQ==", + "integrity": "sha1-7nEM5dk6j9uFb7Xqj/Di11k0sto=", "optional": true, "requires": { "glob": "^6.0.1" @@ -5787,7 +5875,7 @@ "safe-json-stringify": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.2.0.tgz", - "integrity": "sha512-gH8eh2nZudPQO6TytOvbxnuhYBOvDBBLW52tz5q6X58lJcd/tkmqFR+5Z9adS8aJtURSXWThWy/xJtJwixErvg==", + "integrity": "sha1-NW5EvJjx+TzkXfFLzXwBzahuCv0=", "optional": true }, "safer-buffer": { @@ -5798,7 +5886,7 @@ "sandboxed-module": { "version": "0.2.2", "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-0.2.2.tgz", - "integrity": "sha512-1QAd90eCdAnqVn2sLkRCCeFphH/TKLfoTcdyI6h9h2E+YEY+aKovggwzWWWi5IMObafl0W1wr+dQ5F6LFmjpzA==", + "integrity": "sha1-bL3sghOAx31FdcjIeDi5ET5kulA=", "dev": true, "requires": { "require-like": "0.1.2", @@ -5808,7 +5896,7 @@ "stack-trace": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.6.tgz", - "integrity": "sha512-5/6uZt7RYjjAl8z2j1mXWAewz+I4Hk2/L/3n6NRLIQ31+uQ7nMd9O6G69QCdrrufHv0QGRRHl/jwUEGTqhelTA==", + "integrity": "sha1-HnGb1qJin/CcGJ4Xqe+QKpT8XbA=", "dev": true } } @@ -5878,7 +5966,7 @@ "settings-sharelatex": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/settings-sharelatex/-/settings-sharelatex-1.1.0.tgz", - "integrity": "sha512-f7D+0lnlohoteSn6IKTH72NE+JnAdMWTKwQglAuimZWTID2FRRItZSGeYMTRpvEnaQApkoVwRp//WRMsiddnqw==", + "integrity": "sha1-Tv4vUpPbjxwVlnEEx5BfqHD/mS0=", "requires": { "coffee-script": "1.6.0" }, @@ -5886,7 +5974,7 @@ "coffee-script": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", - "integrity": "sha512-Tx8itEfCsQp8RbLDFt7qwjqXycAx2g6SI7//4PPUR2j6meLmNifYm6zKrNDcU1+Q/GWRhjhEZk7DaLG1TfIzGA==" + "integrity": "sha1-gIs5bhEPU9AhoZpO8fZb4OjjX6M=" } } }, @@ -5908,7 +5996,7 @@ "shimmer": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz", - "integrity": "sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw==" + "integrity": "sha1-YQhZ994ye1h+/r9QH7QxF/mv8zc=" }, "side-channel": { "version": "1.0.2", @@ -5926,12 +6014,41 @@ "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=" }, "sinon": { - "version": "1.5.2", - "resolved": "https://registry.npmjs.org/sinon/-/sinon-1.5.2.tgz", - "integrity": "sha512-4I5YC02+PBQpCCPUydFuUpH4X4+t4IpFmKbP1gHthoFiD7yyLPx179im5jgUPw/O2BytFYnl6NLL4ijh585uiA==", + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/sinon/-/sinon-9.0.2.tgz", + "integrity": "sha512-0uF8Q/QHkizNUmbK3LRFqx5cpTttEVXudywY9Uwzy8bTfZUhljZ7ARzSxnRHWYWtVTeh4Cw+tTb3iU21FQVO9A==", "dev": true, "requires": { - "buster-format": "~0.5" + "@sinonjs/commons": "^1.7.2", + "@sinonjs/fake-timers": "^6.0.1", + "@sinonjs/formatio": "^5.0.1", + "@sinonjs/samsam": "^5.0.3", + "diff": "^4.0.2", + "nise": "^4.0.1", + "supports-color": "^7.1.0" + }, + "dependencies": { + "diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "supports-color": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", + "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } } }, "slice-ansi": { @@ -5957,7 +6074,7 @@ "source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + "integrity": "sha1-dHIq8y6WFOnCh6jQu95IteLxomM=" }, "sparse-bitfield": { "version": "3.0.3", @@ -6048,7 +6165,7 @@ "split": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", - "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==", + "integrity": "sha1-YFvZvjA6pZ+zX5Ip++oN3snqB9k=", "requires": { "through": "2" } @@ -6077,17 +6194,17 @@ "stack-trace": { "version": "0.0.9", "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz", - "integrity": "sha512-vjUc6sfgtgY0dxCdnc40mK6Oftjo9+2K8H/NG81TMhgL392FtiPA9tn9RLyTxXmTLPJPjF3VyzFp6bsWFLisMQ==" + "integrity": "sha1-qPbq7KkGdMMz58Q5U/J1tFFRBpU=" }, "standard-as-callback": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.0.1.tgz", - "integrity": "sha512-NQOxSeB8gOI5WjSaxjBgog2QFw55FV8TkS6Y07BiB3VJ8xNTvUYm0wl0s8ObgQ5NhdpnNfigMIKjgPESzgr4tg==" + "integrity": "sha1-7YuyVkjhWDF1m2Ajvbh+a2CzgSY=" }, "statsd-parser": { "version": "0.0.4", "resolved": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz", - "integrity": "sha512-7XO+ur89EalMXXFQaydsczB8sclr5nDsNIoUu0IzJx1pIbHUhO3LtpSzBwetIuU9DyTLMiVaJBMtWS/Nb2KR4g==" + "integrity": "sha1-y9JDlTzELv/VSLXSI4jtaJ7GOb0=" }, "statuses": { "version": "1.5.0", @@ -6228,7 +6345,7 @@ "tdigest": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.1.tgz", - "integrity": "sha512-CXcDY/NIgIbKZPx5H4JJNpq6JwJhU5Z4+yWj4ZghDc7/9nVajiRlPPyMXRePPPlBfcayUqtoCXjo7/Hm82ecUA==", + "integrity": "sha1-Ljyyw56kSeVdHmzZEReszKRYgCE=", "requires": { "bintrees": "1.0.1" } @@ -6287,7 +6404,7 @@ "through": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", - "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==" + "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=" }, "through2": { "version": "3.0.1", @@ -6350,7 +6467,7 @@ "to-mongodb-core": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/to-mongodb-core/-/to-mongodb-core-2.0.0.tgz", - "integrity": "sha512-vfXXcGYFP8+0L5IPOtUzzVIvPE/G3GN0TKa/PRBlzPqYyhm+UxhPmvv634EQgO4Ot8dHbBFihOslMJQclY8Z9A==" + "integrity": "sha1-NZbsdhOsmtO5ioncua77pWnNJ+s=" }, "to-no-case": { "version": "1.0.2", @@ -6411,7 +6528,7 @@ "tweetnacl": { "version": "0.14.5", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==" + "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=" }, "type-check": { "version": "0.3.2", @@ -6451,7 +6568,7 @@ "underscore": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz", - "integrity": "sha512-z4o1fvKUojIWh9XuaVLUDdf86RQiq13AC1dmHbTpoyuu+bquHms76v16CjycCbec87J7z0k//SiQVk0sMdFmpQ==" + "integrity": "sha1-izixDKze9jM3uLJOT/htRa6lKag=" }, "unpipe": { "version": "1.0.0", @@ -6461,7 +6578,7 @@ "uri-js": { "version": "4.2.2", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz", - "integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==", + "integrity": "sha1-lMVA4f93KVbiKZUHwBCupsiDjrA=", "requires": { "punycode": "^2.1.0" } @@ -6469,7 +6586,7 @@ "util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" }, "utils-merge": { "version": "1.0.1", @@ -6504,7 +6621,7 @@ "verror": { "version": "1.10.0", "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", - "integrity": "sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw==", + "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", "requires": { "assert-plus": "^1.0.0", "core-util-is": "1.0.2", @@ -6593,7 +6710,7 @@ "when": { "version": "3.7.8", "resolved": "https://registry.npmjs.org/when/-/when-3.7.8.tgz", - "integrity": "sha512-5cZ7mecD3eYcMiCH4wtRPA5iFJZ50BJYDfckI5RRpQiktMiYTcn0ccLTZOvcbBume+1304fQztxeNzNS9Gvrnw==" + "integrity": "sha1-xxMLan6gRpPoQs3J56Hyqjmjn4I=" }, "which": { "version": "1.3.1", @@ -6627,7 +6744,7 @@ "wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" }, "write": { "version": "1.0.3", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index af5f3ff6ac..3f0b1e2de0 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -55,7 +55,7 @@ "prettier": "^2.0.5", "prettier-eslint-cli": "^5.0.0", "sandboxed-module": "~0.2.0", - "sinon": "~1.5.2", + "sinon": "^9.0.2", "timekeeper": "^2.0.0" } } diff --git a/services/document-updater/test/acceptance/js/FlushingDocsTests.js b/services/document-updater/test/acceptance/js/FlushingDocsTests.js index 5556870f8e..109f89d434 100644 --- a/services/document-updater/test/acceptance/js/FlushingDocsTests.js +++ b/services/document-updater/test/acceptance/js/FlushingDocsTests.js @@ -121,26 +121,26 @@ describe('Flushing a doc to Mongo', function () { version: this.version }) let t = 30000 - sinon.stub( - MockWebApi, - 'setDocument', - ( - project_id, - doc_id, - lines, - version, - ranges, - lastUpdatedAt, - lastUpdatedBy, - callback - ) => { - if (callback == null) { - callback = function (error) {} + sinon + .stub(MockWebApi, 'setDocument') + .callsFake( + ( + project_id, + doc_id, + lines, + version, + ranges, + lastUpdatedAt, + lastUpdatedBy, + callback + ) => { + if (callback == null) { + callback = function (error) {} + } + setTimeout(callback, t) + return (t = 0) } - setTimeout(callback, t) - return (t = 0) - } - ) + ) return DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, done) }) diff --git a/services/document-updater/test/acceptance/js/GettingADocumentTests.js b/services/document-updater/test/acceptance/js/GettingADocumentTests.js index fc97c8d825..a0b9de5773 100644 --- a/services/document-updater/test/acceptance/js/GettingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/GettingADocumentTests.js @@ -216,12 +216,14 @@ describe('Getting a document', function () { DocUpdaterClient.randomId(), DocUpdaterClient.randomId() ]) - sinon.stub(MockWebApi, 'getDocument', (project_id, doc_id, callback) => { - if (callback == null) { - callback = function (error, doc) {} - } - return callback(new Error('oops')) - }) + sinon + .stub(MockWebApi, 'getDocument') + .callsFake((project_id, doc_id, callback) => { + if (callback == null) { + callback = function (error, doc) {} + } + return callback(new Error('oops')) + }) return DocUpdaterClient.getDoc( this.project_id, this.doc_id, @@ -248,12 +250,14 @@ describe('Getting a document', function () { DocUpdaterClient.randomId(), DocUpdaterClient.randomId() ]) - sinon.stub(MockWebApi, 'getDocument', (project_id, doc_id, callback) => { - if (callback == null) { - callback = function (error, doc) {} - } - return setTimeout(callback, 30000) - }) + sinon + .stub(MockWebApi, 'getDocument') + .callsFake((project_id, doc_id, callback) => { + if (callback == null) { + callback = function (error, doc) {} + } + return setTimeout(callback, 30000) + }) return done() }) diff --git a/services/document-updater/test/acceptance/js/SettingADocumentTests.js b/services/document-updater/test/acceptance/js/SettingADocumentTests.js index 484d51b57c..54ca9d00d0 100644 --- a/services/document-updater/test/acceptance/js/SettingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/SettingADocumentTests.js @@ -88,9 +88,9 @@ describe('Setting a document', function () { }) after(function () { - MockTrackChangesApi.flushDoc.reset() - MockProjectHistoryApi.flushProject.reset() - MockWebApi.setDocument.reset() + MockTrackChangesApi.flushDoc.resetHistory() + MockProjectHistoryApi.flushProject.resetHistory() + MockWebApi.setDocument.resetHistory() }) it('should return a 204 status code', function () { @@ -171,9 +171,9 @@ describe('Setting a document', function () { }) after(function () { - MockTrackChangesApi.flushDoc.reset() - MockProjectHistoryApi.flushProject.reset() - MockWebApi.setDocument.reset() + MockTrackChangesApi.flushDoc.resetHistory() + MockProjectHistoryApi.flushProject.resetHistory() + MockWebApi.setDocument.resetHistory() }) it('should return a 204 status code', function () { @@ -254,9 +254,9 @@ describe('Setting a document', function () { }) after(function () { - MockTrackChangesApi.flushDoc.reset() - MockProjectHistoryApi.flushProject.reset() - MockWebApi.setDocument.reset() + MockTrackChangesApi.flushDoc.resetHistory() + MockProjectHistoryApi.flushProject.resetHistory() + MockWebApi.setDocument.resetHistory() }) it(`should return a ${testCase.expectedStatusCode} status code`, function () { @@ -310,9 +310,9 @@ describe('Setting a document', function () { }) after(function () { - MockTrackChangesApi.flushDoc.reset() - MockProjectHistoryApi.flushProject.reset() - MockWebApi.setDocument.reset() + MockTrackChangesApi.flushDoc.resetHistory() + MockProjectHistoryApi.flushProject.resetHistory() + MockWebApi.setDocument.resetHistory() }) it('should return a 204 status code', function () { @@ -388,9 +388,9 @@ describe('Setting a document', function () { }) after(function () { - MockTrackChangesApi.flushDoc.reset() - MockProjectHistoryApi.flushProject.reset() - MockWebApi.setDocument.reset() + MockTrackChangesApi.flushDoc.resetHistory() + MockProjectHistoryApi.flushProject.resetHistory() + MockWebApi.setDocument.resetHistory() }) it('should undo the tracked changes', function (done) { @@ -451,9 +451,9 @@ describe('Setting a document', function () { }) after(function () { - MockTrackChangesApi.flushDoc.reset() - MockProjectHistoryApi.flushProject.reset() - MockWebApi.setDocument.reset() + MockTrackChangesApi.flushDoc.resetHistory() + MockProjectHistoryApi.flushProject.resetHistory() + MockWebApi.setDocument.resetHistory() }) it('should not undo the tracked changes', function (done) { diff --git a/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js b/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js index 4baa5cee8d..295a643cee 100644 --- a/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js +++ b/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js @@ -52,7 +52,8 @@ describe('DocumentManager', function () { './RealTimeRedisManager': (this.RealTimeRedisManager = {}), './DiffCodec': (this.DiffCodec = {}), './UpdateManager': (this.UpdateManager = {}), - './RangesManager': (this.RangesManager = {}) + './RangesManager': (this.RangesManager = {}), + './Errors': Errors } }) this.project_id = 'project-id-123' @@ -765,10 +766,9 @@ describe('DocumentManager', function () { }) return it('should call the callback with a not found error', function () { - const error = new Errors.NotFoundError( - `document not found: ${this.doc_id}` - ) - return this.callback.calledWith(error).should.equal(true) + return this.callback + .calledWith(sinon.match.instanceOf(Errors.NotFoundError)) + .should.equal(true) }) }) }) @@ -848,10 +848,9 @@ describe('DocumentManager', function () { }) return it('should call the callback with a not found error', function () { - const error = new Errors.NotFoundError( - `document not found: ${this.doc_id}` - ) - return this.callback.calledWith(error).should.equal(true) + return this.callback + .calledWith(sinon.match.instanceOf(Errors.NotFoundError)) + .should.equal(true) }) }) }) diff --git a/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js b/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js index 68321533b3..c578a6be22 100644 --- a/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js +++ b/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js @@ -95,7 +95,7 @@ describe('HistoryRedisManager', function () { return it('should call the callback with an error', function () { return this.callback - .calledWith(new Error('cannot push no ops')) + .calledWith(sinon.match.instanceOf(Error)) .should.equal(true) }) }) diff --git a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js index 64751e55db..36cf990724 100644 --- a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js +++ b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js @@ -159,7 +159,7 @@ describe('HttpController', function () { it('should call next with NotFoundError', function () { this.next - .calledWith(new Errors.NotFoundError('not found')) + .calledWith(sinon.match.instanceOf(Errors.NotFoundError)) .should.equal(true) }) }) @@ -173,7 +173,7 @@ describe('HttpController', function () { }) it('should call next with the error', function () { - this.next.calledWith(new Error('oops')).should.equal(true) + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) }) }) }) @@ -252,7 +252,7 @@ describe('HttpController', function () { }) it('should call next with the error', function () { - this.next.calledWith(new Error('oops')).should.equal(true) + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) }) }) @@ -327,7 +327,7 @@ describe('HttpController', function () { }) it('should call next with the error', function () { - this.next.calledWith(new Error('oops')).should.equal(true) + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) }) }) }) @@ -387,7 +387,7 @@ describe('HttpController', function () { }) it('should call next with the error', function () { - this.next.calledWith(new Error('oops')).should.equal(true) + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) }) }) }) @@ -477,7 +477,7 @@ describe('HttpController', function () { }) it('should call next with the error', function () { - this.next.calledWith(new Error('oops')).should.equal(true) + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) }) }) }) @@ -550,7 +550,7 @@ describe('HttpController', function () { }) it('should call next with the error', function () { - this.next.calledWith(new Error('oops')).should.equal(true) + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) }) }) }) @@ -640,7 +640,7 @@ describe('HttpController', function () { }) it('should call next with the error', function () { - this.next.calledWith(new Error('oops')).should.equal(true) + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) }) }) }) @@ -703,7 +703,7 @@ describe('HttpController', function () { }) it('should call next with the error', function () { - this.next.calledWith(new Error('oops')).should.equal(true) + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) }) }) }) @@ -804,7 +804,7 @@ describe('HttpController', function () { }) it('should call next with the error', function () { - this.next.calledWith(new Error('oops')).should.equal(true) + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) }) }) }) @@ -870,7 +870,7 @@ describe('HttpController', function () { }) it('should call next with the error', function () { - this.next.calledWith(new Error('oops')).should.equal(true) + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) }) }) }) @@ -925,7 +925,7 @@ describe('HttpController', function () { }) it('should call next with the error', function () { - this.next.calledWith(new Error('oops')).should.equal(true) + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) }) }) }) diff --git a/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js b/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js index d0c88940cc..82b0bc7da7 100644 --- a/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js +++ b/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js @@ -92,7 +92,7 @@ describe('LockManager - releasing the lock', function () { return it('should return an error if the lock has expired', function () { return this.callback - .calledWith(new Error('tried to release timed out lock')) + .calledWith(sinon.match.instanceOf(Error)) .should.equal(true) }) }) diff --git a/services/document-updater/test/unit/js/LockManager/getLockTests.js b/services/document-updater/test/unit/js/LockManager/getLockTests.js index 1c41f2b0b4..cbe805040b 100644 --- a/services/document-updater/test/unit/js/LockManager/getLockTests.js +++ b/services/document-updater/test/unit/js/LockManager/getLockTests.js @@ -119,9 +119,13 @@ describe('LockManager - getting the lock', function () { }) return it('should return the callback with an error', function () { - const e = new Error('Timeout') - e.doc_id = this.doc_id - return this.callback.calledWith(e).should.equal(true) + return this.callback + .calledWith( + sinon.match + .instanceOf(Error) + .and(sinon.match.has('doc_id', this.doc_id)) + ) + .should.equal(true) }) }) }) diff --git a/services/document-updater/test/unit/js/LockManager/tryLockTests.js b/services/document-updater/test/unit/js/LockManager/tryLockTests.js index 77aecd87d1..36be6321bd 100644 --- a/services/document-updater/test/unit/js/LockManager/tryLockTests.js +++ b/services/document-updater/test/unit/js/LockManager/tryLockTests.js @@ -142,8 +142,9 @@ describe('LockManager - trying the lock', function () { }) return it('should return the callback with an error', function () { - const e = new Error('tried to release timed out lock') - return this.callback.calledWith(e).should.equal(true) + return this.callback + .calledWith(sinon.match.instanceOf(Error)) + .should.equal(true) }) }) }) diff --git a/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js b/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js index 98039d7823..645ee8a59b 100644 --- a/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js +++ b/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js @@ -41,7 +41,8 @@ describe('PersistenceManager', function () { 'logger-sharelatex': (this.logger = { log: sinon.stub(), err: sinon.stub() - }) + }), + './Errors': Errors } }) this.project_id = 'project-id-123' @@ -171,7 +172,7 @@ describe('PersistenceManager', function () { it('should return a NotFoundError', function () { return this.callback - .calledWith(new Errors.NotFoundError('not found')) + .calledWith(sinon.match.instanceOf(Errors.NotFoundError)) .should.equal(true) }) @@ -198,7 +199,7 @@ describe('PersistenceManager', function () { it('should return an error', function () { return this.callback - .calledWith(new Error('web api error')) + .calledWith(sinon.match.instanceOf(Error)) .should.equal(true) }) @@ -231,7 +232,7 @@ describe('PersistenceManager', function () { return it('should return and error', function () { return this.callback - .calledWith(new Error('web API response had no doc lines')) + .calledWith(sinon.match.instanceOf(Error)) .should.equal(true) }) }) @@ -254,7 +255,7 @@ describe('PersistenceManager', function () { return it('should return and error', function () { return this.callback - .calledWith(new Error('web API response had no valid doc version')) + .calledWith(sinon.match.instanceOf(Error)) .should.equal(true) }) }) @@ -277,7 +278,7 @@ describe('PersistenceManager', function () { return it('should return and error', function () { return this.callback - .calledWith(new Error('web API response had no valid doc pathname')) + .calledWith(sinon.match.instanceOf(Error)) .should.equal(true) }) }) @@ -386,7 +387,7 @@ describe('PersistenceManager', function () { it('should return a NotFoundError', function () { return this.callback - .calledWith(new Errors.NotFoundError('not found')) + .calledWith(sinon.match.instanceOf(Errors.NotFoundError)) .should.equal(true) }) @@ -418,7 +419,7 @@ describe('PersistenceManager', function () { it('should return an error', function () { return this.callback - .calledWith(new Error('web api error')) + .calledWith(sinon.match.instanceOf(Error)) .should.equal(true) }) diff --git a/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js index 266d1d1a2e..9589d42054 100644 --- a/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js @@ -147,7 +147,9 @@ describe('ProjectManager - flushAndDeleteProject', function () { }) it('should call the callback with an error', function () { - return this.callback.calledWith(new Error()).should.equal(true) + return this.callback + .calledWith(sinon.match.instanceOf(Error)) + .should.equal(true) }) return it('should time the execution', function () { diff --git a/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js index 1907a26228..c0bb668f49 100644 --- a/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js @@ -136,7 +136,9 @@ describe('ProjectManager - flushProject', function () { }) it('should call the callback with an error', function () { - return this.callback.calledWith(new Error()).should.equal(true) + return this.callback + .calledWith(sinon.match.instanceOf(Error)) + .should.equal(true) }) return it('should time the execution', function () { diff --git a/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js b/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js index a10b328864..db9f31e4ad 100644 --- a/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js @@ -40,7 +40,8 @@ describe('ProjectManager - getProjectDocsAndFlushIfOld', function () { Timer.initClass() return Timer })()) - }) + }), + './Errors': Errors } }) this.project_id = 'project-id-123' @@ -146,9 +147,7 @@ describe('ProjectManager - getProjectDocsAndFlushIfOld', function () { it('should call the callback with an error', function () { return this.callback - .calledWith( - new Errors.ProjectStateChangedError('project state changed') - ) + .calledWith(sinon.match.instanceOf(Errors.ProjectStateChangedError)) .should.equal(true) }) @@ -194,7 +193,9 @@ describe('ProjectManager - getProjectDocsAndFlushIfOld', function () { }) it('should call the callback with an error', function () { - return this.callback.calledWith(new Error('oops')).should.equal(true) + return this.callback + .calledWith(sinon.match.instanceOf(Error)) + .should.equal(true) }) return it('should time the execution', function () { diff --git a/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js b/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js index 9e2f2e270f..cda00ca9d8 100644 --- a/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js +++ b/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js @@ -118,7 +118,7 @@ describe('RealTimeRedisManager', function () { return it('should return an error to the callback', function () { return this.callback - .calledWith(new Error('JSON parse error')) + .calledWith(sinon.match.has('name', 'SyntaxError')) .should.equal(true) }) }) diff --git a/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js index e36df094f1..467c9a3c2f 100644 --- a/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js +++ b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js @@ -398,7 +398,7 @@ describe('RedisManager', function () { return it('should return an error', function () { return this.callback - .calledWith(new Error('redis getDoc exceeded timeout')) + .calledWith(sinon.match.instanceOf(Error)) .should.equal(true) }) }) @@ -426,7 +426,7 @@ describe('RedisManager', function () { return it('should return an error', function () { return this.callback - .calledWith(new Errors.NotFoundError('not found')) + .calledWith(sinon.match.instanceOf(Errors.NotFoundError)) .should.equal(true) }) }) @@ -543,11 +543,7 @@ describe('RedisManager', function () { it('should return an error', function () { return this.callback - .calledWith( - new Errors.OpRangeNotAvailableError( - 'doc ops range is not loaded in redis' - ) - ) + .calledWith(sinon.match.instanceOf(Errors.OpRangeNotAvailableError)) .should.equal(true) }) @@ -588,7 +584,7 @@ describe('RedisManager', function () { return it('should return an error', function () { return this.callback - .calledWith(new Error('redis getPreviousDocOps exceeded timeout')) + .calledWith(sinon.match.instanceOf(Error)) .should.equal(true) }) }) @@ -854,9 +850,7 @@ describe('RedisManager', function () { return it('should call the callback with an error', function () { return this.callback - .calledWith( - new Error(`Version mismatch. '${this.doc_id}' is corrupted.`) - ) + .calledWith(sinon.match.instanceOf(Error)) .should.equal(true) }) }) @@ -954,7 +948,7 @@ describe('RedisManager', function () { return it('should call the callback with an error', function () { return this.callback - .calledWith(new Error('null bytes found in doc lines')) + .calledWith(sinon.match.instanceOf(Error)) .should.equal(true) }) }) @@ -985,7 +979,7 @@ describe('RedisManager', function () { return it('should call the callback with the error', function () { return this.callback - .calledWith(new Error('ranges are too large')) + .calledWith(sinon.match.instanceOf(Error)) .should.equal(true) }) }) @@ -1157,7 +1151,7 @@ describe('RedisManager', function () { return it('should call the callback with an error', function () { return this.callback - .calledWith(new Error('null bytes found in doc lines')) + .calledWith(sinon.match.instanceOf(Error)) .should.equal(true) }) }) @@ -1185,7 +1179,7 @@ describe('RedisManager', function () { return it('should call the callback with the error', function () { return this.callback - .calledWith(new Error('ranges are too large')) + .calledWith(sinon.match.instanceOf(Error)) .should.equal(true) }) }) diff --git a/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js b/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js index 7f7d377c1d..ddf98775d8 100644 --- a/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js +++ b/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js @@ -26,7 +26,8 @@ describe('ShareJsDB', function () { this.callback = sinon.stub() this.ShareJsDB = SandboxedModule.require(modulePath, { requires: { - './RedisManager': (this.RedisManager = {}) + './RedisManager': (this.RedisManager = {}), + './Errors': Errors } }) @@ -68,7 +69,7 @@ describe('ShareJsDB', function () { return it('should return the callback with a NotFoundError', function () { return this.callback - .calledWith(new Errors.NotFoundError('not found')) + .calledWith(sinon.match.instanceOf(Errors.NotFoundError)) .should.equal(true) }) }) diff --git a/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js b/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js index 56afe4c584..ff8b74b8e6 100644 --- a/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js +++ b/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js @@ -194,7 +194,9 @@ describe('ShareJsUpdateManager', function () { }) return it('should call the callback with the error', function () { - return this.callback.calledWith(this.error).should.equal(true) + return this.callback + .calledWith(sinon.match.instanceOf(Error)) + .should.equal(true) }) }) }) From 9799b94752e1b53975039921dfedb42ea80e554d Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Fri, 15 May 2020 15:54:31 -0400 Subject: [PATCH 676/769] Accept ordered doc and file updates Add an `updates` parameter to the project update endpoint. It can be used instead of `docUpdates` and `fileUpdates` to provide a single list of updates in the order they should be processed. --- services/document-updater/.eslintrc | 2 +- .../document-updater/app/js/HttpController.js | 32 +++- .../document-updater/app/js/ProjectManager.js | 154 +++++++++--------- .../js/HttpController/HttpControllerTests.js | 113 ++++++++++++- .../js/ProjectManager/updateProjectTests.js | 64 +++++--- 5 files changed, 250 insertions(+), 115 deletions(-) diff --git a/services/document-updater/.eslintrc b/services/document-updater/.eslintrc index 2e945d6ffb..76dad1561d 100644 --- a/services/document-updater/.eslintrc +++ b/services/document-updater/.eslintrc @@ -8,7 +8,7 @@ "prettier/standard" ], "parserOptions": { - "ecmaVersion": 2017 + "ecmaVersion": 2018 }, "plugins": [ "mocha", diff --git a/services/document-updater/app/js/HttpController.js b/services/document-updater/app/js/HttpController.js index 646a8578df..d2904eb898 100644 --- a/services/document-updater/app/js/HttpController.js +++ b/services/document-updater/app/js/HttpController.js @@ -330,19 +330,23 @@ function updateProject(req, res, next) { userId, docUpdates, fileUpdates, + updates, version } = req.body logger.log( - { projectId, docUpdates, fileUpdates, version }, + { projectId, updates, docUpdates, fileUpdates, version }, 'updating project via http' ) - + const allUpdates = _mergeUpdates( + docUpdates || [], + fileUpdates || [], + updates || [] + ) ProjectManager.updateProjectWithLocks( projectId, projectHistoryId, userId, - docUpdates, - fileUpdates, + allUpdates, version, (error) => { timer.done() @@ -412,3 +416,23 @@ function flushQueuedProjects(req, res, next) { } }) } + +/** + * Merge updates from the previous project update interface (docUpdates + + * fileUpdates) and the new update interface (updates). + */ +function _mergeUpdates(docUpdates, fileUpdates, updates) { + const mergedUpdates = [] + for (const update of docUpdates) { + const type = update.docLines != null ? 'add-doc' : 'rename-doc' + mergedUpdates.push({ type, ...update }) + } + for (const update of fileUpdates) { + const type = update.url != null ? 'add-file' : 'rename-file' + mergedUpdates.push({ type, ...update }) + } + for (const update of updates) { + mergedUpdates.push(update) + } + return mergedUpdates +} diff --git a/services/document-updater/app/js/ProjectManager.js b/services/document-updater/app/js/ProjectManager.js index 768ab3e759..07284a692a 100644 --- a/services/document-updater/app/js/ProjectManager.js +++ b/services/document-updater/app/js/ProjectManager.js @@ -212,9 +212,8 @@ function updateProjectWithLocks( projectId, projectHistoryId, userId, - docUpdates, - fileUpdates, - version, + updates, + projectVersion, _callback ) { const timer = new Metrics.Timer('projectManager.updateProject') @@ -223,92 +222,85 @@ function updateProjectWithLocks( _callback(...args) } - const projectVersion = version let projectSubversion = 0 // project versions can have multiple operations - let projectOpsLength = 0 - const handleDocUpdate = function (projectUpdate, cb) { - const docId = projectUpdate.id - projectUpdate.version = `${projectVersion}.${projectSubversion++}` - if (projectUpdate.docLines != null) { - ProjectHistoryRedisManager.queueAddEntity( - projectId, - projectHistoryId, - 'doc', - docId, - userId, - projectUpdate, - (error, count) => { - projectOpsLength = count - cb(error) - } - ) - } else { - DocumentManager.renameDocWithLock( - projectId, - docId, - userId, - projectUpdate, - projectHistoryId, - (error, count) => { - projectOpsLength = count - cb(error) - } - ) + function handleUpdate(update, cb) { + update.version = `${projectVersion}.${projectSubversion++}` + switch (update.type) { + case 'add-doc': + ProjectHistoryRedisManager.queueAddEntity( + projectId, + projectHistoryId, + 'doc', + update.id, + userId, + update, + (error, count) => { + projectOpsLength = count + cb(error) + } + ) + break + case 'rename-doc': + DocumentManager.renameDocWithLock( + projectId, + update.id, + userId, + update, + projectHistoryId, + (error, count) => { + projectOpsLength = count + cb(error) + } + ) + break + case 'add-file': + ProjectHistoryRedisManager.queueAddEntity( + projectId, + projectHistoryId, + 'file', + update.id, + userId, + update, + (error, count) => { + projectOpsLength = count + cb(error) + } + ) + break + case 'rename-file': + ProjectHistoryRedisManager.queueRenameEntity( + projectId, + projectHistoryId, + 'file', + update.id, + userId, + update, + (error, count) => { + projectOpsLength = count + cb(error) + } + ) + break + default: + cb(new Error(`Unknown update type: ${update.type}`)) } } - const handleFileUpdate = function (projectUpdate, cb) { - const fileId = projectUpdate.id - projectUpdate.version = `${projectVersion}.${projectSubversion++}` - if (projectUpdate.url != null) { - ProjectHistoryRedisManager.queueAddEntity( - projectId, - projectHistoryId, - 'file', - fileId, - userId, - projectUpdate, - (error, count) => { - projectOpsLength = count - cb(error) - } - ) - } else { - ProjectHistoryRedisManager.queueRenameEntity( - projectId, - projectHistoryId, - 'file', - fileId, - userId, - projectUpdate, - (error, count) => { - projectOpsLength = count - cb(error) - } - ) - } - } - - async.eachSeries(docUpdates, handleDocUpdate, (error) => { + async.eachSeries(updates, handleUpdate, (error) => { if (error) { return callback(error) } - async.eachSeries(fileUpdates, handleFileUpdate, (error) => { - if (error) { - return callback(error) - } - if ( - HistoryManager.shouldFlushHistoryOps( - projectOpsLength, - docUpdates.length + fileUpdates.length, - HistoryManager.FLUSH_PROJECT_EVERY_N_OPS - ) - ) { - HistoryManager.flushProjectChangesAsync(projectId) - } - callback() - }) + if ( + HistoryManager.shouldFlushHistoryOps( + projectOpsLength, + updates.length, + HistoryManager.FLUSH_PROJECT_EVERY_N_OPS + ) + ) { + HistoryManager.flushProjectChangesAsync(projectId) + } + callback() }) } diff --git a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js index 36cf990724..4d9790926a 100644 --- a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js +++ b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js @@ -809,12 +809,34 @@ describe('HttpController', function () { }) }) - describe('updateProject', function () { + describe('updateProject (split doc and file updates)', function () { beforeEach(function () { this.projectHistoryId = 'history-id-123' this.userId = 'user-id-123' - this.docUpdates = sinon.stub() - this.fileUpdates = sinon.stub() + this.docUpdates = [ + { id: 1, pathname: 'thesis.tex', newPathname: 'book.tex' }, + { id: 2, pathname: 'article.tex', docLines: 'hello' } + ] + this.fileUpdates = [ + { id: 3, pathname: 'apple.png', newPathname: 'banana.png' }, + { id: 4, url: 'filestore.example.com/4' } + ] + this.expectedUpdates = [ + { + type: 'rename-doc', + id: 1, + pathname: 'thesis.tex', + newPathname: 'book.tex' + }, + { type: 'add-doc', id: 2, pathname: 'article.tex', docLines: 'hello' }, + { + type: 'rename-file', + id: 3, + pathname: 'apple.png', + newPathname: 'banana.png' + }, + { type: 'add-file', id: 4, url: 'filestore.example.com/4' } + ] this.version = 1234567 this.req = { query: {}, @@ -833,9 +855,7 @@ describe('HttpController', function () { describe('successfully', function () { beforeEach(function () { - this.ProjectManager.updateProjectWithLocks = sinon - .stub() - .callsArgWith(6) + this.ProjectManager.updateProjectWithLocks = sinon.stub().yields() this.HttpController.updateProject(this.req, this.res, this.next) }) @@ -845,8 +865,7 @@ describe('HttpController', function () { this.project_id, this.projectHistoryId, this.userId, - this.docUpdates, - this.fileUpdates, + this.expectedUpdates, this.version ) .should.equal(true) @@ -865,7 +884,83 @@ describe('HttpController', function () { beforeEach(function () { this.ProjectManager.updateProjectWithLocks = sinon .stub() - .callsArgWith(6, new Error('oops')) + .yields(new Error('oops')) + this.HttpController.updateProject(this.req, this.res, this.next) + }) + + it('should call next with the error', function () { + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) + }) + }) + }) + + describe('updateProject (single updates parameter)', function () { + beforeEach(function () { + this.projectHistoryId = 'history-id-123' + this.userId = 'user-id-123' + this.updates = [ + { + type: 'rename-doc', + id: 1, + pathname: 'thesis.tex', + newPathname: 'book.tex' + }, + { type: 'add-doc', id: 2, pathname: 'article.tex', docLines: 'hello' }, + { + type: 'rename-file', + id: 3, + pathname: 'apple.png', + newPathname: 'banana.png' + }, + { type: 'add-file', id: 4, url: 'filestore.example.com/4' } + ] + this.version = 1234567 + this.req = { + query: {}, + body: { + projectHistoryId: this.projectHistoryId, + userId: this.userId, + updates: this.updates, + version: this.version + }, + params: { + project_id: this.project_id + } + } + }) + + describe('successfully', function () { + beforeEach(function () { + this.ProjectManager.updateProjectWithLocks = sinon.stub().yields() + this.HttpController.updateProject(this.req, this.res, this.next) + }) + + it('should accept the change', function () { + this.ProjectManager.updateProjectWithLocks + .calledWith( + this.project_id, + this.projectHistoryId, + this.userId, + this.updates, + this.version + ) + .should.equal(true) + }) + + it('should return a successful No Content response', function () { + this.res.sendStatus.calledWith(204).should.equal(true) + }) + + it('should time the request', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + describe('when an errors occurs', function () { + beforeEach(function () { + this.ProjectManager.updateProjectWithLocks = sinon + .stub() + .yields(new Error('oops')) this.HttpController.updateProject(this.req, this.res, this.next) }) diff --git a/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js index d17f80d44b..aa3db813a0 100644 --- a/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js @@ -49,22 +49,28 @@ describe('ProjectManager', function () { describe('rename operations', function () { beforeEach(function () { this.firstDocUpdate = { + type: 'rename-doc', id: 1, pathname: 'foo', newPathname: 'foo' } this.secondDocUpdate = { + type: 'rename-doc', id: 2, pathname: 'bar', newPathname: 'bar2' } - this.docUpdates = [this.firstDocUpdate, this.secondDocUpdate] this.firstFileUpdate = { + type: 'rename-file', id: 2, pathname: 'bar', newPathname: 'bar2' } - this.fileUpdates = [this.firstFileUpdate] + this.updates = [ + this.firstDocUpdate, + this.secondDocUpdate, + this.firstFileUpdate + ] }) describe('successfully', function () { @@ -73,8 +79,7 @@ describe('ProjectManager', function () { this.project_id, this.projectHistoryId, this.user_id, - this.docUpdates, - this.fileUpdates, + this.updates, this.version, this.callback ) @@ -146,8 +151,7 @@ describe('ProjectManager', function () { this.project_id, this.projectHistoryId, this.user_id, - this.docUpdates, - this.fileUpdates, + this.updates, this.version, this.callback ) @@ -166,8 +170,7 @@ describe('ProjectManager', function () { this.project_id, this.projectHistoryId, this.user_id, - this.docUpdates, - this.fileUpdates, + this.updates, this.version, this.callback ) @@ -185,8 +188,7 @@ describe('ProjectManager', function () { this.project_id, this.projectHistoryId, this.user_id, - this.docUpdates, - this.fileUpdates, + this.updates, this.version, this.callback ) @@ -203,23 +205,31 @@ describe('ProjectManager', function () { describe('add operations', function () { beforeEach(function () { this.firstDocUpdate = { + type: 'add-doc', id: 1, docLines: 'a\nb' } this.secondDocUpdate = { + type: 'add-doc', id: 2, docLines: 'a\nb' } - this.docUpdates = [this.firstDocUpdate, this.secondDocUpdate] this.firstFileUpdate = { + type: 'add-file', id: 3, url: 'filestore.example.com/2' } this.secondFileUpdate = { + type: 'add-file', id: 4, url: 'filestore.example.com/3' } - this.fileUpdates = [this.firstFileUpdate, this.secondFileUpdate] + this.updates = [ + this.firstDocUpdate, + this.secondDocUpdate, + this.firstFileUpdate, + this.secondFileUpdate + ] }) describe('successfully', function () { @@ -228,8 +238,7 @@ describe('ProjectManager', function () { this.project_id, this.projectHistoryId, this.user_id, - this.docUpdates, - this.fileUpdates, + this.updates, this.version, this.callback ) @@ -322,8 +331,7 @@ describe('ProjectManager', function () { this.project_id, this.projectHistoryId, this.user_id, - this.docUpdates, - this.fileUpdates, + this.updates, this.version, this.callback ) @@ -342,8 +350,7 @@ describe('ProjectManager', function () { this.project_id, this.projectHistoryId, this.user_id, - this.docUpdates, - this.fileUpdates, + this.updates, this.version, this.callback ) @@ -361,8 +368,7 @@ describe('ProjectManager', function () { this.project_id, this.projectHistoryId, this.user_id, - this.docUpdates, - this.fileUpdates, + this.updates, this.version, this.callback ) @@ -375,5 +381,23 @@ describe('ProjectManager', function () { }) }) }) + + describe('when given an unknown operation type', function () { + beforeEach(function () { + this.updates = [{ type: 'brew-coffee' }] + this.ProjectManager.updateProjectWithLocks( + this.project_id, + this.projectHistoryId, + this.user_id, + this.updates, + this.version, + this.callback + ) + }) + + it('should call back with an error', function () { + this.callback.calledWith(sinon.match.instanceOf(Error)).should.be.true + }) + }) }) }) From 924cc0bf7390b0a297595bfe9f9033c6d351b8b4 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Wed, 20 May 2020 15:31:41 -0400 Subject: [PATCH 677/769] Decaf cleanup: simplify null checks --- .../ApplyingUpdatesToProjectStructureTests.js | 51 +++++++------------ 1 file changed, 18 insertions(+), 33 deletions(-) diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js index 793a9fa5a8..c43ad15ae6 100644 --- a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js +++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js @@ -8,7 +8,6 @@ /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') @@ -41,7 +40,7 @@ describe("Applying updates to a project's structure", function () { } this.fileUpdates = [this.fileUpdate] return DocUpdaterApp.ensureRunning((error) => { - if (error != null) { + if (error) { throw error } return DocUpdaterClient.sendProjectUpdate( @@ -51,7 +50,7 @@ describe("Applying updates to a project's structure", function () { this.fileUpdates, this.version, (error) => { - if (error != null) { + if (error) { throw error } return setTimeout(done, 200) @@ -66,7 +65,7 @@ describe("Applying updates to a project's structure", function () { 0, -1, (error, updates) => { - if (error != null) { + if (error) { throw error } @@ -81,7 +80,6 @@ describe("Applying updates to a project's structure", function () { return done() } ) - return null }) }) @@ -105,13 +103,12 @@ describe("Applying updates to a project's structure", function () { [], this.version, (error) => { - if (error != null) { + if (error) { throw error } return setTimeout(done, 200) } ) - return null }) return it('should push the applied doc renames to the project history api', function (done) { @@ -120,7 +117,7 @@ describe("Applying updates to a project's structure", function () { 0, -1, (error, updates) => { - if (error != null) { + if (error) { throw error } @@ -135,7 +132,6 @@ describe("Applying updates to a project's structure", function () { return done() } ) - return null }) }) @@ -147,7 +143,7 @@ describe("Applying updates to a project's structure", function () { this.project_id, this.docUpdate.id, (error) => { - if (error != null) { + if (error) { throw error } sinon.spy(MockWebApi, 'getDocument') @@ -158,7 +154,7 @@ describe("Applying updates to a project's structure", function () { [], this.version, (error) => { - if (error != null) { + if (error) { throw error } return setTimeout(done, 200) @@ -166,7 +162,6 @@ describe("Applying updates to a project's structure", function () { ) } ) - return null }) after(function () { @@ -182,7 +177,6 @@ describe("Applying updates to a project's structure", function () { return done() } ) - return null }) return it('should push the applied doc renames to the project history api', function (done) { @@ -191,7 +185,7 @@ describe("Applying updates to a project's structure", function () { 0, -1, (error, updates) => { - if (error != null) { + if (error) { throw error } @@ -206,7 +200,6 @@ describe("Applying updates to a project's structure", function () { return done() } ) - return null }) }) }) @@ -247,13 +240,12 @@ describe("Applying updates to a project's structure", function () { this.fileUpdates, this.version, (error) => { - if (error != null) { + if (error) { throw error } return setTimeout(done, 200) } ) - return null }) return it('should push the applied doc renames to the project history api', function (done) { @@ -262,7 +254,7 @@ describe("Applying updates to a project's structure", function () { 0, -1, (error, updates) => { - if (error != null) { + if (error) { throw error } @@ -301,7 +293,6 @@ describe("Applying updates to a project's structure", function () { return done() } ) - return null }) }) }) @@ -322,13 +313,12 @@ describe("Applying updates to a project's structure", function () { this.fileUpdates, this.version, (error) => { - if (error != null) { + if (error) { throw error } return setTimeout(done, 200) } ) - return null }) return it('should push the file addition to the project history api', function (done) { @@ -337,7 +327,7 @@ describe("Applying updates to a project's structure", function () { 0, -1, (error, updates) => { - if (error != null) { + if (error) { throw error } @@ -352,7 +342,6 @@ describe("Applying updates to a project's structure", function () { return done() } ) - return null }) }) @@ -372,13 +361,12 @@ describe("Applying updates to a project's structure", function () { [], this.version, (error) => { - if (error != null) { + if (error) { throw error } return setTimeout(done, 200) } ) - return null }) return it('should push the doc addition to the project history api', function (done) { @@ -387,7 +375,7 @@ describe("Applying updates to a project's structure", function () { 0, -1, (error, updates) => { - if (error != null) { + if (error) { throw error } @@ -402,7 +390,6 @@ describe("Applying updates to a project's structure", function () { return done() } ) - return null }) }) @@ -434,7 +421,7 @@ describe("Applying updates to a project's structure", function () { [], this.version0, function (error) { - if (error != null) { + if (error) { throw error } return DocUpdaterClient.sendProjectUpdate( @@ -444,7 +431,7 @@ describe("Applying updates to a project's structure", function () { [], this.version1, (error) => { - if (error != null) { + if (error) { throw error } return setTimeout(done, 2000) @@ -452,7 +439,6 @@ describe("Applying updates to a project's structure", function () { ) } ) - return null }) after(function () { @@ -495,7 +481,7 @@ describe("Applying updates to a project's structure", function () { [], this.version0, function (error) { - if (error != null) { + if (error) { throw error } return DocUpdaterClient.sendProjectUpdate( @@ -505,7 +491,7 @@ describe("Applying updates to a project's structure", function () { [], this.version1, (error) => { - if (error != null) { + if (error) { throw error } return setTimeout(done, 2000) @@ -513,7 +499,6 @@ describe("Applying updates to a project's structure", function () { ) } ) - return null }) after(function () { From cfc0d45ccd46f78530272f571b3d4f81f965471a Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Wed, 20 May 2020 15:32:57 -0400 Subject: [PATCH 678/769] Decaf cleanup: unnecessary returns --- .../ApplyingUpdatesToProjectStructureTests.js | 85 +++++++++---------- 1 file changed, 39 insertions(+), 46 deletions(-) diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js index c43ad15ae6..4efa4c63e6 100644 --- a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js +++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js @@ -3,13 +3,6 @@ handle-callback-err, no-return-assign, */ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ const sinon = require('sinon') const chai = require('chai') chai.should() @@ -27,7 +20,7 @@ const DocUpdaterApp = require('./helpers/DocUpdaterApp') describe("Applying updates to a project's structure", function () { before(function () { this.user_id = 'user-id-123' - return (this.version = 1234) + this.version = 1234 }) describe('renaming a file', function () { @@ -39,11 +32,11 @@ describe("Applying updates to a project's structure", function () { newPathname: '/new-file-path' } this.fileUpdates = [this.fileUpdate] - return DocUpdaterApp.ensureRunning((error) => { + DocUpdaterApp.ensureRunning((error) => { if (error) { throw error } - return DocUpdaterClient.sendProjectUpdate( + DocUpdaterClient.sendProjectUpdate( this.project_id, this.user_id, [], @@ -53,13 +46,13 @@ describe("Applying updates to a project's structure", function () { if (error) { throw error } - return setTimeout(done, 200) + setTimeout(done, 200) } ) }) }) - return it('should push the applied file renames to the project history api', function (done) { + it('should push the applied file renames to the project history api', function (done) { rclient_project_history.lrange( ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), 0, @@ -77,7 +70,7 @@ describe("Applying updates to a project's structure", function () { update.meta.ts.should.be.a('string') update.version.should.equal(`${this.version}.0`) - return done() + done() } ) }) @@ -90,7 +83,7 @@ describe("Applying updates to a project's structure", function () { pathname: '/doc-path', newPathname: '/new-doc-path' } - return (this.docUpdates = [this.docUpdate]) + this.docUpdates = [this.docUpdate] }) describe('when the document is not loaded', function () { @@ -106,12 +99,12 @@ describe("Applying updates to a project's structure", function () { if (error) { throw error } - return setTimeout(done, 200) + setTimeout(done, 200) } ) }) - return it('should push the applied doc renames to the project history api', function (done) { + it('should push the applied doc renames to the project history api', function (done) { rclient_project_history.lrange( ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), 0, @@ -129,13 +122,13 @@ describe("Applying updates to a project's structure", function () { update.meta.ts.should.be.a('string') update.version.should.equal(`${this.version}.0`) - return done() + done() } ) }) }) - return describe('when the document is loaded', function () { + describe('when the document is loaded', function () { before(function (done) { this.project_id = DocUpdaterClient.randomId() MockWebApi.insertDoc(this.project_id, this.docUpdate.id, {}) @@ -147,7 +140,7 @@ describe("Applying updates to a project's structure", function () { throw error } sinon.spy(MockWebApi, 'getDocument') - return DocUpdaterClient.sendProjectUpdate( + DocUpdaterClient.sendProjectUpdate( this.project_id, this.user_id, this.docUpdates, @@ -157,7 +150,7 @@ describe("Applying updates to a project's structure", function () { if (error) { throw error } - return setTimeout(done, 200) + setTimeout(done, 200) } ) } @@ -165,7 +158,7 @@ describe("Applying updates to a project's structure", function () { }) after(function () { - return MockWebApi.getDocument.restore() + MockWebApi.getDocument.restore() }) it('should update the doc', function (done) { @@ -174,12 +167,12 @@ describe("Applying updates to a project's structure", function () { this.docUpdate.id, (error, res, doc) => { doc.pathname.should.equal(this.docUpdate.newPathname) - return done() + done() } ) }) - return it('should push the applied doc renames to the project history api', function (done) { + it('should push the applied doc renames to the project history api', function (done) { rclient_project_history.lrange( ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), 0, @@ -197,7 +190,7 @@ describe("Applying updates to a project's structure", function () { update.meta.ts.should.be.a('string') update.version.should.equal(`${this.version}.0`) - return done() + done() } ) }) @@ -227,10 +220,10 @@ describe("Applying updates to a project's structure", function () { pathname: '/file-path1', newPathname: '/new-file-path1' } - return (this.fileUpdates = [this.fileUpdate0, this.fileUpdate1]) + this.fileUpdates = [this.fileUpdate0, this.fileUpdate1] }) - return describe('when the documents are not loaded', function () { + describe('when the documents are not loaded', function () { before(function (done) { this.project_id = DocUpdaterClient.randomId() DocUpdaterClient.sendProjectUpdate( @@ -243,12 +236,12 @@ describe("Applying updates to a project's structure", function () { if (error) { throw error } - return setTimeout(done, 200) + setTimeout(done, 200) } ) }) - return it('should push the applied doc renames to the project history api', function (done) { + it('should push the applied doc renames to the project history api', function (done) { rclient_project_history.lrange( ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), 0, @@ -290,7 +283,7 @@ describe("Applying updates to a project's structure", function () { update.meta.ts.should.be.a('string') update.version.should.equal(`${this.version}.3`) - return done() + done() } ) }) @@ -316,12 +309,12 @@ describe("Applying updates to a project's structure", function () { if (error) { throw error } - return setTimeout(done, 200) + setTimeout(done, 200) } ) }) - return it('should push the file addition to the project history api', function (done) { + it('should push the file addition to the project history api', function (done) { rclient_project_history.lrange( ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), 0, @@ -339,7 +332,7 @@ describe("Applying updates to a project's structure", function () { update.meta.ts.should.be.a('string') update.version.should.equal(`${this.version}.0`) - return done() + done() } ) }) @@ -364,12 +357,12 @@ describe("Applying updates to a project's structure", function () { if (error) { throw error } - return setTimeout(done, 200) + setTimeout(done, 200) } ) }) - return it('should push the doc addition to the project history api', function (done) { + it('should push the doc addition to the project history api', function (done) { rclient_project_history.lrange( ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), 0, @@ -387,7 +380,7 @@ describe("Applying updates to a project's structure", function () { update.meta.ts.should.be.a('string') update.version.should.equal(`${this.version}.0`) - return done() + done() } ) }) @@ -424,7 +417,7 @@ describe("Applying updates to a project's structure", function () { if (error) { throw error } - return DocUpdaterClient.sendProjectUpdate( + DocUpdaterClient.sendProjectUpdate( projectId, userId, updates.slice(250), @@ -434,7 +427,7 @@ describe("Applying updates to a project's structure", function () { if (error) { throw error } - return setTimeout(done, 2000) + setTimeout(done, 2000) } ) } @@ -442,17 +435,17 @@ describe("Applying updates to a project's structure", function () { }) after(function () { - return MockProjectHistoryApi.flushProject.restore() + MockProjectHistoryApi.flushProject.restore() }) - return it('should flush project history', function () { - return MockProjectHistoryApi.flushProject + it('should flush project history', function () { + MockProjectHistoryApi.flushProject .calledWith(this.project_id) .should.equal(true) }) }) - return describe('with too few updates to flush to the history service', function () { + describe('with too few updates to flush to the history service', function () { before(function (done) { this.project_id = DocUpdaterClient.randomId() this.user_id = DocUpdaterClient.randomId() @@ -484,7 +477,7 @@ describe("Applying updates to a project's structure", function () { if (error) { throw error } - return DocUpdaterClient.sendProjectUpdate( + DocUpdaterClient.sendProjectUpdate( projectId, userId, updates.slice(10), @@ -494,7 +487,7 @@ describe("Applying updates to a project's structure", function () { if (error) { throw error } - return setTimeout(done, 2000) + setTimeout(done, 2000) } ) } @@ -502,11 +495,11 @@ describe("Applying updates to a project's structure", function () { }) after(function () { - return MockProjectHistoryApi.flushProject.restore() + MockProjectHistoryApi.flushProject.restore() }) - return it('should not flush project history', function () { - return MockProjectHistoryApi.flushProject + it('should not flush project history', function () { + MockProjectHistoryApi.flushProject .calledWith(this.project_id) .should.equal(false) }) From e41836028a632fdd18cab8cf323d6af24b4df1e1 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Wed, 20 May 2020 15:34:28 -0400 Subject: [PATCH 679/769] Decaf cleanup: error handling --- .../ApplyingUpdatesToProjectStructureTests.js | 41 ++++++++++--------- 1 file changed, 21 insertions(+), 20 deletions(-) diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js index 4efa4c63e6..d9e1365a5a 100644 --- a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js +++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js @@ -1,7 +1,5 @@ /* eslint-disable camelcase, - handle-callback-err, - no-return-assign, */ const sinon = require('sinon') const chai = require('chai') @@ -34,7 +32,7 @@ describe("Applying updates to a project's structure", function () { this.fileUpdates = [this.fileUpdate] DocUpdaterApp.ensureRunning((error) => { if (error) { - throw error + return done(error) } DocUpdaterClient.sendProjectUpdate( this.project_id, @@ -44,7 +42,7 @@ describe("Applying updates to a project's structure", function () { this.version, (error) => { if (error) { - throw error + return done(error) } setTimeout(done, 200) } @@ -59,7 +57,7 @@ describe("Applying updates to a project's structure", function () { -1, (error, updates) => { if (error) { - throw error + return done(error) } const update = JSON.parse(updates[0]) @@ -97,7 +95,7 @@ describe("Applying updates to a project's structure", function () { this.version, (error) => { if (error) { - throw error + return done(error) } setTimeout(done, 200) } @@ -111,7 +109,7 @@ describe("Applying updates to a project's structure", function () { -1, (error, updates) => { if (error) { - throw error + return done(error) } const update = JSON.parse(updates[0]) @@ -137,7 +135,7 @@ describe("Applying updates to a project's structure", function () { this.docUpdate.id, (error) => { if (error) { - throw error + return done(error) } sinon.spy(MockWebApi, 'getDocument') DocUpdaterClient.sendProjectUpdate( @@ -148,7 +146,7 @@ describe("Applying updates to a project's structure", function () { this.version, (error) => { if (error) { - throw error + return done(error) } setTimeout(done, 200) } @@ -166,6 +164,9 @@ describe("Applying updates to a project's structure", function () { this.project_id, this.docUpdate.id, (error, res, doc) => { + if (error) { + return done(error) + } doc.pathname.should.equal(this.docUpdate.newPathname) done() } @@ -179,7 +180,7 @@ describe("Applying updates to a project's structure", function () { -1, (error, updates) => { if (error) { - throw error + return done(error) } const update = JSON.parse(updates[0]) @@ -234,7 +235,7 @@ describe("Applying updates to a project's structure", function () { this.version, (error) => { if (error) { - throw error + return done(error) } setTimeout(done, 200) } @@ -248,7 +249,7 @@ describe("Applying updates to a project's structure", function () { -1, (error, updates) => { if (error) { - throw error + return done(error) } let update = JSON.parse(updates[0]) @@ -307,7 +308,7 @@ describe("Applying updates to a project's structure", function () { this.version, (error) => { if (error) { - throw error + return done(error) } setTimeout(done, 200) } @@ -321,7 +322,7 @@ describe("Applying updates to a project's structure", function () { -1, (error, updates) => { if (error) { - throw error + return done(error) } const update = JSON.parse(updates[0]) @@ -355,7 +356,7 @@ describe("Applying updates to a project's structure", function () { this.version, (error) => { if (error) { - throw error + return done(error) } setTimeout(done, 200) } @@ -369,7 +370,7 @@ describe("Applying updates to a project's structure", function () { -1, (error, updates) => { if (error) { - throw error + return done(error) } const update = JSON.parse(updates[0]) @@ -415,7 +416,7 @@ describe("Applying updates to a project's structure", function () { this.version0, function (error) { if (error) { - throw error + return done(error) } DocUpdaterClient.sendProjectUpdate( projectId, @@ -425,7 +426,7 @@ describe("Applying updates to a project's structure", function () { this.version1, (error) => { if (error) { - throw error + return done(error) } setTimeout(done, 2000) } @@ -475,7 +476,7 @@ describe("Applying updates to a project's structure", function () { this.version0, function (error) { if (error) { - throw error + return done(error) } DocUpdaterClient.sendProjectUpdate( projectId, @@ -485,7 +486,7 @@ describe("Applying updates to a project's structure", function () { this.version1, (error) => { if (error) { - throw error + return done(error) } setTimeout(done, 2000) } From 3830b8029ad2c46e3eb7bd60a4409ab2b99cf5b1 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Wed, 20 May 2020 15:35:12 -0400 Subject: [PATCH 680/769] Decaf cleanup: camel case variables --- .../ApplyingUpdatesToProjectStructureTests.js | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js index d9e1365a5a..8be9291fdf 100644 --- a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js +++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js @@ -1,11 +1,8 @@ -/* eslint-disable - camelcase, -*/ const sinon = require('sinon') const chai = require('chai') chai.should() const Settings = require('settings-sharelatex') -const rclient_project_history = require('redis-sharelatex').createClient( +const rclientProjectHistory = require('redis-sharelatex').createClient( Settings.redis.project_history ) const ProjectHistoryKeys = Settings.redis.project_history.key_schema @@ -51,7 +48,7 @@ describe("Applying updates to a project's structure", function () { }) it('should push the applied file renames to the project history api', function (done) { - rclient_project_history.lrange( + rclientProjectHistory.lrange( ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), 0, -1, @@ -103,7 +100,7 @@ describe("Applying updates to a project's structure", function () { }) it('should push the applied doc renames to the project history api', function (done) { - rclient_project_history.lrange( + rclientProjectHistory.lrange( ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), 0, -1, @@ -174,7 +171,7 @@ describe("Applying updates to a project's structure", function () { }) it('should push the applied doc renames to the project history api', function (done) { - rclient_project_history.lrange( + rclientProjectHistory.lrange( ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), 0, -1, @@ -243,7 +240,7 @@ describe("Applying updates to a project's structure", function () { }) it('should push the applied doc renames to the project history api', function (done) { - rclient_project_history.lrange( + rclientProjectHistory.lrange( ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), 0, -1, @@ -316,7 +313,7 @@ describe("Applying updates to a project's structure", function () { }) it('should push the file addition to the project history api', function (done) { - rclient_project_history.lrange( + rclientProjectHistory.lrange( ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), 0, -1, @@ -364,7 +361,7 @@ describe("Applying updates to a project's structure", function () { }) it('should push the doc addition to the project history api', function (done) { - rclient_project_history.lrange( + rclientProjectHistory.lrange( ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), 0, -1, From 8bbfd25d477c8a3c7d8bf2e3a26616d4762a10bb Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Wed, 20 May 2020 15:53:06 -0400 Subject: [PATCH 681/769] Decaf cleanup: simplify null checks --- .../acceptance/js/helpers/DocUpdaterClient.js | 58 +------------------ 1 file changed, 3 insertions(+), 55 deletions(-) diff --git a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js index 53793135eb..74e04733f5 100644 --- a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js +++ b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js @@ -8,7 +8,6 @@ * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ let DocUpdaterClient @@ -35,26 +34,20 @@ module.exports = DocUpdaterClient = { }, subscribeToAppliedOps(callback) { - if (callback == null) { - callback = function (message) {} - } return rclient_sub.on('message', callback) }, sendUpdate(project_id, doc_id, update, callback) { - if (callback == null) { - callback = function (error) {} - } return rclient.rpush( keys.pendingUpdates({ doc_id }), JSON.stringify(update), (error) => { - if (error != null) { + if (error) { return callback(error) } const doc_key = `${project_id}:${doc_id}` return rclient.sadd('DocsWithPendingUpdates', doc_key, (error) => { - if (error != null) { + if (error) { return callback(error) } return rclient.rpush('pending-updates-list', doc_key, callback) @@ -64,11 +57,8 @@ module.exports = DocUpdaterClient = { }, sendUpdates(project_id, doc_id, updates, callback) { - if (callback == null) { - callback = function (error) {} - } return DocUpdaterClient.preloadDoc(project_id, doc_id, (error) => { - if (error != null) { + if (error) { return callback(error) } const jobs = [] @@ -100,9 +90,6 @@ module.exports = DocUpdaterClient = { }, getDoc(project_id, doc_id, callback) { - if (callback == null) { - callback = function (error, res, body) {} - } return request.get( `http://localhost:3003/project/${project_id}/doc/${doc_id}`, (error, res, body) => { @@ -115,9 +102,6 @@ module.exports = DocUpdaterClient = { }, getDocAndRecentOps(project_id, doc_id, fromVersion, callback) { - if (callback == null) { - callback = function (error, res, body) {} - } return request.get( `http://localhost:3003/project/${project_id}/doc/${doc_id}?fromVersion=${fromVersion}`, (error, res, body) => { @@ -130,16 +114,10 @@ module.exports = DocUpdaterClient = { }, preloadDoc(project_id, doc_id, callback) { - if (callback == null) { - callback = function (error) {} - } return DocUpdaterClient.getDoc(project_id, doc_id, callback) }, flushDoc(project_id, doc_id, callback) { - if (callback == null) { - callback = function (error) {} - } return request.post( `http://localhost:3003/project/${project_id}/doc/${doc_id}/flush`, (error, res, body) => callback(error, res, body) @@ -147,9 +125,6 @@ module.exports = DocUpdaterClient = { }, setDocLines(project_id, doc_id, lines, source, user_id, undoing, callback) { - if (callback == null) { - callback = function (error) {} - } return request.post( { url: `http://localhost:3003/project/${project_id}/doc/${doc_id}`, @@ -165,9 +140,6 @@ module.exports = DocUpdaterClient = { }, deleteDoc(project_id, doc_id, callback) { - if (callback == null) { - callback = function (error) {} - } return request.del( `http://localhost:3003/project/${project_id}/doc/${doc_id}`, (error, res, body) => callback(error, res, body) @@ -175,9 +147,6 @@ module.exports = DocUpdaterClient = { }, flushProject(project_id, callback) { - if (callback == null) { - callback = function () {} - } return request.post( `http://localhost:3003/project/${project_id}/flush`, callback @@ -185,16 +154,10 @@ module.exports = DocUpdaterClient = { }, deleteProject(project_id, callback) { - if (callback == null) { - callback = function () {} - } return request.del(`http://localhost:3003/project/${project_id}`, callback) }, deleteProjectOnShutdown(project_id, callback) { - if (callback == null) { - callback = function () {} - } return request.del( `http://localhost:3003/project/${project_id}?background=true&shutdown=true`, callback @@ -202,9 +165,6 @@ module.exports = DocUpdaterClient = { }, flushOldProjects(callback) { - if (callback == null) { - callback = function () {} - } return request.get( 'http://localhost:3003/flush_queued_projects?min_delete_age=1', callback @@ -212,9 +172,6 @@ module.exports = DocUpdaterClient = { }, acceptChange(project_id, doc_id, change_id, callback) { - if (callback == null) { - callback = function () {} - } return request.post( `http://localhost:3003/project/${project_id}/doc/${doc_id}/change/${change_id}/accept`, callback @@ -222,9 +179,6 @@ module.exports = DocUpdaterClient = { }, removeComment(project_id, doc_id, comment, callback) { - if (callback == null) { - callback = function () {} - } return request.del( `http://localhost:3003/project/${project_id}/doc/${doc_id}/comment/${comment}`, callback @@ -232,9 +186,6 @@ module.exports = DocUpdaterClient = { }, getProjectDocs(project_id, projectStateHash, callback) { - if (callback == null) { - callback = function () {} - } return request.get( `http://localhost:3003/project/${project_id}/doc?state=${projectStateHash}`, (error, res, body) => { @@ -254,9 +205,6 @@ module.exports = DocUpdaterClient = { version, callback ) { - if (callback == null) { - callback = function (error) {} - } return request.post( { url: `http://localhost:3003/project/${project_id}`, From e9df9714e59d2f6af1d2ab90fc52025877868a05 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Wed, 20 May 2020 15:54:36 -0400 Subject: [PATCH 682/769] Decaf cleanup: unnecessary returns --- .../acceptance/js/helpers/DocUpdaterClient.js | 56 +++++++++---------- 1 file changed, 26 insertions(+), 30 deletions(-) diff --git a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js index 74e04733f5..89cc9cd74c 100644 --- a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js +++ b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js @@ -7,7 +7,6 @@ /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from - * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ let DocUpdaterClient @@ -34,11 +33,11 @@ module.exports = DocUpdaterClient = { }, subscribeToAppliedOps(callback) { - return rclient_sub.on('message', callback) + rclient_sub.on('message', callback) }, sendUpdate(project_id, doc_id, update, callback) { - return rclient.rpush( + rclient.rpush( keys.pendingUpdates({ doc_id }), JSON.stringify(update), (error) => { @@ -46,18 +45,18 @@ module.exports = DocUpdaterClient = { return callback(error) } const doc_key = `${project_id}:${doc_id}` - return rclient.sadd('DocsWithPendingUpdates', doc_key, (error) => { + rclient.sadd('DocsWithPendingUpdates', doc_key, (error) => { if (error) { return callback(error) } - return rclient.rpush('pending-updates-list', doc_key, callback) + rclient.rpush('pending-updates-list', doc_key, callback) }) } ) }, sendUpdates(project_id, doc_id, updates, callback) { - return DocUpdaterClient.preloadDoc(project_id, doc_id, (error) => { + DocUpdaterClient.preloadDoc(project_id, doc_id, (error) => { if (error) { return callback(error) } @@ -68,21 +67,21 @@ module.exports = DocUpdaterClient = { DocUpdaterClient.sendUpdate(project_id, doc_id, update, callback) ))(update) } - return async.series(jobs, (err) => + async.series(jobs, (err) => DocUpdaterClient.waitForPendingUpdates(project_id, doc_id, callback) ) }) }, waitForPendingUpdates(project_id, doc_id, callback) { - return async.retry( + async.retry( { times: 30, interval: 100 }, (cb) => rclient.llen(keys.pendingUpdates({ doc_id }), (err, length) => { if (length > 0) { - return cb(new Error('updates still pending')) + cb(new Error('updates still pending')) } else { - return cb() + cb() } }), callback @@ -90,42 +89,42 @@ module.exports = DocUpdaterClient = { }, getDoc(project_id, doc_id, callback) { - return request.get( + request.get( `http://localhost:3003/project/${project_id}/doc/${doc_id}`, (error, res, body) => { if (body != null && res.statusCode >= 200 && res.statusCode < 300) { body = JSON.parse(body) } - return callback(error, res, body) + callback(error, res, body) } ) }, getDocAndRecentOps(project_id, doc_id, fromVersion, callback) { - return request.get( + request.get( `http://localhost:3003/project/${project_id}/doc/${doc_id}?fromVersion=${fromVersion}`, (error, res, body) => { if (body != null && res.statusCode >= 200 && res.statusCode < 300) { body = JSON.parse(body) } - return callback(error, res, body) + callback(error, res, body) } ) }, preloadDoc(project_id, doc_id, callback) { - return DocUpdaterClient.getDoc(project_id, doc_id, callback) + DocUpdaterClient.getDoc(project_id, doc_id, callback) }, flushDoc(project_id, doc_id, callback) { - return request.post( + request.post( `http://localhost:3003/project/${project_id}/doc/${doc_id}/flush`, (error, res, body) => callback(error, res, body) ) }, setDocLines(project_id, doc_id, lines, source, user_id, undoing, callback) { - return request.post( + request.post( { url: `http://localhost:3003/project/${project_id}/doc/${doc_id}`, json: { @@ -140,59 +139,56 @@ module.exports = DocUpdaterClient = { }, deleteDoc(project_id, doc_id, callback) { - return request.del( + request.del( `http://localhost:3003/project/${project_id}/doc/${doc_id}`, (error, res, body) => callback(error, res, body) ) }, flushProject(project_id, callback) { - return request.post( - `http://localhost:3003/project/${project_id}/flush`, - callback - ) + request.post(`http://localhost:3003/project/${project_id}/flush`, callback) }, deleteProject(project_id, callback) { - return request.del(`http://localhost:3003/project/${project_id}`, callback) + request.del(`http://localhost:3003/project/${project_id}`, callback) }, deleteProjectOnShutdown(project_id, callback) { - return request.del( + request.del( `http://localhost:3003/project/${project_id}?background=true&shutdown=true`, callback ) }, flushOldProjects(callback) { - return request.get( + request.get( 'http://localhost:3003/flush_queued_projects?min_delete_age=1', callback ) }, acceptChange(project_id, doc_id, change_id, callback) { - return request.post( + request.post( `http://localhost:3003/project/${project_id}/doc/${doc_id}/change/${change_id}/accept`, callback ) }, removeComment(project_id, doc_id, comment, callback) { - return request.del( + request.del( `http://localhost:3003/project/${project_id}/doc/${doc_id}/comment/${comment}`, callback ) }, getProjectDocs(project_id, projectStateHash, callback) { - return request.get( + request.get( `http://localhost:3003/project/${project_id}/doc?state=${projectStateHash}`, (error, res, body) => { if (body != null && res.statusCode >= 200 && res.statusCode < 300) { body = JSON.parse(body) } - return callback(error, res, body) + callback(error, res, body) } ) }, @@ -205,7 +201,7 @@ module.exports = DocUpdaterClient = { version, callback ) { - return request.post( + request.post( { url: `http://localhost:3003/project/${project_id}`, json: { userId, docUpdates, fileUpdates, version } From 05a2cf829c8e5c88697ec3cc641d5c27768aa6db Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Wed, 20 May 2020 16:06:42 -0400 Subject: [PATCH 683/769] Decaf cleanup: simplify loops --- .../acceptance/js/helpers/DocUpdaterClient.js | 36 +++++-------------- 1 file changed, 8 insertions(+), 28 deletions(-) diff --git a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js index 89cc9cd74c..ebd1a5b1a0 100644 --- a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js +++ b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js @@ -2,13 +2,6 @@ camelcase, handle-callback-err, */ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ let DocUpdaterClient const Settings = require('settings-sharelatex') const rclient = require('redis-sharelatex').createClient( @@ -26,10 +19,11 @@ rclient_sub.setMaxListeners(0) module.exports = DocUpdaterClient = { randomId() { - const chars = __range__(1, 24, true).map( - (i) => Math.random().toString(16)[2] - ) - return chars.join('') + let str = '' + for (let i = 0; i < 24; i++) { + str += Math.floor(Math.random() * 16).toString(16) + } + return str }, subscribeToAppliedOps(callback) { @@ -60,13 +54,9 @@ module.exports = DocUpdaterClient = { if (error) { return callback(error) } - const jobs = [] - for (const update of Array.from(updates)) { - ;((update) => - jobs.push((callback) => - DocUpdaterClient.sendUpdate(project_id, doc_id, update, callback) - ))(update) - } + const jobs = updates.map((update) => (callback) => { + DocUpdaterClient.sendUpdate(project_id, doc_id, update, callback) + }) async.series(jobs, (err) => DocUpdaterClient.waitForPendingUpdates(project_id, doc_id, callback) ) @@ -210,13 +200,3 @@ module.exports = DocUpdaterClient = { ) } } - -function __range__(left, right, inclusive) { - const range = [] - const ascending = left < right - const end = !inclusive ? right : ascending ? right + 1 : right - 1 - for (let i = left; ascending ? i < end : i > end; ascending ? i++ : i--) { - range.push(i) - } - return range -} From abb7e8fa20908700c839b2600dd8b3489e43d56a Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Wed, 20 May 2020 16:08:03 -0400 Subject: [PATCH 684/769] Decaf cleanup: error handling --- .../test/acceptance/js/helpers/DocUpdaterClient.js | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js index ebd1a5b1a0..a3b15942d5 100644 --- a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js +++ b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js @@ -1,6 +1,5 @@ /* eslint-disable camelcase, - handle-callback-err, */ let DocUpdaterClient const Settings = require('settings-sharelatex') @@ -57,9 +56,12 @@ module.exports = DocUpdaterClient = { const jobs = updates.map((update) => (callback) => { DocUpdaterClient.sendUpdate(project_id, doc_id, update, callback) }) - async.series(jobs, (err) => + async.series(jobs, (err) => { + if (err) { + return callback(err) + } DocUpdaterClient.waitForPendingUpdates(project_id, doc_id, callback) - ) + }) }) }, @@ -68,6 +70,9 @@ module.exports = DocUpdaterClient = { { times: 30, interval: 100 }, (cb) => rclient.llen(keys.pendingUpdates({ doc_id }), (err, length) => { + if (err) { + return cb(err) + } if (length > 0) { cb(new Error('updates still pending')) } else { From c018fee72c192e58e9ac79cc5f21108154236b42 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Wed, 20 May 2020 16:12:27 -0400 Subject: [PATCH 685/769] Decaf cleanup: camel case variables --- .../acceptance/js/helpers/DocUpdaterClient.js | 87 +++++++++---------- 1 file changed, 42 insertions(+), 45 deletions(-) diff --git a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js index a3b15942d5..10813e72fa 100644 --- a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js +++ b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js @@ -1,6 +1,3 @@ -/* eslint-disable - camelcase, -*/ let DocUpdaterClient const Settings = require('settings-sharelatex') const rclient = require('redis-sharelatex').createClient( @@ -10,11 +7,11 @@ const keys = Settings.redis.documentupdater.key_schema const request = require('request').defaults({ jar: false }) const async = require('async') -const rclient_sub = require('redis-sharelatex').createClient( +const rclientSub = require('redis-sharelatex').createClient( Settings.redis.pubsub ) -rclient_sub.subscribe('applied-ops') -rclient_sub.setMaxListeners(0) +rclientSub.subscribe('applied-ops') +rclientSub.setMaxListeners(0) module.exports = DocUpdaterClient = { randomId() { @@ -26,50 +23,50 @@ module.exports = DocUpdaterClient = { }, subscribeToAppliedOps(callback) { - rclient_sub.on('message', callback) + rclientSub.on('message', callback) }, - sendUpdate(project_id, doc_id, update, callback) { + sendUpdate(projectId, docId, update, callback) { rclient.rpush( - keys.pendingUpdates({ doc_id }), + keys.pendingUpdates({ doc_id: docId }), JSON.stringify(update), (error) => { if (error) { return callback(error) } - const doc_key = `${project_id}:${doc_id}` - rclient.sadd('DocsWithPendingUpdates', doc_key, (error) => { + const docKey = `${projectId}:${docId}` + rclient.sadd('DocsWithPendingUpdates', docKey, (error) => { if (error) { return callback(error) } - rclient.rpush('pending-updates-list', doc_key, callback) + rclient.rpush('pending-updates-list', docKey, callback) }) } ) }, - sendUpdates(project_id, doc_id, updates, callback) { - DocUpdaterClient.preloadDoc(project_id, doc_id, (error) => { + sendUpdates(projectId, docId, updates, callback) { + DocUpdaterClient.preloadDoc(projectId, docId, (error) => { if (error) { return callback(error) } const jobs = updates.map((update) => (callback) => { - DocUpdaterClient.sendUpdate(project_id, doc_id, update, callback) + DocUpdaterClient.sendUpdate(projectId, docId, update, callback) }) async.series(jobs, (err) => { if (err) { return callback(err) } - DocUpdaterClient.waitForPendingUpdates(project_id, doc_id, callback) + DocUpdaterClient.waitForPendingUpdates(projectId, docId, callback) }) }) }, - waitForPendingUpdates(project_id, doc_id, callback) { + waitForPendingUpdates(projectId, docId, callback) { async.retry( { times: 30, interval: 100 }, (cb) => - rclient.llen(keys.pendingUpdates({ doc_id }), (err, length) => { + rclient.llen(keys.pendingUpdates({ doc_id: docId }), (err, length) => { if (err) { return cb(err) } @@ -83,9 +80,9 @@ module.exports = DocUpdaterClient = { ) }, - getDoc(project_id, doc_id, callback) { + getDoc(projectId, docId, callback) { request.get( - `http://localhost:3003/project/${project_id}/doc/${doc_id}`, + `http://localhost:3003/project/${projectId}/doc/${docId}`, (error, res, body) => { if (body != null && res.statusCode >= 200 && res.statusCode < 300) { body = JSON.parse(body) @@ -95,9 +92,9 @@ module.exports = DocUpdaterClient = { ) }, - getDocAndRecentOps(project_id, doc_id, fromVersion, callback) { + getDocAndRecentOps(projectId, docId, fromVersion, callback) { request.get( - `http://localhost:3003/project/${project_id}/doc/${doc_id}?fromVersion=${fromVersion}`, + `http://localhost:3003/project/${projectId}/doc/${docId}?fromVersion=${fromVersion}`, (error, res, body) => { if (body != null && res.statusCode >= 200 && res.statusCode < 300) { body = JSON.parse(body) @@ -107,25 +104,25 @@ module.exports = DocUpdaterClient = { ) }, - preloadDoc(project_id, doc_id, callback) { - DocUpdaterClient.getDoc(project_id, doc_id, callback) + preloadDoc(projectId, docId, callback) { + DocUpdaterClient.getDoc(projectId, docId, callback) }, - flushDoc(project_id, doc_id, callback) { + flushDoc(projectId, docId, callback) { request.post( - `http://localhost:3003/project/${project_id}/doc/${doc_id}/flush`, + `http://localhost:3003/project/${projectId}/doc/${docId}/flush`, (error, res, body) => callback(error, res, body) ) }, - setDocLines(project_id, doc_id, lines, source, user_id, undoing, callback) { + setDocLines(projectId, docId, lines, source, userId, undoing, callback) { request.post( { - url: `http://localhost:3003/project/${project_id}/doc/${doc_id}`, + url: `http://localhost:3003/project/${projectId}/doc/${docId}`, json: { lines, source, - user_id, + user_id: userId, undoing } }, @@ -133,24 +130,24 @@ module.exports = DocUpdaterClient = { ) }, - deleteDoc(project_id, doc_id, callback) { + deleteDoc(projectId, docId, callback) { request.del( - `http://localhost:3003/project/${project_id}/doc/${doc_id}`, + `http://localhost:3003/project/${projectId}/doc/${docId}`, (error, res, body) => callback(error, res, body) ) }, - flushProject(project_id, callback) { - request.post(`http://localhost:3003/project/${project_id}/flush`, callback) + flushProject(projectId, callback) { + request.post(`http://localhost:3003/project/${projectId}/flush`, callback) }, - deleteProject(project_id, callback) { - request.del(`http://localhost:3003/project/${project_id}`, callback) + deleteProject(projectId, callback) { + request.del(`http://localhost:3003/project/${projectId}`, callback) }, - deleteProjectOnShutdown(project_id, callback) { + deleteProjectOnShutdown(projectId, callback) { request.del( - `http://localhost:3003/project/${project_id}?background=true&shutdown=true`, + `http://localhost:3003/project/${projectId}?background=true&shutdown=true`, callback ) }, @@ -162,23 +159,23 @@ module.exports = DocUpdaterClient = { ) }, - acceptChange(project_id, doc_id, change_id, callback) { + acceptChange(projectId, docId, changeId, callback) { request.post( - `http://localhost:3003/project/${project_id}/doc/${doc_id}/change/${change_id}/accept`, + `http://localhost:3003/project/${projectId}/doc/${docId}/change/${changeId}/accept`, callback ) }, - removeComment(project_id, doc_id, comment, callback) { + removeComment(projectId, docId, comment, callback) { request.del( - `http://localhost:3003/project/${project_id}/doc/${doc_id}/comment/${comment}`, + `http://localhost:3003/project/${projectId}/doc/${docId}/comment/${comment}`, callback ) }, - getProjectDocs(project_id, projectStateHash, callback) { + getProjectDocs(projectId, projectStateHash, callback) { request.get( - `http://localhost:3003/project/${project_id}/doc?state=${projectStateHash}`, + `http://localhost:3003/project/${projectId}/doc?state=${projectStateHash}`, (error, res, body) => { if (body != null && res.statusCode >= 200 && res.statusCode < 300) { body = JSON.parse(body) @@ -189,7 +186,7 @@ module.exports = DocUpdaterClient = { }, sendProjectUpdate( - project_id, + projectId, userId, docUpdates, fileUpdates, @@ -198,7 +195,7 @@ module.exports = DocUpdaterClient = { ) { request.post( { - url: `http://localhost:3003/project/${project_id}`, + url: `http://localhost:3003/project/${projectId}`, json: { userId, docUpdates, fileUpdates, version } }, (error, res, body) => callback(error, res, body) From 1d1f2040214f119f5199655aa9c41724958cfc10 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Wed, 20 May 2020 16:26:22 -0400 Subject: [PATCH 686/769] Remove backwards-compat project update API The project update endpoint accepted updates both in two array params: docUpdates and fileUpdates, and in a single array: updates. This commit removes the docUpdates/fileUpdates params now that web uses the updates param. --- .../document-updater/app/js/HttpController.js | 41 +-------- .../ApplyingUpdatesToProjectStructureTests.js | 62 ++++++------- .../acceptance/js/helpers/DocUpdaterClient.js | 11 +-- .../js/HttpController/HttpControllerTests.js | 87 +------------------ 4 files changed, 39 insertions(+), 162 deletions(-) diff --git a/services/document-updater/app/js/HttpController.js b/services/document-updater/app/js/HttpController.js index d2904eb898..5e47cf5bf1 100644 --- a/services/document-updater/app/js/HttpController.js +++ b/services/document-updater/app/js/HttpController.js @@ -325,28 +325,13 @@ function deleteComment(req, res, next) { function updateProject(req, res, next) { const timer = new Metrics.Timer('http.updateProject') const projectId = req.params.project_id - const { - projectHistoryId, - userId, - docUpdates, - fileUpdates, - updates, - version - } = req.body - logger.log( - { projectId, updates, docUpdates, fileUpdates, version }, - 'updating project via http' - ) - const allUpdates = _mergeUpdates( - docUpdates || [], - fileUpdates || [], - updates || [] - ) + const { projectHistoryId, userId, updates = [], version } = req.body + logger.log({ projectId, updates, version }, 'updating project via http') ProjectManager.updateProjectWithLocks( projectId, projectHistoryId, userId, - allUpdates, + updates, version, (error) => { timer.done() @@ -416,23 +401,3 @@ function flushQueuedProjects(req, res, next) { } }) } - -/** - * Merge updates from the previous project update interface (docUpdates + - * fileUpdates) and the new update interface (updates). - */ -function _mergeUpdates(docUpdates, fileUpdates, updates) { - const mergedUpdates = [] - for (const update of docUpdates) { - const type = update.docLines != null ? 'add-doc' : 'rename-doc' - mergedUpdates.push({ type, ...update }) - } - for (const update of fileUpdates) { - const type = update.url != null ? 'add-file' : 'rename-file' - mergedUpdates.push({ type, ...update }) - } - for (const update of updates) { - mergedUpdates.push(update) - } - return mergedUpdates -} diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js index 8be9291fdf..58fe5d13eb 100644 --- a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js +++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js @@ -22,11 +22,12 @@ describe("Applying updates to a project's structure", function () { before(function (done) { this.project_id = DocUpdaterClient.randomId() this.fileUpdate = { + type: 'rename-file', id: DocUpdaterClient.randomId(), pathname: '/file-path', newPathname: '/new-file-path' } - this.fileUpdates = [this.fileUpdate] + this.updates = [this.fileUpdate] DocUpdaterApp.ensureRunning((error) => { if (error) { return done(error) @@ -34,8 +35,7 @@ describe("Applying updates to a project's structure", function () { DocUpdaterClient.sendProjectUpdate( this.project_id, this.user_id, - [], - this.fileUpdates, + this.updates, this.version, (error) => { if (error) { @@ -73,12 +73,13 @@ describe("Applying updates to a project's structure", function () { describe('renaming a document', function () { before(function () { - this.docUpdate = { + this.update = { + type: 'rename-doc', id: DocUpdaterClient.randomId(), pathname: '/doc-path', newPathname: '/new-doc-path' } - this.docUpdates = [this.docUpdate] + this.updates = [this.update] }) describe('when the document is not loaded', function () { @@ -87,8 +88,7 @@ describe("Applying updates to a project's structure", function () { DocUpdaterClient.sendProjectUpdate( this.project_id, this.user_id, - this.docUpdates, - [], + this.updates, this.version, (error) => { if (error) { @@ -110,7 +110,7 @@ describe("Applying updates to a project's structure", function () { } const update = JSON.parse(updates[0]) - update.doc.should.equal(this.docUpdate.id) + update.doc.should.equal(this.update.id) update.pathname.should.equal('/doc-path') update.new_pathname.should.equal('/new-doc-path') update.meta.user_id.should.equal(this.user_id) @@ -126,10 +126,10 @@ describe("Applying updates to a project's structure", function () { describe('when the document is loaded', function () { before(function (done) { this.project_id = DocUpdaterClient.randomId() - MockWebApi.insertDoc(this.project_id, this.docUpdate.id, {}) + MockWebApi.insertDoc(this.project_id, this.update.id, {}) DocUpdaterClient.preloadDoc( this.project_id, - this.docUpdate.id, + this.update.id, (error) => { if (error) { return done(error) @@ -138,8 +138,7 @@ describe("Applying updates to a project's structure", function () { DocUpdaterClient.sendProjectUpdate( this.project_id, this.user_id, - this.docUpdates, - [], + this.updates, this.version, (error) => { if (error) { @@ -159,12 +158,12 @@ describe("Applying updates to a project's structure", function () { it('should update the doc', function (done) { DocUpdaterClient.getDoc( this.project_id, - this.docUpdate.id, + this.update.id, (error, res, doc) => { if (error) { return done(error) } - doc.pathname.should.equal(this.docUpdate.newPathname) + doc.pathname.should.equal(this.update.newPathname) done() } ) @@ -181,7 +180,7 @@ describe("Applying updates to a project's structure", function () { } const update = JSON.parse(updates[0]) - update.doc.should.equal(this.docUpdate.id) + update.doc.should.equal(this.update.id) update.pathname.should.equal('/doc-path') update.new_pathname.should.equal('/new-doc-path') update.meta.user_id.should.equal(this.user_id) @@ -198,27 +197,35 @@ describe("Applying updates to a project's structure", function () { describe('renaming multiple documents and files', function () { before(function () { this.docUpdate0 = { + type: 'rename-doc', id: DocUpdaterClient.randomId(), pathname: '/doc-path0', newPathname: '/new-doc-path0' } this.docUpdate1 = { + type: 'rename-doc', id: DocUpdaterClient.randomId(), pathname: '/doc-path1', newPathname: '/new-doc-path1' } - this.docUpdates = [this.docUpdate0, this.docUpdate1] this.fileUpdate0 = { + type: 'rename-file', id: DocUpdaterClient.randomId(), pathname: '/file-path0', newPathname: '/new-file-path0' } this.fileUpdate1 = { + type: 'rename-file', id: DocUpdaterClient.randomId(), pathname: '/file-path1', newPathname: '/new-file-path1' } - this.fileUpdates = [this.fileUpdate0, this.fileUpdate1] + this.updates = [ + this.docUpdate0, + this.docUpdate1, + this.fileUpdate0, + this.fileUpdate1 + ] }) describe('when the documents are not loaded', function () { @@ -227,8 +234,7 @@ describe("Applying updates to a project's structure", function () { DocUpdaterClient.sendProjectUpdate( this.project_id, this.user_id, - this.docUpdates, - this.fileUpdates, + this.updates, this.version, (error) => { if (error) { @@ -292,16 +298,16 @@ describe("Applying updates to a project's structure", function () { before(function (done) { this.project_id = DocUpdaterClient.randomId() this.fileUpdate = { + type: 'add-file', id: DocUpdaterClient.randomId(), pathname: '/file-path', url: 'filestore.example.com' } - this.fileUpdates = [this.fileUpdate] + this.updates = [this.fileUpdate] DocUpdaterClient.sendProjectUpdate( this.project_id, this.user_id, - [], - this.fileUpdates, + this.updates, this.version, (error) => { if (error) { @@ -340,16 +346,16 @@ describe("Applying updates to a project's structure", function () { before(function (done) { this.project_id = DocUpdaterClient.randomId() this.docUpdate = { + type: 'add-doc', id: DocUpdaterClient.randomId(), pathname: '/file-path', docLines: 'a\nb' } - this.docUpdates = [this.docUpdate] + this.updates = [this.docUpdate] DocUpdaterClient.sendProjectUpdate( this.project_id, this.user_id, - this.docUpdates, - [], + this.updates, this.version, (error) => { if (error) { @@ -394,6 +400,7 @@ describe("Applying updates to a project's structure", function () { for (let v = 0; v <= 599; v++) { // Should flush after 500 ops updates.push({ + type: 'add-doc', id: DocUpdaterClient.randomId(), pathname: '/file-' + v, docLines: 'a\nb' @@ -409,7 +416,6 @@ describe("Applying updates to a project's structure", function () { projectId, userId, updates.slice(0, 250), - [], this.version0, function (error) { if (error) { @@ -419,7 +425,6 @@ describe("Applying updates to a project's structure", function () { projectId, userId, updates.slice(250), - [], this.version1, (error) => { if (error) { @@ -454,6 +459,7 @@ describe("Applying updates to a project's structure", function () { for (let v = 0; v <= 42; v++) { // Should flush after 500 ops updates.push({ + type: 'add-doc', id: DocUpdaterClient.randomId(), pathname: '/file-' + v, docLines: 'a\nb' @@ -469,7 +475,6 @@ describe("Applying updates to a project's structure", function () { projectId, userId, updates.slice(0, 10), - [], this.version0, function (error) { if (error) { @@ -479,7 +484,6 @@ describe("Applying updates to a project's structure", function () { projectId, userId, updates.slice(10), - [], this.version1, (error) => { if (error) { diff --git a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js index 10813e72fa..9e0ee6462f 100644 --- a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js +++ b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js @@ -185,18 +185,11 @@ module.exports = DocUpdaterClient = { ) }, - sendProjectUpdate( - projectId, - userId, - docUpdates, - fileUpdates, - version, - callback - ) { + sendProjectUpdate(projectId, userId, updates, version, callback) { request.post( { url: `http://localhost:3003/project/${projectId}`, - json: { userId, docUpdates, fileUpdates, version } + json: { userId, updates, version } }, (error, res, body) => callback(error, res, body) ) diff --git a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js index 4d9790926a..07e9d93c9a 100644 --- a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js +++ b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js @@ -809,92 +809,7 @@ describe('HttpController', function () { }) }) - describe('updateProject (split doc and file updates)', function () { - beforeEach(function () { - this.projectHistoryId = 'history-id-123' - this.userId = 'user-id-123' - this.docUpdates = [ - { id: 1, pathname: 'thesis.tex', newPathname: 'book.tex' }, - { id: 2, pathname: 'article.tex', docLines: 'hello' } - ] - this.fileUpdates = [ - { id: 3, pathname: 'apple.png', newPathname: 'banana.png' }, - { id: 4, url: 'filestore.example.com/4' } - ] - this.expectedUpdates = [ - { - type: 'rename-doc', - id: 1, - pathname: 'thesis.tex', - newPathname: 'book.tex' - }, - { type: 'add-doc', id: 2, pathname: 'article.tex', docLines: 'hello' }, - { - type: 'rename-file', - id: 3, - pathname: 'apple.png', - newPathname: 'banana.png' - }, - { type: 'add-file', id: 4, url: 'filestore.example.com/4' } - ] - this.version = 1234567 - this.req = { - query: {}, - body: { - projectHistoryId: this.projectHistoryId, - userId: this.userId, - docUpdates: this.docUpdates, - fileUpdates: this.fileUpdates, - version: this.version - }, - params: { - project_id: this.project_id - } - } - }) - - describe('successfully', function () { - beforeEach(function () { - this.ProjectManager.updateProjectWithLocks = sinon.stub().yields() - this.HttpController.updateProject(this.req, this.res, this.next) - }) - - it('should accept the change', function () { - this.ProjectManager.updateProjectWithLocks - .calledWith( - this.project_id, - this.projectHistoryId, - this.userId, - this.expectedUpdates, - this.version - ) - .should.equal(true) - }) - - it('should return a successful No Content response', function () { - this.res.sendStatus.calledWith(204).should.equal(true) - }) - - it('should time the request', function () { - this.Metrics.Timer.prototype.done.called.should.equal(true) - }) - }) - - describe('when an errors occurs', function () { - beforeEach(function () { - this.ProjectManager.updateProjectWithLocks = sinon - .stub() - .yields(new Error('oops')) - this.HttpController.updateProject(this.req, this.res, this.next) - }) - - it('should call next with the error', function () { - this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) - }) - }) - }) - - describe('updateProject (single updates parameter)', function () { + describe('updateProject', function () { beforeEach(function () { this.projectHistoryId = 'history-id-123' this.userId = 'user-id-123' From ca98a604ff6a3c2e0bef7b77c435e565e28b06ed Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 3 Jun 2020 10:22:56 +0100 Subject: [PATCH 687/769] update to node 10.21.0 --- services/document-updater/.nvmrc | 2 +- services/document-updater/Dockerfile | 2 +- services/document-updater/docker-compose.yml | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/services/document-updater/.nvmrc b/services/document-updater/.nvmrc index 5b7269c0a9..b61c07ffdd 100644 --- a/services/document-updater/.nvmrc +++ b/services/document-updater/.nvmrc @@ -1 +1 @@ -10.19.0 +10.21.0 diff --git a/services/document-updater/Dockerfile b/services/document-updater/Dockerfile index 4242e7d3be..b07f7117bc 100644 --- a/services/document-updater/Dockerfile +++ b/services/document-updater/Dockerfile @@ -2,7 +2,7 @@ # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -FROM node:10.19.0 as base +FROM node:10.21.0 as base WORKDIR /app diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index 161476b8d1..47c4ffc1b1 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -6,7 +6,7 @@ version: "2.3" services: test_unit: - image: node:10.19.0 + image: node:10.21.0 volumes: - .:/app working_dir: /app @@ -17,7 +17,7 @@ services: user: node test_acceptance: - image: node:10.19.0 + image: node:10.21.0 volumes: - .:/app working_dir: /app From cf9f3f48dddc0bb5b92d1a28b6e7f82a7d18bac9 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 3 Jun 2020 11:10:45 +0100 Subject: [PATCH 688/769] update buildscript.txt to node 10.21.0 --- services/document-updater/buildscript.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/buildscript.txt b/services/document-updater/buildscript.txt index b234a9b3ac..7c1e06607e 100644 --- a/services/document-updater/buildscript.txt +++ b/services/document-updater/buildscript.txt @@ -5,6 +5,6 @@ document-updater --env-add= --env-pass-through= --language=es ---node-version=10.19.0 +--node-version=10.21.0 --public-repo=True --script-version=2.2.0 From 2ef5f471abe4611c9b56a5e66fadec299bc5d21a Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Fri, 24 Jul 2020 10:14:48 +0100 Subject: [PATCH 689/769] Upgrade to redis-sharelatex 1.0.13 --- services/document-updater/package-lock.json | 34 ++++++++++----------- services/document-updater/package.json | 2 +- 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 6d838e6217..1ca191a283 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -1581,6 +1581,14 @@ "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.0.tgz", "integrity": "sha512-2Nii8p3RwAPiFwsnZvukotvow2rIHM+yQ6ZcBXGHdniadkYGZYiGmkHJIbZPIV9nfv7m/U1IPMVVcAhoWFeklw==" }, + "coffee-script": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.8.0.tgz", + "integrity": "sha1-nJ8dK0pSoADe0Vtll5FwNkgmPB0=", + "requires": { + "mkdirp": "~0.3.5" + } + }, "color-convert": { "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", @@ -3397,9 +3405,9 @@ } }, "ioredis": { - "version": "4.16.1", - "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.16.1.tgz", - "integrity": "sha512-g76Mm9dE7BLuewncu1MimGZw5gDDjDwjoRony/VoSxSJEKAhuYncDEwYKYjtHi2NWsTNIB6XXRjE64uVa/wpKQ==", + "version": "4.17.3", + "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.17.3.tgz", + "integrity": "sha512-iRvq4BOYzNFkDnSyhx7cmJNOi1x/HWYe+A4VXHBu4qpwJaGT1Mp+D2bVGJntH9K/Z/GeOM/Nprb8gB3bmitz1Q==", "requires": { "cluster-key-slot": "^1.1.0", "debug": "^4.1.1", @@ -5586,7 +5594,7 @@ "redis-commands": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.5.0.tgz", - "integrity": "sha1-gNLiBpj+aI8icSf/nlFkp90X54U=" + "integrity": "sha512-6KxamqpZ468MeQC3bkWmCB1fp56XL64D4Kf0zJSwDZbVLLm7KFkoIcHrgRvQ+sk8dnhySs7+yBg94yIkAK7aJg==" }, "redis-errors": { "version": "1.2.0", @@ -5611,25 +5619,17 @@ } }, "redis-sharelatex": { - "version": "1.0.12", - "resolved": "https://registry.npmjs.org/redis-sharelatex/-/redis-sharelatex-1.0.12.tgz", - "integrity": "sha512-Z+LDGaRNgZ+NiDaCC/R0N3Uy6SCtbKXqiXlvCwAbIQRSZUc69OVx/cQ3i5qDF7zeERhh+pnTd+zGs8nVfa5p+Q==", + "version": "1.0.13", + "resolved": "https://registry.npmjs.org/redis-sharelatex/-/redis-sharelatex-1.0.13.tgz", + "integrity": "sha512-sAQNofqfcMlIxzxNJF1qUspJKDM1VuuIOrGZQX9nb5JtcJ5cusa5sc+Oyb51eymPV5mZGWT3u07tKtv4jdXVIg==", "requires": { "async": "^2.5.0", "coffee-script": "1.8.0", - "ioredis": "~4.16.1", + "ioredis": "~4.17.3", "redis-sentinel": "0.1.1", "underscore": "1.7.0" }, "dependencies": { - "coffee-script": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.8.0.tgz", - "integrity": "sha512-EvLTMcu9vR6G1yfnz75yrISvhq1eBPC+pZbQhHzTiC5vXgpYIrArxQc5tB+SYfBi3souVdSZ4AZzYxI72oLXUw==", - "requires": { - "mkdirp": "~0.3.5" - } - }, "underscore": { "version": "1.7.0", "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz", @@ -6199,7 +6199,7 @@ "standard-as-callback": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.0.1.tgz", - "integrity": "sha1-7YuyVkjhWDF1m2Ajvbh+a2CzgSY=" + "integrity": "sha512-NQOxSeB8gOI5WjSaxjBgog2QFw55FV8TkS6Y07BiB3VJ8xNTvUYm0wl0s8ObgQ5NhdpnNfigMIKjgPESzgr4tg==" }, "statsd-parser": { "version": "0.0.4", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 3f0b1e2de0..04484720ea 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -26,7 +26,7 @@ "logger-sharelatex": "^1.9.1", "metrics-sharelatex": "^2.6.2", "mongojs": "^3.1.0", - "redis-sharelatex": "^1.0.12", + "redis-sharelatex": "^1.0.13", "request": "^2.88.2", "requestretry": "^4.1.0", "settings-sharelatex": "^1.1.0" From f331c89da5fe8988aa61d805593ecc33ee07cad9 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Fri, 24 Jul 2020 11:14:20 +0100 Subject: [PATCH 690/769] Fix integrity hash on coffee-script dependency --- services/document-updater/package-lock.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 1ca191a283..8bc91495cb 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -1584,7 +1584,7 @@ "coffee-script": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.8.0.tgz", - "integrity": "sha1-nJ8dK0pSoADe0Vtll5FwNkgmPB0=", + "integrity": "sha512-EvLTMcu9vR6G1yfnz75yrISvhq1eBPC+pZbQhHzTiC5vXgpYIrArxQc5tB+SYfBi3souVdSZ4AZzYxI72oLXUw==", "requires": { "mkdirp": "~0.3.5" } From bd5cb98517b87d58887b290126decded7da9d55a Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Thu, 6 Aug 2020 12:39:58 +0100 Subject: [PATCH 691/769] [misc] bump the dev-env to 3.3.1 --- .../document-updater/.github/dependabot.yml | 17 +++ services/document-updater/.gitignore | 3 + services/document-updater/Dockerfile | 2 - services/document-updater/Jenkinsfile | 131 ------------------ services/document-updater/Makefile | 6 +- services/document-updater/buildscript.txt | 4 +- .../document-updater/docker-compose.ci.yml | 2 + services/document-updater/docker-compose.yml | 6 +- services/document-updater/nodemon.json | 1 - services/document-updater/package-lock.json | 102 +++++++++----- services/document-updater/package.json | 18 +-- 11 files changed, 108 insertions(+), 184 deletions(-) create mode 100644 services/document-updater/.github/dependabot.yml delete mode 100644 services/document-updater/Jenkinsfile diff --git a/services/document-updater/.github/dependabot.yml b/services/document-updater/.github/dependabot.yml new file mode 100644 index 0000000000..c6f98d843d --- /dev/null +++ b/services/document-updater/.github/dependabot.yml @@ -0,0 +1,17 @@ +version: 2 +updates: + - package-ecosystem: "npm" + directory: "/" + schedule: + interval: "daily" + + pull-request-branch-name: + # Separate sections of the branch name with a hyphen + # Docker images use the branch name and do not support slashes in tags + # https://github.com/overleaf/google-ops/issues/822 + # https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#pull-request-branch-nameseparator + separator: "-" + + # Block informal upgrades -- security upgrades use a separate queue. + # https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#open-pull-requests-limit + open-pull-requests-limit: 0 diff --git a/services/document-updater/.gitignore b/services/document-updater/.gitignore index 86e9e7a2fc..624e78f096 100644 --- a/services/document-updater/.gitignore +++ b/services/document-updater/.gitignore @@ -47,3 +47,6 @@ forever/ **/appendonly.aof **/dump.rdb **/nodes.conf + +# managed by dev-environment$ bin/update_build_scripts +.npmrc diff --git a/services/document-updater/Dockerfile b/services/document-updater/Dockerfile index b07f7117bc..78a715757d 100644 --- a/services/document-updater/Dockerfile +++ b/services/document-updater/Dockerfile @@ -15,8 +15,6 @@ RUN npm ci --quiet COPY . /app - - FROM base COPY --from=app /app /app diff --git a/services/document-updater/Jenkinsfile b/services/document-updater/Jenkinsfile deleted file mode 100644 index 803963773e..0000000000 --- a/services/document-updater/Jenkinsfile +++ /dev/null @@ -1,131 +0,0 @@ -String cron_string = BRANCH_NAME == "master" ? "@daily" : "" - -pipeline { - agent any - - environment { - GIT_PROJECT = "document-updater" - JENKINS_WORKFLOW = "document-updater-sharelatex" - TARGET_URL = "${env.JENKINS_URL}blue/organizations/jenkins/${JENKINS_WORKFLOW}/detail/$BRANCH_NAME/$BUILD_NUMBER/pipeline" - GIT_API_URL = "https://api.github.com/repos/overleaf/${GIT_PROJECT}/statuses/$GIT_COMMIT" - } - - triggers { - pollSCM('* * * * *') - cron(cron_string) - } - - stages { - - stage('Install') { - steps { - withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) { - sh "curl $GIT_API_URL \ - --data '{ \ - \"state\" : \"pending\", \ - \"target_url\": \"$TARGET_URL\", \ - \"description\": \"Your build is underway\", \ - \"context\": \"ci/jenkins\" }' \ - -u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD" - } - } - } - - stage('Build') { - steps { - sh 'make build' - } - } - - stage('Linting') { - steps { - sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make format' - sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make lint' - } - } - - stage('Unit Tests') { - steps { - sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_unit' - } - } - - stage('Acceptance Tests') { - steps { - sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_acceptance' - } - } - - stage('Package and docker push') { - steps { - sh 'echo ${BUILD_NUMBER} > build_number.txt' - sh 'touch build.tar.gz' // Avoid tar warning about files changing during read - sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make tar' - - withCredentials([file(credentialsId: 'gcr.io_overleaf-ops', variable: 'DOCKER_REPO_KEY_PATH')]) { - sh 'docker login -u _json_key --password-stdin https://gcr.io/overleaf-ops < ${DOCKER_REPO_KEY_PATH}' - } - sh 'DOCKER_REPO=gcr.io/overleaf-ops make publish' - sh 'docker logout https://gcr.io/overleaf-ops' - - } - } - - stage('Publish to s3') { - steps { - sh 'echo ${BRANCH_NAME}-${BUILD_NUMBER} > build_number.txt' - withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") { - s3Upload(file:'build.tar.gz', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/${BUILD_NUMBER}.tar.gz") - } - withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") { - // The deployment process uses this file to figure out the latest build - s3Upload(file:'build_number.txt', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/latest") - } - } - } - } - - post { - always { - sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_clean' - sh 'make clean' - } - - success { - withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) { - sh "curl $GIT_API_URL \ - --data '{ \ - \"state\" : \"success\", \ - \"target_url\": \"$TARGET_URL\", \ - \"description\": \"Your build succeeded!\", \ - \"context\": \"ci/jenkins\" }' \ - -u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD" - } - } - - failure { - mail(from: "${EMAIL_ALERT_FROM}", - to: "${EMAIL_ALERT_TO}", - subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}", - body: "Build: ${BUILD_URL}") - withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) { - sh "curl $GIT_API_URL \ - --data '{ \ - \"state\" : \"failure\", \ - \"target_url\": \"$TARGET_URL\", \ - \"description\": \"Your build failed\", \ - \"context\": \"ci/jenkins\" }' \ - -u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD" - } - } - } - - // The options directive is for configuration that applies to the whole job. - options { - // we'd like to make sure remove old builds, so we don't fill up our storage! - buildDiscarder(logRotator(numToKeepStr:'50')) - - // And we'd really like to be sure that this build doesn't hang forever, so let's time it out after: - timeout(time: 30, unit: 'MINUTES') - } -} diff --git a/services/document-updater/Makefile b/services/document-updater/Makefile index df879265c9..596aa47fdb 100644 --- a/services/document-updater/Makefile +++ b/services/document-updater/Makefile @@ -25,13 +25,13 @@ clean: docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) format: - $(DOCKER_COMPOSE) run --rm test_unit npm run format + $(DOCKER_COMPOSE) run --rm test_unit npm run --silent format format_fix: - $(DOCKER_COMPOSE) run --rm test_unit npm run format:fix + $(DOCKER_COMPOSE) run --rm test_unit npm run --silent format:fix lint: - $(DOCKER_COMPOSE) run --rm test_unit npm run lint + $(DOCKER_COMPOSE) run --rm test_unit npm run --silent lint test: format lint test_unit test_acceptance diff --git a/services/document-updater/buildscript.txt b/services/document-updater/buildscript.txt index 7c1e06607e..1593c6c188 100644 --- a/services/document-updater/buildscript.txt +++ b/services/document-updater/buildscript.txt @@ -1,10 +1,8 @@ document-updater ---acceptance-creds=None --dependencies=mongo,redis --docker-repos=gcr.io/overleaf-ops --env-add= --env-pass-through= ---language=es --node-version=10.21.0 --public-repo=True ---script-version=2.2.0 +--script-version=3.3.1 diff --git a/services/document-updater/docker-compose.ci.yml b/services/document-updater/docker-compose.ci.yml index d609e7b5ec..848ca57a38 100644 --- a/services/document-updater/docker-compose.ci.yml +++ b/services/document-updater/docker-compose.ci.yml @@ -11,6 +11,7 @@ services: command: npm run test:unit:_run environment: NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" test_acceptance: @@ -23,6 +24,7 @@ services: POSTGRES_HOST: postgres MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" depends_on: mongo: condition: service_healthy diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index 47c4ffc1b1..f8e0219b19 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -13,7 +13,8 @@ services: environment: MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test - command: npm run test:unit + NODE_OPTIONS: "--unhandled-rejections=strict" + command: npm run --silent test:unit user: node test_acceptance: @@ -29,13 +30,14 @@ services: MOCHA_GREP: ${MOCHA_GREP} LOG_LEVEL: ERROR NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" user: node depends_on: mongo: condition: service_healthy redis: condition: service_healthy - command: npm run test:acceptance + command: npm run --silent test:acceptance redis: image: redis diff --git a/services/document-updater/nodemon.json b/services/document-updater/nodemon.json index 5826281b84..e3e8817d90 100644 --- a/services/document-updater/nodemon.json +++ b/services/document-updater/nodemon.json @@ -8,7 +8,6 @@ "execMap": { "js": "npm run start" }, - "watch": [ "app/js/", "app.js", diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 8bc91495cb..c154414b01 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -2032,9 +2032,9 @@ "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" }, "eslint": { - "version": "6.6.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.6.0.tgz", - "integrity": "sha512-PpEBq7b6qY/qrOmpYQ/jTMDYfuQMELR4g4WI1M/NaSDDD/bdcMb+dj4Hgks7p41kW2caXsPsEZAEAyAgjVVC0g==", + "version": "6.8.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.8.0.tgz", + "integrity": "sha512-K+Iayyo2LtyYhDSYwz5D5QdWw0hCacNzyq1Y821Xna2xSJj7cijoLLYmLxTQgcgZ9mC61nryMy9S7GRbYpI5Ig==", "dev": true, "requires": { "@babel/code-frame": "^7.0.0", @@ -2052,7 +2052,7 @@ "file-entry-cache": "^5.0.1", "functional-red-black-tree": "^1.0.1", "glob-parent": "^5.0.0", - "globals": "^11.7.0", + "globals": "^12.1.0", "ignore": "^4.0.6", "import-fresh": "^3.0.0", "imurmurhash": "^0.1.4", @@ -2065,7 +2065,7 @@ "minimatch": "^3.0.4", "mkdirp": "^0.5.1", "natural-compare": "^1.4.0", - "optionator": "^0.8.2", + "optionator": "^0.8.3", "progress": "^2.0.0", "regexpp": "^2.0.1", "semver": "^6.1.2", @@ -2106,6 +2106,15 @@ "ms": "^2.1.1" } }, + "globals": { + "version": "12.4.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-12.4.0.tgz", + "integrity": "sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg==", + "dev": true, + "requires": { + "type-fest": "^0.8.1" + } + }, "minimist": { "version": "1.2.5", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", @@ -2126,6 +2135,12 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "dev": true + }, + "type-fest": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", + "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", + "dev": true } } }, @@ -2240,9 +2255,9 @@ "dev": true }, "eslint-plugin-chai-friendly": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-chai-friendly/-/eslint-plugin-chai-friendly-0.6.0.tgz", - "integrity": "sha512-Uvvv1gkbRGp/qfN15B0kQyQWg+oFA8buDSqrwmW3egNSk/FpqH2MjQqKOuKwmEL6w4QIQrIjDp+gg6kGGmD3oQ==", + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-chai-friendly/-/eslint-plugin-chai-friendly-0.5.0.tgz", + "integrity": "sha512-Pxe6z8C9fP0pn2X2nGFU/b3GBOCM/5FVus1hsMwJsXP3R7RiXFl7g0ksJbsc0GxiLyidTW4mEFk77qsNn7Tk7g==", "dev": true }, "eslint-plugin-es": { @@ -2526,9 +2541,9 @@ "dev": true }, "eslint-scope": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.0.0.tgz", - "integrity": "sha512-oYrhJW7S0bxAFDvWqzvMPRm6pcgcnWc4QnofCAqRTRfQC0JcwenzGglTtsLyIuuWFfkqDG9vz67cnttSd53djw==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.0.tgz", + "integrity": "sha512-iiGRvtxWqgtx5m8EyQUJihBloE4EnYeGE/bz1wSPwJE6tZuJUtHlhqDM4Xj2ukE8Dyy1+HCZ4hE0fzIVMzb58w==", "dev": true, "requires": { "esrecurse": "^4.1.0", @@ -2562,9 +2577,9 @@ }, "dependencies": { "acorn": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.1.tgz", - "integrity": "sha512-add7dgA5ppRPxCFJoAGfMDi7PIBXq1RtGo7BhbLaxwrXPOmw8gq48Y9ozT01hUKy9byMjlR20EJhu5zlkErEkg==", + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.0.tgz", + "integrity": "sha512-+G7P8jJmCHr+S+cLfQxygbWhXy+8YTVGzAkpEbcLo2mLoL7tij/VG41QSHACSf5QgYRhMZYHuNc6drJaO0Da+w==", "dev": true } } @@ -3283,21 +3298,21 @@ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, "inquirer": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.1.0.tgz", - "integrity": "sha512-5fJMWEmikSYu0nv/flMc475MhGbB7TSPd/2IpFV4I4rMklboCH2rQjYY5kKiYGHqUF9gvaambupcJFFG9dvReg==", + "version": "7.3.3", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.3.3.tgz", + "integrity": "sha512-JG3eIAj5V9CwcGvuOmoo6LB9kbAYT8HXffUl6memuszlwDC/qvFAJw49XJ5NROSFNPxp3iQg1GqkFhaY/CR0IA==", "dev": true, "requires": { "ansi-escapes": "^4.2.1", - "chalk": "^3.0.0", + "chalk": "^4.1.0", "cli-cursor": "^3.1.0", - "cli-width": "^2.0.0", + "cli-width": "^3.0.0", "external-editor": "^3.0.3", "figures": "^3.0.0", - "lodash": "^4.17.15", + "lodash": "^4.17.19", "mute-stream": "0.0.8", "run-async": "^2.4.0", - "rxjs": "^6.5.3", + "rxjs": "^6.6.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0", "through": "^2.3.6" @@ -3320,15 +3335,21 @@ } }, "chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", + "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", "dev": true, "requires": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, + "cli-width": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-3.0.0.tgz", + "integrity": "sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==", + "dev": true + }, "color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", @@ -3362,6 +3383,21 @@ "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "dev": true }, + "lodash": { + "version": "4.17.19", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.19.tgz", + "integrity": "sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ==", + "dev": true + }, + "rxjs": { + "version": "6.6.2", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.6.2.tgz", + "integrity": "sha512-BHdBMVoWC2sL26w//BCu3YzKT4s2jip/WhwsGEDmeKYBhKDZeYezVUnHatYB7L85v5xs0BAQmg6BEYJEKxBabg==", + "dev": true, + "requires": { + "tslib": "^1.9.0" + } + }, "string-width": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", @@ -4554,9 +4590,9 @@ } }, "onetime": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.0.tgz", - "integrity": "sha512-5NcSkPHhwTVFIQN+TUqXoS5+dlElHXdpAWu9I0HP20YOtIi+aZ0Ct82jdlILDxjLEAWwvm+qj1m6aEtsDVmm6Q==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.1.tgz", + "integrity": "sha512-ZpZpjcJeugQfWsfyQlshVoowIIQ1qBGSVll4rfDq6JJVO//fesjoX808hXWfBjY+ROZgpKDI5TRSRBSoJiZ8eg==", "dev": true, "requires": { "mimic-fn": "^2.1.0" @@ -6312,9 +6348,9 @@ "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=" }, "strip-json-comments": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.0.tgz", - "integrity": "sha512-e6/d0eBu7gHtdCqFt0xJr642LdToM5/cN4Qb9DbHjVx1CP5RyeM+zH7pbecEmDv/lBqb0QH+6Uqq75rxFPkM0w==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", "dev": true }, "stubs": { @@ -6599,9 +6635,9 @@ "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" }, "v8-compile-cache": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.1.0.tgz", - "integrity": "sha512-usZBT3PW+LOjM25wbqIlZwPeJV+3OSz3M1k1Ws8snlW39dZyYL9lOGC5FgPVHfk0jKmjiDV8Z0mIbVQPiwFs7g==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.1.1.tgz", + "integrity": "sha512-8OQ9CL+VWyt3JStj7HX7/ciTL2V3Rl1Wf5OL+SNTm0yK1KvtReVulksyeRnCANHHuUxHlQig+JJDlUhBt1NQDQ==", "dev": true }, "validate-npm-package-license": { diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 04484720ea..5858eebd9a 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -13,7 +13,7 @@ "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js", "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", "nodemon": "nodemon --config nodemon.json", - "lint": "node_modules/.bin/eslint .", + "lint": "node_modules/.bin/eslint --max-warnings 0 .", "format": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --list-different", "format:fix": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --write" }, @@ -36,23 +36,23 @@ "chai": "^3.5.0", "chai-spies": "^0.7.1", "cluster-key-slot": "^1.0.5", - "eslint": "^6.6.0", - "eslint-config-prettier": "^6.11.0", - "eslint-config-standard": "^14.1.1", + "eslint": "^6.8.0", + "eslint-config-prettier": "^6.10.0", + "eslint-config-standard": "^14.1.0", "eslint-config-standard-jsx": "^8.1.0", "eslint-config-standard-react": "^9.2.0", "eslint-plugin-chai-expect": "^2.1.0", - "eslint-plugin-chai-friendly": "^0.6.0", - "eslint-plugin-import": "^2.20.2", + "eslint-plugin-chai-friendly": "^0.5.0", + "eslint-plugin-import": "^2.20.1", "eslint-plugin-jsx-a11y": "^6.2.3", "eslint-plugin-mocha": "^6.3.0", - "eslint-plugin-node": "^11.1.0", - "eslint-plugin-prettier": "^3.1.3", + "eslint-plugin-node": "^11.0.0", + "eslint-plugin-prettier": "^3.1.2", "eslint-plugin-promise": "^4.2.1", "eslint-plugin-react": "^7.19.0", "eslint-plugin-standard": "^4.0.1", "mocha": "^5.0.1", - "prettier": "^2.0.5", + "prettier": "^2.0.0", "prettier-eslint-cli": "^5.0.0", "sandboxed-module": "~0.2.0", "sinon": "^9.0.2", From 7f19fa6081992ff15b2585d9bba76173b1e06ba1 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Mon, 10 Aug 2020 17:01:12 +0100 Subject: [PATCH 692/769] [misc] bump the dev-env to 3.3.2 --- services/document-updater/buildscript.txt | 2 +- services/document-updater/docker-compose.ci.yml | 2 +- services/document-updater/docker-compose.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/services/document-updater/buildscript.txt b/services/document-updater/buildscript.txt index 1593c6c188..a379cedc95 100644 --- a/services/document-updater/buildscript.txt +++ b/services/document-updater/buildscript.txt @@ -5,4 +5,4 @@ document-updater --env-pass-through= --node-version=10.21.0 --public-repo=True ---script-version=3.3.1 +--script-version=3.3.2 diff --git a/services/document-updater/docker-compose.ci.yml b/services/document-updater/docker-compose.ci.yml index 848ca57a38..d56b0e1590 100644 --- a/services/document-updater/docker-compose.ci.yml +++ b/services/document-updater/docker-compose.ci.yml @@ -45,4 +45,4 @@ services: image: redis mongo: - image: mongo:3.6 + image: mongo:4.0 diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index f8e0219b19..397a97aa40 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -43,5 +43,5 @@ services: image: redis mongo: - image: mongo:3.6 + image: mongo:4.0 From a3430cfdae397167fb13c4978562f8983e05ccde Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Wed, 12 Aug 2020 15:35:03 +0100 Subject: [PATCH 693/769] [misc] bump logger-sharelatex to version 2.2.0 --- services/document-updater/package-lock.json | 388 +++++++++++++------- services/document-updater/package.json | 2 +- 2 files changed, 252 insertions(+), 138 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index c154414b01..3b32518ac4 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -155,6 +155,24 @@ "google-auth-library": "^5.5.0", "retry-request": "^4.0.0", "teeny-request": "^6.0.0" + }, + "dependencies": { + "google-auth-library": { + "version": "5.10.1", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz", + "integrity": "sha512-rOlaok5vlpV9rSiUu5EpR0vVpc+PhN62oF4RyX/6++DG1VsaulAFEMlDYBLjJDDPI6OcNOCGAKy9UVB/3NIDXg==", + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^2.1.0", + "gcp-metadata": "^3.4.0", + "gtoken": "^4.1.0", + "jws": "^4.0.0", + "lru-cache": "^5.0.0" + } + } } }, "@google-cloud/debug-agent": { @@ -370,15 +388,33 @@ "stream-events": "^1.0.4", "through2": "^3.0.0", "type-fest": "^0.12.0" + }, + "dependencies": { + "google-auth-library": { + "version": "5.10.1", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz", + "integrity": "sha512-rOlaok5vlpV9rSiUu5EpR0vVpc+PhN62oF4RyX/6++DG1VsaulAFEMlDYBLjJDDPI6OcNOCGAKy9UVB/3NIDXg==", + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^2.1.0", + "gcp-metadata": "^3.4.0", + "gtoken": "^4.1.0", + "jws": "^4.0.0", + "lru-cache": "^5.0.0" + } + } } }, "@google-cloud/logging-bunyan": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@google-cloud/logging-bunyan/-/logging-bunyan-2.0.3.tgz", - "integrity": "sha512-8n9MwsCRd4v8WZg17+d3m7qInud7lYTm5rpwXHY0/lzWEJYjeiztT09BiCYh56EEhHr+ynymJnzUDZKazkywlg==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/logging-bunyan/-/logging-bunyan-3.0.0.tgz", + "integrity": "sha512-ZLVXEejNQ27ktGcA3S/sd7GPefp7kywbn+/KoBajdb1Syqcmtc98jhXpYQBXVtNP2065iyu77s4SBaiYFbTC5A==", "requires": { "@google-cloud/logging": "^7.0.0", - "google-auth-library": "^5.0.0" + "google-auth-library": "^6.0.0" } }, "@google-cloud/paginator": { @@ -783,17 +819,17 @@ } }, "@grpc/grpc-js": { - "version": "0.6.18", - "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-0.6.18.tgz", - "integrity": "sha512-uAzv/tM8qpbf1vpx1xPMfcUMzbfdqJtdCYAqY/LsLeQQlnTb4vApylojr+wlCyr7bZeg3AFfHvtihnNOQQt/nA==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.0.5.tgz", + "integrity": "sha512-Hm+xOiqAhcpT9RYM8lc15dbQD7aQurM7ZU8ulmulepiPlN7iwBXXwP3vSBUimoFoApRqz7pSIisXU8pZaCB4og==", "requires": { "semver": "^6.2.0" } }, "@grpc/proto-loader": { - "version": "0.5.3", - "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.5.3.tgz", - "integrity": "sha512-8qvUtGg77G2ZT2HqdqYoM/OY97gQd/0crSG34xNmZ4ZOsv3aQT/FQV9QfZPazTGna6MIoyUd+u6AxsoZjJ/VMQ==", + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.5.5.tgz", + "integrity": "sha512-WwN9jVNdHRQoOBo9FDH7qU+mgfjPc8GygPYms3M+y3fbQLfnCe/Kv/E01t7JRgnrsOHH8euvSbed3mIalXhwqQ==", "requires": { "lodash.camelcase": "^4.3.0", "protobufjs": "^6.8.6" @@ -822,9 +858,9 @@ } }, "@overleaf/o-error": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@overleaf/o-error/-/o-error-2.1.0.tgz", - "integrity": "sha512-Zd9sks9LrLw8ErHt/cXeWIkyxWAqNAvNGn7wIjLQJH6TTEEW835PWOhpch+hQwwWsTxWIx/JDj+IpZ3ouw925g==" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@overleaf/o-error/-/o-error-3.0.0.tgz", + "integrity": "sha512-LsM2s6Iy9G97ktPo0ys4VxtI/m3ahc1ZHwjo5XnhXtjeIkkkVAehsrcRRoV/yWepPjymB0oZonhcfojpjYR/tg==" }, "@protobufjs/aspromise": { "version": "1.1.2", @@ -947,9 +983,9 @@ "dev": true }, "@tootallnate/once": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.0.0.tgz", - "integrity": "sha512-KYyTT/T6ALPkIRd2Ge080X/BsXvy9O0hcWTtMWkPvwAwF99+vn6Dv4GzrFT/Nn1LePr+FFDbRXXlqmsy9lw2zA==" + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", + "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==" }, "@types/caseless": { "version": "0.12.2", @@ -982,9 +1018,9 @@ "dev": true }, "@types/fs-extra": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-8.1.0.tgz", - "integrity": "sha512-UoOfVEzAUpeSPmjm7h1uk5MH6KZma2z2O7a75onTGjnNvAvMVrPzPL/vBbT65iIGHWj6rokwfmYcmxmlSf2uwg==", + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-8.1.1.tgz", + "integrity": "sha512-TcUlBem321DFQzBNuz8p0CLLKp0VvF/XH9E4KHNmgwyp4E3AfgI5cjiIVZWlbfThBop2qxFIh4+LeY6hVWWZ2w==", "requires": { "@types/node": "*" } @@ -1120,9 +1156,9 @@ "dev": true }, "agent-base": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.0.tgz", - "integrity": "sha512-j1Q7cSCqN+AwrmDd+pzgqc0/NpC655x2bUf5ZjRIO77DcNBFmh+OgRNzF6OKdCC9RSCb19fGd99+bhXFdkRNqw==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.1.tgz", + "integrity": "sha512-01q25QQDwLSsyfhrKbn8yuur+JNw0H+0Y4JiGIKd3z9aYk/w/2kxD/Upc+t2ZBBSUNff50VjPsSW2YxM8QYKVg==", "requires": { "debug": "4" }, @@ -1551,6 +1587,11 @@ "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", "dev": true }, + "charenc": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/charenc/-/charenc-0.0.2.tgz", + "integrity": "sha1-wKHS86cJLgN3S/qD8UwPxXkKhmc=" + }, "cli-cursor": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", @@ -1737,10 +1778,15 @@ } } }, + "crypt": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/crypt/-/crypt-0.0.2.tgz", + "integrity": "sha1-iNf/fsDfuG9xPch7u0LQRNPmxBs=" + }, "d64": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz", - "integrity": "sha512-5eNy3WZziVYnrogqgXhcdEmqcDB2IHurTqLcrgssJsfkMVCUoUaZpK6cJjxxvLV2dUm5SuJMNcYfVGoin9UIRw==" + "integrity": "sha1-QAKofoUMv8n52XBrYPymE6MzbpA=" }, "damerau-levenshtein": { "version": "1.0.6", @@ -2979,27 +3025,100 @@ "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==" }, "google-auth-library": { - "version": "5.10.1", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz", - "integrity": "sha512-rOlaok5vlpV9rSiUu5EpR0vVpc+PhN62oF4RyX/6++DG1VsaulAFEMlDYBLjJDDPI6OcNOCGAKy9UVB/3NIDXg==", + "version": "6.0.6", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.6.tgz", + "integrity": "sha512-fWYdRdg55HSJoRq9k568jJA1lrhg9i2xgfhVIMJbskUmbDpJGHsbv9l41DGhCDXM21F9Kn4kUwdysgxSYBYJUw==", "requires": { "arrify": "^2.0.0", "base64-js": "^1.3.0", "ecdsa-sig-formatter": "^1.0.11", "fast-text-encoding": "^1.0.0", - "gaxios": "^2.1.0", - "gcp-metadata": "^3.4.0", - "gtoken": "^4.1.0", + "gaxios": "^3.0.0", + "gcp-metadata": "^4.1.0", + "gtoken": "^5.0.0", "jws": "^4.0.0", - "lru-cache": "^5.0.0" + "lru-cache": "^6.0.0" + }, + "dependencies": { + "bignumber.js": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.0.tgz", + "integrity": "sha512-t/OYhhJ2SD+YGBQcjY8GzzDHEk9f3nerxjtfa6tlMXfe7frs/WozhvCNoGvpM0P3bNf3Gq5ZRMlGr5f3r4/N8A==" + }, + "gaxios": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.1.0.tgz", + "integrity": "sha512-DDTn3KXVJJigtz+g0J3vhcfbDbKtAroSTxauWsdnP57sM5KZ3d2c/3D9RKFJ86s43hfw6WULg6TXYw/AYiBlpA==", + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.3.0" + } + }, + "gcp-metadata": { + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.4.tgz", + "integrity": "sha512-5J/GIH0yWt/56R3dNaNWPGQ/zXsZOddYECfJaqxFWgrZ9HC2Kvc5vl9upOgUUHKzURjAVf2N+f6tEJiojqXUuA==", + "requires": { + "gaxios": "^3.0.0", + "json-bigint": "^1.0.0" + } + }, + "google-p12-pem": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.2.tgz", + "integrity": "sha512-tbjzndQvSIHGBLzHnhDs3cL4RBjLbLXc2pYvGH+imGVu5b4RMAttUTdnmW2UH0t11QeBTXZ7wlXPS7hrypO/tg==", + "requires": { + "node-forge": "^0.9.0" + } + }, + "gtoken": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.3.tgz", + "integrity": "sha512-Nyd1wZCMRc2dj/mAD0LlfQLcAO06uKdpKJXvK85SGrF5+5+Bpfil9u/2aw35ltvEHjvl0h5FMKN5knEU+9JrOg==", + "requires": { + "gaxios": "^3.0.0", + "google-p12-pem": "^3.0.0", + "jws": "^4.0.0", + "mime": "^2.2.0" + } + }, + "json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "requires": { + "bignumber.js": "^9.0.0" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + }, + "mime": { + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", + "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } } }, "google-gax": { - "version": "1.15.1", - "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-1.15.1.tgz", - "integrity": "sha512-1T1PwSZWnbdRusA+NCZMSe56iU6swGvuZuy54eYl9vEHiRXTLYbQmUkWY2CqgYD9Fd/T4WBkUl22+rZG80unyw==", + "version": "1.15.3", + "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-1.15.3.tgz", + "integrity": "sha512-3JKJCRumNm3x2EksUTw4P1Rad43FTpqrtW9jzpf3xSMYXx+ogaqTM1vGo7VixHB4xkAyATXVIa3OcNSh8H9zsQ==", "requires": { - "@grpc/grpc-js": "^0.6.18", + "@grpc/grpc-js": "~1.0.3", "@grpc/proto-loader": "^0.5.1", "@types/fs-extra": "^8.0.1", "@types/long": "^4.0.0", @@ -3016,10 +3135,31 @@ "walkdir": "^0.4.0" }, "dependencies": { + "@types/node": { + "version": "13.13.15", + "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.15.tgz", + "integrity": "sha512-kwbcs0jySLxzLsa2nWUAGOd/s21WU1jebrEdtzhsj1D4Yps1EOuyI1Qcu+FD56dL7NRNIJtDDjcqIG22NwkgLw==" + }, + "google-auth-library": { + "version": "5.10.1", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz", + "integrity": "sha512-rOlaok5vlpV9rSiUu5EpR0vVpc+PhN62oF4RyX/6++DG1VsaulAFEMlDYBLjJDDPI6OcNOCGAKy9UVB/3NIDXg==", + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^2.1.0", + "gcp-metadata": "^3.4.0", + "gtoken": "^4.1.0", + "jws": "^4.0.0", + "lru-cache": "^5.0.0" + } + }, "protobufjs": { - "version": "6.8.9", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.9.tgz", - "integrity": "sha512-j2JlRdUeL/f4Z6x4aU4gj9I2LECglC+5qR2TrWb193Tla1qfdaNQTZ8I27Pt7K0Ajmvjjpft7O3KWTGciz4gpw==", + "version": "6.10.1", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.1.tgz", + "integrity": "sha512-pb8kTchL+1Ceg4lFd5XUpK8PdWacbvV5SK2ULH2ebrYtl4GjJmS24m6CKME67jzV53tbJxHlnNOSqQHbTsR9JQ==", "requires": { "@protobufjs/aspromise": "^1.1.2", "@protobufjs/base64": "^1.1.2", @@ -3031,8 +3171,8 @@ "@protobufjs/path": "^1.1.2", "@protobufjs/pool": "^1.1.0", "@protobufjs/utf8": "^1.1.0", - "@types/long": "^4.0.0", - "@types/node": "^10.1.0", + "@types/long": "^4.0.1", + "@types/node": "^13.7.0", "long": "^4.0.0" } } @@ -3069,9 +3209,9 @@ }, "dependencies": { "mime": { - "version": "2.4.4", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", - "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==" + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", + "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" } } }, @@ -3834,12 +3974,12 @@ "lodash.at": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/lodash.at/-/lodash.at-4.6.0.tgz", - "integrity": "sha512-GOTh0SEp+Yosnlpjic+8cl2WM9MykorogkGA9xyIFkkObQ3H3kNZqZ+ohuq4K3FrSVo7hMcZBMataJemrxC3BA==" + "integrity": "sha1-k83OZk8KGZTqM9181A4jr9EbD/g=" }, "lodash.camelcase": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", - "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==" + "integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY=" }, "lodash.defaults": { "version": "4.2.0", @@ -3865,7 +4005,7 @@ "lodash.has": { "version": "4.5.2", "resolved": "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz", - "integrity": "sha512-rnYUdIo6xRCJnQmbVFEwcxF144erlD+M3YcJUVesflU9paQaE8p+fJDcIQrlMYbxoANFL+AB9hZrzSBBk5PL+g==" + "integrity": "sha1-0Z9NwQlQWMzL4rDN9O4P5Ko3yGI=" }, "lodash.memoize": { "version": "4.1.2", @@ -3896,60 +4036,33 @@ "integrity": "sha512-U7KCmLdqsGHBLeWqYlFA0V0Sl6P08EE1ZrmA9cxjUE0WVqT9qnyVDPz1kzpFEP0jdJuFnasWIfSd7fsaNXkpbg==" }, "logger-sharelatex": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.9.1.tgz", - "integrity": "sha512-9s6JQnH/PN+Js2CmI8+J3MQCTNlRzP2Dh4pcekXrV6Jm5J4HzyPi+6d3zfBskZ4NBmaUVw9hC4p5dmdaRmh4mQ==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-2.2.0.tgz", + "integrity": "sha512-ko+OmE25XHJJCiz1R9EgwlfM7J/5olpunUfR3WcfuqOQrcUqsdBrDA2sOytngT0ViwjCR0Fh4qZVPwEWfmrvwA==", "requires": { - "@google-cloud/logging-bunyan": "^2.0.0", - "@overleaf/o-error": "^2.0.0", - "bunyan": "1.8.12", - "raven": "1.1.3", - "request": "2.88.0", - "yn": "^3.1.1" + "@google-cloud/logging-bunyan": "^3.0.0", + "@overleaf/o-error": "^3.0.0", + "bunyan": "^1.8.14", + "node-fetch": "^2.6.0", + "raven": "^2.6.4", + "yn": "^4.0.0" }, "dependencies": { "bunyan": { - "version": "1.8.12", - "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.12.tgz", - "integrity": "sha512-dmDUbGHeGcvCDLRFOscZkwx1ZO/aFz3bJOCi5nCgzdhFGPxwK+y5AcDBnqagNGlJZ7lje/l6JUEz9mQcutttdg==", + "version": "1.8.14", + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.14.tgz", + "integrity": "sha512-LlahJUxXzZLuw/hetUQJmRgZ1LF6+cr5TPpRj6jf327AsiIq2jhYEH4oqUUkVKTor+9w2BT3oxVwhzE5lw9tcg==", "requires": { "dtrace-provider": "~0.8", - "moment": "^2.10.6", + "moment": "^2.19.3", "mv": "~2", "safe-json-stringify": "~1" } }, - "qs": { - "version": "6.5.2", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", - "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==" - }, - "request": { - "version": "2.88.0", - "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", - "integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==", - "requires": { - "aws-sign2": "~0.7.0", - "aws4": "^1.8.0", - "caseless": "~0.12.0", - "combined-stream": "~1.0.6", - "extend": "~3.0.2", - "forever-agent": "~0.6.1", - "form-data": "~2.3.2", - "har-validator": "~5.1.0", - "http-signature": "~1.2.0", - "is-typedarray": "~1.0.0", - "isstream": "~0.1.2", - "json-stringify-safe": "~5.0.1", - "mime-types": "~2.1.19", - "oauth-sign": "~0.9.0", - "performance-now": "^2.1.0", - "qs": "~6.5.2", - "safe-buffer": "^5.1.2", - "tough-cookie": "~2.4.3", - "tunnel-agent": "^0.6.0", - "uuid": "^3.3.2" - } + "yn": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yn/-/yn-4.0.0.tgz", + "integrity": "sha512-huWiiCS4TxKc4SfgmTwW1K7JmXPPAmuXWYy4j9qjQo4+27Kni8mGhAAi1cloRWmBe2EqcLgt3IGqQoRL/MtPgg==" } } }, @@ -4033,11 +4146,6 @@ "yallist": "^3.0.2" } }, - "lsmod": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/lsmod/-/lsmod-1.0.0.tgz", - "integrity": "sha1-mgD3bco26yP6BTUK/htYXUKZ5ks=" - }, "lynx": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz", @@ -4086,6 +4194,23 @@ "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.1.0.tgz", "integrity": "sha512-glc9y00wgtwcDmp7GaE/0b0OnxpNJsVf3ael/An6Fe2Q51LLwN1er6sdomLRzz5h0+yMpiYLhWYF5R7HeqVd4g==" }, + "md5": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/md5/-/md5-2.3.0.tgz", + "integrity": "sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==", + "requires": { + "charenc": "0.0.2", + "crypt": "0.0.2", + "is-buffer": "~1.1.6" + }, + "dependencies": { + "is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" + } + } + }, "media-typer": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", @@ -4278,9 +4403,9 @@ "integrity": "sha1-EUyUlnPiqKNenTV4hSeqN7Z52is=" }, "moment": { - "version": "2.24.0", - "resolved": "https://registry.npmjs.org/moment/-/moment-2.24.0.tgz", - "integrity": "sha1-DQVdU/UFKqZTyfbraLtdEr9cK1s=", + "version": "2.27.0", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.27.0.tgz", + "integrity": "sha512-al0MUK7cpIcglMv3YF13qSgdAIqxHTO7brRtaz3DlSULbqfazqkc5kEjNrLDOM7fsjshoFIihnU8snrP7zUvhQ==", "optional": true }, "mongodb": { @@ -5553,15 +5678,15 @@ "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==" }, "raven": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/raven/-/raven-1.1.3.tgz", - "integrity": "sha1-QnPBrm005CMPUbLAEEGjK5Iygio=", + "version": "2.6.4", + "resolved": "https://registry.npmjs.org/raven/-/raven-2.6.4.tgz", + "integrity": "sha512-6PQdfC4+DQSFncowthLf+B6Hr0JpPsFBgTVYTAOq7tCmx/kR4SXbeawtPch20+3QfUcQDoJBLjWW1ybvZ4kXTw==", "requires": { "cookie": "0.3.1", - "json-stringify-safe": "5.0.1", - "lsmod": "1.0.0", - "stack-trace": "0.0.9", - "uuid": "3.0.0" + "md5": "^2.2.1", + "stack-trace": "0.0.10", + "timed-out": "4.0.1", + "uuid": "3.3.2" }, "dependencies": { "cookie": { @@ -5570,9 +5695,9 @@ "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" }, "uuid": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.0.tgz", - "integrity": "sha512-rqE1LoOVLv3QrZMjb4NkF5UWlkurCfPyItVnFPNKDDGkHw4dQUdE4zMcLqx28+0Kcf3+bnUk4PisaiRJT4aiaQ==" + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", + "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" } } }, @@ -5911,7 +6036,7 @@ "safe-json-stringify": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.2.0.tgz", - "integrity": "sha1-NW5EvJjx+TzkXfFLzXwBzahuCv0=", + "integrity": "sha512-gH8eh2nZudPQO6TytOvbxnuhYBOvDBBLW52tz5q6X58lJcd/tkmqFR+5Z9adS8aJtURSXWThWy/xJtJwixErvg==", "optional": true }, "safer-buffer": { @@ -6099,9 +6224,9 @@ } }, "snakecase-keys": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/snakecase-keys/-/snakecase-keys-3.1.2.tgz", - "integrity": "sha512-NrzHj8ctStnd1LYx3+L4buS7yildFum7WAbQQxkhPCNi3Qeqv7hoBne2c9n++HWxDG9Nv23pNEyyLCITZTv24Q==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/snakecase-keys/-/snakecase-keys-3.2.0.tgz", + "integrity": "sha512-WTJ0NhCH/37J+PU3fuz0x5b6TvtWQChTcKPOndWoUy0pteKOe0hrHMzSRsJOWSIP48EQkzUEsgQPmrG3W8pFNQ==", "requires": { "map-obj": "^4.0.0", "to-snake-case": "^1.0.0" @@ -6228,9 +6353,9 @@ } }, "stack-trace": { - "version": "0.0.9", - "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz", - "integrity": "sha1-qPbq7KkGdMMz58Q5U/J1tFFRBpU=" + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", + "integrity": "sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA=" }, "standard-as-callback": { "version": "2.0.1", @@ -6356,7 +6481,7 @@ "stubs": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", - "integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==" + "integrity": "sha1-6NK6H6nJBXAwPAMLaQD31fiavls=" }, "supports-color": { "version": "5.4.0", @@ -6399,9 +6524,9 @@ }, "dependencies": { "uuid": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-7.0.2.tgz", - "integrity": "sha512-vy9V/+pKG+5ZTYKf+VcphF5Oc6EFiu3W8Nv3P3zIh0EqVI80ZxOzuPfe9EHjkFNvf8+xuTHVeei4Drydlx4zjw==" + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-7.0.3.tgz", + "integrity": "sha512-DPSke0pXhTZgoF/d+WSt2QaKMCFSfx7QegxEWT+JOuHF5aWrKEn0G+ztjuJg/gG8/ItK+rbPCD/yNv8yyih6Cg==" } } }, @@ -6480,6 +6605,11 @@ "resolved": "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz", "integrity": "sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==" }, + "timed-out": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/timed-out/-/timed-out-4.0.1.tgz", + "integrity": "sha1-8y6srFoXW+ol1/q1Zas+2HQe9W8=" + }, "timekeeper": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/timekeeper/-/timekeeper-2.2.0.tgz", @@ -6508,12 +6638,12 @@ "to-no-case": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/to-no-case/-/to-no-case-1.0.2.tgz", - "integrity": "sha512-Z3g735FxuZY8rodxV4gH7LxClE4H0hTIyHNIHdk+vpQxjLm0cwnKXq/OFVZ76SOQmto7txVcwSCwkU5kqp+FKg==" + "integrity": "sha1-xyKQcWTvaxeBMsjmmTAhLRtKoWo=" }, "to-snake-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-snake-case/-/to-snake-case-1.0.0.tgz", - "integrity": "sha512-joRpzBAk1Bhi2eGEYBjukEWHOe/IvclOkiJl3DtA91jV6NwQ3MwXA4FHYeqk8BNp/D8bmi9tcNbRu/SozP0jbQ==", + "integrity": "sha1-znRpE4l5RgGah+Yu366upMYIq4w=", "requires": { "to-space-case": "^1.0.0" } @@ -6521,7 +6651,7 @@ "to-space-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-space-case/-/to-space-case-1.0.0.tgz", - "integrity": "sha512-rLdvwXZ39VOn1IxGL3V6ZstoTbwLRckQmn/U8ZDLuWwIXNpuZDhQ3AiRUlhTbOXFVE9C+dR51wM0CBDhk31VcA==", + "integrity": "sha1-sFLar7Gysp3HcM6gFj5ewOvJ/Bc=", "requires": { "to-no-case": "^1.0.0" } @@ -6531,22 +6661,6 @@ "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz", "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==" }, - "tough-cookie": { - "version": "2.4.3", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", - "integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==", - "requires": { - "psl": "^1.1.24", - "punycode": "^1.4.1" - }, - "dependencies": { - "punycode": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==" - } - } - }, "tslib": { "version": "1.11.2", "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.11.2.tgz", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 5858eebd9a..aebc1ef940 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -23,7 +23,7 @@ "bunyan": "~0.22.1", "express": "4.17.1", "lodash": "^4.17.13", - "logger-sharelatex": "^1.9.1", + "logger-sharelatex": "^2.2.0", "metrics-sharelatex": "^2.6.2", "mongojs": "^3.1.0", "redis-sharelatex": "^1.0.13", From f80a92ce4600e2e5d38f3784811b6a06924f3d82 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Tue, 25 Aug 2020 12:32:16 +0100 Subject: [PATCH 694/769] [misc] migrate the app to the native mongo driver acceptance tests to follow in a separate commit --- services/document-updater/app.js | 34 +++++++++++++------ .../app/js/SnapshotManager.js | 4 +-- services/document-updater/app/js/mongodb.js | 30 ++++++++++++++++ services/document-updater/package-lock.json | 16 ++++----- services/document-updater/package.json | 1 + 5 files changed, 65 insertions(+), 20 deletions(-) create mode 100644 services/document-updater/app/js/mongodb.js diff --git a/services/document-updater/app.js b/services/document-updater/app.js index e23fa3ca7b..7ac17d13bb 100644 --- a/services/document-updater/app.js +++ b/services/document-updater/app.js @@ -17,14 +17,14 @@ const DispatchManager = require('./app/js/DispatchManager') const DeleteQueueManager = require('./app/js/DeleteQueueManager') const Errors = require('./app/js/Errors') const HttpController = require('./app/js/HttpController') -const mongojs = require('./app/js/mongojs') +const mongodb = require('./app/js/mongodb') const async = require('async') const Path = require('path') const bodyParser = require('body-parser') Metrics.mongodb.monitor( - Path.resolve(__dirname, '/node_modules/mongojs/node_modules/mongodb'), + Path.resolve(__dirname, '/node_modules/mongodb'), logger ) Metrics.event_loop.monitor(logger, 100) @@ -158,7 +158,7 @@ app.get('/health_check', (req, res, next) => { }) }, (cb) => { - mongojs.healthCheck((error) => { + mongodb.healthCheck((error) => { if (error) { logger.err({ err: error }, 'failed mongo health check') } @@ -219,13 +219,27 @@ const host = Settings.internal.documentupdater.host || 'localhost' if (!module.parent) { // Called directly - app.listen(port, host, () => { - logger.info(`Document-updater starting up, listening on ${host}:${port}`) - if (Settings.continuousBackgroundFlush) { - logger.info('Starting continuous background flush') - DeleteQueueManager.startBackgroundFlush() - } - }) + mongodb + .waitForDb() + .then(() => { + app.listen(port, host, function (err) { + if (err) { + logger.fatal({ err }, `Cannot bind to ${host}:${port}. Exiting.`) + process.exit(1) + } + logger.info( + `Document-updater starting up, listening on ${host}:${port}` + ) + if (Settings.continuousBackgroundFlush) { + logger.info('Starting continuous background flush') + DeleteQueueManager.startBackgroundFlush() + } + }) + }) + .catch((err) => { + logger.fatal({ err }, 'Cannot connect to mongo. Exiting.') + process.exit(1) + }) } module.exports = app diff --git a/services/document-updater/app/js/SnapshotManager.js b/services/document-updater/app/js/SnapshotManager.js index 728ee73626..ec7b31315c 100644 --- a/services/document-updater/app/js/SnapshotManager.js +++ b/services/document-updater/app/js/SnapshotManager.js @@ -12,7 +12,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ let SnapshotManager -const { db, ObjectId } = require('./mongojs') +const { db, ObjectId } = require('./mongodb') module.exports = SnapshotManager = { recordSnapshot( @@ -30,7 +30,7 @@ module.exports = SnapshotManager = { } catch (error) { return callback(error) } - return db.docSnapshots.insert( + db.docSnapshots.insertOne( { project_id, doc_id, diff --git a/services/document-updater/app/js/mongodb.js b/services/document-updater/app/js/mongodb.js new file mode 100644 index 0000000000..c06865fff5 --- /dev/null +++ b/services/document-updater/app/js/mongodb.js @@ -0,0 +1,30 @@ +const Settings = require('settings-sharelatex') +const { MongoClient, ObjectId } = require('mongodb') + +const clientPromise = MongoClient.connect(Settings.mongo.url) + +async function healthCheck() { + const internalDb = (await clientPromise).db() + const res = await internalDb.command({ ping: 1 }) + if (!res.ok) { + throw new Error('failed mongo ping') + } +} + +async function waitForDb() { + await clientPromise +} + +const db = {} +waitForDb().then(async function () { + const internalDb = (await clientPromise).db() + + db.docSnapshots = internalDb.collection('docSnapshots') +}) + +module.exports = { + db, + ObjectId, + healthCheck: require('util').callbackify(healthCheck), + waitForDb +} diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 3b32518ac4..233cf4ec0f 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -1480,9 +1480,9 @@ "dev": true }, "bson": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/bson/-/bson-1.1.3.tgz", - "integrity": "sha512-TdiJxMVnodVS7r0BdL42y/pqC9cL2iKynVwA0Ho3qbsQYr428veL3l7BQyuqiw+Q5SqqoT0m4srSY/BlZ9AxXg==" + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/bson/-/bson-1.1.5.tgz", + "integrity": "sha512-kDuEzldR21lHciPQAIulLs1LZlCXdLziXI6Mb/TDkwXhb//UORJNPXgcRs2CuO4H0DcMkpfT3/ySsP3unoZjBg==" }, "buffer-equal-constant-time": { "version": "1.0.1", @@ -4409,12 +4409,12 @@ "optional": true }, "mongodb": { - "version": "3.5.5", - "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.5.5.tgz", - "integrity": "sha512-GCjDxR3UOltDq00Zcpzql6dQo1sVry60OXJY3TDmFc2SWFY6c8Gn1Ardidc5jDirvJrx2GC3knGOImKphbSL3A==", + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.6.0.tgz", + "integrity": "sha512-/XWWub1mHZVoqEsUppE0GV7u9kanLvHxho6EvBxQbShXTKYF9trhZC2NzbulRGeG7xMJHD8IOWRcdKx5LPjAjQ==", "requires": { "bl": "^2.2.0", - "bson": "^1.1.1", + "bson": "^1.1.4", "denque": "^1.4.1", "require_optional": "^1.0.1", "safe-buffer": "^5.1.2", @@ -5939,7 +5939,7 @@ "require_optional": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/require_optional/-/require_optional-1.0.1.tgz", - "integrity": "sha1-TPNaQkf2TKPfjC7yCMxJSxyo/C4=", + "integrity": "sha512-qhM/y57enGWHAe3v/NcwML6a3/vfESLe/sGM2dII+gEO0BpKRUkWZow/tyloNqJyN6kXSl3RyyM8Ll5D/sJP8g==", "requires": { "resolve-from": "^2.0.0", "semver": "^5.1.0" diff --git a/services/document-updater/package.json b/services/document-updater/package.json index aebc1ef940..d174d46fcc 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -25,6 +25,7 @@ "lodash": "^4.17.13", "logger-sharelatex": "^2.2.0", "metrics-sharelatex": "^2.6.2", + "mongodb": "^3.6.0", "mongojs": "^3.1.0", "redis-sharelatex": "^1.0.13", "request": "^2.88.2", From 1d57706d44d928378ecc76b2b8e531431a000cdd Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Tue, 25 Aug 2020 13:29:58 +0100 Subject: [PATCH 695/769] [misc] migrate acceptance tests to the native mongo driver, drop mongojs --- services/document-updater/app/js/mongojs.js | 27 - services/document-updater/package-lock.json | 685 +++--------------- services/document-updater/package.json | 1 - .../test/acceptance/js/RangesTests.js | 13 +- .../acceptance/js/helpers/DocUpdaterApp.js | 10 +- 5 files changed, 115 insertions(+), 621 deletions(-) delete mode 100644 services/document-updater/app/js/mongojs.js diff --git a/services/document-updater/app/js/mongojs.js b/services/document-updater/app/js/mongojs.js deleted file mode 100644 index f88f08d15e..0000000000 --- a/services/document-updater/app/js/mongojs.js +++ /dev/null @@ -1,27 +0,0 @@ -// TODO: This file was created by bulk-decaffeinate. -// Sanity-check the conversion and remove this comment. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const Settings = require('settings-sharelatex') -const mongojs = require('mongojs') -const db = mongojs(Settings.mongo.url, ['docSnapshots']) - -module.exports = { - db, - ObjectId: mongojs.ObjectId, - healthCheck(callback) { - return db.runCommand({ ping: 1 }, function (err, res) { - if (err != null) { - return callback(err) - } - if (!res.ok) { - return callback(new Error('failed mongo ping')) - } - return callback() - }) - } -} diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 233cf4ec0f..1492fdd308 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -8,6 +8,7 @@ "version": "7.8.3", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.8.3.tgz", "integrity": "sha512-a9gxpmdXtZEInkCSHUJDLHZVBgb1QS0jhss4cPP93EW7s+uC5bikET2twEF3KV+7rDblJcmNvTR7VJejqd2C2g==", + "dev": true, "requires": { "@babel/highlight": "^7.8.3" } @@ -16,6 +17,7 @@ "version": "7.8.8", "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.8.8.tgz", "integrity": "sha512-HKyUVu69cZoclptr8t8U5b6sx6zoWjh8jiUhnuj3MpZuKT2dJ8zPTuiy31luq32swhI0SpwItCIlU8XW7BZeJg==", + "dev": true, "requires": { "@babel/types": "^7.8.7", "jsesc": "^2.5.1", @@ -26,7 +28,8 @@ "source-map": { "version": "0.5.7", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=" + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", + "dev": true } } }, @@ -34,6 +37,7 @@ "version": "7.8.3", "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.8.3.tgz", "integrity": "sha512-BCxgX1BC2hD/oBlIFUgOCQDOPV8nSINxCwM3o93xP4P9Fq6aV5sgv2cOOITDMtCfQ+3PvHp3l689XZvAM9QyOA==", + "dev": true, "requires": { "@babel/helper-get-function-arity": "^7.8.3", "@babel/template": "^7.8.3", @@ -44,6 +48,7 @@ "version": "7.8.3", "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.8.3.tgz", "integrity": "sha512-FVDR+Gd9iLjUMY1fzE2SR0IuaJToR4RkCDARVfsBBPSP53GEqSFjD8gNyxg246VUyc/ALRxFaAK8rVG7UT7xRA==", + "dev": true, "requires": { "@babel/types": "^7.8.3" } @@ -52,6 +57,7 @@ "version": "7.8.3", "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.8.3.tgz", "integrity": "sha512-3x3yOeyBhW851hroze7ElzdkeRXQYQbFIb7gLK1WQYsw2GWDay5gAJNw1sWJ0VFP6z5J1whqeXH/WCdCjZv6dA==", + "dev": true, "requires": { "@babel/types": "^7.8.3" } @@ -60,6 +66,7 @@ "version": "7.8.3", "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.8.3.tgz", "integrity": "sha512-PX4y5xQUvy0fnEVHrYOarRPXVWafSjTW9T0Hab8gVIawpl2Sj0ORyrygANq+KjcNlSSTw0YCLSNA8OyZ1I4yEg==", + "dev": true, "requires": { "chalk": "^2.0.0", "esutils": "^2.0.2", @@ -69,7 +76,8 @@ "@babel/parser": { "version": "7.8.8", "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.8.8.tgz", - "integrity": "sha512-mO5GWzBPsPf6865iIbzNE0AvkKF3NE+2S3eRUpE+FE07BOAkXh6G+GW/Pj01hhXjve1WScbaIO4UlY1JKeqCcA==" + "integrity": "sha512-mO5GWzBPsPf6865iIbzNE0AvkKF3NE+2S3eRUpE+FE07BOAkXh6G+GW/Pj01hhXjve1WScbaIO4UlY1JKeqCcA==", + "dev": true }, "@babel/runtime": { "version": "7.9.6", @@ -94,6 +102,7 @@ "version": "7.8.6", "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.8.6.tgz", "integrity": "sha512-zbMsPMy/v0PWFZEhQJ66bqjhH+z0JgMoBWuikXybgG3Gkd/3t5oQ1Rw2WQhnSrsOmsKXnZOx15tkC4qON/+JPg==", + "dev": true, "requires": { "@babel/code-frame": "^7.8.3", "@babel/parser": "^7.8.6", @@ -104,6 +113,7 @@ "version": "7.8.6", "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.8.6.tgz", "integrity": "sha512-2B8l0db/DPi8iinITKuo7cbPznLCEk0kCxDoB9/N6gGNg/gxOXiR/IcymAFPiBwk5w6TtQ27w4wpElgp9btR9A==", + "dev": true, "requires": { "@babel/code-frame": "^7.8.3", "@babel/generator": "^7.8.6", @@ -120,6 +130,7 @@ "version": "4.1.1", "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, "requires": { "ms": "^2.1.1" } @@ -127,7 +138,8 @@ "ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true } } }, @@ -135,6 +147,7 @@ "version": "7.8.7", "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.8.7.tgz", "integrity": "sha512-k2TreEHxFA4CjGkL+GYjRyx35W0Mr7DP5+9q6WMkyKXB+904bYmG40syjMFV0oLlhhFCwWl0vA0DyzTDkwAiJw==", + "dev": true, "requires": { "esutils": "^2.0.2", "lodash": "^4.17.13", @@ -1209,33 +1222,23 @@ "ansi-regex": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", - "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==" + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true }, "ansi-styles": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, "requires": { "color-convert": "^1.9.0" } }, - "append-transform": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/append-transform/-/append-transform-1.0.0.tgz", - "integrity": "sha512-P009oYkeHyU742iSZJzZZywj4QRJdnTWffaKuJQLablCZ1uz6/cW4yaRgcDaoQ+uwOxxnt0gRUcwfsNP2ri0gw==", - "requires": { - "default-require-extensions": "^2.0.0" - } - }, - "archy": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/archy/-/archy-1.0.0.tgz", - "integrity": "sha1-+cjBN1fMHde8N5rHeyxipcKGjEA=" - }, "argparse": { "version": "1.0.10", "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, "requires": { "sprintf-js": "~1.0.2" } @@ -1516,17 +1519,6 @@ "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", "integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==" }, - "caching-transform": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/caching-transform/-/caching-transform-3.0.2.tgz", - "integrity": "sha512-Mtgcv3lh3U0zRii/6qVgQODdPA4G3zhG+jtbCWj39RXuUFTMzH0vcdMtaJS1jPowd+It2Pqr6y3NJMQqOqCE2w==", - "requires": { - "hasha": "^3.0.0", - "make-dir": "^2.0.0", - "package-hash": "^3.0.0", - "write-file-atomic": "^2.4.2" - } - }, "callsites": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", @@ -1536,7 +1528,8 @@ "camelcase": { "version": "5.3.1", "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", - "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==" + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true }, "camelcase-keys": { "version": "6.2.2", @@ -1575,6 +1568,7 @@ "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, "requires": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", @@ -1611,6 +1605,7 @@ "version": "5.0.0", "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", + "dev": true, "requires": { "string-width": "^3.1.0", "strip-ansi": "^5.2.0", @@ -1634,6 +1629,7 @@ "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, "requires": { "color-name": "1.1.3" } @@ -1641,7 +1637,8 @@ "color-name": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true }, "combined-stream": { "version": "1.0.8", @@ -1663,11 +1660,6 @@ "integrity": "sha512-6P6g0uetGpW/sdyUy/iQQCbFF0kWVMSIVSyYz7Zgjcgh8mgw8PQzDNZeyZ5DQ2gM7LBoZPHmnjz8rUthkBG5tw==", "dev": true }, - "commondir": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", - "integrity": "sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs=" - }, "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", @@ -1706,14 +1698,6 @@ "emitter-listener": "^1.1.1" } }, - "convert-source-map": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.7.0.tgz", - "integrity": "sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA==", - "requires": { - "safe-buffer": "~5.1.1" - } - }, "cookie": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.0.tgz", @@ -1741,43 +1725,6 @@ "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" }, - "cp-file": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/cp-file/-/cp-file-6.2.0.tgz", - "integrity": "sha512-fmvV4caBnofhPe8kOcitBwSn2f39QLjnAnGq3gO9dfd75mUytzKNZB1hde6QHunW2Rt+OwuBOMc3i1tNElbszA==", - "requires": { - "graceful-fs": "^4.1.2", - "make-dir": "^2.0.0", - "nested-error-stacks": "^2.0.0", - "pify": "^4.0.1", - "safe-buffer": "^5.0.1" - } - }, - "cross-spawn": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-4.0.2.tgz", - "integrity": "sha1-e5JHYhwjrf3ThWAEqCPL45dCTUE=", - "requires": { - "lru-cache": "^4.0.1", - "which": "^1.2.9" - }, - "dependencies": { - "lru-cache": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", - "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", - "requires": { - "pseudomap": "^1.0.2", - "yallist": "^2.1.2" - } - }, - "yallist": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", - "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=" - } - } - }, "crypt": { "version": "0.0.2", "resolved": "https://registry.npmjs.org/crypt/-/crypt-0.0.2.tgz", @@ -1813,7 +1760,8 @@ "decamelize": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=" + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", + "dev": true }, "deep-eql": { "version": "0.1.3", @@ -1838,14 +1786,6 @@ "integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=", "dev": true }, - "default-require-extensions": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/default-require-extensions/-/default-require-extensions-2.0.0.tgz", - "integrity": "sha1-9fj7sYp9bVCyH2QfZJ67Uiz+JPc=", - "requires": { - "strip-bom": "^3.0.0" - } - }, "define-properties": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", @@ -1958,11 +1898,6 @@ } } }, - "each-series": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/each-series/-/each-series-1.0.0.tgz", - "integrity": "sha1-+Ibmxm39sl7x/nNWQUbuXLR4r8s=" - }, "ecc-jsbn": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", @@ -1991,7 +1926,8 @@ "emoji-regex": { "version": "7.0.3", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", - "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==" + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true }, "encodeurl": { "version": "1.0.2", @@ -2015,6 +1951,7 @@ "version": "1.3.2", "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, "requires": { "is-arrayish": "^0.2.1" } @@ -2049,11 +1986,6 @@ "is-symbol": "^1.0.2" } }, - "es6-error": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz", - "integrity": "sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==" - }, "es6-promise": { "version": "4.2.8", "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz", @@ -2075,7 +2007,8 @@ "escape-string-regexp": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true }, "eslint": { "version": "6.8.0", @@ -2633,7 +2566,8 @@ "esprima": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==" + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true }, "esquery": { "version": "1.3.1", @@ -2670,7 +2604,8 @@ "esutils": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", - "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==" + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true }, "etag": { "version": "1.8.1", @@ -2813,20 +2748,11 @@ "unpipe": "~1.0.0" } }, - "find-cache-dir": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-2.1.0.tgz", - "integrity": "sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==", - "requires": { - "commondir": "^1.0.1", - "make-dir": "^2.0.0", - "pkg-dir": "^3.0.0" - } - }, "find-up": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, "requires": { "locate-path": "^3.0.0" } @@ -2896,15 +2822,6 @@ } } }, - "foreground-child": { - "version": "1.5.6", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-1.5.6.tgz", - "integrity": "sha1-T9ca0t/elnibmApcCilZN8svXOk=", - "requires": { - "cross-spawn": "^4", - "signal-exit": "^3.0.0" - } - }, "forever-agent": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", @@ -2943,7 +2860,8 @@ "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true }, "function-bind": { "version": "1.1.1", @@ -2981,7 +2899,8 @@ "get-caller-file": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==" + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true }, "get-stdin": { "version": "6.0.0", @@ -3022,7 +2941,8 @@ "globals": { "version": "11.12.0", "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==" + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true }, "google-auth-library": { "version": "6.0.6", @@ -3189,7 +3109,8 @@ "graceful-fs": { "version": "4.2.3", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz", - "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==" + "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==", + "dev": true }, "growl": { "version": "1.10.5", @@ -3258,7 +3179,8 @@ "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==" + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true }, "has-symbols": { "version": "1.0.1", @@ -3266,21 +3188,6 @@ "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", "dev": true }, - "hasha": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/hasha/-/hasha-3.0.0.tgz", - "integrity": "sha1-UqMvq4Vp1BymmmH/GiFPjrfIvTk=", - "requires": { - "is-stream": "^1.0.1" - }, - "dependencies": { - "is-stream": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", - "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=" - } - } - }, "he": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", @@ -3295,12 +3202,8 @@ "hosted-git-info": { "version": "2.8.8", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.8.tgz", - "integrity": "sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==" - }, - "html-escaper": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.0.tgz", - "integrity": "sha512-a4u9BeERWGu/S8JiWEAQcdrg9v4QArtP9keViQjGMdff20fBdd8waotXaNmODqBe6uZ3Nafi7K/ho4gCQHV3Ig==" + "integrity": "sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==", + "dev": true }, "http-errors": { "version": "1.7.2", @@ -3415,7 +3318,8 @@ "imurmurhash": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=" + "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", + "dev": true }, "indent-string": { "version": "4.0.0", @@ -3624,7 +3528,8 @@ "is-arrayish": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=" + "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", + "dev": true }, "is-buffer": { "version": "2.0.4", @@ -3652,7 +3557,8 @@ "is-fullwidth-code-point": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=" + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true }, "is-glob": { "version": "4.0.1", @@ -3715,125 +3621,25 @@ "isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=" + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "dev": true }, "isstream": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" }, - "istanbul-lib-coverage": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.5.tgz", - "integrity": "sha512-8aXznuEPCJvGnMSRft4udDRDtb1V3pkQkMMI5LI+6HuQz5oQ4J2UFn1H82raA3qJtyOLkkwVqICBQkjnGtn5mA==" - }, - "istanbul-lib-hook": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/istanbul-lib-hook/-/istanbul-lib-hook-2.0.7.tgz", - "integrity": "sha512-vrRztU9VRRFDyC+aklfLoeXyNdTfga2EI3udDGn4cZ6fpSXpHLV9X6CHvfoMCPtggg8zvDDmC4b9xfu0z6/llA==", - "requires": { - "append-transform": "^1.0.0" - } - }, - "istanbul-lib-instrument": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-3.3.0.tgz", - "integrity": "sha512-5nnIN4vo5xQZHdXno/YDXJ0G+I3dAm4XgzfSVTPLQpj/zAV2dV6Juy0yaf10/zrJOJeHoN3fraFe+XRq2bFVZA==", - "requires": { - "@babel/generator": "^7.4.0", - "@babel/parser": "^7.4.3", - "@babel/template": "^7.4.0", - "@babel/traverse": "^7.4.3", - "@babel/types": "^7.4.0", - "istanbul-lib-coverage": "^2.0.5", - "semver": "^6.0.0" - } - }, - "istanbul-lib-report": { - "version": "2.0.8", - "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-2.0.8.tgz", - "integrity": "sha512-fHBeG573EIihhAblwgxrSenp0Dby6tJMFR/HvlerBsrCTD5bkUuoNtn3gVh29ZCS824cGGBPn7Sg7cNk+2xUsQ==", - "requires": { - "istanbul-lib-coverage": "^2.0.5", - "make-dir": "^2.1.0", - "supports-color": "^6.1.0" - }, - "dependencies": { - "supports-color": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", - "integrity": "sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ==", - "requires": { - "has-flag": "^3.0.0" - } - } - } - }, - "istanbul-lib-source-maps": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-3.0.6.tgz", - "integrity": "sha512-R47KzMtDJH6X4/YW9XTx+jrLnZnscW4VpNN+1PViSYTejLVPWv7oov+Duf8YQSPyVRUvueQqz1TcsC6mooZTXw==", - "requires": { - "debug": "^4.1.1", - "istanbul-lib-coverage": "^2.0.5", - "make-dir": "^2.1.0", - "rimraf": "^2.6.3", - "source-map": "^0.6.1" - }, - "dependencies": { - "debug": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", - "requires": { - "ms": "^2.1.1" - } - }, - "glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - }, - "rimraf": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", - "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", - "requires": { - "glob": "^7.1.3" - } - } - } - }, - "istanbul-reports": { - "version": "2.2.7", - "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-2.2.7.tgz", - "integrity": "sha512-uu1F/L1o5Y6LzPVSVZXNOoD/KXpJue9aeLRd0sM9uMXfZvzomB0WxVamWb5ue8kA2vVWEmW7EG+A5n3f1kqHKg==", - "requires": { - "html-escaper": "^2.0.0" - } - }, "js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true }, "js-yaml": { "version": "3.13.1", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", + "dev": true, "requires": { "argparse": "^1.0.7", "esprima": "^4.0.0" @@ -3847,7 +3653,8 @@ "jsesc": { "version": "2.5.2", "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", - "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==" + "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", + "dev": true }, "json-bigint": { "version": "0.3.0", @@ -3857,11 +3664,6 @@ "bignumber.js": "^7.0.0" } }, - "json-parse-better-errors": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", - "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==" - }, "json-schema": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", @@ -3939,28 +3741,11 @@ "type-check": "~0.3.2" } }, - "load-json-file": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", - "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", - "requires": { - "graceful-fs": "^4.1.2", - "parse-json": "^4.0.0", - "pify": "^3.0.0", - "strip-bom": "^3.0.0" - }, - "dependencies": { - "pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" - } - } - }, "locate-path": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, "requires": { "p-locate": "^3.0.0", "path-exists": "^3.0.0" @@ -3991,11 +3776,6 @@ "resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz", "integrity": "sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8=" }, - "lodash.flattendeep": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz", - "integrity": "sha1-+wMJF/hqMTTlvJvsDWngAT3f7bI=" - }, "lodash.get": { "version": "4.4.2", "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", @@ -4155,22 +3935,6 @@ "statsd-parser": "~0.0.4" } }, - "make-dir": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", - "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", - "requires": { - "pify": "^4.0.1", - "semver": "^5.6.0" - }, - "dependencies": { - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" - } - } - }, "make-plural": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/make-plural/-/make-plural-4.3.0.tgz", @@ -4227,14 +3991,6 @@ "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=" }, - "merge-source-map": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/merge-source-map/-/merge-source-map-1.1.0.tgz", - "integrity": "sha512-Qkcp7P2ygktpMPh2mCQZaf3jhN6D3Z/qVZHSdWvQ+2Ef5HgRAPBO57A77+ENm0CPx2+1Ce/MYKi3ymqdfuqibw==", - "requires": { - "source-map": "^0.6.1" - } - }, "mersenne": { "version": "0.0.4", "resolved": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz", @@ -4421,21 +4177,6 @@ "saslprep": "^1.0.0" } }, - "mongojs": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/mongojs/-/mongojs-3.1.0.tgz", - "integrity": "sha512-aXJ4xfXwx9s1cqtKTZ24PypXiWhIgvgENObQzCGbV4QBxEVedy3yuErhx6znk959cF2dOzL2ClgXJvIhfgkpIQ==", - "requires": { - "each-series": "^1.0.0", - "mongodb": "^3.3.2", - "nyc": "^14.1.1", - "once": "^1.4.0", - "parse-mongo-url": "^1.1.1", - "readable-stream": "^3.4.0", - "thunky": "^1.1.0", - "to-mongodb-core": "^2.0.0" - } - }, "ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", @@ -4491,11 +4232,6 @@ "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz", "integrity": "sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==" }, - "nested-error-stacks": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/nested-error-stacks/-/nested-error-stacks-2.1.0.tgz", - "integrity": "sha512-AO81vsIO1k1sM4Zrd6Hu7regmJN1NSiAja10gc4bX3F0wd+9rQmcuHQaHVQCYIEC8iFXnE+mavh23GOt7wBgug==" - }, "nice-try": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", @@ -4546,6 +4282,7 @@ "version": "2.5.0", "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, "requires": { "hosted-git-info": "^2.1.4", "resolve": "^1.10.0", @@ -4556,67 +4293,8 @@ "semver": { "version": "5.7.1", "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" - } - } - }, - "nyc": { - "version": "14.1.1", - "resolved": "https://registry.npmjs.org/nyc/-/nyc-14.1.1.tgz", - "integrity": "sha512-OI0vm6ZGUnoGZv/tLdZ2esSVzDwUC88SNs+6JoSOMVxA+gKMB8Tk7jBwgemLx4O40lhhvZCVw1C+OYLOBOPXWw==", - "requires": { - "archy": "^1.0.0", - "caching-transform": "^3.0.2", - "convert-source-map": "^1.6.0", - "cp-file": "^6.2.0", - "find-cache-dir": "^2.1.0", - "find-up": "^3.0.0", - "foreground-child": "^1.5.6", - "glob": "^7.1.3", - "istanbul-lib-coverage": "^2.0.5", - "istanbul-lib-hook": "^2.0.7", - "istanbul-lib-instrument": "^3.3.0", - "istanbul-lib-report": "^2.0.8", - "istanbul-lib-source-maps": "^3.0.6", - "istanbul-reports": "^2.2.4", - "js-yaml": "^3.13.1", - "make-dir": "^2.1.0", - "merge-source-map": "^1.1.0", - "resolve-from": "^4.0.0", - "rimraf": "^2.6.3", - "signal-exit": "^3.0.2", - "spawn-wrap": "^1.4.2", - "test-exclude": "^5.2.3", - "uuid": "^3.3.2", - "yargs": "^13.2.2", - "yargs-parser": "^13.0.0" - }, - "dependencies": { - "glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, - "resolve-from": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==" - }, - "rimraf": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", - "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", - "requires": { - "glob": "^7.1.3" - } + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true } } }, @@ -4737,11 +4415,6 @@ "word-wrap": "~1.2.3" } }, - "os-homedir": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", - "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=" - }, "os-tmpdir": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", @@ -4760,6 +4433,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, "requires": { "p-limit": "^2.0.0" } @@ -4769,17 +4443,6 @@ "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", "integrity": "sha1-yyhoVA4xPWHeWPr741zpAE1VQOY=" }, - "package-hash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/package-hash/-/package-hash-3.0.0.tgz", - "integrity": "sha512-lOtmukMDVvtkL84rJHI7dpTYq+0rli8N2wlnqUcBuDWCfVhRUfOmnR9SsoHFMLpACvEV60dX7rd0rFaYDZI+FA==", - "requires": { - "graceful-fs": "^4.1.15", - "hasha": "^3.0.0", - "lodash.flattendeep": "^4.4.0", - "release-zalgo": "^1.0.0" - } - }, "parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", @@ -4794,20 +4457,6 @@ "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.1.2.tgz", "integrity": "sha512-0qfMZyjOUFBeEIvJ5EayfXJqaEXxQ+Oj2b7tWJM3hvEXvXsYCk05EDVI23oYnEw2NaFYUWdABEVPBvBMh8L/pA==" }, - "parse-json": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", - "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", - "requires": { - "error-ex": "^1.3.1", - "json-parse-better-errors": "^1.0.1" - } - }, - "parse-mongo-url": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/parse-mongo-url/-/parse-mongo-url-1.1.1.tgz", - "integrity": "sha1-ZiON9fjnwMjKTNlw1KtqE3PrdbU=" - }, "parse-ms": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.1.0.tgz", @@ -4821,7 +4470,8 @@ "path-exists": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", - "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=" + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", + "dev": true }, "path-is-absolute": { "version": "1.0.1", @@ -4850,21 +4500,6 @@ "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" }, - "path-type": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", - "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", - "requires": { - "pify": "^3.0.0" - }, - "dependencies": { - "pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" - } - } - }, "performance-now": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", @@ -4875,14 +4510,6 @@ "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", "integrity": "sha1-SyzSXFDVmHNcUCkiJP2MbfQeMjE=" }, - "pkg-dir": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", - "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", - "requires": { - "find-up": "^3.0.0" - } - }, "prelude-ls": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", @@ -5603,11 +5230,6 @@ "ipaddr.js": "1.9.1" } }, - "pseudomap": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", - "integrity": "sha1-8FKijacOYYkX7wqKw0wa5aaChrM=" - }, "psl": { "version": "1.7.0", "resolved": "https://registry.npmjs.org/psl/-/psl-1.7.0.tgz", @@ -5718,25 +5340,6 @@ "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", "dev": true }, - "read-pkg": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", - "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=", - "requires": { - "load-json-file": "^4.0.0", - "normalize-package-data": "^2.3.2", - "path-type": "^3.0.0" - } - }, - "read-pkg-up": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-4.0.0.tgz", - "integrity": "sha512-6etQSH7nJGsK0RbG/2TeDzZFa8shjQ1um+SwQQ5cwKy0dhSXdOncEhb1CPpvQG4h7FyOV6EB6YlV0yJvZQNAkA==", - "requires": { - "find-up": "^3.0.0", - "read-pkg": "^3.0.0" - } - }, "readable-stream": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", @@ -5820,14 +5423,6 @@ "integrity": "sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==", "dev": true }, - "release-zalgo": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/release-zalgo/-/release-zalgo-1.0.0.tgz", - "integrity": "sha1-CXALflB0Mpc5Mw5TXFqQ+2eFFzA=", - "requires": { - "es6-error": "^4.0.1" - } - }, "request": { "version": "2.88.2", "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz", @@ -5892,7 +5487,8 @@ "require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", - "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=" + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "dev": true }, "require-in-the-middle": { "version": "4.0.1", @@ -5928,7 +5524,8 @@ "require-main-filename": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", - "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==" + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", + "dev": true }, "require-relative": { "version": "0.8.7", @@ -6117,7 +5714,8 @@ "set-blocking": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=" + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", + "dev": true }, "setprototypeof": { "version": "1.1.1", @@ -6172,7 +5770,8 @@ "signal-exit": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", - "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=" + "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=", + "dev": true }, "sinon": { "version": "9.0.2", @@ -6246,59 +5845,11 @@ "memory-pager": "^1.0.2" } }, - "spawn-wrap": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/spawn-wrap/-/spawn-wrap-1.4.3.tgz", - "integrity": "sha512-IgB8md0QW/+tWqcavuFgKYR/qIRvJkRLPJDFaoXtLLUaVcCDK0+HeFTkmQHj3eprcYhc+gOl0aEA1w7qZlYezw==", - "requires": { - "foreground-child": "^1.5.6", - "mkdirp": "^0.5.0", - "os-homedir": "^1.0.1", - "rimraf": "^2.6.2", - "signal-exit": "^3.0.2", - "which": "^1.3.0" - }, - "dependencies": { - "glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, - "minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" - }, - "mkdirp": { - "version": "0.5.3", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.3.tgz", - "integrity": "sha512-P+2gwrFqx8lhew375MQHHeTlY8AuOJSrGf0R5ddkEndUkmwpgUob/vQuBD1V22/Cw1/lJr4x+EjllSezBThzBg==", - "requires": { - "minimist": "^1.2.5" - } - }, - "rimraf": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", - "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", - "requires": { - "glob": "^7.1.3" - } - } - } - }, "spdx-correct": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.0.tgz", "integrity": "sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q==", + "dev": true, "requires": { "spdx-expression-parse": "^3.0.0", "spdx-license-ids": "^3.0.0" @@ -6307,12 +5858,14 @@ "spdx-exceptions": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz", - "integrity": "sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA==" + "integrity": "sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA==", + "dev": true }, "spdx-expression-parse": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz", "integrity": "sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg==", + "dev": true, "requires": { "spdx-exceptions": "^2.1.0", "spdx-license-ids": "^3.0.0" @@ -6321,7 +5874,8 @@ "spdx-license-ids": { "version": "3.0.5", "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz", - "integrity": "sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q==" + "integrity": "sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q==", + "dev": true }, "split": { "version": "1.0.1", @@ -6334,7 +5888,8 @@ "sprintf-js": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=" + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", + "dev": true }, "sshpk": { "version": "1.16.1", @@ -6389,6 +5944,7 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, "requires": { "emoji-regex": "^7.0.1", "is-fullwidth-code-point": "^2.0.0", @@ -6463,6 +6019,7 @@ "version": "5.2.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, "requires": { "ansi-regex": "^4.1.0" } @@ -6470,7 +6027,8 @@ "strip-bom": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=" + "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", + "dev": true }, "strip-json-comments": { "version": "3.1.1", @@ -6487,6 +6045,7 @@ "version": "5.4.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.4.0.tgz", "integrity": "sha512-zjaXglF5nnWpsq470jSv6P9DwPvgLkuapYmfDm3JWOm0vkNTVF2tI4UrN2r6jH1qM/uc/WtxYY1hYoA2dOKj5w==", + "dev": true, "requires": { "has-flag": "^3.0.0" } @@ -6530,32 +6089,6 @@ } } }, - "test-exclude": { - "version": "5.2.3", - "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-5.2.3.tgz", - "integrity": "sha512-M+oxtseCFO3EDtAaGH7iiej3CBkzXqFMbzqYAACdzKui4eZA+pq3tZEwChvOdNfa7xxy8BfbmgJSIr43cC/+2g==", - "requires": { - "glob": "^7.1.3", - "minimatch": "^3.0.4", - "read-pkg-up": "^4.0.0", - "require-main-filename": "^2.0.0" - }, - "dependencies": { - "glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - } - } - }, "text-table": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", @@ -6600,11 +6133,6 @@ } } }, - "thunky": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz", - "integrity": "sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==" - }, "timed-out": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/timed-out/-/timed-out-4.0.1.tgz", @@ -6628,12 +6156,8 @@ "to-fast-properties": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=" - }, - "to-mongodb-core": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-mongodb-core/-/to-mongodb-core-2.0.0.tgz", - "integrity": "sha1-NZbsdhOsmtO5ioncua77pWnNJ+s=" + "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=", + "dev": true }, "to-no-case": { "version": "1.0.2", @@ -6758,6 +6282,7 @@ "version": "3.0.4", "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, "requires": { "spdx-correct": "^3.0.0", "spdx-expression-parse": "^3.0.0" @@ -6866,6 +6391,7 @@ "version": "1.3.1", "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dev": true, "requires": { "isexe": "^2.0.0" } @@ -6873,7 +6399,8 @@ "which-module": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", - "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=" + "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", + "dev": true }, "word-wrap": { "version": "1.2.3", @@ -6885,6 +6412,7 @@ "version": "5.1.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", + "dev": true, "requires": { "ansi-styles": "^3.2.0", "string-width": "^3.0.0", @@ -6922,16 +6450,6 @@ } } }, - "write-file-atomic": { - "version": "2.4.3", - "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.3.tgz", - "integrity": "sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==", - "requires": { - "graceful-fs": "^4.1.11", - "imurmurhash": "^0.1.4", - "signal-exit": "^3.0.2" - } - }, "xregexp": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/xregexp/-/xregexp-4.3.0.tgz", @@ -6944,7 +6462,8 @@ "y18n": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", - "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==" + "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==", + "dev": true }, "yallist": { "version": "3.1.1", @@ -6955,6 +6474,7 @@ "version": "13.3.2", "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.2.tgz", "integrity": "sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==", + "dev": true, "requires": { "cliui": "^5.0.0", "find-up": "^3.0.0", @@ -6972,6 +6492,7 @@ "version": "13.1.2", "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.2.tgz", "integrity": "sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==", + "dev": true, "requires": { "camelcase": "^5.0.0", "decamelize": "^1.2.0" diff --git a/services/document-updater/package.json b/services/document-updater/package.json index d174d46fcc..65656cc788 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -26,7 +26,6 @@ "logger-sharelatex": "^2.2.0", "metrics-sharelatex": "^2.6.2", "mongodb": "^3.6.0", - "mongojs": "^3.1.0", "redis-sharelatex": "^1.0.13", "request": "^2.88.2", "requestretry": "^4.1.0", diff --git a/services/document-updater/test/acceptance/js/RangesTests.js b/services/document-updater/test/acceptance/js/RangesTests.js index df0afe01e5..b765e58b7c 100644 --- a/services/document-updater/test/acceptance/js/RangesTests.js +++ b/services/document-updater/test/acceptance/js/RangesTests.js @@ -17,7 +17,7 @@ chai.should() const { expect } = chai const async = require('async') -const { db, ObjectId } = require('../../../app/js/mongojs') +const { db, ObjectId } = require('../../../app/js/mongodb') const MockWebApi = require('./helpers/MockWebApi') const DocUpdaterClient = require('./helpers/DocUpdaterClient') const DocUpdaterApp = require('./helpers/DocUpdaterApp') @@ -668,12 +668,12 @@ describe('Ranges', function () { if (error != null) { return done(error) } - return db.docSnapshots.find( - { + db.docSnapshots + .find({ project_id: ObjectId(this.project_id), doc_id: ObjectId(this.doc_id) - }, - (error, docSnapshots) => { + }) + .toArray((error, docSnapshots) => { if (error != null) { return done(error) } @@ -686,8 +686,7 @@ describe('Ranges', function () { tid: this.tid }) return done() - } - ) + }) } ) }) diff --git a/services/document-updater/test/acceptance/js/helpers/DocUpdaterApp.js b/services/document-updater/test/acceptance/js/helpers/DocUpdaterApp.js index b922032cd8..8d53e69118 100644 --- a/services/document-updater/test/acceptance/js/helpers/DocUpdaterApp.js +++ b/services/document-updater/test/acceptance/js/helpers/DocUpdaterApp.js @@ -12,6 +12,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const app = require('../../../../app') +const { waitForDb } = require('../../../../app/js/mongodb') require('logger-sharelatex').logger.level('fatal') module.exports = { @@ -26,9 +27,10 @@ module.exports = { return callback() } else if (this.initing) { return this.callbacks.push(callback) - } else { - this.initing = true - this.callbacks.push(callback) + } + this.initing = true + this.callbacks.push(callback) + waitForDb().then(() => { return app.listen(3003, 'localhost', (error) => { if (error != null) { throw error @@ -42,6 +44,6 @@ module.exports = { return result })() }) - } + }) } } From c337cf1c4fcaa8191340cf3e8a953caf9355f0a8 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Mon, 7 Sep 2020 09:49:06 +0100 Subject: [PATCH 696/769] [misc] mongodb: refactor the process of setting up the db construct Co-Authored-By: John Lees-Miller --- services/document-updater/app/js/mongodb.js | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/services/document-updater/app/js/mongodb.js b/services/document-updater/app/js/mongodb.js index c06865fff5..8994f891da 100644 --- a/services/document-updater/app/js/mongodb.js +++ b/services/document-updater/app/js/mongodb.js @@ -11,16 +11,20 @@ async function healthCheck() { } } +let setupDbPromise async function waitForDb() { - await clientPromise + if (!setupDbPromise) { + setupDbPromise = setupDb() + } + await setupDbPromise } const db = {} -waitForDb().then(async function () { +async function setupDb() { const internalDb = (await clientPromise).db() db.docSnapshots = internalDb.collection('docSnapshots') -}) +} module.exports = { db, From 16ef0d9610e846385bced850de0af1dc2a8d8dc3 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Thu, 10 Sep 2020 10:40:05 +0100 Subject: [PATCH 697/769] [misc] mongodb: use the new db connector by default mongojs was enabling it by default as well. --- services/document-updater/app/js/mongodb.js | 5 ++++- services/document-updater/config/settings.defaults.js | 4 ++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/services/document-updater/app/js/mongodb.js b/services/document-updater/app/js/mongodb.js index 8994f891da..86b015a308 100644 --- a/services/document-updater/app/js/mongodb.js +++ b/services/document-updater/app/js/mongodb.js @@ -1,7 +1,10 @@ const Settings = require('settings-sharelatex') const { MongoClient, ObjectId } = require('mongodb') -const clientPromise = MongoClient.connect(Settings.mongo.url) +const clientPromise = MongoClient.connect( + Settings.mongo.url, + Settings.mongo.options +) async function healthCheck() { const internalDb = (await clientPromise).db() diff --git a/services/document-updater/config/settings.defaults.js b/services/document-updater/config/settings.defaults.js index 21c3219a33..0228941382 100755 --- a/services/document-updater/config/settings.defaults.js +++ b/services/document-updater/config/settings.defaults.js @@ -174,6 +174,10 @@ module.exports = { dispatcherCount: process.env.DISPATCHER_COUNT, mongo: { + options: { + useUnifiedTopology: + (process.env.MONGO_USE_UNIFIED_TOPOLOGY || 'true') === 'true' + }, url: process.env.MONGO_CONNECTION_STRING || `mongodb://${process.env.MONGO_HOST || '127.0.0.1'}/sharelatex` From e47c3b747bcaa583840a1b547f3e00453ebbfda9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 12 Sep 2020 15:08:02 +0000 Subject: [PATCH 698/769] Bump node-fetch from 2.6.0 to 2.6.1 Bumps [node-fetch](https://github.com/bitinn/node-fetch) from 2.6.0 to 2.6.1. - [Release notes](https://github.com/bitinn/node-fetch/releases) - [Changelog](https://github.com/node-fetch/node-fetch/blob/master/docs/CHANGELOG.md) - [Commits](https://github.com/bitinn/node-fetch/compare/v2.6.0...v2.6.1) Signed-off-by: dependabot[bot] --- services/document-updater/package-lock.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 3b32518ac4..4df8e3439f 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -4533,9 +4533,9 @@ } }, "node-fetch": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz", - "integrity": "sha1-5jNFY4bUqlWGP2dqerDaqP3ssP0=" + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.1.tgz", + "integrity": "sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==" }, "node-forge": { "version": "0.9.1", From ccdf555ecbb97b1c7c32756b829fd98c6ee1d596 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Thu, 17 Sep 2020 13:36:27 +0100 Subject: [PATCH 699/769] [misc] bump the dev-env to 3.3.4 and bump the node version to 10.22.1 --- services/document-updater/.github/dependabot.yml | 6 ++++++ services/document-updater/.nvmrc | 2 +- services/document-updater/Dockerfile | 2 +- services/document-updater/buildscript.txt | 4 ++-- services/document-updater/docker-compose.yml | 4 ++-- 5 files changed, 12 insertions(+), 6 deletions(-) diff --git a/services/document-updater/.github/dependabot.yml b/services/document-updater/.github/dependabot.yml index c6f98d843d..e2c64a3351 100644 --- a/services/document-updater/.github/dependabot.yml +++ b/services/document-updater/.github/dependabot.yml @@ -15,3 +15,9 @@ updates: # Block informal upgrades -- security upgrades use a separate queue. # https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#open-pull-requests-limit open-pull-requests-limit: 0 + + # currently assign team-magma to all dependabot PRs - this may change in + # future if we reorganise teams + labels: + - "dependencies" + - "Team-Magma" diff --git a/services/document-updater/.nvmrc b/services/document-updater/.nvmrc index b61c07ffdd..c2f6421352 100644 --- a/services/document-updater/.nvmrc +++ b/services/document-updater/.nvmrc @@ -1 +1 @@ -10.21.0 +10.22.1 diff --git a/services/document-updater/Dockerfile b/services/document-updater/Dockerfile index 78a715757d..f0e362fca0 100644 --- a/services/document-updater/Dockerfile +++ b/services/document-updater/Dockerfile @@ -2,7 +2,7 @@ # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -FROM node:10.21.0 as base +FROM node:10.22.1 as base WORKDIR /app diff --git a/services/document-updater/buildscript.txt b/services/document-updater/buildscript.txt index a379cedc95..afb9f89937 100644 --- a/services/document-updater/buildscript.txt +++ b/services/document-updater/buildscript.txt @@ -3,6 +3,6 @@ document-updater --docker-repos=gcr.io/overleaf-ops --env-add= --env-pass-through= ---node-version=10.21.0 +--node-version=10.22.1 --public-repo=True ---script-version=3.3.2 +--script-version=3.3.4 diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index 397a97aa40..6d513ec68f 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -6,7 +6,7 @@ version: "2.3" services: test_unit: - image: node:10.21.0 + image: node:10.22.1 volumes: - .:/app working_dir: /app @@ -18,7 +18,7 @@ services: user: node test_acceptance: - image: node:10.21.0 + image: node:10.22.1 volumes: - .:/app working_dir: /app From ffc55a0502ff8be3028890896c704515a8b4803c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 30 Sep 2020 08:10:39 +0000 Subject: [PATCH 700/769] Bump lodash from 4.17.15 to 4.17.20 Bumps [lodash](https://github.com/lodash/lodash) from 4.17.15 to 4.17.20. - [Release notes](https://github.com/lodash/lodash/releases) - [Commits](https://github.com/lodash/lodash/compare/4.17.15...4.17.20) Signed-off-by: dependabot[bot] --- services/document-updater/package-lock.json | 6 +++--- services/document-updater/package.json | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 1492fdd308..9c933ea707 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -3752,9 +3752,9 @@ } }, "lodash": { - "version": "4.17.15", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", - "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==" + "version": "4.17.19", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.19.tgz", + "integrity": "sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ==" }, "lodash.at": { "version": "4.6.0", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 65656cc788..e62d644150 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -22,7 +22,7 @@ "body-parser": "^1.19.0", "bunyan": "~0.22.1", "express": "4.17.1", - "lodash": "^4.17.13", + "lodash": "^4.17.19", "logger-sharelatex": "^2.2.0", "metrics-sharelatex": "^2.6.2", "mongodb": "^3.6.0", From 033cc28e7fd28610a442d426630a0c445e8e974d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 30 Sep 2020 08:11:14 +0000 Subject: [PATCH 701/769] Bump bl from 2.2.0 to 2.2.1 Bumps [bl](https://github.com/rvagg/bl) from 2.2.0 to 2.2.1. - [Release notes](https://github.com/rvagg/bl/releases) - [Commits](https://github.com/rvagg/bl/compare/v2.2.0...v2.2.1) Signed-off-by: dependabot[bot] --- services/document-updater/package-lock.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 1492fdd308..1ec56d429d 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -1420,9 +1420,9 @@ "integrity": "sha1-DmVcm5wkNeqraL9AJyJtK1WjRSQ=" }, "bl": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-2.2.0.tgz", - "integrity": "sha512-wbgvOpqopSr7uq6fJrLH8EsvYMJf9gzfo2jCsL2eTy75qXPukA4pCgHamOQkZtY5vmfVtjB+P3LNlMHW5CEZXA==", + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/bl/-/bl-2.2.1.tgz", + "integrity": "sha512-6Pesp1w0DEX1N550i/uGV/TqucVL4AM/pgThFSN/Qq9si1/DF9aIHs1BxD8V/QU0HoeHO6cQRTAuYnLPKq1e4g==", "requires": { "readable-stream": "^2.3.5", "safe-buffer": "^5.1.1" From 0c7d8ae5ae1074bc1772ad074a11d6973f2129b6 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 16 Oct 2020 14:27:36 +0100 Subject: [PATCH 702/769] downgrade to logger 1.9.1 investigating possible regression in 2.2.0 --- services/document-updater/package-lock.json | 605 ++++++++------------ services/document-updater/package.json | 2 +- 2 files changed, 249 insertions(+), 358 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 22c82738ee..6328d17d93 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -168,30 +168,12 @@ "google-auth-library": "^5.5.0", "retry-request": "^4.0.0", "teeny-request": "^6.0.0" - }, - "dependencies": { - "google-auth-library": { - "version": "5.10.1", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz", - "integrity": "sha512-rOlaok5vlpV9rSiUu5EpR0vVpc+PhN62oF4RyX/6++DG1VsaulAFEMlDYBLjJDDPI6OcNOCGAKy9UVB/3NIDXg==", - "requires": { - "arrify": "^2.0.0", - "base64-js": "^1.3.0", - "ecdsa-sig-formatter": "^1.0.11", - "fast-text-encoding": "^1.0.0", - "gaxios": "^2.1.0", - "gcp-metadata": "^3.4.0", - "gtoken": "^4.1.0", - "jws": "^4.0.0", - "lru-cache": "^5.0.0" - } - } } }, "@google-cloud/debug-agent": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-3.2.0.tgz", - "integrity": "sha1-2qdjWhaYpWY31dxXzhED536uKdM=", + "integrity": "sha512-fP87kYbS6aeDna08BivwQ1J260mwJGchRi99XdWCgqbRwuFac8ul0OT5i2wEeDSc5QaDX8ZuWQQ0igZvh1rTyQ==", "requires": { "@google-cloud/common": "^0.32.0", "@sindresorhus/is": "^0.15.0", @@ -401,33 +383,15 @@ "stream-events": "^1.0.4", "through2": "^3.0.0", "type-fest": "^0.12.0" - }, - "dependencies": { - "google-auth-library": { - "version": "5.10.1", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz", - "integrity": "sha512-rOlaok5vlpV9rSiUu5EpR0vVpc+PhN62oF4RyX/6++DG1VsaulAFEMlDYBLjJDDPI6OcNOCGAKy9UVB/3NIDXg==", - "requires": { - "arrify": "^2.0.0", - "base64-js": "^1.3.0", - "ecdsa-sig-formatter": "^1.0.11", - "fast-text-encoding": "^1.0.0", - "gaxios": "^2.1.0", - "gcp-metadata": "^3.4.0", - "gtoken": "^4.1.0", - "jws": "^4.0.0", - "lru-cache": "^5.0.0" - } - } } }, "@google-cloud/logging-bunyan": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@google-cloud/logging-bunyan/-/logging-bunyan-3.0.0.tgz", - "integrity": "sha512-ZLVXEejNQ27ktGcA3S/sd7GPefp7kywbn+/KoBajdb1Syqcmtc98jhXpYQBXVtNP2065iyu77s4SBaiYFbTC5A==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/logging-bunyan/-/logging-bunyan-2.0.3.tgz", + "integrity": "sha512-8n9MwsCRd4v8WZg17+d3m7qInud7lYTm5rpwXHY0/lzWEJYjeiztT09BiCYh56EEhHr+ynymJnzUDZKazkywlg==", "requires": { "@google-cloud/logging": "^7.0.0", - "google-auth-library": "^6.0.0" + "google-auth-library": "^5.0.0" } }, "@google-cloud/paginator": { @@ -442,7 +406,7 @@ "@google-cloud/profiler": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/@google-cloud/profiler/-/profiler-0.2.3.tgz", - "integrity": "sha1-Fj3738Mwuug1X+RuHlvgZTV7H1w=", + "integrity": "sha512-rNvtrFtIebIxZEJ/O0t8n7HciZGIXBo8DvHxWqAmsCaeLvkTtsaL6HmPkwxrNQ1IhbYWAxF+E/DwCiHyhKmgTg==", "requires": { "@google-cloud/common": "^0.26.0", "@types/console-log-level": "^1.4.0", @@ -464,7 +428,7 @@ "@google-cloud/common": { "version": "0.26.2", "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.26.2.tgz", - "integrity": "sha1-nFTiRxqEqgMelaJIJJduCA8lVkU=", + "integrity": "sha512-xJ2M/q3MrUbnYZuFlpF01caAlEhAUoRn0NXp93Hn3pkFpfSOG8YfbKbpBAHvcKVbBOAKVIwPsleNtuyuabUwLQ==", "requires": { "@google-cloud/projectify": "^0.3.2", "@google-cloud/promisify": "^0.3.0", @@ -488,7 +452,7 @@ "@google-cloud/promisify": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.3.1.tgz", - "integrity": "sha1-9kHm2USo4KBe4MsQkd+mAIm+zbo=" + "integrity": "sha512-QzB0/IMvB0eFxFK7Eqh+bfC8NLv3E9ScjWQrPOk6GgfNroxcVITdTlT8NRsRrcp5+QQJVPLkRqKG0PUdaWXmHw==" }, "agent-base": { "version": "4.3.0", @@ -501,7 +465,7 @@ "arrify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", - "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=" + "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==" }, "debug": { "version": "3.2.6", @@ -525,7 +489,7 @@ "gcp-metadata": { "version": "0.9.3", "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.9.3.tgz", - "integrity": "sha1-H510lfdGChRSZIHynhFZbdVj3SY=", + "integrity": "sha512-caV4S84xAjENtpezLCT/GILEAF5h/bC4cNqZFmt/tjTn8t+JBtTkQrgBrJu3857YdsnlM8rxX/PMcKGtE8hUlw==", "requires": { "gaxios": "^1.0.2", "json-bigint": "^0.3.0" @@ -534,7 +498,7 @@ "google-auth-library": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-2.0.2.tgz", - "integrity": "sha1-ejFdIDZ0Svavyth7IQ7mY4tA9Xs=", + "integrity": "sha512-FURxmo1hBVmcfLauuMRKOPYAPKht3dGuI2wjeJFalDUThO0HoYVjr4yxt5cgYSFm1dgUpmN9G/poa7ceTFAIiA==", "requires": { "axios": "^0.18.0", "gcp-metadata": "^0.7.0", @@ -548,7 +512,7 @@ "gcp-metadata": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.7.0.tgz", - "integrity": "sha1-bDXbtSvaMqQnu5yY9UI33dG1QG8=", + "integrity": "sha512-ffjC09amcDWjh3VZdkDngIo7WoluyC5Ag9PAYxZbmQLOLNI8lvPtoKTSCyU54j2gwy5roZh6sSMTfkY2ct7K3g==", "requires": { "axios": "^0.18.0", "extend": "^3.0.1", @@ -651,7 +615,7 @@ "@google-cloud/trace-agent": { "version": "3.6.1", "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-3.6.1.tgz", - "integrity": "sha1-W+dEE5TQ6ldY8o25IqUAT/PwO+w=", + "integrity": "sha512-KDo85aPN4gSxJ7oEIOlKd7aGENZFXAM1kbIn1Ds+61gh/K1CQWSyepgJo3nUpAwH6D1ezDWV7Iaf8ueoITc8Uw==", "requires": { "@google-cloud/common": "^0.32.1", "builtin-modules": "^3.0.0", @@ -871,34 +835,34 @@ } }, "@overleaf/o-error": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@overleaf/o-error/-/o-error-3.0.0.tgz", - "integrity": "sha512-LsM2s6Iy9G97ktPo0ys4VxtI/m3ahc1ZHwjo5XnhXtjeIkkkVAehsrcRRoV/yWepPjymB0oZonhcfojpjYR/tg==" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@overleaf/o-error/-/o-error-2.1.0.tgz", + "integrity": "sha512-Zd9sks9LrLw8ErHt/cXeWIkyxWAqNAvNGn7wIjLQJH6TTEEW835PWOhpch+hQwwWsTxWIx/JDj+IpZ3ouw925g==" }, "@protobufjs/aspromise": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", - "integrity": "sha1-m4sMxmPWaafY9vXQiToU00jzD78=" + "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==" }, "@protobufjs/base64": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", - "integrity": "sha1-TIVzDlm5ofHzSQR9vyQpYDS7JzU=" + "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==" }, "@protobufjs/codegen": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", - "integrity": "sha1-fvN/DQEPsCitGtWXIuUG2SYoFcs=" + "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==" }, "@protobufjs/eventemitter": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", - "integrity": "sha1-NVy8mLr61ZePntCV85diHx0Ga3A=" + "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==" }, "@protobufjs/fetch": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", - "integrity": "sha1-upn7WYYUr2VwDBYZ/wbUVLDYTEU=", + "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", "requires": { "@protobufjs/aspromise": "^1.1.1", "@protobufjs/inquire": "^1.1.0" @@ -907,32 +871,32 @@ "@protobufjs/float": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", - "integrity": "sha1-Xp4avctz/Ap8uLKR33jIy9l7h9E=" + "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==" }, "@protobufjs/inquire": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", - "integrity": "sha1-/yAOPnzyQp4tyvwRQIKOjMY48Ik=" + "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==" }, "@protobufjs/path": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", - "integrity": "sha1-bMKyDFya1q0NzP0hynZz2Nf79o0=" + "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==" }, "@protobufjs/pool": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", - "integrity": "sha1-Cf0V8tbTq/qbZbw2ZQbWrXhG/1Q=" + "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==" }, "@protobufjs/utf8": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", - "integrity": "sha1-p3c2C1s5oaLlEG+OhY8v0tBgxXA=" + "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==" }, "@sindresorhus/is": { "version": "0.15.0", "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.15.0.tgz", - "integrity": "sha1-lpFbqgXmpqHRN7rfSYTT/AWCC7Y=" + "integrity": "sha512-lu8BpxjAtRCAo5ifytTpCPCj99LF7o/2Myn+NXyNCBqvPYn7Pjd76AMmUB5l7XF1U6t0hcWrlEM5ESufW7wAeA==" }, "@sinonjs/commons": { "version": "1.7.2", @@ -1003,7 +967,7 @@ "@types/caseless": { "version": "0.12.2", "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.2.tgz", - "integrity": "sha1-9l09Y4ngHutFi9VNyPUrlalGO8g=" + "integrity": "sha512-6ckxMjBBD8URvjB6J3NcnuAn5Pkl7t3TizAg+xdlzzQGSPSmBcXf8KoIH0ua/i+tio+ZRUHEXp0HEmvaR4kt0w==" }, "@types/color-name": { "version": "1.1.1", @@ -1014,12 +978,12 @@ "@types/console-log-level": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/@types/console-log-level/-/console-log-level-1.4.0.tgz", - "integrity": "sha1-7/ccQa689RyLpa2LBdfVQkviuPM=" + "integrity": "sha512-x+OscEQwcx5Biair4enH7ov9W+clcqUWaZRaxn5IkT4yNWWjRr2oiYDkY/x1uXSTVZOQ2xlbFQySaQGB+VdXGQ==" }, "@types/duplexify": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/@types/duplexify/-/duplexify-3.6.0.tgz", - "integrity": "sha1-38grZL06IWj1vSZESvFlvwI33Ng=", + "integrity": "sha512-5zOA53RUlzN74bvrSGwjudssD9F3a797sDZQkiYpUOxW+WHaXTCPz4/d5Dgi6FKnOqZ2CpaTo0DhgIfsXAOE/A==", "requires": { "@types/node": "*" } @@ -1080,7 +1044,7 @@ "@types/semver": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/@types/semver/-/semver-5.5.0.tgz", - "integrity": "sha1-FGwqKe59O65L8vyydGNuJkyBPEU=" + "integrity": "sha512-41qEJgBH/TWgo5NFSvBCJ1qkoi3Q6ONSF2avrHq1LVEZfYpdHmj0y9SuTK+u9ZhG1sYQKBL1AWXKyLWP4RaUoQ==" }, "@types/tough-cookie": { "version": "2.3.6", @@ -1143,7 +1107,7 @@ "abort-controller": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", - "integrity": "sha1-6vVNU7YrrkE46AnKIlyEOabvs5I=", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", "requires": { "event-target-shim": "^5.0.0" } @@ -1177,11 +1141,11 @@ }, "dependencies": { "debug": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz", + "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==", "requires": { - "ms": "^2.1.1" + "ms": "2.1.2" } }, "ms": { @@ -1282,7 +1246,7 @@ "arrify": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", - "integrity": "sha1-yWVekzHgq81YjSp8rX6ZVvZnAfo=" + "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==" }, "asn1": { "version": "0.2.4", @@ -1300,7 +1264,7 @@ "assertion-error": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", - "integrity": "sha1-5gtrDo8wG9l+U3UhW9pAbIURjAs=", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", "dev": true }, "ast-types-flow": { @@ -1326,7 +1290,7 @@ "async-listener": { "version": "0.6.10", "resolved": "https://registry.npmjs.org/async-listener/-/async-listener-0.6.10.tgz", - "integrity": "sha1-p8l6vlcLpgLXgic8DeYKUePhfLw=", + "integrity": "sha512-gpuo6xOyF4D5DE5WvyqZdPA3NGhiT6Qf07l7DCB0wwDEsLvDIbCr6j9S5aj5Ch96dLace5tXVzWBZkxU/c5ohw==", "requires": { "semver": "^5.3.0", "shimmer": "^1.1.0" @@ -1342,12 +1306,12 @@ "asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, "aws-sign2": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", - "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=" + "integrity": "sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA==" }, "aws4": { "version": "1.9.1", @@ -1357,7 +1321,7 @@ "axios": { "version": "0.18.1", "resolved": "https://registry.npmjs.org/axios/-/axios-0.18.1.tgz", - "integrity": "sha1-/z8N4ue10YDnV62YAA8Qgbh7zqM=", + "integrity": "sha512-0BfJq4NSfQXd+SkFdrvFbG7addhYSBA2mQwISr46pD6E5iqkWg02RAs8vyTT/j0RTnoYmeXauBuSv1qKwR179g==", "requires": { "follow-redirects": "1.5.10", "is-buffer": "^2.0.2" @@ -1386,7 +1350,7 @@ "balanced-match": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" + "integrity": "sha512-9Y0g0Q8rmSt+H33DfKv7FOc3v+iRI+o1lbzt8jGcIosYW37IIW/2XVYq5NPdmaD5NQ59Nk26Kl/vZbwW9Fr8vg==" }, "base64-js": { "version": "1.3.1", @@ -1404,12 +1368,12 @@ "bignumber.js": { "version": "7.2.1", "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz", - "integrity": "sha1-gMBIdZ2CaACAfEv9Uh5Q7bulel8=" + "integrity": "sha512-S4XzBk5sMB+Rcb/LNcpzXr57VRTxgAvaAEDAl1AwRx27j00hT84O6OkteE7u8UB3NuaaygCRrEpqox4uDOrbdQ==" }, "bindings": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", - "integrity": "sha1-EDU8npRTNLwFEabZCzj7x8nFBN8=", + "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", "requires": { "file-uri-to-path": "1.0.0" } @@ -1417,7 +1381,7 @@ "bintrees": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz", - "integrity": "sha1-DmVcm5wkNeqraL9AJyJtK1WjRSQ=" + "integrity": "sha512-tbaUB1QpTIj4cKY8c1rvNAvEQXA+ekzHmbe4jzNfW3QWsF9GnnP/BRWyl6/qqS53heoYJ93naaFcm/jooONH8g==" }, "bl": { "version": "2.2.1", @@ -1479,7 +1443,7 @@ "browser-stdout": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", - "integrity": "sha1-uqVZ7hTO1zRSIputcyZGfGH6vWA=", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", "dev": true }, "bson": { @@ -1490,17 +1454,17 @@ "buffer-equal-constant-time": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", - "integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk=" + "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==" }, "builtin-modules": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.1.0.tgz", - "integrity": "sha1-qtl8FRMet2tltQ7yCOdYTNdqdIQ=" + "integrity": "sha512-k0KL0aWZuBt2lrxrcASWDfwOLMnodeQjodT/1SxEQAXsHANgo6ZC/VEaSEHCXt7aSTZ4/4H5LKa+tBXmW7Vtvw==" }, "bunyan": { "version": "0.22.3", "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-0.22.3.tgz", - "integrity": "sha1-ehncG0yMZF90AkGnQPIkUUfGfsI=", + "integrity": "sha512-v9dd5qmd6nJHEi7fiNo1fR2pMpE8AiB47Ap984p4iJKj+dEA69jSccmq6grFQn6pxIh0evvKpC5XO1SKfiaRoQ==", "requires": { "dtrace-provider": "0.2.8", "mv": "~2" @@ -1545,12 +1509,12 @@ "caseless": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", - "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" + "integrity": "sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw==" }, "chai": { "version": "3.5.0", "resolved": "https://registry.npmjs.org/chai/-/chai-3.5.0.tgz", - "integrity": "sha1-TQJjewZ/6Vi9v906QOxW/vc3Mkc=", + "integrity": "sha512-eRYY0vPS2a9zt5w5Z0aCeWbrXTEyvk7u/Xf71EzNObrjSCPgMm1Nku/D/u2tiqHBX5j40wWhj54YJLtgn8g55A==", "dev": true, "requires": { "assertion-error": "^1.0.1", @@ -1561,7 +1525,7 @@ "chai-spies": { "version": "0.7.1", "resolved": "https://registry.npmjs.org/chai-spies/-/chai-spies-0.7.1.tgz", - "integrity": "sha1-ND2Z9RJEIS6LF+ZLk5lv97LCqbE=", + "integrity": "sha512-ezo+u5DUDjPhOYkgsjbbVhtdzsnVr6n2CL/juJA89YnBsWO4ocL14Ake0txlGrGZo/HwcfhFGaV0czdunr3tHA==", "dev": true }, "chalk": { @@ -1581,11 +1545,6 @@ "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", "dev": true }, - "charenc": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/charenc/-/charenc-0.0.2.tgz", - "integrity": "sha1-wKHS86cJLgN3S/qD8UwPxXkKhmc=" - }, "cli-cursor": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", @@ -1663,12 +1622,12 @@ "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" }, "console-log-level": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz", - "integrity": "sha1-nFprue8e9lsFq6gwKLD/iUzfYwo=" + "integrity": "sha512-VZzbIORbP+PPcN/gg3DXClTLPLg5Slwd5fL2MIc+o1qZ4BXBvWyc6QxPk6T/Mkr6IVjRpoAGf32XxP3ZWMVRcQ==" }, "contains-path": { "version": "0.1.0", @@ -1692,7 +1651,7 @@ "continuation-local-storage": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/continuation-local-storage/-/continuation-local-storage-3.2.1.tgz", - "integrity": "sha1-EfYT906RT+mzTJKtLSj+auHbf/s=", + "integrity": "sha512-jx44cconVqkCEEyLSKWwkvUXwO561jXMa3LPjTPsm5QR22PA0/mhe33FT4Xb5y74JDvt/Cq+5lm8S8rskLv9ZA==", "requires": { "async-listener": "^0.6.0", "emitter-listener": "^1.1.1" @@ -1723,12 +1682,7 @@ "core-util-is": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" - }, - "crypt": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/crypt/-/crypt-0.0.2.tgz", - "integrity": "sha1-iNf/fsDfuG9xPch7u0LQRNPmxBs=" + "integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ==" }, "d64": { "version": "1.0.0", @@ -1744,7 +1698,7 @@ "dashdash": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", - "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", + "integrity": "sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==", "requires": { "assert-plus": "^1.0.0" } @@ -1766,7 +1720,7 @@ "deep-eql": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-0.1.3.tgz", - "integrity": "sha1-71WKyrjeJSBs1xOQbXTlaTDrafI=", + "integrity": "sha512-6sEotTRGBFiNcqVoeHwnfopbSpi5NbH1VWJmYCVkmxMmaVTT0bUTrNaGyBwhgP4MZL012W/mkzIn3Da+iDYweg==", "dev": true, "requires": { "type-detect": "0.1.1" @@ -1775,7 +1729,7 @@ "type-detect": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-0.1.1.tgz", - "integrity": "sha1-C6XsKohWQORw6k6FBZcZANrFiCI=", + "integrity": "sha512-5rqszGVwYgBoDkIm2oUtvkfZMQ0vk29iDMU0W2qCa3rG0vPDNczCMT4hV/bLBgLg8k8ri6+u3Zbt+S/14eMzlA==", "dev": true } } @@ -1798,7 +1752,7 @@ "delay": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/delay/-/delay-4.3.0.tgz", - "integrity": "sha1-7+6/uPVFV5yzlrOnIkQ+yW0UxQ4=" + "integrity": "sha512-Lwaf3zVFDMBop1yDuFZ19F9WyGcZcGacsbdlZtWjQmM50tOcMntm1njF/Nb/Vjij3KaSvCF+sEYGKrrjObu2NA==" }, "delayed-stream": { "version": "1.0.0", @@ -1808,7 +1762,7 @@ "denque": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/denque/-/denque-1.4.1.tgz", - "integrity": "sha1-Z0T/dkHBSMP4ppwwflEjXB9KN88=" + "integrity": "sha512-OfzPuSZKGcgr96rf1oODnfjqBFmr1DVoc/TrItj3Ohe0Ah1C5WX5Baquw/9U9KovnQ88EqmJbD66rKYUQYN1tQ==" }, "depd": { "version": "1.1.2", @@ -1823,7 +1777,7 @@ "diff": { "version": "3.5.0", "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", - "integrity": "sha1-gAwN0eCov7yVg1wgKtIg/jF+WhI=", + "integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==", "dev": true }, "dlv": { @@ -1842,9 +1796,9 @@ } }, "dot-prop": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.2.0.tgz", - "integrity": "sha512-uEUyaDKoSQ1M4Oq8l45hSE26SnTxL6snNnqvK/VWx5wJhmff5z0FUVJDKDanor/6w3kzE3i7XZOk+7wC0EXr1A==", + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz", + "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==", "requires": { "is-obj": "^2.0.0" } @@ -1861,7 +1815,7 @@ "duplexify": { "version": "3.7.1", "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz", - "integrity": "sha1-Kk31MX9sz9kfhtb9JdjYoQO4gwk=", + "integrity": "sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g==", "requires": { "end-of-stream": "^1.0.0", "inherits": "^2.0.1", @@ -1910,7 +1864,7 @@ "ecdsa-sig-formatter": { "version": "1.0.11", "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", - "integrity": "sha1-rg8PothQRe8UqBfao86azQSJ5b8=", + "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", "requires": { "safe-buffer": "^5.0.1" } @@ -1918,7 +1872,7 @@ "emitter-listener": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/emitter-listener/-/emitter-listener-1.1.2.tgz", - "integrity": "sha1-VrFA6PaZI3Wz18ssqxzHQy2WMug=", + "integrity": "sha512-Bt1sBAGFHY9DKY+4/2cV6izcKJUf5T7/gkdmkxzX/qv9CcGH8xSwVRW5mtX03SWJtRTWSOpzCuWN9rBFYZepZQ==", "requires": { "shimmer": "^1.2.0" } @@ -1945,7 +1899,7 @@ "ent": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz", - "integrity": "sha1-6WQhkyWiHQX0RGai9obtbOX13R0=" + "integrity": "sha512-GHrMyVZQWvTIdDtpiEXdHZnFQKzeO09apj8Cbl4pKWy4i0Oprcq17usfDt5aO63swf0JOeMWjWQE/LzgSRuWpA==" }, "error-ex": { "version": "1.3.2", @@ -1989,12 +1943,12 @@ "es6-promise": { "version": "4.2.8", "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz", - "integrity": "sha1-TrIVlMlyvEBVPSduUQU5FD21Pgo=" + "integrity": "sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w==" }, "es6-promisify": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz", - "integrity": "sha1-UQnWLz5W6pZ8S2NQWu8IKRyKUgM=", + "integrity": "sha512-C+d6UdsYDk0lMebHNR4S2NybQMMngAOnOwYBQjTOiv0MkoJMP0Myw2mgpDLBcpfCmRLxyFqYhS/CfOENq4SJhQ==", "requires": { "es6-promise": "^4.0.3" } @@ -2007,7 +1961,7 @@ "escape-string-regexp": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", "dev": true }, "eslint": { @@ -2615,7 +2569,7 @@ "event-target-shim": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", - "integrity": "sha1-XU0+vflYPWOlMzzi3rdICrKwV4k=" + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==" }, "eventid": { "version": "1.0.0", @@ -2682,7 +2636,7 @@ "extsprintf": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", - "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=" + "integrity": "sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g==" }, "fast-deep-equal": { "version": "3.1.1", @@ -2709,7 +2663,7 @@ "fast-text-encoding": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.0.tgz", - "integrity": "sha1-PlzoKTQJz6pxd6cbnKhOGx5vJe8=" + "integrity": "sha512-R9bHCvweUxxwkDwhjav5vxpFvdPGlVngtqmx4pIZfSUhM/Q4NiIUHB456BAf+Q1Nwu3HEZYONtu+Rya+af4jiQ==" }, "figures": { "version": "3.2.0", @@ -2732,7 +2686,7 @@ "file-uri-to-path": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", - "integrity": "sha1-VTp7hEb/b2hDWcRF8eN6BdrMM90=" + "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==" }, "finalhandler": { "version": "1.1.2", @@ -2760,7 +2714,7 @@ "findit2": { "version": "2.2.3", "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz", - "integrity": "sha1-WKRmaX34piBc39vzlVNri9d3pfY=" + "integrity": "sha512-lg/Moejf4qXovVutL0Lz4IsaPoNYMuxt4PA0nGqFxnJ1CTTGGlEO2wKgoDpwknhvZ8k4Q2F+eesgkLbG2Mxfog==" }, "flat-cache": { "version": "2.0.1", @@ -2807,7 +2761,7 @@ "follow-redirects": { "version": "1.5.10", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.10.tgz", - "integrity": "sha1-e3qfmuov3/NnhqlP9kPtB/T/Xio=", + "integrity": "sha512-0V5l4Cizzvqt5D44aTXbFZz+FtyXV1vrDN6qrelxtfYQKW0KO0W2T/hkE8xvGa/540LkZlkaUjO4ailYTFtHVQ==", "requires": { "debug": "=3.1.0" }, @@ -2860,7 +2814,7 @@ "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", "dev": true }, "function-bind": { @@ -2911,7 +2865,7 @@ "getpass": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", - "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", + "integrity": "sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==", "requires": { "assert-plus": "^1.0.0" } @@ -2919,7 +2873,7 @@ "glob": { "version": "6.0.4", "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", - "integrity": "sha1-DwiGD2oVUSey+t1PnOJLGqtuTSI=", + "integrity": "sha512-MKZeRNyYZAVVVG1oZeLaWie1uweH40m9AZwIwxyPbTSX4hHrVYSzLg0Ro5Z5R7XKkIX+Cc6oD1rqeDJnwsB8/A==", "optional": true, "requires": { "inflight": "^1.0.4", @@ -2945,92 +2899,19 @@ "dev": true }, "google-auth-library": { - "version": "6.0.6", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.6.tgz", - "integrity": "sha512-fWYdRdg55HSJoRq9k568jJA1lrhg9i2xgfhVIMJbskUmbDpJGHsbv9l41DGhCDXM21F9Kn4kUwdysgxSYBYJUw==", + "version": "5.10.1", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz", + "integrity": "sha512-rOlaok5vlpV9rSiUu5EpR0vVpc+PhN62oF4RyX/6++DG1VsaulAFEMlDYBLjJDDPI6OcNOCGAKy9UVB/3NIDXg==", "requires": { "arrify": "^2.0.0", "base64-js": "^1.3.0", "ecdsa-sig-formatter": "^1.0.11", "fast-text-encoding": "^1.0.0", - "gaxios": "^3.0.0", - "gcp-metadata": "^4.1.0", - "gtoken": "^5.0.0", + "gaxios": "^2.1.0", + "gcp-metadata": "^3.4.0", + "gtoken": "^4.1.0", "jws": "^4.0.0", - "lru-cache": "^6.0.0" - }, - "dependencies": { - "bignumber.js": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.0.tgz", - "integrity": "sha512-t/OYhhJ2SD+YGBQcjY8GzzDHEk9f3nerxjtfa6tlMXfe7frs/WozhvCNoGvpM0P3bNf3Gq5ZRMlGr5f3r4/N8A==" - }, - "gaxios": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.1.0.tgz", - "integrity": "sha512-DDTn3KXVJJigtz+g0J3vhcfbDbKtAroSTxauWsdnP57sM5KZ3d2c/3D9RKFJ86s43hfw6WULg6TXYw/AYiBlpA==", - "requires": { - "abort-controller": "^3.0.0", - "extend": "^3.0.2", - "https-proxy-agent": "^5.0.0", - "is-stream": "^2.0.0", - "node-fetch": "^2.3.0" - } - }, - "gcp-metadata": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.4.tgz", - "integrity": "sha512-5J/GIH0yWt/56R3dNaNWPGQ/zXsZOddYECfJaqxFWgrZ9HC2Kvc5vl9upOgUUHKzURjAVf2N+f6tEJiojqXUuA==", - "requires": { - "gaxios": "^3.0.0", - "json-bigint": "^1.0.0" - } - }, - "google-p12-pem": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.2.tgz", - "integrity": "sha512-tbjzndQvSIHGBLzHnhDs3cL4RBjLbLXc2pYvGH+imGVu5b4RMAttUTdnmW2UH0t11QeBTXZ7wlXPS7hrypO/tg==", - "requires": { - "node-forge": "^0.9.0" - } - }, - "gtoken": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.3.tgz", - "integrity": "sha512-Nyd1wZCMRc2dj/mAD0LlfQLcAO06uKdpKJXvK85SGrF5+5+Bpfil9u/2aw35ltvEHjvl0h5FMKN5knEU+9JrOg==", - "requires": { - "gaxios": "^3.0.0", - "google-p12-pem": "^3.0.0", - "jws": "^4.0.0", - "mime": "^2.2.0" - } - }, - "json-bigint": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", - "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", - "requires": { - "bignumber.js": "^9.0.0" - } - }, - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "requires": { - "yallist": "^4.0.0" - } - }, - "mime": { - "version": "2.4.6", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", - "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - } + "lru-cache": "^5.0.0" } }, "google-gax": { @@ -3056,25 +2937,9 @@ }, "dependencies": { "@types/node": { - "version": "13.13.15", - "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.15.tgz", - "integrity": "sha512-kwbcs0jySLxzLsa2nWUAGOd/s21WU1jebrEdtzhsj1D4Yps1EOuyI1Qcu+FD56dL7NRNIJtDDjcqIG22NwkgLw==" - }, - "google-auth-library": { - "version": "5.10.1", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz", - "integrity": "sha512-rOlaok5vlpV9rSiUu5EpR0vVpc+PhN62oF4RyX/6++DG1VsaulAFEMlDYBLjJDDPI6OcNOCGAKy9UVB/3NIDXg==", - "requires": { - "arrify": "^2.0.0", - "base64-js": "^1.3.0", - "ecdsa-sig-formatter": "^1.0.11", - "fast-text-encoding": "^1.0.0", - "gaxios": "^2.1.0", - "gcp-metadata": "^3.4.0", - "gtoken": "^4.1.0", - "jws": "^4.0.0", - "lru-cache": "^5.0.0" - } + "version": "13.13.25", + "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.25.tgz", + "integrity": "sha512-6ZMK4xRcF2XrPdKmPYQxZkdHKV18xKgUFVvhIgw2iwaaO6weleLPHLBGPZmLhjo+m1N+MZXRAoBEBCCVqgO2zQ==" }, "protobufjs": { "version": "6.10.1", @@ -3139,7 +3004,7 @@ "har-schema": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", - "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=" + "integrity": "sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q==" }, "har-validator": { "version": "5.1.3", @@ -3191,13 +3056,13 @@ "he": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", - "integrity": "sha1-k0EP0hsAlzUVH4howvJx80J+I/0=", + "integrity": "sha512-z/GDPjlRMNOa2XJiB4em8wJpuuBfrFOlYKTZxtpkdr1uPdibHI8rYA3MY0KDObpVyaes0e/aunid/t88ZI2EKA==", "dev": true }, "hex2dec": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/hex2dec/-/hex2dec-1.1.2.tgz", - "integrity": "sha1-jhzkvvNqdPfVcjw/swkMKGAHczg=" + "integrity": "sha512-Yu+q/XWr2fFQ11tHxPq4p4EiNkb2y+lAacJNhAdRXVfRIcDH6gi7htWFnnlIzvqHMHoWeIsfXlNAjZInpAOJDA==" }, "hosted-git-info": { "version": "2.8.8", @@ -3235,11 +3100,11 @@ }, "dependencies": { "debug": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz", + "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==", "requires": { - "ms": "^2.1.1" + "ms": "2.1.2" } }, "ms": { @@ -3269,11 +3134,11 @@ }, "dependencies": { "debug": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz", + "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==", "requires": { - "ms": "^2.1.1" + "ms": "2.1.2" } }, "ms": { @@ -3330,7 +3195,7 @@ "inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", "requires": { "once": "^1.3.0", "wrappy": "1" @@ -3523,7 +3388,7 @@ "is": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/is/-/is-3.3.0.tgz", - "integrity": "sha1-Yc/23TxBk9uUo9YlggcrROVkXXk=" + "integrity": "sha512-nW24QBoPcFGGHJGUwnfpI7Yc5CdqWNdsyHQszVE/z2pKHXzh7FZ5GWhJqSyaQ9wMkQnsTx+kAI8bHlCX4tKdbg==" }, "is-arrayish": { "version": "0.2.1", @@ -3611,7 +3476,7 @@ "is-typedarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=" + "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==" }, "isarray": { "version": "1.0.0", @@ -3627,7 +3492,7 @@ "isstream": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" + "integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g==" }, "js-tokens": { "version": "4.0.0", @@ -3648,7 +3513,7 @@ "jsbn": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=" + "integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==" }, "jsesc": { "version": "2.5.2", @@ -3659,7 +3524,7 @@ "json-bigint": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-0.3.0.tgz", - "integrity": "sha1-DM2RLEuCcNBfBW+9E4FLU9OCWx4=", + "integrity": "sha512-u+c/u/F+JNPUekHCFyGVycRPyh9UHD5iUhSyIAn10kxbDTJxijwAbT6XHaONEOXuGGfmWUSroheXgHcml4gLgg==", "requires": { "bignumber.js": "^7.0.0" } @@ -3667,7 +3532,7 @@ "json-schema": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", - "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=" + "integrity": "sha512-a3xHnILGMtk+hDOqNwHzF6e2fNbiMrXZvxKQiEv2MlgQP+pjIOzqAmKYD2mDpXYE/44M7g+n9p2bKkYWDUcXCQ==" }, "json-schema-traverse": { "version": "0.4.1", @@ -3683,12 +3548,12 @@ "json-stringify-safe": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" + "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==" }, "jsprim": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", - "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", + "integrity": "sha512-4Dj8Rf+fQ+/Pn7C5qeEX02op1WfOss3PKTE9Nsop3Dx+6UPxlm1dr/og7o2cRa5hNN07CACr4NFzRLtj/rjWog==", "requires": { "assert-plus": "1.0.0", "extsprintf": "1.3.0", @@ -3769,12 +3634,12 @@ "lodash.defaults": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz", - "integrity": "sha1-0JF4cW/+pN3p5ft7N/bwgCJ0WAw=" + "integrity": "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==" }, "lodash.flatten": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz", - "integrity": "sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8=" + "integrity": "sha512-C5N2Z3DgnnKr0LOpv/hKCgKdb7ZZwafIrsesve6lmzvZIRZRGaZ/l6Q8+2W7NaT+ZwO3fFlSCzCzrDCFdJfZ4g==" }, "lodash.get": { "version": "4.4.2", @@ -3802,7 +3667,7 @@ "lodash.pickby": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz", - "integrity": "sha1-feoh2MGNdwOifHBMFdO4SmfjOv8=" + "integrity": "sha512-AZV+GsS/6ckvPOVQPXSiFFacKvKB4kOQu6ynt9wz0F3LO4R9Ij4K1ddYsIytDpSgLz88JHd9P+oaLeej5/Sl7Q==" }, "lodash.unescape": { "version": "4.0.1", @@ -3816,33 +3681,60 @@ "integrity": "sha512-U7KCmLdqsGHBLeWqYlFA0V0Sl6P08EE1ZrmA9cxjUE0WVqT9qnyVDPz1kzpFEP0jdJuFnasWIfSd7fsaNXkpbg==" }, "logger-sharelatex": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-2.2.0.tgz", - "integrity": "sha512-ko+OmE25XHJJCiz1R9EgwlfM7J/5olpunUfR3WcfuqOQrcUqsdBrDA2sOytngT0ViwjCR0Fh4qZVPwEWfmrvwA==", + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.9.1.tgz", + "integrity": "sha512-9s6JQnH/PN+Js2CmI8+J3MQCTNlRzP2Dh4pcekXrV6Jm5J4HzyPi+6d3zfBskZ4NBmaUVw9hC4p5dmdaRmh4mQ==", "requires": { - "@google-cloud/logging-bunyan": "^3.0.0", - "@overleaf/o-error": "^3.0.0", - "bunyan": "^1.8.14", - "node-fetch": "^2.6.0", - "raven": "^2.6.4", - "yn": "^4.0.0" + "@google-cloud/logging-bunyan": "^2.0.0", + "@overleaf/o-error": "^2.0.0", + "bunyan": "1.8.12", + "raven": "1.1.3", + "request": "2.88.0", + "yn": "^3.1.1" }, "dependencies": { "bunyan": { - "version": "1.8.14", - "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.14.tgz", - "integrity": "sha512-LlahJUxXzZLuw/hetUQJmRgZ1LF6+cr5TPpRj6jf327AsiIq2jhYEH4oqUUkVKTor+9w2BT3oxVwhzE5lw9tcg==", + "version": "1.8.12", + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.12.tgz", + "integrity": "sha1-8VDw9nSKvdcq6uhPBEA74u8RN5c=", "requires": { "dtrace-provider": "~0.8", - "moment": "^2.19.3", + "moment": "^2.10.6", "mv": "~2", "safe-json-stringify": "~1" } }, - "yn": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yn/-/yn-4.0.0.tgz", - "integrity": "sha512-huWiiCS4TxKc4SfgmTwW1K7JmXPPAmuXWYy4j9qjQo4+27Kni8mGhAAi1cloRWmBe2EqcLgt3IGqQoRL/MtPgg==" + "qs": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", + "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==" + }, + "request": { + "version": "2.88.0", + "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", + "integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==", + "requires": { + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "har-validator": "~5.1.0", + "http-signature": "~1.2.0", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "oauth-sign": "~0.9.0", + "performance-now": "^2.1.0", + "qs": "~6.5.2", + "safe-buffer": "^5.1.2", + "tough-cookie": "~2.4.3", + "tunnel-agent": "^0.6.0", + "uuid": "^3.3.2" + } } } }, @@ -3907,7 +3799,7 @@ "long": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", - "integrity": "sha1-mntxz7fTYaGU6lVSQckvdGjVvyg=" + "integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==" }, "loose-envify": { "version": "1.4.0", @@ -3921,11 +3813,16 @@ "lru-cache": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha1-HaJ+ZxAnGUdpXa9oSOhH8B2EuSA=", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", "requires": { "yallist": "^3.0.2" } }, + "lsmod": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/lsmod/-/lsmod-1.0.0.tgz", + "integrity": "sha1-mgD3bco26yP6BTUK/htYXUKZ5ks=" + }, "lynx": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz", @@ -3958,23 +3855,6 @@ "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.1.0.tgz", "integrity": "sha512-glc9y00wgtwcDmp7GaE/0b0OnxpNJsVf3ael/An6Fe2Q51LLwN1er6sdomLRzz5h0+yMpiYLhWYF5R7HeqVd4g==" }, - "md5": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/md5/-/md5-2.3.0.tgz", - "integrity": "sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==", - "requires": { - "charenc": "0.0.2", - "crypt": "0.0.2", - "is-buffer": "~1.1.6" - }, - "dependencies": { - "is-buffer": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", - "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" - } - } - }, "media-typer": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", @@ -3994,7 +3874,7 @@ "mersenne": { "version": "0.0.4", "resolved": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz", - "integrity": "sha1-QB/ex+whzbngPNPTAhOY2iGycIU=" + "integrity": "sha512-XoSUL+nF8hMTKGQxUs8r3Btdsf1yuKKBdCCGbh3YXgCXuVKishpZv1CNc385w9s8t4Ynwc5h61BwW/FCVulkbg==" }, "messageformat": { "version": "2.3.0", @@ -4073,7 +3953,7 @@ "minimatch": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha1-UWbihkV/AzBgZL5Ul+jbsMPTIIM=", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", "requires": { "brace-expansion": "^1.1.7" } @@ -4081,17 +3961,17 @@ "minimist": { "version": "0.0.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", - "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" + "integrity": "sha512-miQKw5Hv4NS1Psg2517mV4e4dYNaO3++hjAvLOAzKqZ61rH8NS1SK+vbfBWZ5PY/Me/bEWhUwqMghEW5Fb9T7Q==" }, "mkdirp": { "version": "0.3.5", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz", - "integrity": "sha1-3j5fiWHIjHh+4TaN+EmsRBPsqNc=" + "integrity": "sha512-8OCq0De/h9ZxseqzCH8Kw/Filf5pF/vMI6+BH7Lu0jXz2pqYCjTAQRolSxRIi+Ax+oCCjlxoJMP0YQ4XlrQNHg==" }, "mocha": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/mocha/-/mocha-5.2.0.tgz", - "integrity": "sha1-bYrlCPWRZ/lA8rWzxKYSrlDJCuY=", + "integrity": "sha512-2IUgKDhc3J7Uug+FxMXuqIyYzH7gJjXECKe/w43IGgQHTSj3InJi+yAA7T24L9bQMRKiUEHxEX37G5JpVUGLcQ==", "dev": true, "requires": { "browser-stdout": "1.3.1", @@ -4110,7 +3990,7 @@ "commander": { "version": "2.15.1", "resolved": "https://registry.npmjs.org/commander/-/commander-2.15.1.tgz", - "integrity": "sha1-30boZ9D8Kuxmo0ZitAapzK//Ww8=", + "integrity": "sha512-VlfT9F3V0v+jr4yxPc5gg9s62/fIVWsd2Bk2iD435um1NlGMYdVCq+MjcXnhYq2icNOizHr1kK+5TI6H0Hy0ag==", "dev": true }, "debug": { @@ -4125,7 +4005,7 @@ "glob": { "version": "7.1.2", "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz", - "integrity": "sha1-wZyd+aAocC1nhhI4SmVSQExjbRU=", + "integrity": "sha512-MJTUg1kjuLeQCJ+ccE4Vpa6kKVXkPYJ2mOCQyUuKLcLQsdrMCpBPUi8qVE6+YuaJkozeA9NusTAw3hLr8Xe5EQ==", "dev": true, "requires": { "fs.realpath": "^1.0.0", @@ -4139,7 +4019,7 @@ "mkdirp": { "version": "0.5.1", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "integrity": "sha512-SknJC52obPfGQPnjIkXbmA6+5H15E+fR+E4iR2oQ3zzCLbd7/ONua69R/Gw7AgkTLsRG+r5fzksYwWe1AgTyWA==", "dev": true, "requires": { "minimist": "0.0.8" @@ -4156,12 +4036,12 @@ "module-details-from-path": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz", - "integrity": "sha1-EUyUlnPiqKNenTV4hSeqN7Z52is=" + "integrity": "sha512-ySViT69/76t8VhE1xXHK6Ch4NcDd26gx0MzKXLO+F7NOtnqH68d9zF94nT8ZWSxXh8ELOERsnJO/sWt1xZYw5A==" }, "moment": { - "version": "2.27.0", - "resolved": "https://registry.npmjs.org/moment/-/moment-2.27.0.tgz", - "integrity": "sha512-al0MUK7cpIcglMv3YF13qSgdAIqxHTO7brRtaz3DlSULbqfazqkc5kEjNrLDOM7fsjshoFIihnU8snrP7zUvhQ==", + "version": "2.29.1", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.1.tgz", + "integrity": "sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ==", "optional": true }, "mongodb": { @@ -4191,7 +4071,7 @@ "mv": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", - "integrity": "sha1-rmzg1vbV4KT32JN5jQPB6pVZtqI=", + "integrity": "sha512-at/ZndSy3xEGJ8i0ygALh8ru9qy7gWW1cmkaqBN29JmMlIvM//MEO9y1sk/avxuwnPcfhkejkLsuPxH81BrkSg==", "optional": true, "requires": { "mkdirp": "~0.5.1", @@ -4202,7 +4082,7 @@ "mkdirp": { "version": "0.5.1", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "integrity": "sha512-SknJC52obPfGQPnjIkXbmA6+5H15E+fR+E4iR2oQ3zzCLbd7/ONua69R/Gw7AgkTLsRG+r5fzksYwWe1AgTyWA==", "optional": true, "requires": { "minimist": "0.0.8" @@ -4224,7 +4104,7 @@ "ncp": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", - "integrity": "sha1-GVoh1sRuNh0vsSgbo4uR6d9727M=", + "integrity": "sha512-zIdGUrPRFTUELUvr3Gmc7KZ2Sw/h1PiVM0Af/oHB6zgnV1ikqSfRk+TOufi79aHYCW3NiOXmr1BP5nWbzojLaA==", "optional": true }, "negotiator": { @@ -4274,9 +4154,9 @@ "integrity": "sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==" }, "node-forge": { - "version": "0.9.1", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.9.1.tgz", - "integrity": "sha512-G6RlQt5Sb4GMBzXvhfkeFmbqR6MzhtnT7VTHuLadjkii3rdYHNdw0m8zA4BTxVIh68FicCQ2NSUANpsqkr9jvQ==" + "version": "0.9.2", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.9.2.tgz", + "integrity": "sha512-naKSScof4Wn+aoHU6HBsifh92Zeicm1GDQKd1vp3Y/kOi8ub0DozCa9KpvYNCXslFHYRmLNiqRopGdTGwNLpNw==" }, "normalize-package-data": { "version": "2.5.0", @@ -4387,7 +4267,7 @@ "once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", "requires": { "wrappy": "1" } @@ -4441,7 +4321,7 @@ "p-try": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha1-yyhoVA4xPWHeWPr741zpAE1VQOY=" + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" }, "parent-module": { "version": "1.0.1", @@ -4460,7 +4340,7 @@ "parse-ms": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.1.0.tgz", - "integrity": "sha1-NIVlp1PUOR+lJAKZVrFyy3dTCX0=" + "integrity": "sha512-kHt7kzLoS9VBZfUsiKjv43mr91ea+U05EyKkEtqp7vNbHxmaVuEqN7XxeEVnGrMtYOAxGrDElSi96K7EgO1zCA==" }, "parseurl": { "version": "1.3.3", @@ -4476,7 +4356,7 @@ "path-is-absolute": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==" }, "path-is-inside": { "version": "1.0.2", @@ -4493,7 +4373,7 @@ "path-parse": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", - "integrity": "sha1-1i27VnlAXXLEc37FhgDp3c8G0kw=" + "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==" }, "path-to-regexp": { "version": "0.1.7", @@ -4503,12 +4383,12 @@ "performance-now": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", - "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" + "integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==" }, "pify": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", - "integrity": "sha1-SyzSXFDVmHNcUCkiJP2MbfQeMjE=" + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==" }, "prelude-ls": { "version": "1.1.2", @@ -5166,7 +5046,7 @@ "pretty-ms": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-4.0.0.tgz", - "integrity": "sha1-Mbr0G5T9AiJwmKqgO9YmCOsNbpI=", + "integrity": "sha512-qG66ahoLCwpLXD09ZPHSCbUWYTqdosB7SMP4OffgTgL2PBKXMuUsrk5Bwg8q4qPkjTXsKBMr+YK3Ltd/6F9s/Q==", "requires": { "parse-ms": "^2.0.0" } @@ -5204,7 +5084,7 @@ "protobufjs": { "version": "6.8.8", "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz", - "integrity": "sha1-yLTxKC/XqQ5vWxCe0RyEr4KQjnw=", + "integrity": "sha512-AAmHtD5pXgZfi7GMpllpO3q1Xw1OYldr+dMUlAnffGTAhqkg72WdmSY71uKBF/JuyiKs8psYbtKrhi0ASCD8qw==", "requires": { "@protobufjs/aspromise": "^1.1.2", "@protobufjs/base64": "^1.1.2", @@ -5275,7 +5155,7 @@ "q": { "version": "0.9.2", "resolved": "https://registry.npmjs.org/q/-/q-0.9.2.tgz", - "integrity": "sha1-I8BsRsgTKGFqrhaNPuI6Vr1D2vY=" + "integrity": "sha512-ZOxMuWPMJnsUdYhuQ9glpZwKhB4cm8ubYFy1nNCY8TkSAuZun5fd8jCDTlf2ykWnK8x9HGn1stNtLeG179DebQ==" }, "qs": { "version": "6.7.0", @@ -5300,15 +5180,15 @@ "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==" }, "raven": { - "version": "2.6.4", - "resolved": "https://registry.npmjs.org/raven/-/raven-2.6.4.tgz", - "integrity": "sha512-6PQdfC4+DQSFncowthLf+B6Hr0JpPsFBgTVYTAOq7tCmx/kR4SXbeawtPch20+3QfUcQDoJBLjWW1ybvZ4kXTw==", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/raven/-/raven-1.1.3.tgz", + "integrity": "sha1-QnPBrm005CMPUbLAEEGjK5Iygio=", "requires": { "cookie": "0.3.1", - "md5": "^2.2.1", - "stack-trace": "0.0.10", - "timed-out": "4.0.1", - "uuid": "3.3.2" + "json-stringify-safe": "5.0.1", + "lsmod": "1.0.0", + "stack-trace": "0.0.9", + "uuid": "3.0.0" }, "dependencies": { "cookie": { @@ -5317,9 +5197,9 @@ "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" }, "uuid": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", - "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.0.tgz", + "integrity": "sha1-Zyj8BFnEUNeWqZwxg3VpvfZy1yg=" } } }, @@ -5353,7 +5233,7 @@ "redis": { "version": "0.11.0", "resolved": "https://registry.npmjs.org/redis/-/redis-0.11.0.tgz", - "integrity": "sha1-/cAdSrTL5LO7LLKByP5WnDhX9XE=" + "integrity": "sha512-wkgzIZ9HuxJ6Sul1IW/6FG13Ecv6q8kmdHb5xo09Hu6bgWzz5qsnM06SVMpDxFNbyApaRjy8CwnmVaRMMhAMWg==" }, "redis-commands": { "version": "1.5.0", @@ -5363,12 +5243,12 @@ "redis-errors": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz", - "integrity": "sha1-62LSrbFeTq9GEMBK/hUpOEJQq60=" + "integrity": "sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w==" }, "redis-parser": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz", - "integrity": "sha1-tm2CjNyv5rS4pCin3vTGvKwxyLQ=", + "integrity": "sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A==", "requires": { "redis-errors": "^1.0.0" } @@ -5376,7 +5256,7 @@ "redis-sentinel": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/redis-sentinel/-/redis-sentinel-0.1.1.tgz", - "integrity": "sha1-Vj3TQduZMgMfSX+v3Td+hkj/s+U=", + "integrity": "sha512-cKtLSUzDsKmsB50J1eIV/SH11DSMiHgsm/gDPRCU5lXz5OyTSuLKWg9oc8d5n74kZwtAyRkfJP0x8vYXvlPjFQ==", "requires": { "q": "0.9.2", "redis": "0.11.x" @@ -5518,7 +5398,7 @@ "require-like": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", - "integrity": "sha1-rW8wwTvs15cBDEaK+ndcDAprR/o=", + "integrity": "sha512-oyrU88skkMtDdauHDuKVrgR+zuItqr6/c//FXzvmxRGMexSDc6hNvJInGW3LL46n+8b50RykrvwSUIIQH2LQ5A==", "dev": true }, "require-main-filename": { @@ -5560,7 +5440,7 @@ "resolve-from": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz", - "integrity": "sha1-lICrIOlP+h2egKgEx+oUdhGWa1c=" + "integrity": "sha512-qpFcKaXsq8+oRoLilkwyc7zHGF5i9Q2/25NIgLQQ/+VVv9rU4qvr6nXVAw1DsnXJyQkZsR4Ytfbtg5ehfcUssQ==" }, "restore-cursor": { "version": "3.1.0", @@ -5575,7 +5455,7 @@ "retry-axios": { "version": "0.3.2", "resolved": "https://registry.npmjs.org/retry-axios/-/retry-axios-0.3.2.tgz", - "integrity": "sha1-V1fID1hbTMTEmGqi/9R6YMbTXhM=" + "integrity": "sha512-jp4YlI0qyDFfXiXGhkCOliBN1G7fRH03Nqy8YdShzGqbY5/9S2x/IR6C88ls2DFkbWuL3ASkP7QD3pVrNpPgwQ==" }, "retry-request": { "version": "4.1.1", @@ -5604,7 +5484,7 @@ "rimraf": { "version": "2.4.5", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", - "integrity": "sha1-7nEM5dk6j9uFb7Xqj/Di11k0sto=", + "integrity": "sha512-J5xnxTyqaiw06JjMftq7L9ouA448dw/E7dKghkP9WpKNuwmARNNg+Gk8/u5ryb9N/Yo2+z3MCwuqFK/+qPOPfQ==", "optional": true, "requires": { "glob": "^6.0.1" @@ -5644,7 +5524,7 @@ "sandboxed-module": { "version": "0.2.2", "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-0.2.2.tgz", - "integrity": "sha1-bL3sghOAx31FdcjIeDi5ET5kulA=", + "integrity": "sha512-1QAd90eCdAnqVn2sLkRCCeFphH/TKLfoTcdyI6h9h2E+YEY+aKovggwzWWWi5IMObafl0W1wr+dQ5F6LFmjpzA==", "dev": true, "requires": { "require-like": "0.1.2", @@ -5654,7 +5534,7 @@ "stack-trace": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.6.tgz", - "integrity": "sha1-HnGb1qJin/CcGJ4Xqe+QKpT8XbA=", + "integrity": "sha512-5/6uZt7RYjjAl8z2j1mXWAewz+I4Hk2/L/3n6NRLIQ31+uQ7nMd9O6G69QCdrrufHv0QGRRHl/jwUEGTqhelTA==", "dev": true } } @@ -5725,7 +5605,7 @@ "settings-sharelatex": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/settings-sharelatex/-/settings-sharelatex-1.1.0.tgz", - "integrity": "sha1-Tv4vUpPbjxwVlnEEx5BfqHD/mS0=", + "integrity": "sha512-f7D+0lnlohoteSn6IKTH72NE+JnAdMWTKwQglAuimZWTID2FRRItZSGeYMTRpvEnaQApkoVwRp//WRMsiddnqw==", "requires": { "coffee-script": "1.6.0" }, @@ -5733,7 +5613,7 @@ "coffee-script": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", - "integrity": "sha1-gIs5bhEPU9AhoZpO8fZb4OjjX6M=" + "integrity": "sha512-Tx8itEfCsQp8RbLDFt7qwjqXycAx2g6SI7//4PPUR2j6meLmNifYm6zKrNDcU1+Q/GWRhjhEZk7DaLG1TfIzGA==" } } }, @@ -5755,7 +5635,7 @@ "shimmer": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz", - "integrity": "sha1-YQhZ994ye1h+/r9QH7QxF/mv8zc=" + "integrity": "sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw==" }, "side-channel": { "version": "1.0.2", @@ -5834,7 +5714,7 @@ "source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha1-dHIq8y6WFOnCh6jQu95IteLxomM=" + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" }, "sparse-bitfield": { "version": "3.0.3", @@ -5880,7 +5760,7 @@ "split": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", - "integrity": "sha1-YFvZvjA6pZ+zX5Ip++oN3snqB9k=", + "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==", "requires": { "through": "2" } @@ -5908,9 +5788,9 @@ } }, "stack-trace": { - "version": "0.0.10", - "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", - "integrity": "sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA=" + "version": "0.0.9", + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz", + "integrity": "sha1-qPbq7KkGdMMz58Q5U/J1tFFRBpU=" }, "standard-as-callback": { "version": "2.0.1", @@ -5920,7 +5800,7 @@ "statsd-parser": { "version": "0.0.4", "resolved": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz", - "integrity": "sha1-y9JDlTzELv/VSLXSI4jtaJ7GOb0=" + "integrity": "sha512-7XO+ur89EalMXXFQaydsczB8sclr5nDsNIoUu0IzJx1pIbHUhO3LtpSzBwetIuU9DyTLMiVaJBMtWS/Nb2KR4g==" }, "statuses": { "version": "1.5.0", @@ -6065,7 +5945,7 @@ "tdigest": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.1.tgz", - "integrity": "sha1-Ljyyw56kSeVdHmzZEReszKRYgCE=", + "integrity": "sha512-CXcDY/NIgIbKZPx5H4JJNpq6JwJhU5Z4+yWj4ZghDc7/9nVajiRlPPyMXRePPPlBfcayUqtoCXjo7/Hm82ecUA==", "requires": { "bintrees": "1.0.1" } @@ -6098,7 +5978,7 @@ "through": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", - "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=" + "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==" }, "through2": { "version": "3.0.1", @@ -6133,11 +6013,6 @@ } } }, - "timed-out": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/timed-out/-/timed-out-4.0.1.tgz", - "integrity": "sha1-8y6srFoXW+ol1/q1Zas+2HQe9W8=" - }, "timekeeper": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/timekeeper/-/timekeeper-2.2.0.tgz", @@ -6185,6 +6060,22 @@ "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz", "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==" }, + "tough-cookie": { + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", + "integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==", + "requires": { + "psl": "^1.1.24", + "punycode": "^1.4.1" + }, + "dependencies": { + "punycode": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=" + } + } + }, "tslib": { "version": "1.11.2", "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.11.2.tgz", @@ -6202,7 +6093,7 @@ "tweetnacl": { "version": "0.14.5", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=" + "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==" }, "type-check": { "version": "0.3.2", @@ -6242,7 +6133,7 @@ "underscore": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz", - "integrity": "sha1-izixDKze9jM3uLJOT/htRa6lKag=" + "integrity": "sha512-z4o1fvKUojIWh9XuaVLUDdf86RQiq13AC1dmHbTpoyuu+bquHms76v16CjycCbec87J7z0k//SiQVk0sMdFmpQ==" }, "unpipe": { "version": "1.0.0", @@ -6252,7 +6143,7 @@ "uri-js": { "version": "4.2.2", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz", - "integrity": "sha1-lMVA4f93KVbiKZUHwBCupsiDjrA=", + "integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==", "requires": { "punycode": "^2.1.0" } @@ -6260,7 +6151,7 @@ "util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" }, "utils-merge": { "version": "1.0.1", @@ -6296,7 +6187,7 @@ "verror": { "version": "1.10.0", "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", - "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", + "integrity": "sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw==", "requires": { "assert-plus": "^1.0.0", "core-util-is": "1.0.2", @@ -6385,7 +6276,7 @@ "when": { "version": "3.7.8", "resolved": "https://registry.npmjs.org/when/-/when-3.7.8.tgz", - "integrity": "sha1-xxMLan6gRpPoQs3J56Hyqjmjn4I=" + "integrity": "sha512-5cZ7mecD3eYcMiCH4wtRPA5iFJZ50BJYDfckI5RRpQiktMiYTcn0ccLTZOvcbBume+1304fQztxeNzNS9Gvrnw==" }, "which": { "version": "1.3.1", @@ -6422,7 +6313,7 @@ "wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, "write": { "version": "1.0.3", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index e62d644150..59104241b2 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -23,7 +23,7 @@ "bunyan": "~0.22.1", "express": "4.17.1", "lodash": "^4.17.19", - "logger-sharelatex": "^2.2.0", + "logger-sharelatex": "^1.9.1", "metrics-sharelatex": "^2.6.2", "mongodb": "^3.6.0", "redis-sharelatex": "^1.0.13", From 08ed5f6c9b3719702501a52d717411c4fca0b307 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Tue, 10 Nov 2020 11:32:04 +0000 Subject: [PATCH 703/769] [misc] bump @overleaf/redis-wrapper to version 2.0.0 --- services/document-updater/app.js | 4 +- .../app/js/DispatchManager.js | 2 +- .../app/js/HistoryRedisManager.js | 4 +- .../document-updater/app/js/LockManager.js | 2 +- .../app/js/ProjectHistoryRedisManager.js | 2 +- .../app/js/RealTimeRedisManager.js | 4 +- .../document-updater/app/js/RedisManager.js | 2 +- .../document-updater/expire_docops.coffee | 4 +- services/document-updater/package-lock.json | 336 ++++++++---------- services/document-updater/package.json | 3 +- .../js/ApplyingUpdatesToADocTests.js | 6 +- .../ApplyingUpdatesToProjectStructureTests.js | 2 +- .../acceptance/js/SettingADocumentTests.js | 2 +- .../acceptance/js/helpers/DocUpdaterClient.js | 4 +- .../coffee/test_blpop_failover.coffee | 6 +- .../coffee/test_pubsub_failover.coffee | 2 +- .../DispatchManager/DispatchManagerTests.js | 2 +- .../HistoryRedisManagerTests.js | 2 +- .../unit/js/LockManager/CheckingTheLock.js | 2 +- .../unit/js/LockManager/ReleasingTheLock.js | 2 +- .../test/unit/js/LockManager/getLockTests.js | 2 +- .../test/unit/js/LockManager/tryLockTests.js | 2 +- .../ProjectHistoryRedisManagerTests.js | 2 +- .../RealTimeRedisManagerTests.js | 2 +- .../unit/js/RedisManager/RedisManagerTests.js | 2 +- .../ShareJsUpdateManagerTests.js | 2 +- 26 files changed, 185 insertions(+), 220 deletions(-) diff --git a/services/document-updater/app.js b/services/document-updater/app.js index 7ac17d13bb..6307b59180 100644 --- a/services/document-updater/app.js +++ b/services/document-updater/app.js @@ -110,7 +110,7 @@ app.get('/status', (req, res) => { } }) -const pubsubClient = require('redis-sharelatex').createClient( +const pubsubClient = require('@overleaf/redis-wrapper').createClient( Settings.redis.pubsub ) app.get('/health_check/redis', (req, res, next) => { @@ -124,7 +124,7 @@ app.get('/health_check/redis', (req, res, next) => { }) }) -const docUpdaterRedisClient = require('redis-sharelatex').createClient( +const docUpdaterRedisClient = require('@overleaf/redis-wrapper').createClient( Settings.redis.documentupdater ) app.get('/health_check/redis_cluster', (req, res, next) => { diff --git a/services/document-updater/app/js/DispatchManager.js b/services/document-updater/app/js/DispatchManager.js index 2b9b2c4fb7..aa7c4f1f0e 100644 --- a/services/document-updater/app/js/DispatchManager.js +++ b/services/document-updater/app/js/DispatchManager.js @@ -18,7 +18,7 @@ let DispatchManager const Settings = require('settings-sharelatex') const logger = require('logger-sharelatex') const Keys = require('./UpdateKeys') -const redis = require('redis-sharelatex') +const redis = require('@overleaf/redis-wrapper') const Errors = require('./Errors') const UpdateManager = require('./UpdateManager') diff --git a/services/document-updater/app/js/HistoryRedisManager.js b/services/document-updater/app/js/HistoryRedisManager.js index 5b9c76646c..20ce0651dd 100644 --- a/services/document-updater/app/js/HistoryRedisManager.js +++ b/services/document-updater/app/js/HistoryRedisManager.js @@ -13,7 +13,9 @@ */ let HistoryRedisManager const Settings = require('settings-sharelatex') -const rclient = require('redis-sharelatex').createClient(Settings.redis.history) +const rclient = require('@overleaf/redis-wrapper').createClient( + Settings.redis.history +) const Keys = Settings.redis.history.key_schema const logger = require('logger-sharelatex') diff --git a/services/document-updater/app/js/LockManager.js b/services/document-updater/app/js/LockManager.js index 2f08dfd3dd..1c4427ed44 100644 --- a/services/document-updater/app/js/LockManager.js +++ b/services/document-updater/app/js/LockManager.js @@ -14,7 +14,7 @@ let LockManager const metrics = require('./Metrics') const Settings = require('settings-sharelatex') -const redis = require('redis-sharelatex') +const redis = require('@overleaf/redis-wrapper') const rclient = redis.createClient(Settings.redis.lock) const keys = Settings.redis.lock.key_schema const logger = require('logger-sharelatex') diff --git a/services/document-updater/app/js/ProjectHistoryRedisManager.js b/services/document-updater/app/js/ProjectHistoryRedisManager.js index eb714738ca..ab7a9d0faa 100644 --- a/services/document-updater/app/js/ProjectHistoryRedisManager.js +++ b/services/document-updater/app/js/ProjectHistoryRedisManager.js @@ -19,7 +19,7 @@ const projectHistoryKeys = __guard__( Settings.redis != null ? Settings.redis.project_history : undefined, (x) => x.key_schema ) -const rclient = require('redis-sharelatex').createClient( +const rclient = require('@overleaf/redis-wrapper').createClient( Settings.redis.project_history ) const logger = require('logger-sharelatex') diff --git a/services/document-updater/app/js/RealTimeRedisManager.js b/services/document-updater/app/js/RealTimeRedisManager.js index 537be23265..1059dc6079 100644 --- a/services/document-updater/app/js/RealTimeRedisManager.js +++ b/services/document-updater/app/js/RealTimeRedisManager.js @@ -13,10 +13,10 @@ */ let RealTimeRedisManager const Settings = require('settings-sharelatex') -const rclient = require('redis-sharelatex').createClient( +const rclient = require('@overleaf/redis-wrapper').createClient( Settings.redis.documentupdater ) -const pubsubClient = require('redis-sharelatex').createClient( +const pubsubClient = require('@overleaf/redis-wrapper').createClient( Settings.redis.pubsub ) const Keys = Settings.redis.documentupdater.key_schema diff --git a/services/document-updater/app/js/RedisManager.js b/services/document-updater/app/js/RedisManager.js index 64352b4e00..104a720c95 100644 --- a/services/document-updater/app/js/RedisManager.js +++ b/services/document-updater/app/js/RedisManager.js @@ -15,7 +15,7 @@ */ let RedisManager const Settings = require('settings-sharelatex') -const rclient = require('redis-sharelatex').createClient( +const rclient = require('@overleaf/redis-wrapper').createClient( Settings.redis.documentupdater ) const logger = require('logger-sharelatex') diff --git a/services/document-updater/expire_docops.coffee b/services/document-updater/expire_docops.coffee index 1eb7d93c8f..ff25b6f842 100644 --- a/services/document-updater/expire_docops.coffee +++ b/services/document-updater/expire_docops.coffee @@ -1,5 +1,5 @@ Settings = require "settings-sharelatex" -rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) +rclient = require("@overleaf/redis-wrapper").createClient(Settings.redis.documentupdater) keys = Settings.redis.documentupdater.key_schema async = require "async" RedisManager = require "./app/js/RedisManager" @@ -41,4 +41,4 @@ setTimeout () -> # Give redis a chance to connect expireDocOps (error) -> throw error if error? process.exit() -, 1000 \ No newline at end of file +, 1000 diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 6328d17d93..882ae13c3d 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -173,7 +173,7 @@ "@google-cloud/debug-agent": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-3.2.0.tgz", - "integrity": "sha512-fP87kYbS6aeDna08BivwQ1J260mwJGchRi99XdWCgqbRwuFac8ul0OT5i2wEeDSc5QaDX8ZuWQQ0igZvh1rTyQ==", + "integrity": "sha1-2qdjWhaYpWY31dxXzhED536uKdM=", "requires": { "@google-cloud/common": "^0.32.0", "@sindresorhus/is": "^0.15.0", @@ -406,7 +406,7 @@ "@google-cloud/profiler": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/@google-cloud/profiler/-/profiler-0.2.3.tgz", - "integrity": "sha512-rNvtrFtIebIxZEJ/O0t8n7HciZGIXBo8DvHxWqAmsCaeLvkTtsaL6HmPkwxrNQ1IhbYWAxF+E/DwCiHyhKmgTg==", + "integrity": "sha1-Fj3738Mwuug1X+RuHlvgZTV7H1w=", "requires": { "@google-cloud/common": "^0.26.0", "@types/console-log-level": "^1.4.0", @@ -428,7 +428,7 @@ "@google-cloud/common": { "version": "0.26.2", "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.26.2.tgz", - "integrity": "sha512-xJ2M/q3MrUbnYZuFlpF01caAlEhAUoRn0NXp93Hn3pkFpfSOG8YfbKbpBAHvcKVbBOAKVIwPsleNtuyuabUwLQ==", + "integrity": "sha1-nFTiRxqEqgMelaJIJJduCA8lVkU=", "requires": { "@google-cloud/projectify": "^0.3.2", "@google-cloud/promisify": "^0.3.0", @@ -452,7 +452,7 @@ "@google-cloud/promisify": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.3.1.tgz", - "integrity": "sha512-QzB0/IMvB0eFxFK7Eqh+bfC8NLv3E9ScjWQrPOk6GgfNroxcVITdTlT8NRsRrcp5+QQJVPLkRqKG0PUdaWXmHw==" + "integrity": "sha1-9kHm2USo4KBe4MsQkd+mAIm+zbo=" }, "agent-base": { "version": "4.3.0", @@ -465,7 +465,7 @@ "arrify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", - "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==" + "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=" }, "debug": { "version": "3.2.6", @@ -489,7 +489,7 @@ "gcp-metadata": { "version": "0.9.3", "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.9.3.tgz", - "integrity": "sha512-caV4S84xAjENtpezLCT/GILEAF5h/bC4cNqZFmt/tjTn8t+JBtTkQrgBrJu3857YdsnlM8rxX/PMcKGtE8hUlw==", + "integrity": "sha1-H510lfdGChRSZIHynhFZbdVj3SY=", "requires": { "gaxios": "^1.0.2", "json-bigint": "^0.3.0" @@ -498,7 +498,7 @@ "google-auth-library": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-2.0.2.tgz", - "integrity": "sha512-FURxmo1hBVmcfLauuMRKOPYAPKht3dGuI2wjeJFalDUThO0HoYVjr4yxt5cgYSFm1dgUpmN9G/poa7ceTFAIiA==", + "integrity": "sha1-ejFdIDZ0Svavyth7IQ7mY4tA9Xs=", "requires": { "axios": "^0.18.0", "gcp-metadata": "^0.7.0", @@ -512,7 +512,7 @@ "gcp-metadata": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.7.0.tgz", - "integrity": "sha512-ffjC09amcDWjh3VZdkDngIo7WoluyC5Ag9PAYxZbmQLOLNI8lvPtoKTSCyU54j2gwy5roZh6sSMTfkY2ct7K3g==", + "integrity": "sha1-bDXbtSvaMqQnu5yY9UI33dG1QG8=", "requires": { "axios": "^0.18.0", "extend": "^3.0.1", @@ -615,7 +615,7 @@ "@google-cloud/trace-agent": { "version": "3.6.1", "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-3.6.1.tgz", - "integrity": "sha512-KDo85aPN4gSxJ7oEIOlKd7aGENZFXAM1kbIn1Ds+61gh/K1CQWSyepgJo3nUpAwH6D1ezDWV7Iaf8ueoITc8Uw==", + "integrity": "sha1-W+dEE5TQ6ldY8o25IqUAT/PwO+w=", "requires": { "@google-cloud/common": "^0.32.1", "builtin-modules": "^3.0.0", @@ -835,34 +835,42 @@ } }, "@overleaf/o-error": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@overleaf/o-error/-/o-error-2.1.0.tgz", - "integrity": "sha512-Zd9sks9LrLw8ErHt/cXeWIkyxWAqNAvNGn7wIjLQJH6TTEEW835PWOhpch+hQwwWsTxWIx/JDj+IpZ3ouw925g==" + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@overleaf/o-error/-/o-error-3.1.0.tgz", + "integrity": "sha512-TWJ80ozJ1LeugGTJyGQSPEuTkZ9LqZD7/ndLE6azKa03SU/mKV/FINcfk8atpVil8iv1hHQwzYZc35klplpMpQ==" + }, + "@overleaf/redis-wrapper": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@overleaf/redis-wrapper/-/redis-wrapper-2.0.0.tgz", + "integrity": "sha512-lREuhDPNgmKyOmL1g6onfRzDLWOG/POsE4Vd7ZzLnKDYt9SbOIujtx3CxI2qtQAKBYHf/hfyrbtyX3Ib2yTvYA==", + "requires": { + "ioredis": "~4.17.3" + } }, "@protobufjs/aspromise": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", - "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==" + "integrity": "sha1-m4sMxmPWaafY9vXQiToU00jzD78=" }, "@protobufjs/base64": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", - "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==" + "integrity": "sha1-TIVzDlm5ofHzSQR9vyQpYDS7JzU=" }, "@protobufjs/codegen": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", - "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==" + "integrity": "sha1-fvN/DQEPsCitGtWXIuUG2SYoFcs=" }, "@protobufjs/eventemitter": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", - "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==" + "integrity": "sha1-NVy8mLr61ZePntCV85diHx0Ga3A=" }, "@protobufjs/fetch": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", - "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", + "integrity": "sha1-upn7WYYUr2VwDBYZ/wbUVLDYTEU=", "requires": { "@protobufjs/aspromise": "^1.1.1", "@protobufjs/inquire": "^1.1.0" @@ -871,32 +879,32 @@ "@protobufjs/float": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", - "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==" + "integrity": "sha1-Xp4avctz/Ap8uLKR33jIy9l7h9E=" }, "@protobufjs/inquire": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", - "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==" + "integrity": "sha1-/yAOPnzyQp4tyvwRQIKOjMY48Ik=" }, "@protobufjs/path": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", - "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==" + "integrity": "sha1-bMKyDFya1q0NzP0hynZz2Nf79o0=" }, "@protobufjs/pool": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", - "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==" + "integrity": "sha1-Cf0V8tbTq/qbZbw2ZQbWrXhG/1Q=" }, "@protobufjs/utf8": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", - "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==" + "integrity": "sha1-p3c2C1s5oaLlEG+OhY8v0tBgxXA=" }, "@sindresorhus/is": { "version": "0.15.0", "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.15.0.tgz", - "integrity": "sha512-lu8BpxjAtRCAo5ifytTpCPCj99LF7o/2Myn+NXyNCBqvPYn7Pjd76AMmUB5l7XF1U6t0hcWrlEM5ESufW7wAeA==" + "integrity": "sha1-lpFbqgXmpqHRN7rfSYTT/AWCC7Y=" }, "@sinonjs/commons": { "version": "1.7.2", @@ -967,7 +975,7 @@ "@types/caseless": { "version": "0.12.2", "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.2.tgz", - "integrity": "sha512-6ckxMjBBD8URvjB6J3NcnuAn5Pkl7t3TizAg+xdlzzQGSPSmBcXf8KoIH0ua/i+tio+ZRUHEXp0HEmvaR4kt0w==" + "integrity": "sha1-9l09Y4ngHutFi9VNyPUrlalGO8g=" }, "@types/color-name": { "version": "1.1.1", @@ -978,12 +986,12 @@ "@types/console-log-level": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/@types/console-log-level/-/console-log-level-1.4.0.tgz", - "integrity": "sha512-x+OscEQwcx5Biair4enH7ov9W+clcqUWaZRaxn5IkT4yNWWjRr2oiYDkY/x1uXSTVZOQ2xlbFQySaQGB+VdXGQ==" + "integrity": "sha1-7/ccQa689RyLpa2LBdfVQkviuPM=" }, "@types/duplexify": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/@types/duplexify/-/duplexify-3.6.0.tgz", - "integrity": "sha512-5zOA53RUlzN74bvrSGwjudssD9F3a797sDZQkiYpUOxW+WHaXTCPz4/d5Dgi6FKnOqZ2CpaTo0DhgIfsXAOE/A==", + "integrity": "sha1-38grZL06IWj1vSZESvFlvwI33Ng=", "requires": { "@types/node": "*" } @@ -1044,7 +1052,7 @@ "@types/semver": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/@types/semver/-/semver-5.5.0.tgz", - "integrity": "sha512-41qEJgBH/TWgo5NFSvBCJ1qkoi3Q6ONSF2avrHq1LVEZfYpdHmj0y9SuTK+u9ZhG1sYQKBL1AWXKyLWP4RaUoQ==" + "integrity": "sha1-FGwqKe59O65L8vyydGNuJkyBPEU=" }, "@types/tough-cookie": { "version": "2.3.6", @@ -1107,7 +1115,7 @@ "abort-controller": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", - "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "integrity": "sha1-6vVNU7YrrkE46AnKIlyEOabvs5I=", "requires": { "event-target-shim": "^5.0.0" } @@ -1246,7 +1254,7 @@ "arrify": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", - "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==" + "integrity": "sha1-yWVekzHgq81YjSp8rX6ZVvZnAfo=" }, "asn1": { "version": "0.2.4", @@ -1264,7 +1272,7 @@ "assertion-error": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", - "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "integrity": "sha1-5gtrDo8wG9l+U3UhW9pAbIURjAs=", "dev": true }, "ast-types-flow": { @@ -1290,7 +1298,7 @@ "async-listener": { "version": "0.6.10", "resolved": "https://registry.npmjs.org/async-listener/-/async-listener-0.6.10.tgz", - "integrity": "sha512-gpuo6xOyF4D5DE5WvyqZdPA3NGhiT6Qf07l7DCB0wwDEsLvDIbCr6j9S5aj5Ch96dLace5tXVzWBZkxU/c5ohw==", + "integrity": "sha1-p8l6vlcLpgLXgic8DeYKUePhfLw=", "requires": { "semver": "^5.3.0", "shimmer": "^1.1.0" @@ -1306,12 +1314,12 @@ "asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" }, "aws-sign2": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", - "integrity": "sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA==" + "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=" }, "aws4": { "version": "1.9.1", @@ -1321,7 +1329,7 @@ "axios": { "version": "0.18.1", "resolved": "https://registry.npmjs.org/axios/-/axios-0.18.1.tgz", - "integrity": "sha512-0BfJq4NSfQXd+SkFdrvFbG7addhYSBA2mQwISr46pD6E5iqkWg02RAs8vyTT/j0RTnoYmeXauBuSv1qKwR179g==", + "integrity": "sha1-/z8N4ue10YDnV62YAA8Qgbh7zqM=", "requires": { "follow-redirects": "1.5.10", "is-buffer": "^2.0.2" @@ -1350,7 +1358,7 @@ "balanced-match": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha512-9Y0g0Q8rmSt+H33DfKv7FOc3v+iRI+o1lbzt8jGcIosYW37IIW/2XVYq5NPdmaD5NQ59Nk26Kl/vZbwW9Fr8vg==" + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" }, "base64-js": { "version": "1.3.1", @@ -1368,12 +1376,12 @@ "bignumber.js": { "version": "7.2.1", "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz", - "integrity": "sha512-S4XzBk5sMB+Rcb/LNcpzXr57VRTxgAvaAEDAl1AwRx27j00hT84O6OkteE7u8UB3NuaaygCRrEpqox4uDOrbdQ==" + "integrity": "sha1-gMBIdZ2CaACAfEv9Uh5Q7bulel8=" }, "bindings": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", - "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", + "integrity": "sha1-EDU8npRTNLwFEabZCzj7x8nFBN8=", "requires": { "file-uri-to-path": "1.0.0" } @@ -1381,7 +1389,7 @@ "bintrees": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz", - "integrity": "sha512-tbaUB1QpTIj4cKY8c1rvNAvEQXA+ekzHmbe4jzNfW3QWsF9GnnP/BRWyl6/qqS53heoYJ93naaFcm/jooONH8g==" + "integrity": "sha1-DmVcm5wkNeqraL9AJyJtK1WjRSQ=" }, "bl": { "version": "2.2.1", @@ -1443,7 +1451,7 @@ "browser-stdout": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", - "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "integrity": "sha1-uqVZ7hTO1zRSIputcyZGfGH6vWA=", "dev": true }, "bson": { @@ -1454,17 +1462,17 @@ "buffer-equal-constant-time": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", - "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==" + "integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk=" }, "builtin-modules": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.1.0.tgz", - "integrity": "sha512-k0KL0aWZuBt2lrxrcASWDfwOLMnodeQjodT/1SxEQAXsHANgo6ZC/VEaSEHCXt7aSTZ4/4H5LKa+tBXmW7Vtvw==" + "integrity": "sha1-qtl8FRMet2tltQ7yCOdYTNdqdIQ=" }, "bunyan": { "version": "0.22.3", "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-0.22.3.tgz", - "integrity": "sha512-v9dd5qmd6nJHEi7fiNo1fR2pMpE8AiB47Ap984p4iJKj+dEA69jSccmq6grFQn6pxIh0evvKpC5XO1SKfiaRoQ==", + "integrity": "sha1-ehncG0yMZF90AkGnQPIkUUfGfsI=", "requires": { "dtrace-provider": "0.2.8", "mv": "~2" @@ -1509,12 +1517,12 @@ "caseless": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", - "integrity": "sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw==" + "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" }, "chai": { "version": "3.5.0", "resolved": "https://registry.npmjs.org/chai/-/chai-3.5.0.tgz", - "integrity": "sha512-eRYY0vPS2a9zt5w5Z0aCeWbrXTEyvk7u/Xf71EzNObrjSCPgMm1Nku/D/u2tiqHBX5j40wWhj54YJLtgn8g55A==", + "integrity": "sha1-TQJjewZ/6Vi9v906QOxW/vc3Mkc=", "dev": true, "requires": { "assertion-error": "^1.0.1", @@ -1525,7 +1533,7 @@ "chai-spies": { "version": "0.7.1", "resolved": "https://registry.npmjs.org/chai-spies/-/chai-spies-0.7.1.tgz", - "integrity": "sha512-ezo+u5DUDjPhOYkgsjbbVhtdzsnVr6n2CL/juJA89YnBsWO4ocL14Ake0txlGrGZo/HwcfhFGaV0czdunr3tHA==", + "integrity": "sha1-ND2Z9RJEIS6LF+ZLk5lv97LCqbE=", "dev": true }, "chalk": { @@ -1576,14 +1584,6 @@ "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.0.tgz", "integrity": "sha512-2Nii8p3RwAPiFwsnZvukotvow2rIHM+yQ6ZcBXGHdniadkYGZYiGmkHJIbZPIV9nfv7m/U1IPMVVcAhoWFeklw==" }, - "coffee-script": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.8.0.tgz", - "integrity": "sha512-EvLTMcu9vR6G1yfnz75yrISvhq1eBPC+pZbQhHzTiC5vXgpYIrArxQc5tB+SYfBi3souVdSZ4AZzYxI72oLXUw==", - "requires": { - "mkdirp": "~0.3.5" - } - }, "color-convert": { "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", @@ -1622,12 +1622,12 @@ "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" }, "console-log-level": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz", - "integrity": "sha512-VZzbIORbP+PPcN/gg3DXClTLPLg5Slwd5fL2MIc+o1qZ4BXBvWyc6QxPk6T/Mkr6IVjRpoAGf32XxP3ZWMVRcQ==" + "integrity": "sha1-nFprue8e9lsFq6gwKLD/iUzfYwo=" }, "contains-path": { "version": "0.1.0", @@ -1651,7 +1651,7 @@ "continuation-local-storage": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/continuation-local-storage/-/continuation-local-storage-3.2.1.tgz", - "integrity": "sha512-jx44cconVqkCEEyLSKWwkvUXwO561jXMa3LPjTPsm5QR22PA0/mhe33FT4Xb5y74JDvt/Cq+5lm8S8rskLv9ZA==", + "integrity": "sha1-EfYT906RT+mzTJKtLSj+auHbf/s=", "requires": { "async-listener": "^0.6.0", "emitter-listener": "^1.1.1" @@ -1682,7 +1682,7 @@ "core-util-is": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ==" + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" }, "d64": { "version": "1.0.0", @@ -1698,7 +1698,7 @@ "dashdash": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", - "integrity": "sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==", + "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", "requires": { "assert-plus": "^1.0.0" } @@ -1720,7 +1720,7 @@ "deep-eql": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-0.1.3.tgz", - "integrity": "sha512-6sEotTRGBFiNcqVoeHwnfopbSpi5NbH1VWJmYCVkmxMmaVTT0bUTrNaGyBwhgP4MZL012W/mkzIn3Da+iDYweg==", + "integrity": "sha1-71WKyrjeJSBs1xOQbXTlaTDrafI=", "dev": true, "requires": { "type-detect": "0.1.1" @@ -1729,7 +1729,7 @@ "type-detect": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-0.1.1.tgz", - "integrity": "sha512-5rqszGVwYgBoDkIm2oUtvkfZMQ0vk29iDMU0W2qCa3rG0vPDNczCMT4hV/bLBgLg8k8ri6+u3Zbt+S/14eMzlA==", + "integrity": "sha1-C6XsKohWQORw6k6FBZcZANrFiCI=", "dev": true } } @@ -1752,7 +1752,7 @@ "delay": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/delay/-/delay-4.3.0.tgz", - "integrity": "sha512-Lwaf3zVFDMBop1yDuFZ19F9WyGcZcGacsbdlZtWjQmM50tOcMntm1njF/Nb/Vjij3KaSvCF+sEYGKrrjObu2NA==" + "integrity": "sha1-7+6/uPVFV5yzlrOnIkQ+yW0UxQ4=" }, "delayed-stream": { "version": "1.0.0", @@ -1762,7 +1762,7 @@ "denque": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/denque/-/denque-1.4.1.tgz", - "integrity": "sha512-OfzPuSZKGcgr96rf1oODnfjqBFmr1DVoc/TrItj3Ohe0Ah1C5WX5Baquw/9U9KovnQ88EqmJbD66rKYUQYN1tQ==" + "integrity": "sha1-Z0T/dkHBSMP4ppwwflEjXB9KN88=" }, "depd": { "version": "1.1.2", @@ -1777,7 +1777,7 @@ "diff": { "version": "3.5.0", "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", - "integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==", + "integrity": "sha1-gAwN0eCov7yVg1wgKtIg/jF+WhI=", "dev": true }, "dlv": { @@ -1815,7 +1815,7 @@ "duplexify": { "version": "3.7.1", "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz", - "integrity": "sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g==", + "integrity": "sha1-Kk31MX9sz9kfhtb9JdjYoQO4gwk=", "requires": { "end-of-stream": "^1.0.0", "inherits": "^2.0.1", @@ -1864,7 +1864,7 @@ "ecdsa-sig-formatter": { "version": "1.0.11", "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", - "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", + "integrity": "sha1-rg8PothQRe8UqBfao86azQSJ5b8=", "requires": { "safe-buffer": "^5.0.1" } @@ -1872,7 +1872,7 @@ "emitter-listener": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/emitter-listener/-/emitter-listener-1.1.2.tgz", - "integrity": "sha512-Bt1sBAGFHY9DKY+4/2cV6izcKJUf5T7/gkdmkxzX/qv9CcGH8xSwVRW5mtX03SWJtRTWSOpzCuWN9rBFYZepZQ==", + "integrity": "sha1-VrFA6PaZI3Wz18ssqxzHQy2WMug=", "requires": { "shimmer": "^1.2.0" } @@ -1899,7 +1899,7 @@ "ent": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz", - "integrity": "sha512-GHrMyVZQWvTIdDtpiEXdHZnFQKzeO09apj8Cbl4pKWy4i0Oprcq17usfDt5aO63swf0JOeMWjWQE/LzgSRuWpA==" + "integrity": "sha1-6WQhkyWiHQX0RGai9obtbOX13R0=" }, "error-ex": { "version": "1.3.2", @@ -1943,12 +1943,12 @@ "es6-promise": { "version": "4.2.8", "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz", - "integrity": "sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w==" + "integrity": "sha1-TrIVlMlyvEBVPSduUQU5FD21Pgo=" }, "es6-promisify": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz", - "integrity": "sha512-C+d6UdsYDk0lMebHNR4S2NybQMMngAOnOwYBQjTOiv0MkoJMP0Myw2mgpDLBcpfCmRLxyFqYhS/CfOENq4SJhQ==", + "integrity": "sha1-UQnWLz5W6pZ8S2NQWu8IKRyKUgM=", "requires": { "es6-promise": "^4.0.3" } @@ -1961,7 +1961,7 @@ "escape-string-regexp": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", "dev": true }, "eslint": { @@ -2569,7 +2569,7 @@ "event-target-shim": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", - "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==" + "integrity": "sha1-XU0+vflYPWOlMzzi3rdICrKwV4k=" }, "eventid": { "version": "1.0.0", @@ -2636,7 +2636,7 @@ "extsprintf": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", - "integrity": "sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g==" + "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=" }, "fast-deep-equal": { "version": "3.1.1", @@ -2663,7 +2663,7 @@ "fast-text-encoding": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.0.tgz", - "integrity": "sha512-R9bHCvweUxxwkDwhjav5vxpFvdPGlVngtqmx4pIZfSUhM/Q4NiIUHB456BAf+Q1Nwu3HEZYONtu+Rya+af4jiQ==" + "integrity": "sha1-PlzoKTQJz6pxd6cbnKhOGx5vJe8=" }, "figures": { "version": "3.2.0", @@ -2686,7 +2686,7 @@ "file-uri-to-path": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", - "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==" + "integrity": "sha1-VTp7hEb/b2hDWcRF8eN6BdrMM90=" }, "finalhandler": { "version": "1.1.2", @@ -2714,7 +2714,7 @@ "findit2": { "version": "2.2.3", "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz", - "integrity": "sha512-lg/Moejf4qXovVutL0Lz4IsaPoNYMuxt4PA0nGqFxnJ1CTTGGlEO2wKgoDpwknhvZ8k4Q2F+eesgkLbG2Mxfog==" + "integrity": "sha1-WKRmaX34piBc39vzlVNri9d3pfY=" }, "flat-cache": { "version": "2.0.1", @@ -2761,7 +2761,7 @@ "follow-redirects": { "version": "1.5.10", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.10.tgz", - "integrity": "sha512-0V5l4Cizzvqt5D44aTXbFZz+FtyXV1vrDN6qrelxtfYQKW0KO0W2T/hkE8xvGa/540LkZlkaUjO4ailYTFtHVQ==", + "integrity": "sha1-e3qfmuov3/NnhqlP9kPtB/T/Xio=", "requires": { "debug": "=3.1.0" }, @@ -2814,7 +2814,7 @@ "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", "dev": true }, "function-bind": { @@ -2865,7 +2865,7 @@ "getpass": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", - "integrity": "sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==", + "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", "requires": { "assert-plus": "^1.0.0" } @@ -2873,7 +2873,7 @@ "glob": { "version": "6.0.4", "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", - "integrity": "sha512-MKZeRNyYZAVVVG1oZeLaWie1uweH40m9AZwIwxyPbTSX4hHrVYSzLg0Ro5Z5R7XKkIX+Cc6oD1rqeDJnwsB8/A==", + "integrity": "sha1-DwiGD2oVUSey+t1PnOJLGqtuTSI=", "optional": true, "requires": { "inflight": "^1.0.4", @@ -3004,7 +3004,7 @@ "har-schema": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", - "integrity": "sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q==" + "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=" }, "har-validator": { "version": "5.1.3", @@ -3056,13 +3056,13 @@ "he": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", - "integrity": "sha512-z/GDPjlRMNOa2XJiB4em8wJpuuBfrFOlYKTZxtpkdr1uPdibHI8rYA3MY0KDObpVyaes0e/aunid/t88ZI2EKA==", + "integrity": "sha1-k0EP0hsAlzUVH4howvJx80J+I/0=", "dev": true }, "hex2dec": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/hex2dec/-/hex2dec-1.1.2.tgz", - "integrity": "sha512-Yu+q/XWr2fFQ11tHxPq4p4EiNkb2y+lAacJNhAdRXVfRIcDH6gi7htWFnnlIzvqHMHoWeIsfXlNAjZInpAOJDA==" + "integrity": "sha1-jhzkvvNqdPfVcjw/swkMKGAHczg=" }, "hosted-git-info": { "version": "2.8.8", @@ -3195,7 +3195,7 @@ "inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", "requires": { "once": "^1.3.0", "wrappy": "1" @@ -3366,11 +3366,11 @@ }, "dependencies": { "debug": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz", + "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==", "requires": { - "ms": "^2.1.1" + "ms": "2.1.2" } }, "ms": { @@ -3388,7 +3388,7 @@ "is": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/is/-/is-3.3.0.tgz", - "integrity": "sha512-nW24QBoPcFGGHJGUwnfpI7Yc5CdqWNdsyHQszVE/z2pKHXzh7FZ5GWhJqSyaQ9wMkQnsTx+kAI8bHlCX4tKdbg==" + "integrity": "sha1-Yc/23TxBk9uUo9YlggcrROVkXXk=" }, "is-arrayish": { "version": "0.2.1", @@ -3476,7 +3476,7 @@ "is-typedarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==" + "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=" }, "isarray": { "version": "1.0.0", @@ -3492,7 +3492,7 @@ "isstream": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g==" + "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" }, "js-tokens": { "version": "4.0.0", @@ -3513,7 +3513,7 @@ "jsbn": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==" + "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=" }, "jsesc": { "version": "2.5.2", @@ -3524,7 +3524,7 @@ "json-bigint": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-0.3.0.tgz", - "integrity": "sha512-u+c/u/F+JNPUekHCFyGVycRPyh9UHD5iUhSyIAn10kxbDTJxijwAbT6XHaONEOXuGGfmWUSroheXgHcml4gLgg==", + "integrity": "sha1-DM2RLEuCcNBfBW+9E4FLU9OCWx4=", "requires": { "bignumber.js": "^7.0.0" } @@ -3532,7 +3532,7 @@ "json-schema": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", - "integrity": "sha512-a3xHnILGMtk+hDOqNwHzF6e2fNbiMrXZvxKQiEv2MlgQP+pjIOzqAmKYD2mDpXYE/44M7g+n9p2bKkYWDUcXCQ==" + "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=" }, "json-schema-traverse": { "version": "0.4.1", @@ -3548,12 +3548,12 @@ "json-stringify-safe": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==" + "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" }, "jsprim": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", - "integrity": "sha512-4Dj8Rf+fQ+/Pn7C5qeEX02op1WfOss3PKTE9Nsop3Dx+6UPxlm1dr/og7o2cRa5hNN07CACr4NFzRLtj/rjWog==", + "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", "requires": { "assert-plus": "1.0.0", "extsprintf": "1.3.0", @@ -3634,12 +3634,12 @@ "lodash.defaults": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz", - "integrity": "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==" + "integrity": "sha1-0JF4cW/+pN3p5ft7N/bwgCJ0WAw=" }, "lodash.flatten": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz", - "integrity": "sha512-C5N2Z3DgnnKr0LOpv/hKCgKdb7ZZwafIrsesve6lmzvZIRZRGaZ/l6Q8+2W7NaT+ZwO3fFlSCzCzrDCFdJfZ4g==" + "integrity": "sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8=" }, "lodash.get": { "version": "4.4.2", @@ -3667,7 +3667,7 @@ "lodash.pickby": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz", - "integrity": "sha512-AZV+GsS/6ckvPOVQPXSiFFacKvKB4kOQu6ynt9wz0F3LO4R9Ij4K1ddYsIytDpSgLz88JHd9P+oaLeej5/Sl7Q==" + "integrity": "sha1-feoh2MGNdwOifHBMFdO4SmfjOv8=" }, "lodash.unescape": { "version": "4.0.1", @@ -3693,6 +3693,11 @@ "yn": "^3.1.1" }, "dependencies": { + "@overleaf/o-error": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@overleaf/o-error/-/o-error-2.1.0.tgz", + "integrity": "sha512-Zd9sks9LrLw8ErHt/cXeWIkyxWAqNAvNGn7wIjLQJH6TTEEW835PWOhpch+hQwwWsTxWIx/JDj+IpZ3ouw925g==" + }, "bunyan": { "version": "1.8.12", "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.12.tgz", @@ -3799,7 +3804,7 @@ "long": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", - "integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==" + "integrity": "sha1-mntxz7fTYaGU6lVSQckvdGjVvyg=" }, "loose-envify": { "version": "1.4.0", @@ -3813,7 +3818,7 @@ "lru-cache": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "integrity": "sha1-HaJ+ZxAnGUdpXa9oSOhH8B2EuSA=", "requires": { "yallist": "^3.0.2" } @@ -3874,7 +3879,7 @@ "mersenne": { "version": "0.0.4", "resolved": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz", - "integrity": "sha512-XoSUL+nF8hMTKGQxUs8r3Btdsf1yuKKBdCCGbh3YXgCXuVKishpZv1CNc385w9s8t4Ynwc5h61BwW/FCVulkbg==" + "integrity": "sha1-QB/ex+whzbngPNPTAhOY2iGycIU=" }, "messageformat": { "version": "2.3.0", @@ -3953,7 +3958,7 @@ "minimatch": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "integrity": "sha1-UWbihkV/AzBgZL5Ul+jbsMPTIIM=", "requires": { "brace-expansion": "^1.1.7" } @@ -3961,17 +3966,12 @@ "minimist": { "version": "0.0.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", - "integrity": "sha512-miQKw5Hv4NS1Psg2517mV4e4dYNaO3++hjAvLOAzKqZ61rH8NS1SK+vbfBWZ5PY/Me/bEWhUwqMghEW5Fb9T7Q==" - }, - "mkdirp": { - "version": "0.3.5", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz", - "integrity": "sha512-8OCq0De/h9ZxseqzCH8Kw/Filf5pF/vMI6+BH7Lu0jXz2pqYCjTAQRolSxRIi+Ax+oCCjlxoJMP0YQ4XlrQNHg==" + "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" }, "mocha": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/mocha/-/mocha-5.2.0.tgz", - "integrity": "sha512-2IUgKDhc3J7Uug+FxMXuqIyYzH7gJjXECKe/w43IGgQHTSj3InJi+yAA7T24L9bQMRKiUEHxEX37G5JpVUGLcQ==", + "integrity": "sha1-bYrlCPWRZ/lA8rWzxKYSrlDJCuY=", "dev": true, "requires": { "browser-stdout": "1.3.1", @@ -3990,7 +3990,7 @@ "commander": { "version": "2.15.1", "resolved": "https://registry.npmjs.org/commander/-/commander-2.15.1.tgz", - "integrity": "sha512-VlfT9F3V0v+jr4yxPc5gg9s62/fIVWsd2Bk2iD435um1NlGMYdVCq+MjcXnhYq2icNOizHr1kK+5TI6H0Hy0ag==", + "integrity": "sha1-30boZ9D8Kuxmo0ZitAapzK//Ww8=", "dev": true }, "debug": { @@ -4005,7 +4005,7 @@ "glob": { "version": "7.1.2", "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz", - "integrity": "sha512-MJTUg1kjuLeQCJ+ccE4Vpa6kKVXkPYJ2mOCQyUuKLcLQsdrMCpBPUi8qVE6+YuaJkozeA9NusTAw3hLr8Xe5EQ==", + "integrity": "sha1-wZyd+aAocC1nhhI4SmVSQExjbRU=", "dev": true, "requires": { "fs.realpath": "^1.0.0", @@ -4019,7 +4019,7 @@ "mkdirp": { "version": "0.5.1", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha512-SknJC52obPfGQPnjIkXbmA6+5H15E+fR+E4iR2oQ3zzCLbd7/ONua69R/Gw7AgkTLsRG+r5fzksYwWe1AgTyWA==", + "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", "dev": true, "requires": { "minimist": "0.0.8" @@ -4036,7 +4036,7 @@ "module-details-from-path": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz", - "integrity": "sha512-ySViT69/76t8VhE1xXHK6Ch4NcDd26gx0MzKXLO+F7NOtnqH68d9zF94nT8ZWSxXh8ELOERsnJO/sWt1xZYw5A==" + "integrity": "sha1-EUyUlnPiqKNenTV4hSeqN7Z52is=" }, "moment": { "version": "2.29.1", @@ -4071,7 +4071,7 @@ "mv": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", - "integrity": "sha512-at/ZndSy3xEGJ8i0ygALh8ru9qy7gWW1cmkaqBN29JmMlIvM//MEO9y1sk/avxuwnPcfhkejkLsuPxH81BrkSg==", + "integrity": "sha1-rmzg1vbV4KT32JN5jQPB6pVZtqI=", "optional": true, "requires": { "mkdirp": "~0.5.1", @@ -4082,7 +4082,7 @@ "mkdirp": { "version": "0.5.1", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha512-SknJC52obPfGQPnjIkXbmA6+5H15E+fR+E4iR2oQ3zzCLbd7/ONua69R/Gw7AgkTLsRG+r5fzksYwWe1AgTyWA==", + "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", "optional": true, "requires": { "minimist": "0.0.8" @@ -4104,7 +4104,7 @@ "ncp": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", - "integrity": "sha512-zIdGUrPRFTUELUvr3Gmc7KZ2Sw/h1PiVM0Af/oHB6zgnV1ikqSfRk+TOufi79aHYCW3NiOXmr1BP5nWbzojLaA==", + "integrity": "sha1-GVoh1sRuNh0vsSgbo4uR6d9727M=", "optional": true }, "negotiator": { @@ -4267,7 +4267,7 @@ "once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", "requires": { "wrappy": "1" } @@ -4321,7 +4321,7 @@ "p-try": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" + "integrity": "sha1-yyhoVA4xPWHeWPr741zpAE1VQOY=" }, "parent-module": { "version": "1.0.1", @@ -4340,7 +4340,7 @@ "parse-ms": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.1.0.tgz", - "integrity": "sha512-kHt7kzLoS9VBZfUsiKjv43mr91ea+U05EyKkEtqp7vNbHxmaVuEqN7XxeEVnGrMtYOAxGrDElSi96K7EgO1zCA==" + "integrity": "sha1-NIVlp1PUOR+lJAKZVrFyy3dTCX0=" }, "parseurl": { "version": "1.3.3", @@ -4356,7 +4356,7 @@ "path-is-absolute": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==" + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" }, "path-is-inside": { "version": "1.0.2", @@ -4373,7 +4373,7 @@ "path-parse": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", - "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==" + "integrity": "sha1-1i27VnlAXXLEc37FhgDp3c8G0kw=" }, "path-to-regexp": { "version": "0.1.7", @@ -4383,12 +4383,12 @@ "performance-now": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", - "integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==" + "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" }, "pify": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", - "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==" + "integrity": "sha1-SyzSXFDVmHNcUCkiJP2MbfQeMjE=" }, "prelude-ls": { "version": "1.1.2", @@ -5046,7 +5046,7 @@ "pretty-ms": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-4.0.0.tgz", - "integrity": "sha512-qG66ahoLCwpLXD09ZPHSCbUWYTqdosB7SMP4OffgTgL2PBKXMuUsrk5Bwg8q4qPkjTXsKBMr+YK3Ltd/6F9s/Q==", + "integrity": "sha1-Mbr0G5T9AiJwmKqgO9YmCOsNbpI=", "requires": { "parse-ms": "^2.0.0" } @@ -5084,7 +5084,7 @@ "protobufjs": { "version": "6.8.8", "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz", - "integrity": "sha512-AAmHtD5pXgZfi7GMpllpO3q1Xw1OYldr+dMUlAnffGTAhqkg72WdmSY71uKBF/JuyiKs8psYbtKrhi0ASCD8qw==", + "integrity": "sha1-yLTxKC/XqQ5vWxCe0RyEr4KQjnw=", "requires": { "@protobufjs/aspromise": "^1.1.2", "@protobufjs/base64": "^1.1.2", @@ -5152,11 +5152,6 @@ "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" }, - "q": { - "version": "0.9.2", - "resolved": "https://registry.npmjs.org/q/-/q-0.9.2.tgz", - "integrity": "sha512-ZOxMuWPMJnsUdYhuQ9glpZwKhB4cm8ubYFy1nNCY8TkSAuZun5fd8jCDTlf2ykWnK8x9HGn1stNtLeG179DebQ==" - }, "qs": { "version": "6.7.0", "resolved": "https://registry.npmjs.org/qs/-/qs-6.7.0.tgz", @@ -5230,11 +5225,6 @@ "util-deprecate": "^1.0.1" } }, - "redis": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/redis/-/redis-0.11.0.tgz", - "integrity": "sha512-wkgzIZ9HuxJ6Sul1IW/6FG13Ecv6q8kmdHb5xo09Hu6bgWzz5qsnM06SVMpDxFNbyApaRjy8CwnmVaRMMhAMWg==" - }, "redis-commands": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.5.0.tgz", @@ -5243,44 +5233,16 @@ "redis-errors": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz", - "integrity": "sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w==" + "integrity": "sha1-62LSrbFeTq9GEMBK/hUpOEJQq60=" }, "redis-parser": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz", - "integrity": "sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A==", + "integrity": "sha1-tm2CjNyv5rS4pCin3vTGvKwxyLQ=", "requires": { "redis-errors": "^1.0.0" } }, - "redis-sentinel": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/redis-sentinel/-/redis-sentinel-0.1.1.tgz", - "integrity": "sha512-cKtLSUzDsKmsB50J1eIV/SH11DSMiHgsm/gDPRCU5lXz5OyTSuLKWg9oc8d5n74kZwtAyRkfJP0x8vYXvlPjFQ==", - "requires": { - "q": "0.9.2", - "redis": "0.11.x" - } - }, - "redis-sharelatex": { - "version": "1.0.13", - "resolved": "https://registry.npmjs.org/redis-sharelatex/-/redis-sharelatex-1.0.13.tgz", - "integrity": "sha512-sAQNofqfcMlIxzxNJF1qUspJKDM1VuuIOrGZQX9nb5JtcJ5cusa5sc+Oyb51eymPV5mZGWT3u07tKtv4jdXVIg==", - "requires": { - "async": "^2.5.0", - "coffee-script": "1.8.0", - "ioredis": "~4.17.3", - "redis-sentinel": "0.1.1", - "underscore": "1.7.0" - }, - "dependencies": { - "underscore": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz", - "integrity": "sha512-cp0oQQyZhUM1kpJDLdGO1jPZHgS/MpzoWYfe9+CM2h/QGDZlqwT2T3YGukuBdaNJ/CAPoeyAZRRHz8JFo176vA==" - } - } - }, "regenerator-runtime": { "version": "0.13.5", "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.5.tgz", @@ -5398,7 +5360,7 @@ "require-like": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", - "integrity": "sha512-oyrU88skkMtDdauHDuKVrgR+zuItqr6/c//FXzvmxRGMexSDc6hNvJInGW3LL46n+8b50RykrvwSUIIQH2LQ5A==", + "integrity": "sha1-rW8wwTvs15cBDEaK+ndcDAprR/o=", "dev": true }, "require-main-filename": { @@ -5416,7 +5378,7 @@ "require_optional": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/require_optional/-/require_optional-1.0.1.tgz", - "integrity": "sha512-qhM/y57enGWHAe3v/NcwML6a3/vfESLe/sGM2dII+gEO0BpKRUkWZow/tyloNqJyN6kXSl3RyyM8Ll5D/sJP8g==", + "integrity": "sha1-TPNaQkf2TKPfjC7yCMxJSxyo/C4=", "requires": { "resolve-from": "^2.0.0", "semver": "^5.1.0" @@ -5440,7 +5402,7 @@ "resolve-from": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz", - "integrity": "sha512-qpFcKaXsq8+oRoLilkwyc7zHGF5i9Q2/25NIgLQQ/+VVv9rU4qvr6nXVAw1DsnXJyQkZsR4Ytfbtg5ehfcUssQ==" + "integrity": "sha1-lICrIOlP+h2egKgEx+oUdhGWa1c=" }, "restore-cursor": { "version": "3.1.0", @@ -5455,7 +5417,7 @@ "retry-axios": { "version": "0.3.2", "resolved": "https://registry.npmjs.org/retry-axios/-/retry-axios-0.3.2.tgz", - "integrity": "sha512-jp4YlI0qyDFfXiXGhkCOliBN1G7fRH03Nqy8YdShzGqbY5/9S2x/IR6C88ls2DFkbWuL3ASkP7QD3pVrNpPgwQ==" + "integrity": "sha1-V1fID1hbTMTEmGqi/9R6YMbTXhM=" }, "retry-request": { "version": "4.1.1", @@ -5484,7 +5446,7 @@ "rimraf": { "version": "2.4.5", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", - "integrity": "sha512-J5xnxTyqaiw06JjMftq7L9ouA448dw/E7dKghkP9WpKNuwmARNNg+Gk8/u5ryb9N/Yo2+z3MCwuqFK/+qPOPfQ==", + "integrity": "sha1-7nEM5dk6j9uFb7Xqj/Di11k0sto=", "optional": true, "requires": { "glob": "^6.0.1" @@ -5524,7 +5486,7 @@ "sandboxed-module": { "version": "0.2.2", "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-0.2.2.tgz", - "integrity": "sha512-1QAd90eCdAnqVn2sLkRCCeFphH/TKLfoTcdyI6h9h2E+YEY+aKovggwzWWWi5IMObafl0W1wr+dQ5F6LFmjpzA==", + "integrity": "sha1-bL3sghOAx31FdcjIeDi5ET5kulA=", "dev": true, "requires": { "require-like": "0.1.2", @@ -5534,7 +5496,7 @@ "stack-trace": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.6.tgz", - "integrity": "sha512-5/6uZt7RYjjAl8z2j1mXWAewz+I4Hk2/L/3n6NRLIQ31+uQ7nMd9O6G69QCdrrufHv0QGRRHl/jwUEGTqhelTA==", + "integrity": "sha1-HnGb1qJin/CcGJ4Xqe+QKpT8XbA=", "dev": true } } @@ -5605,7 +5567,7 @@ "settings-sharelatex": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/settings-sharelatex/-/settings-sharelatex-1.1.0.tgz", - "integrity": "sha512-f7D+0lnlohoteSn6IKTH72NE+JnAdMWTKwQglAuimZWTID2FRRItZSGeYMTRpvEnaQApkoVwRp//WRMsiddnqw==", + "integrity": "sha1-Tv4vUpPbjxwVlnEEx5BfqHD/mS0=", "requires": { "coffee-script": "1.6.0" }, @@ -5613,7 +5575,7 @@ "coffee-script": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", - "integrity": "sha512-Tx8itEfCsQp8RbLDFt7qwjqXycAx2g6SI7//4PPUR2j6meLmNifYm6zKrNDcU1+Q/GWRhjhEZk7DaLG1TfIzGA==" + "integrity": "sha1-gIs5bhEPU9AhoZpO8fZb4OjjX6M=" } } }, @@ -5635,7 +5597,7 @@ "shimmer": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz", - "integrity": "sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw==" + "integrity": "sha1-YQhZ994ye1h+/r9QH7QxF/mv8zc=" }, "side-channel": { "version": "1.0.2", @@ -5714,7 +5676,7 @@ "source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + "integrity": "sha1-dHIq8y6WFOnCh6jQu95IteLxomM=" }, "sparse-bitfield": { "version": "3.0.3", @@ -5760,7 +5722,7 @@ "split": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", - "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==", + "integrity": "sha1-YFvZvjA6pZ+zX5Ip++oN3snqB9k=", "requires": { "through": "2" } @@ -5800,7 +5762,7 @@ "statsd-parser": { "version": "0.0.4", "resolved": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz", - "integrity": "sha512-7XO+ur89EalMXXFQaydsczB8sclr5nDsNIoUu0IzJx1pIbHUhO3LtpSzBwetIuU9DyTLMiVaJBMtWS/Nb2KR4g==" + "integrity": "sha1-y9JDlTzELv/VSLXSI4jtaJ7GOb0=" }, "statuses": { "version": "1.5.0", @@ -5945,7 +5907,7 @@ "tdigest": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.1.tgz", - "integrity": "sha512-CXcDY/NIgIbKZPx5H4JJNpq6JwJhU5Z4+yWj4ZghDc7/9nVajiRlPPyMXRePPPlBfcayUqtoCXjo7/Hm82ecUA==", + "integrity": "sha1-Ljyyw56kSeVdHmzZEReszKRYgCE=", "requires": { "bintrees": "1.0.1" } @@ -5978,7 +5940,7 @@ "through": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", - "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==" + "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=" }, "through2": { "version": "3.0.1", @@ -6093,7 +6055,7 @@ "tweetnacl": { "version": "0.14.5", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==" + "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=" }, "type-check": { "version": "0.3.2", @@ -6133,7 +6095,7 @@ "underscore": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz", - "integrity": "sha512-z4o1fvKUojIWh9XuaVLUDdf86RQiq13AC1dmHbTpoyuu+bquHms76v16CjycCbec87J7z0k//SiQVk0sMdFmpQ==" + "integrity": "sha1-izixDKze9jM3uLJOT/htRa6lKag=" }, "unpipe": { "version": "1.0.0", @@ -6143,7 +6105,7 @@ "uri-js": { "version": "4.2.2", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz", - "integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==", + "integrity": "sha1-lMVA4f93KVbiKZUHwBCupsiDjrA=", "requires": { "punycode": "^2.1.0" } @@ -6151,7 +6113,7 @@ "util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" }, "utils-merge": { "version": "1.0.1", @@ -6187,7 +6149,7 @@ "verror": { "version": "1.10.0", "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", - "integrity": "sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw==", + "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", "requires": { "assert-plus": "^1.0.0", "core-util-is": "1.0.2", @@ -6276,7 +6238,7 @@ "when": { "version": "3.7.8", "resolved": "https://registry.npmjs.org/when/-/when-3.7.8.tgz", - "integrity": "sha512-5cZ7mecD3eYcMiCH4wtRPA5iFJZ50BJYDfckI5RRpQiktMiYTcn0ccLTZOvcbBume+1304fQztxeNzNS9Gvrnw==" + "integrity": "sha1-xxMLan6gRpPoQs3J56Hyqjmjn4I=" }, "which": { "version": "1.3.1", @@ -6313,7 +6275,7 @@ "wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" }, "write": { "version": "1.0.3", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 59104241b2..d5484b1aef 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -18,6 +18,8 @@ "format:fix": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --write" }, "dependencies": { + "@overleaf/o-error": "^3.1.0", + "@overleaf/redis-wrapper": "^2.0.0", "async": "^2.5.0", "body-parser": "^1.19.0", "bunyan": "~0.22.1", @@ -26,7 +28,6 @@ "logger-sharelatex": "^1.9.1", "metrics-sharelatex": "^2.6.2", "mongodb": "^3.6.0", - "redis-sharelatex": "^1.0.13", "request": "^2.88.2", "requestretry": "^4.1.0", "settings-sharelatex": "^1.1.0" diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js index aab254ff96..918baba141 100644 --- a/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js +++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js @@ -17,13 +17,13 @@ chai.should() const { expect } = chai const async = require('async') const Settings = require('settings-sharelatex') -const rclient_history = require('redis-sharelatex').createClient( +const rclient_history = require('@overleaf/redis-wrapper').createClient( Settings.redis.history ) // note: this is track changes, not project-history -const rclient_project_history = require('redis-sharelatex').createClient( +const rclient_project_history = require('@overleaf/redis-wrapper').createClient( Settings.redis.project_history ) -const rclient_du = require('redis-sharelatex').createClient( +const rclient_du = require('@overleaf/redis-wrapper').createClient( Settings.redis.documentupdater ) const Keys = Settings.redis.documentupdater.key_schema diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js index 58fe5d13eb..42c3c8af6a 100644 --- a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js +++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js @@ -2,7 +2,7 @@ const sinon = require('sinon') const chai = require('chai') chai.should() const Settings = require('settings-sharelatex') -const rclientProjectHistory = require('redis-sharelatex').createClient( +const rclientProjectHistory = require('@overleaf/redis-wrapper').createClient( Settings.redis.project_history ) const ProjectHistoryKeys = Settings.redis.project_history.key_schema diff --git a/services/document-updater/test/acceptance/js/SettingADocumentTests.js b/services/document-updater/test/acceptance/js/SettingADocumentTests.js index 54ca9d00d0..d47931868c 100644 --- a/services/document-updater/test/acceptance/js/SettingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/SettingADocumentTests.js @@ -3,7 +3,7 @@ const chai = require('chai') chai.should() const { expect } = require('chai') const Settings = require('settings-sharelatex') -const docUpdaterRedis = require('redis-sharelatex').createClient( +const docUpdaterRedis = require('@overleaf/redis-wrapper').createClient( Settings.redis.documentupdater ) const Keys = Settings.redis.documentupdater.key_schema diff --git a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js index 9e0ee6462f..7156da0c26 100644 --- a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js +++ b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js @@ -1,13 +1,13 @@ let DocUpdaterClient const Settings = require('settings-sharelatex') -const rclient = require('redis-sharelatex').createClient( +const rclient = require('@overleaf/redis-wrapper').createClient( Settings.redis.documentupdater ) const keys = Settings.redis.documentupdater.key_schema const request = require('request').defaults({ jar: false }) const async = require('async') -const rclientSub = require('redis-sharelatex').createClient( +const rclientSub = require('@overleaf/redis-wrapper').createClient( Settings.redis.pubsub ) rclientSub.subscribe('applied-ops') diff --git a/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.coffee b/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.coffee index 72a11164a4..e36f31f670 100644 --- a/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.coffee +++ b/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.coffee @@ -1,4 +1,4 @@ -redis = require "redis-sharelatex" +redis = require "@overleaf/redis-wrapper" rclient1 = redis.createClient(cluster: [{ port: "7000" host: "localhost" @@ -36,6 +36,6 @@ do sendPings = () -> setTimeout sendPings, PING_DELAY do listenInBackground = (cb = () ->) -> - listenForPing (error, value) -> + listenForPing (error, value) -> console.error "[RECEIVING ERROR]", error.message if error - setTimeout listenInBackground \ No newline at end of file + setTimeout listenInBackground diff --git a/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.coffee b/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.coffee index 31bddb5bca..eccf952504 100644 --- a/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.coffee +++ b/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.coffee @@ -1,4 +1,4 @@ -redis = require "redis-sharelatex" +redis = require "@overleaf/redis-wrapper" rclient1 = redis.createClient(cluster: [{ port: "7000" host: "localhost" diff --git a/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js b/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js index a177f162fb..0907b14e57 100644 --- a/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js +++ b/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js @@ -35,7 +35,7 @@ describe('DispatchManager', function () { documentupdater: {} } }), - 'redis-sharelatex': (this.redis = {}), + '@overleaf/redis-wrapper': (this.redis = {}), './RateLimitManager': {}, './Errors': Errors, './Metrics': (this.Metrics = { diff --git a/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js b/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js index c578a6be22..1b266685d1 100644 --- a/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js +++ b/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js @@ -27,7 +27,7 @@ describe('HistoryRedisManager', function () { this.rclient.multi = () => this.rclient this.HistoryRedisManager = SandboxedModule.require(modulePath, { requires: { - 'redis-sharelatex': { createClient: () => this.rclient }, + '@overleaf/redis-wrapper': { createClient: () => this.rclient }, 'settings-sharelatex': { redis: { history: (this.settings = { diff --git a/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js b/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js index c97677041f..6b3c3b539e 100644 --- a/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js +++ b/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js @@ -26,7 +26,7 @@ describe('LockManager - checking the lock', function () { const mocks = { 'logger-sharelatex': { log() {} }, - 'redis-sharelatex': { + '@overleaf/redis-wrapper': { createClient() { return { auth() {}, diff --git a/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js b/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js index 82b0bc7da7..a04db7614f 100644 --- a/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js +++ b/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js @@ -31,7 +31,7 @@ describe('LockManager - releasing the lock', function () { log() {}, error() {} }, - 'redis-sharelatex': { + '@overleaf/redis-wrapper': { createClient: () => this.client }, 'settings-sharelatex': { diff --git a/services/document-updater/test/unit/js/LockManager/getLockTests.js b/services/document-updater/test/unit/js/LockManager/getLockTests.js index cbe805040b..d56a244510 100644 --- a/services/document-updater/test/unit/js/LockManager/getLockTests.js +++ b/services/document-updater/test/unit/js/LockManager/getLockTests.js @@ -26,7 +26,7 @@ describe('LockManager - getting the lock', function () { this.LockManager = SandboxedModule.require(modulePath, { requires: { 'logger-sharelatex': { log() {} }, - 'redis-sharelatex': { + '@overleaf/redis-wrapper': { createClient: () => { return { auth() {} } } diff --git a/services/document-updater/test/unit/js/LockManager/tryLockTests.js b/services/document-updater/test/unit/js/LockManager/tryLockTests.js index 36be6321bd..02c279dd11 100644 --- a/services/document-updater/test/unit/js/LockManager/tryLockTests.js +++ b/services/document-updater/test/unit/js/LockManager/tryLockTests.js @@ -23,7 +23,7 @@ describe('LockManager - trying the lock', function () { this.LockManager = SandboxedModule.require(modulePath, { requires: { 'logger-sharelatex': { log() {} }, - 'redis-sharelatex': { + '@overleaf/redis-wrapper': { createClient: () => { return { auth() {}, diff --git a/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js b/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js index c4ec5b1c12..8b62bd83f3 100644 --- a/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js +++ b/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js @@ -44,7 +44,7 @@ describe('ProjectHistoryRedisManager', function () { } } }), - 'redis-sharelatex': { + '@overleaf/redis-wrapper': { createClient: () => this.rclient }, 'logger-sharelatex': { diff --git a/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js b/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js index cda00ca9d8..c5e4647df4 100644 --- a/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js +++ b/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js @@ -27,7 +27,7 @@ describe('RealTimeRedisManager', function () { this.pubsubClient = { publish: sinon.stub() } this.RealTimeRedisManager = SandboxedModule.require(modulePath, { requires: { - 'redis-sharelatex': { + '@overleaf/redis-wrapper': { createClient: (config) => config.name === 'pubsub' ? this.pubsubClient : this.rclient }, diff --git a/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js index 467c9a3c2f..739aa88ab8 100644 --- a/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js +++ b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js @@ -104,7 +104,7 @@ describe('RedisManager', function () { } } }), - 'redis-sharelatex': { + '@overleaf/redis-wrapper': { createClient: () => this.rclient }, './Metrics': (this.metrics = { diff --git a/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js b/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js index ff8b74b8e6..2ab5ba617e 100644 --- a/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js +++ b/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js @@ -30,7 +30,7 @@ describe('ShareJsUpdateManager', function () { } }), './ShareJsDB': (this.ShareJsDB = { mockDB: true }), - 'redis-sharelatex': { + '@overleaf/redis-wrapper': { createClient: () => { return (this.rclient = { auth() {} }) } From ed1e192d2e66abbc171676e843359129bd8bea67 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Mon, 23 Nov 2020 10:56:33 -0500 Subject: [PATCH 704/769] Upgrade build-scripts to 3.4.0 This version fixes docker-compose health checks for dependent services. See https://github.com/overleaf/dev-environment/pull/409 for details. --- services/document-updater/buildscript.txt | 2 +- services/document-updater/docker-compose.ci.yml | 9 +++++++++ services/document-updater/docker-compose.yml | 9 +++++++++ 3 files changed, 19 insertions(+), 1 deletion(-) diff --git a/services/document-updater/buildscript.txt b/services/document-updater/buildscript.txt index afb9f89937..72b71fe987 100644 --- a/services/document-updater/buildscript.txt +++ b/services/document-updater/buildscript.txt @@ -5,4 +5,4 @@ document-updater --env-pass-through= --node-version=10.22.1 --public-repo=True ---script-version=3.3.4 +--script-version=3.4.0 diff --git a/services/document-updater/docker-compose.ci.yml b/services/document-updater/docker-compose.ci.yml index d56b0e1590..1a25b3bf15 100644 --- a/services/document-updater/docker-compose.ci.yml +++ b/services/document-updater/docker-compose.ci.yml @@ -20,6 +20,7 @@ services: environment: ELASTIC_SEARCH_DSN: es:9200 REDIS_HOST: redis + QUEUES_REDIS_HOST: redis MONGO_HOST: mongo POSTGRES_HOST: postgres MOCHA_GREP: ${MOCHA_GREP} @@ -43,6 +44,14 @@ services: user: root redis: image: redis + healthcheck: + test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ] + interval: 1s + retries: 20 mongo: image: mongo:4.0 + healthcheck: + test: "mongo --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'" + interval: 1s + retries: 20 diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index 6d513ec68f..4a16f5ecb1 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -25,6 +25,7 @@ services: environment: ELASTIC_SEARCH_DSN: es:9200 REDIS_HOST: redis + QUEUES_REDIS_HOST: redis MONGO_HOST: mongo POSTGRES_HOST: postgres MOCHA_GREP: ${MOCHA_GREP} @@ -41,7 +42,15 @@ services: redis: image: redis + healthcheck: + test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ] + interval: 1s + retries: 20 mongo: image: mongo:4.0 + healthcheck: + test: "mongo --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'" + interval: 1s + retries: 20 From 5e00684dbb5773924117dec4a55ae83ed10d6cb1 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Wed, 25 Nov 2020 11:57:19 +0000 Subject: [PATCH 705/769] [misc] bump metrics module to 3.4.1 - renamed package from `metrics-sharelatex` to `@overleaf/metrics` - drop support for statsd backend - decaffeinate - compress `/metrics` response using gzip - bump debugging agents to latest versions - expose prometheus interfaces for custom metrics (custom tags) - cleanup of open sockets metrics - fix deprecation warnings for header access --- services/document-updater/app.js | 2 +- services/document-updater/app/js/Metrics.js | 2 +- services/document-updater/package-lock.json | 1610 ++++++++++--------- services/document-updater/package.json | 2 +- 4 files changed, 839 insertions(+), 777 deletions(-) diff --git a/services/document-updater/app.js b/services/document-updater/app.js index 6307b59180..6d1cc43b82 100644 --- a/services/document-updater/app.js +++ b/services/document-updater/app.js @@ -1,4 +1,4 @@ -const Metrics = require('metrics-sharelatex') +const Metrics = require('@overleaf/metrics') Metrics.initialize('doc-updater') const express = require('express') diff --git a/services/document-updater/app/js/Metrics.js b/services/document-updater/app/js/Metrics.js index e9676415ea..f0e57794fd 100644 --- a/services/document-updater/app/js/Metrics.js +++ b/services/document-updater/app/js/Metrics.js @@ -1,3 +1,3 @@ // TODO: This file was created by bulk-decaffeinate. // Sanity-check the conversion and remove this comment. -module.exports = require('metrics-sharelatex') +module.exports = require('@overleaf/metrics') diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 882ae13c3d..707013ae89 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -170,195 +170,6 @@ "teeny-request": "^6.0.0" } }, - "@google-cloud/debug-agent": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-3.2.0.tgz", - "integrity": "sha1-2qdjWhaYpWY31dxXzhED536uKdM=", - "requires": { - "@google-cloud/common": "^0.32.0", - "@sindresorhus/is": "^0.15.0", - "acorn": "^6.0.0", - "coffeescript": "^2.0.0", - "console-log-level": "^1.4.0", - "extend": "^3.0.1", - "findit2": "^2.2.3", - "gcp-metadata": "^1.0.0", - "lodash.pickby": "^4.6.0", - "p-limit": "^2.2.0", - "pify": "^4.0.1", - "semver": "^6.0.0", - "source-map": "^0.6.1", - "split": "^1.0.0" - }, - "dependencies": { - "@google-cloud/common": { - "version": "0.32.1", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.32.1.tgz", - "integrity": "sha512-bLdPzFvvBMtVkwsoBtygE9oUm3yrNmPa71gvOgucYI/GqvNP2tb6RYsDHPq98kvignhcgHGDI5wyNgxaCo8bKQ==", - "requires": { - "@google-cloud/projectify": "^0.3.3", - "@google-cloud/promisify": "^0.4.0", - "@types/request": "^2.48.1", - "arrify": "^2.0.0", - "duplexify": "^3.6.0", - "ent": "^2.2.0", - "extend": "^3.0.2", - "google-auth-library": "^3.1.1", - "pify": "^4.0.1", - "retry-request": "^4.0.0", - "teeny-request": "^3.11.3" - } - }, - "@google-cloud/projectify": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.3.tgz", - "integrity": "sha512-7522YHQ4IhaafgSunsFF15nG0TGVmxgXidy9cITMe+256RgqfcrfWphiMufW+Ou4kqagW/u3yxwbzVEW3dk2Uw==" - }, - "@google-cloud/promisify": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.4.0.tgz", - "integrity": "sha512-4yAHDC52TEMCNcMzVC8WlqnKKKq+Ssi2lXoUg9zWWkZ6U6tq9ZBRYLHHCRdfU+EU9YJsVmivwGcKYCjRGjnf4Q==" - }, - "agent-base": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz", - "integrity": "sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg==", - "requires": { - "es6-promisify": "^5.0.0" - } - }, - "coffeescript": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.5.1.tgz", - "integrity": "sha512-J2jRPX0eeFh5VKyVnoLrfVFgLZtnnmp96WQSLAS8OrLm2wtQLcnikYKe1gViJKDH7vucjuhHvBKKBP3rKcD1tQ==" - }, - "debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", - "requires": { - "ms": "^2.1.1" - } - }, - "gaxios": { - "version": "1.8.4", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.8.4.tgz", - "integrity": "sha512-BoENMnu1Gav18HcpV9IleMPZ9exM+AvUjrAOV4Mzs/vfz2Lu/ABv451iEXByKiMPn2M140uul1txXCg83sAENw==", - "requires": { - "abort-controller": "^3.0.0", - "extend": "^3.0.2", - "https-proxy-agent": "^2.2.1", - "node-fetch": "^2.3.0" - } - }, - "gcp-metadata": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-1.0.0.tgz", - "integrity": "sha512-Q6HrgfrCQeEircnNP3rCcEgiDv7eF9+1B+1MMgpE190+/+0mjQR8PxeOaRgxZWmdDAF9EIryHB9g1moPiw1SbQ==", - "requires": { - "gaxios": "^1.0.2", - "json-bigint": "^0.3.0" - } - }, - "google-auth-library": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-3.1.2.tgz", - "integrity": "sha512-cDQMzTotwyWMrg5jRO7q0A4TL/3GWBgO7I7q5xGKNiiFf9SmGY/OJ1YsLMgI2MVHHsEGyrqYnbnmV1AE+Z6DnQ==", - "requires": { - "base64-js": "^1.3.0", - "fast-text-encoding": "^1.0.0", - "gaxios": "^1.2.1", - "gcp-metadata": "^1.0.0", - "gtoken": "^2.3.2", - "https-proxy-agent": "^2.2.1", - "jws": "^3.1.5", - "lru-cache": "^5.0.0", - "semver": "^5.5.0" - }, - "dependencies": { - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" - } - } - }, - "google-p12-pem": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.4.tgz", - "integrity": "sha512-SwLAUJqUfTB2iS+wFfSS/G9p7bt4eWcc2LyfvmUXe7cWp6p3mpxDo6LLI29MXdU6wvPcQ/up298X7GMC5ylAlA==", - "requires": { - "node-forge": "^0.8.0", - "pify": "^4.0.0" - } - }, - "gtoken": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.3.tgz", - "integrity": "sha512-EaB49bu/TCoNeQjhCYKI/CurooBKkGxIqFHsWABW0b25fobBYVTMe84A8EBVVZhl8emiUdNypil9huMOTmyAnw==", - "requires": { - "gaxios": "^1.0.4", - "google-p12-pem": "^1.0.0", - "jws": "^3.1.5", - "mime": "^2.2.0", - "pify": "^4.0.0" - } - }, - "https-proxy-agent": { - "version": "2.2.4", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.4.tgz", - "integrity": "sha512-OmvfoQ53WLjtA9HeYP9RNrWMJzzAz1JGaSFr1nijg0PVR1JaD/xbJq1mdEIIlxGpXp9eSe/O2LgU9DJmTPd0Eg==", - "requires": { - "agent-base": "^4.3.0", - "debug": "^3.1.0" - } - }, - "jwa": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", - "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", - "requires": { - "buffer-equal-constant-time": "1.0.1", - "ecdsa-sig-formatter": "1.0.11", - "safe-buffer": "^5.0.1" - } - }, - "jws": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", - "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", - "requires": { - "jwa": "^1.4.1", - "safe-buffer": "^5.0.1" - } - }, - "mime": { - "version": "2.4.4", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", - "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==" - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - }, - "node-forge": { - "version": "0.8.5", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.5.tgz", - "integrity": "sha512-vFMQIWt+J/7FLNyKouZ9TazT74PRV3wgv9UT4cRjC8BffxFbKXkgIWR42URCPSnHm/QDz6BOlb2Q0U4+VQT67Q==" - }, - "teeny-request": { - "version": "3.11.3", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-3.11.3.tgz", - "integrity": "sha512-CKncqSF7sH6p4rzCgkb/z/Pcos5efl0DmolzvlqRQUNcpRIruOhY9+T1FsIlyEbfWd7MsFpodROOwHYh2BaXzw==", - "requires": { - "https-proxy-agent": "^2.2.1", - "node-fetch": "^2.2.0", - "uuid": "^3.3.2" - } - } - } - }, "@google-cloud/logging": { "version": "7.3.0", "resolved": "https://registry.npmjs.org/@google-cloud/logging/-/logging-7.3.0.tgz", @@ -403,205 +214,6 @@ "extend": "^3.0.2" } }, - "@google-cloud/profiler": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/@google-cloud/profiler/-/profiler-0.2.3.tgz", - "integrity": "sha1-Fj3738Mwuug1X+RuHlvgZTV7H1w=", - "requires": { - "@google-cloud/common": "^0.26.0", - "@types/console-log-level": "^1.4.0", - "@types/semver": "^5.5.0", - "bindings": "^1.2.1", - "console-log-level": "^1.4.0", - "delay": "^4.0.1", - "extend": "^3.0.1", - "gcp-metadata": "^0.9.0", - "nan": "^2.11.1", - "parse-duration": "^0.1.1", - "pify": "^4.0.0", - "pretty-ms": "^4.0.0", - "protobufjs": "~6.8.6", - "semver": "^5.5.0", - "teeny-request": "^3.3.0" - }, - "dependencies": { - "@google-cloud/common": { - "version": "0.26.2", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.26.2.tgz", - "integrity": "sha1-nFTiRxqEqgMelaJIJJduCA8lVkU=", - "requires": { - "@google-cloud/projectify": "^0.3.2", - "@google-cloud/promisify": "^0.3.0", - "@types/duplexify": "^3.5.0", - "@types/request": "^2.47.0", - "arrify": "^1.0.1", - "duplexify": "^3.6.0", - "ent": "^2.2.0", - "extend": "^3.0.1", - "google-auth-library": "^2.0.0", - "pify": "^4.0.0", - "retry-request": "^4.0.0", - "through2": "^3.0.0" - } - }, - "@google-cloud/projectify": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.3.tgz", - "integrity": "sha512-7522YHQ4IhaafgSunsFF15nG0TGVmxgXidy9cITMe+256RgqfcrfWphiMufW+Ou4kqagW/u3yxwbzVEW3dk2Uw==" - }, - "@google-cloud/promisify": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.3.1.tgz", - "integrity": "sha1-9kHm2USo4KBe4MsQkd+mAIm+zbo=" - }, - "agent-base": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz", - "integrity": "sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg==", - "requires": { - "es6-promisify": "^5.0.0" - } - }, - "arrify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", - "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=" - }, - "debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", - "requires": { - "ms": "^2.1.1" - } - }, - "gaxios": { - "version": "1.8.4", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.8.4.tgz", - "integrity": "sha512-BoENMnu1Gav18HcpV9IleMPZ9exM+AvUjrAOV4Mzs/vfz2Lu/ABv451iEXByKiMPn2M140uul1txXCg83sAENw==", - "requires": { - "abort-controller": "^3.0.0", - "extend": "^3.0.2", - "https-proxy-agent": "^2.2.1", - "node-fetch": "^2.3.0" - } - }, - "gcp-metadata": { - "version": "0.9.3", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.9.3.tgz", - "integrity": "sha1-H510lfdGChRSZIHynhFZbdVj3SY=", - "requires": { - "gaxios": "^1.0.2", - "json-bigint": "^0.3.0" - } - }, - "google-auth-library": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-2.0.2.tgz", - "integrity": "sha1-ejFdIDZ0Svavyth7IQ7mY4tA9Xs=", - "requires": { - "axios": "^0.18.0", - "gcp-metadata": "^0.7.0", - "gtoken": "^2.3.0", - "https-proxy-agent": "^2.2.1", - "jws": "^3.1.5", - "lru-cache": "^5.0.0", - "semver": "^5.5.0" - }, - "dependencies": { - "gcp-metadata": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.7.0.tgz", - "integrity": "sha1-bDXbtSvaMqQnu5yY9UI33dG1QG8=", - "requires": { - "axios": "^0.18.0", - "extend": "^3.0.1", - "retry-axios": "0.3.2" - } - } - } - }, - "google-p12-pem": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.4.tgz", - "integrity": "sha512-SwLAUJqUfTB2iS+wFfSS/G9p7bt4eWcc2LyfvmUXe7cWp6p3mpxDo6LLI29MXdU6wvPcQ/up298X7GMC5ylAlA==", - "requires": { - "node-forge": "^0.8.0", - "pify": "^4.0.0" - } - }, - "gtoken": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.3.tgz", - "integrity": "sha512-EaB49bu/TCoNeQjhCYKI/CurooBKkGxIqFHsWABW0b25fobBYVTMe84A8EBVVZhl8emiUdNypil9huMOTmyAnw==", - "requires": { - "gaxios": "^1.0.4", - "google-p12-pem": "^1.0.0", - "jws": "^3.1.5", - "mime": "^2.2.0", - "pify": "^4.0.0" - } - }, - "https-proxy-agent": { - "version": "2.2.4", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.4.tgz", - "integrity": "sha512-OmvfoQ53WLjtA9HeYP9RNrWMJzzAz1JGaSFr1nijg0PVR1JaD/xbJq1mdEIIlxGpXp9eSe/O2LgU9DJmTPd0Eg==", - "requires": { - "agent-base": "^4.3.0", - "debug": "^3.1.0" - } - }, - "jwa": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", - "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", - "requires": { - "buffer-equal-constant-time": "1.0.1", - "ecdsa-sig-formatter": "1.0.11", - "safe-buffer": "^5.0.1" - } - }, - "jws": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", - "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", - "requires": { - "jwa": "^1.4.1", - "safe-buffer": "^5.0.1" - } - }, - "mime": { - "version": "2.4.4", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", - "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==" - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - }, - "node-forge": { - "version": "0.8.5", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.5.tgz", - "integrity": "sha512-vFMQIWt+J/7FLNyKouZ9TazT74PRV3wgv9UT4cRjC8BffxFbKXkgIWR42URCPSnHm/QDz6BOlb2Q0U4+VQT67Q==" - }, - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" - }, - "teeny-request": { - "version": "3.11.3", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-3.11.3.tgz", - "integrity": "sha512-CKncqSF7sH6p4rzCgkb/z/Pcos5efl0DmolzvlqRQUNcpRIruOhY9+T1FsIlyEbfWd7MsFpodROOwHYh2BaXzw==", - "requires": { - "https-proxy-agent": "^2.2.1", - "node-fetch": "^2.2.0", - "uuid": "^3.3.2" - } - } - } - }, "@google-cloud/projectify": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-1.0.4.tgz", @@ -612,189 +224,6 @@ "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-1.0.4.tgz", "integrity": "sha512-VccZDcOql77obTnFh0TbNED/6ZbbmHDf8UMNnzO1d5g9V0Htfm4k5cllY8P1tJsRKC3zWYGRLaViiupcgVjBoQ==" }, - "@google-cloud/trace-agent": { - "version": "3.6.1", - "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-3.6.1.tgz", - "integrity": "sha1-W+dEE5TQ6ldY8o25IqUAT/PwO+w=", - "requires": { - "@google-cloud/common": "^0.32.1", - "builtin-modules": "^3.0.0", - "console-log-level": "^1.4.0", - "continuation-local-storage": "^3.2.1", - "extend": "^3.0.0", - "gcp-metadata": "^1.0.0", - "hex2dec": "^1.0.1", - "is": "^3.2.0", - "methods": "^1.1.1", - "require-in-the-middle": "^4.0.0", - "semver": "^6.0.0", - "shimmer": "^1.2.0", - "uuid": "^3.0.1" - }, - "dependencies": { - "@google-cloud/common": { - "version": "0.32.1", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.32.1.tgz", - "integrity": "sha512-bLdPzFvvBMtVkwsoBtygE9oUm3yrNmPa71gvOgucYI/GqvNP2tb6RYsDHPq98kvignhcgHGDI5wyNgxaCo8bKQ==", - "requires": { - "@google-cloud/projectify": "^0.3.3", - "@google-cloud/promisify": "^0.4.0", - "@types/request": "^2.48.1", - "arrify": "^2.0.0", - "duplexify": "^3.6.0", - "ent": "^2.2.0", - "extend": "^3.0.2", - "google-auth-library": "^3.1.1", - "pify": "^4.0.1", - "retry-request": "^4.0.0", - "teeny-request": "^3.11.3" - } - }, - "@google-cloud/projectify": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.3.tgz", - "integrity": "sha512-7522YHQ4IhaafgSunsFF15nG0TGVmxgXidy9cITMe+256RgqfcrfWphiMufW+Ou4kqagW/u3yxwbzVEW3dk2Uw==" - }, - "@google-cloud/promisify": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.4.0.tgz", - "integrity": "sha512-4yAHDC52TEMCNcMzVC8WlqnKKKq+Ssi2lXoUg9zWWkZ6U6tq9ZBRYLHHCRdfU+EU9YJsVmivwGcKYCjRGjnf4Q==" - }, - "agent-base": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz", - "integrity": "sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg==", - "requires": { - "es6-promisify": "^5.0.0" - } - }, - "debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", - "requires": { - "ms": "^2.1.1" - } - }, - "gaxios": { - "version": "1.8.4", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.8.4.tgz", - "integrity": "sha512-BoENMnu1Gav18HcpV9IleMPZ9exM+AvUjrAOV4Mzs/vfz2Lu/ABv451iEXByKiMPn2M140uul1txXCg83sAENw==", - "requires": { - "abort-controller": "^3.0.0", - "extend": "^3.0.2", - "https-proxy-agent": "^2.2.1", - "node-fetch": "^2.3.0" - } - }, - "gcp-metadata": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-1.0.0.tgz", - "integrity": "sha512-Q6HrgfrCQeEircnNP3rCcEgiDv7eF9+1B+1MMgpE190+/+0mjQR8PxeOaRgxZWmdDAF9EIryHB9g1moPiw1SbQ==", - "requires": { - "gaxios": "^1.0.2", - "json-bigint": "^0.3.0" - } - }, - "google-auth-library": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-3.1.2.tgz", - "integrity": "sha512-cDQMzTotwyWMrg5jRO7q0A4TL/3GWBgO7I7q5xGKNiiFf9SmGY/OJ1YsLMgI2MVHHsEGyrqYnbnmV1AE+Z6DnQ==", - "requires": { - "base64-js": "^1.3.0", - "fast-text-encoding": "^1.0.0", - "gaxios": "^1.2.1", - "gcp-metadata": "^1.0.0", - "gtoken": "^2.3.2", - "https-proxy-agent": "^2.2.1", - "jws": "^3.1.5", - "lru-cache": "^5.0.0", - "semver": "^5.5.0" - }, - "dependencies": { - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" - } - } - }, - "google-p12-pem": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.4.tgz", - "integrity": "sha512-SwLAUJqUfTB2iS+wFfSS/G9p7bt4eWcc2LyfvmUXe7cWp6p3mpxDo6LLI29MXdU6wvPcQ/up298X7GMC5ylAlA==", - "requires": { - "node-forge": "^0.8.0", - "pify": "^4.0.0" - } - }, - "gtoken": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.3.tgz", - "integrity": "sha512-EaB49bu/TCoNeQjhCYKI/CurooBKkGxIqFHsWABW0b25fobBYVTMe84A8EBVVZhl8emiUdNypil9huMOTmyAnw==", - "requires": { - "gaxios": "^1.0.4", - "google-p12-pem": "^1.0.0", - "jws": "^3.1.5", - "mime": "^2.2.0", - "pify": "^4.0.0" - } - }, - "https-proxy-agent": { - "version": "2.2.4", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.4.tgz", - "integrity": "sha512-OmvfoQ53WLjtA9HeYP9RNrWMJzzAz1JGaSFr1nijg0PVR1JaD/xbJq1mdEIIlxGpXp9eSe/O2LgU9DJmTPd0Eg==", - "requires": { - "agent-base": "^4.3.0", - "debug": "^3.1.0" - } - }, - "jwa": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", - "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", - "requires": { - "buffer-equal-constant-time": "1.0.1", - "ecdsa-sig-formatter": "1.0.11", - "safe-buffer": "^5.0.1" - } - }, - "jws": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", - "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", - "requires": { - "jwa": "^1.4.1", - "safe-buffer": "^5.0.1" - } - }, - "mime": { - "version": "2.4.4", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", - "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==" - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - }, - "node-forge": { - "version": "0.8.5", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.5.tgz", - "integrity": "sha512-vFMQIWt+J/7FLNyKouZ9TazT74PRV3wgv9UT4cRjC8BffxFbKXkgIWR42URCPSnHm/QDz6BOlb2Q0U4+VQT67Q==" - }, - "teeny-request": { - "version": "3.11.3", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-3.11.3.tgz", - "integrity": "sha512-CKncqSF7sH6p4rzCgkb/z/Pcos5efl0DmolzvlqRQUNcpRIruOhY9+T1FsIlyEbfWd7MsFpodROOwHYh2BaXzw==", - "requires": { - "https-proxy-agent": "^2.2.1", - "node-fetch": "^2.2.0", - "uuid": "^3.3.2" - } - } - } - }, "@grpc/grpc-js": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.0.5.tgz", @@ -834,6 +263,335 @@ "uuid": "^3.2.1" } }, + "@overleaf/metrics": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/@overleaf/metrics/-/metrics-3.4.1.tgz", + "integrity": "sha512-OgjlzuC+2gPdIEDHhmd9LDMu01tk1ln0cJhw1727BZ+Wgf2Z1hjuHRt4JeCkf+PFTHwJutVYT8v6IGPpNEPtbg==", + "requires": { + "@google-cloud/debug-agent": "^5.1.2", + "@google-cloud/profiler": "^4.0.3", + "@google-cloud/trace-agent": "^5.1.1", + "compression": "^1.7.4", + "prom-client": "^11.1.3", + "underscore": "~1.6.0", + "yn": "^3.1.1" + }, + "dependencies": { + "@google-cloud/common": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.5.0.tgz", + "integrity": "sha512-10d7ZAvKhq47L271AqvHEd8KzJqGU45TY+rwM2Z3JHuB070FeTi7oJJd7elfrnKaEvaktw3hH2wKnRWxk/3oWQ==", + "requires": { + "@google-cloud/projectify": "^2.0.0", + "@google-cloud/promisify": "^2.0.0", + "arrify": "^2.0.1", + "duplexify": "^4.1.1", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^6.1.1", + "retry-request": "^4.1.1", + "teeny-request": "^7.0.0" + } + }, + "@google-cloud/debug-agent": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-5.1.3.tgz", + "integrity": "sha512-WbzeEz4MvPlM7DX2QBsPcWgF62u7LSQv/oMYPl0L+TddTebqjDKiVXwxpzWk61NIfcKiet3dyCbPIt3N5o8XPQ==", + "requires": { + "@google-cloud/common": "^3.0.0", + "acorn": "^8.0.0", + "coffeescript": "^2.0.0", + "console-log-level": "^1.4.0", + "extend": "^3.0.2", + "findit2": "^2.2.3", + "gcp-metadata": "^4.0.0", + "p-limit": "^3.0.1", + "semver": "^7.0.0", + "source-map": "^0.6.1", + "split": "^1.0.0" + } + }, + "@google-cloud/profiler": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@google-cloud/profiler/-/profiler-4.1.0.tgz", + "integrity": "sha512-9e1zXRctLSUHAoAsFGwE4rS28fr0siiG+jXl5OpwTK8ZAUlxb70aosHaZGdsv8YXrYKjuiufjRZ/OXCs0XLI9g==", + "requires": { + "@google-cloud/common": "^3.0.0", + "@types/console-log-level": "^1.4.0", + "@types/semver": "^7.0.0", + "console-log-level": "^1.4.0", + "delay": "^4.0.1", + "extend": "^3.0.2", + "gcp-metadata": "^4.0.0", + "parse-duration": "^0.4.4", + "pprof": "3.0.0", + "pretty-ms": "^7.0.0", + "protobufjs": "~6.10.0", + "semver": "^7.0.0", + "teeny-request": "^7.0.0" + } + }, + "@google-cloud/projectify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", + "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" + }, + "@google-cloud/promisify": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz", + "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw==" + }, + "@google-cloud/trace-agent": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-5.1.1.tgz", + "integrity": "sha512-YTcK0RLN90pLCprg0XC8uV4oAVd79vsXhkcxmEVwiOOYjUDvSrAhb7y/0SY606zgfhJHmUTNb/fZSWEtZP/slQ==", + "requires": { + "@google-cloud/common": "^3.0.0", + "@opencensus/propagation-stackdriver": "0.0.22", + "builtin-modules": "^3.0.0", + "console-log-level": "^1.4.0", + "continuation-local-storage": "^3.2.1", + "extend": "^3.0.2", + "gcp-metadata": "^4.0.0", + "google-auth-library": "^6.0.0", + "hex2dec": "^1.0.1", + "is": "^3.2.0", + "methods": "^1.1.1", + "require-in-the-middle": "^5.0.0", + "semver": "^7.0.0", + "shimmer": "^1.2.0", + "source-map-support": "^0.5.16", + "uuid": "^8.0.0" + } + }, + "@opencensus/core": { + "version": "0.0.22", + "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.22.tgz", + "integrity": "sha512-ErazJtivjceNoOZI1bG9giQ6cWS45J4i6iPUtlp7dLNu58OLs/v+CD0FsaPCh47XgPxAI12vbBE8Ec09ViwHNA==", + "requires": { + "continuation-local-storage": "^3.2.1", + "log-driver": "^1.2.7", + "semver": "^7.0.0", + "shimmer": "^1.2.0", + "uuid": "^8.0.0" + } + }, + "@opencensus/propagation-stackdriver": { + "version": "0.0.22", + "resolved": "https://registry.npmjs.org/@opencensus/propagation-stackdriver/-/propagation-stackdriver-0.0.22.tgz", + "integrity": "sha512-eBvf/ihb1mN8Yz/ASkz8nHzuMKqygu77+VNnUeR0yEh3Nj+ykB8VVR6lK+NAFXo1Rd1cOsTmgvuXAZgDAGleQQ==", + "requires": { + "@opencensus/core": "^0.0.22", + "hex2dec": "^1.0.1", + "uuid": "^8.0.0" + } + }, + "@types/node": { + "version": "13.13.33", + "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.33.tgz", + "integrity": "sha512-1B3GM1yuYsFyEvBb+ljBqWBOylsWDYioZ5wpu8AhXdIhq20neXS7eaSC8GkwHE0yQYGiOIV43lMsgRYTgKZefQ==" + }, + "@types/semver": { + "version": "7.3.4", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.4.tgz", + "integrity": "sha512-+nVsLKlcUCeMzD2ufHEYuJ9a2ovstb6Dp52A5VsoKxDXgvE051XgHI/33I1EymwkRGQkwnA0LkhnUzituGs4EQ==" + }, + "acorn": { + "version": "8.0.4", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.0.4.tgz", + "integrity": "sha512-XNP0PqF1XD19ZlLKvB7cMmnZswW4C/03pRHgirB30uSJTaS3A3V1/P4sS3HPvFmjoriPCJQs+JDSbm4bL1TxGQ==" + }, + "bignumber.js": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz", + "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==" + }, + "debug": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", + "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", + "requires": { + "ms": "2.1.2" + } + }, + "duplexify": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", + "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", + "requires": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, + "gaxios": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.0.1.tgz", + "integrity": "sha512-jOin8xRZ/UytQeBpSXFqIzqU7Fi5TqgPNLlUsSB8kjJ76+FiGBfImF8KJu++c6J4jOldfJUtt0YmkRj2ZpSHTQ==", + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.3.0" + } + }, + "gcp-metadata": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz", + "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==", + "requires": { + "gaxios": "^4.0.0", + "json-bigint": "^1.0.0" + } + }, + "google-auth-library": { + "version": "6.1.3", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.1.3.tgz", + "integrity": "sha512-m9mwvY3GWbr7ZYEbl61isWmk+fvTmOt0YNUfPOUY2VH8K5pZlAIWJjxEi0PqR3OjMretyiQLI6GURMrPSwHQ2g==", + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^4.0.0", + "gcp-metadata": "^4.2.0", + "gtoken": "^5.0.4", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + } + }, + "google-p12-pem": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz", + "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==", + "requires": { + "node-forge": "^0.10.0" + } + }, + "gtoken": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.1.0.tgz", + "integrity": "sha512-4d8N6Lk8TEAHl9vVoRVMh9BNOKWVgl2DdNtr3428O75r3QFrF/a5MMu851VmK0AA8+iSvbwRv69k5XnMLURGhg==", + "requires": { + "gaxios": "^4.0.0", + "google-p12-pem": "^3.0.3", + "jws": "^4.0.0", + "mime": "^2.2.0" + } + }, + "json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "requires": { + "bignumber.js": "^9.0.0" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + }, + "mime": { + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", + "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node-forge": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", + "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" + }, + "p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "requires": { + "yocto-queue": "^0.1.0" + } + }, + "parse-duration": { + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.4.4.tgz", + "integrity": "sha512-KbAJuYGUhZkB9gotDiKLnZ7Z3VTacK3fgwmDdB6ZVDtJbMBT6MfLga0WJaYpPDu0mzqT0NgHtHDt5PY4l0nidg==" + }, + "pretty-ms": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-7.0.1.tgz", + "integrity": "sha512-973driJZvxiGOQ5ONsFhOF/DtzPMOMtgC11kCpUrPGMTgqp2q/1gwzCquocrN33is0VZ5GFHXZYMM9l6h67v2Q==", + "requires": { + "parse-ms": "^2.1.0" + } + }, + "protobufjs": { + "version": "6.10.2", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.2.tgz", + "integrity": "sha512-27yj+04uF6ya9l+qfpH187aqEzfCF4+Uit0I9ZBQVqK09hk/SQzKa2MUqUpXaVa7LOFRg1TSSr3lVxGOk6c0SQ==", + "requires": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/long": "^4.0.1", + "@types/node": "^13.7.0", + "long": "^4.0.0" + } + }, + "require-in-the-middle": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-5.0.3.tgz", + "integrity": "sha512-p/ICV8uMlqC4tjOYabLMxAWCIKa0YUQgZZ6KDM0xgXJNgdGQ1WmL2A07TwmrZw+wi6ITUFKzH5v3n+ENEyXVkA==", + "requires": { + "debug": "^4.1.1", + "module-details-from-path": "^1.0.3", + "resolve": "^1.12.0" + } + }, + "semver": { + "version": "7.3.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.2.tgz", + "integrity": "sha512-OrOb32TeeambH6UrhtShmF7CRDqhL6/5XpPNp2DuRH6+9QLw/orhp72j87v8Qa1ScDkvrrBNpZcDejAirJmfXQ==" + }, + "teeny-request": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz", + "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==", + "requires": { + "http-proxy-agent": "^4.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.1", + "stream-events": "^1.0.5", + "uuid": "^8.0.0" + } + }, + "uuid": { + "version": "8.3.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.1.tgz", + "integrity": "sha512-FOmRr+FmWEIG8uhZv6C2bTgEVXsHk08kE7mPlrBbEe+c3r9pjceVPgupIfNIhc4yx55H69OXANrUaSuu9eInKg==" + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + } + }, "@overleaf/o-error": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/@overleaf/o-error/-/o-error-3.1.0.tgz", @@ -901,11 +659,6 @@ "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", "integrity": "sha1-p3c2C1s5oaLlEG+OhY8v0tBgxXA=" }, - "@sindresorhus/is": { - "version": "0.15.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.15.0.tgz", - "integrity": "sha1-lpFbqgXmpqHRN7rfSYTT/AWCC7Y=" - }, "@sinonjs/commons": { "version": "1.7.2", "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.7.2.tgz", @@ -972,11 +725,6 @@ "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==" }, - "@types/caseless": { - "version": "0.12.2", - "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.2.tgz", - "integrity": "sha1-9l09Y4ngHutFi9VNyPUrlalGO8g=" - }, "@types/color-name": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/@types/color-name/-/color-name-1.1.1.tgz", @@ -988,14 +736,6 @@ "resolved": "https://registry.npmjs.org/@types/console-log-level/-/console-log-level-1.4.0.tgz", "integrity": "sha1-7/ccQa689RyLpa2LBdfVQkviuPM=" }, - "@types/duplexify": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/@types/duplexify/-/duplexify-3.6.0.tgz", - "integrity": "sha1-38grZL06IWj1vSZESvFlvwI33Ng=", - "requires": { - "@types/node": "*" - } - }, "@types/eslint-visitor-keys": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/@types/eslint-visitor-keys/-/eslint-visitor-keys-1.0.0.tgz", @@ -1026,39 +766,6 @@ "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.15.tgz", "integrity": "sha512-daFGV9GSs6USfPgxceDA8nlSe48XrVCJfDeYm7eokxq/ye7iuOH87hKXgMtEAVLFapkczbZsx868PMDT1Y0a6A==" }, - "@types/request": { - "version": "2.48.4", - "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.4.tgz", - "integrity": "sha512-W1t1MTKYR8PxICH+A4HgEIPuAC3sbljoEVfyZbeFJJDbr30guDspJri2XOaM2E+Un7ZjrihaDi7cf6fPa2tbgw==", - "requires": { - "@types/caseless": "*", - "@types/node": "*", - "@types/tough-cookie": "*", - "form-data": "^2.5.0" - }, - "dependencies": { - "form-data": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", - "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", - "requires": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.6", - "mime-types": "^2.1.12" - } - } - } - }, - "@types/semver": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-5.5.0.tgz", - "integrity": "sha1-FGwqKe59O65L8vyydGNuJkyBPEU=" - }, - "@types/tough-cookie": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-2.3.6.tgz", - "integrity": "sha512-wHNBMnkoEBiRAd3s8KTKwIuO9biFtTf0LehITzBhSco+HQI0xkXZbLOD55SW3Aqw3oUkHstkm5SPv58yaAdFPQ==" - }, "@typescript-eslint/experimental-utils": { "version": "1.13.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-1.13.0.tgz", @@ -1112,6 +819,11 @@ } } }, + "abbrev": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==" + }, "abort-controller": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", @@ -1132,7 +844,8 @@ "acorn": { "version": "6.4.1", "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.1.tgz", - "integrity": "sha512-ZVA9k326Nwrj3Cj9jlh3wGFutC2ZornPNARZwsNYqQYgN0EsV2d53w5RN/co65Ohn4sUAUtb1rSUAOD6XN9idA==" + "integrity": "sha512-ZVA9k326Nwrj3Cj9jlh3wGFutC2ZornPNARZwsNYqQYgN0EsV2d53w5RN/co65Ohn4sUAUtb1rSUAOD6XN9idA==", + "dev": true }, "acorn-jsx": { "version": "5.2.0", @@ -1206,6 +919,36 @@ "color-convert": "^1.9.0" } }, + "aproba": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", + "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==" + }, + "are-we-there-yet": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz", + "integrity": "sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w==", + "requires": { + "delegates": "^1.0.0", + "readable-stream": "^2.0.6" + }, + "dependencies": { + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + } + } + }, "argparse": { "version": "1.0.10", "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", @@ -1326,15 +1069,6 @@ "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.9.1.tgz", "integrity": "sha512-wMHVg2EOHaMRxbzgFJ9gtjOOCrI80OHLG14rxi28XwOW8ux6IiEbRCGGGqCtdAIg4FQCbW20k9RsT4y3gJlFug==" }, - "axios": { - "version": "0.18.1", - "resolved": "https://registry.npmjs.org/axios/-/axios-0.18.1.tgz", - "integrity": "sha1-/z8N4ue10YDnV62YAA8Qgbh7zqM=", - "requires": { - "follow-redirects": "1.5.10", - "is-buffer": "^2.0.2" - } - }, "axobject-query": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-2.1.2.tgz", @@ -1464,6 +1198,11 @@ "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", "integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk=" }, + "buffer-from": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz", + "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==" + }, "builtin-modules": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.1.0.tgz", @@ -1553,6 +1292,11 @@ "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", "dev": true }, + "chownr": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", + "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==" + }, "cli-cursor": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", @@ -1584,6 +1328,16 @@ "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.0.tgz", "integrity": "sha512-2Nii8p3RwAPiFwsnZvukotvow2rIHM+yQ6ZcBXGHdniadkYGZYiGmkHJIbZPIV9nfv7m/U1IPMVVcAhoWFeklw==" }, + "code-point-at": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", + "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=" + }, + "coffeescript": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.5.1.tgz", + "integrity": "sha512-J2jRPX0eeFh5VKyVnoLrfVFgLZtnnmp96WQSLAS8OrLm2wtQLcnikYKe1gViJKDH7vucjuhHvBKKBP3rKcD1tQ==" + }, "color-convert": { "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", @@ -1619,11 +1373,45 @@ "integrity": "sha512-6P6g0uetGpW/sdyUy/iQQCbFF0kWVMSIVSyYz7Zgjcgh8mgw8PQzDNZeyZ5DQ2gM7LBoZPHmnjz8rUthkBG5tw==", "dev": true }, + "compressible": { + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz", + "integrity": "sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==", + "requires": { + "mime-db": ">= 1.43.0 < 2" + } + }, + "compression": { + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/compression/-/compression-1.7.4.tgz", + "integrity": "sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==", + "requires": { + "accepts": "~1.3.5", + "bytes": "3.0.0", + "compressible": "~2.0.16", + "debug": "2.6.9", + "on-headers": "~1.0.2", + "safe-buffer": "5.1.2", + "vary": "~1.1.2" + }, + "dependencies": { + "bytes": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", + "integrity": "sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=" + } + } + }, "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" }, + "console-control-strings": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", + "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=" + }, "console-log-level": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz", @@ -1734,6 +1522,11 @@ } } }, + "deep-extend": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", + "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==" + }, "deep-is": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz", @@ -1759,6 +1552,11 @@ "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" }, + "delegates": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", + "integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=" + }, "denque": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/denque/-/denque-1.4.1.tgz", @@ -1774,6 +1572,11 @@ "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz", "integrity": "sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=" }, + "detect-libc": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", + "integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=" + }, "diff": { "version": "3.5.0", "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", @@ -1940,19 +1743,6 @@ "is-symbol": "^1.0.2" } }, - "es6-promise": { - "version": "4.2.8", - "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz", - "integrity": "sha1-TrIVlMlyvEBVPSduUQU5FD21Pgo=" - }, - "es6-promisify": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz", - "integrity": "sha1-UQnWLz5W6pZ8S2NQWu8IKRyKUgM=", - "requires": { - "es6-promise": "^4.0.3" - } - }, "escape-html": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", @@ -2758,24 +2548,6 @@ "integrity": "sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA==", "dev": true }, - "follow-redirects": { - "version": "1.5.10", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.10.tgz", - "integrity": "sha1-e3qfmuov3/NnhqlP9kPtB/T/Xio=", - "requires": { - "debug": "=3.1.0" - }, - "dependencies": { - "debug": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", - "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", - "requires": { - "ms": "2.0.0" - } - } - } - }, "forever-agent": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", @@ -2811,11 +2583,18 @@ "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=" }, + "fs-minipass": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-1.2.7.tgz", + "integrity": "sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA==", + "requires": { + "minipass": "^2.6.0" + } + }, "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", - "dev": true + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" }, "function-bind": { "version": "1.1.1", @@ -2829,6 +2608,54 @@ "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=", "dev": true }, + "gauge": { + "version": "2.7.4", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz", + "integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=", + "requires": { + "aproba": "^1.0.3", + "console-control-strings": "^1.0.0", + "has-unicode": "^2.0.0", + "object-assign": "^4.1.0", + "signal-exit": "^3.0.0", + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1", + "wide-align": "^1.1.0" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=" + }, + "is-fullwidth-code-point": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", + "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", + "requires": { + "number-is-nan": "^1.0.0" + } + }, + "string-width": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "requires": { + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + } + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "requires": { + "ansi-regex": "^2.0.0" + } + } + } + }, "gaxios": { "version": "2.3.4", "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-2.3.4.tgz", @@ -3053,6 +2880,11 @@ "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", "dev": true }, + "has-unicode": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", + "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=" + }, "he": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", @@ -3162,6 +2994,14 @@ "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", "dev": true }, + "ignore-walk": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-3.0.3.tgz", + "integrity": "sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw==", + "requires": { + "minimatch": "^3.0.4" + } + }, "import-fresh": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.2.1.tgz", @@ -3206,6 +3046,11 @@ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, + "ini": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz", + "integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==" + }, "inquirer": { "version": "7.3.3", "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.3.3.tgz", @@ -3396,11 +3241,6 @@ "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", "dev": true }, - "is-buffer": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.4.tgz", - "integrity": "sha512-Kq1rokWXOPXWuaMAqZiJW4XxsmD9zGx9q4aePabbn3qCRGedtH7Cm+zV8WETitMfu1wdh+Rvd6w5egwSngUX2A==" - }, "is-callable": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", @@ -3422,8 +3262,7 @@ "is-fullwidth-code-point": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", - "dev": true + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=" }, "is-glob": { "version": "4.0.1", @@ -3664,11 +3503,6 @@ "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", "dev": true }, - "lodash.pickby": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz", - "integrity": "sha1-feoh2MGNdwOifHBMFdO4SmfjOv8=" - }, "lodash.unescape": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/lodash.unescape/-/lodash.unescape-4.0.1.tgz", @@ -3828,15 +3662,6 @@ "resolved": "https://registry.npmjs.org/lsmod/-/lsmod-1.0.0.tgz", "integrity": "sha1-mgD3bco26yP6BTUK/htYXUKZ5ks=" }, - "lynx": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz", - "integrity": "sha512-JI52N0NwK2b/Md0TFPdPtUBI46kjyJXF7+q08l2yvQ56q6QA8s7ZjZQQRoxFpS2jDXNf/B0p8ID+OIKcTsZwzw==", - "requires": { - "mersenne": "~0.0.3", - "statsd-parser": "~0.0.4" - } - }, "make-plural": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/make-plural/-/make-plural-4.3.0.tgz", @@ -3876,11 +3701,6 @@ "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=" }, - "mersenne": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz", - "integrity": "sha1-QB/ex+whzbngPNPTAhOY2iGycIU=" - }, "messageformat": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/messageformat/-/messageformat-2.3.0.tgz", @@ -3909,28 +3729,6 @@ "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=" }, - "metrics-sharelatex": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.6.2.tgz", - "integrity": "sha512-bOLfkSCexiPgB96hdXhoOWyvvrwscgjeZPEqdcJ7BTGxY59anzvymNf5hTGJ1RtS4sblDKxITw3L5a+gYKhRYQ==", - "requires": { - "@google-cloud/debug-agent": "^3.0.0", - "@google-cloud/profiler": "^0.2.3", - "@google-cloud/trace-agent": "^3.2.0", - "coffee-script": "1.6.0", - "lynx": "~0.1.1", - "prom-client": "^11.1.3", - "underscore": "~1.6.0", - "yn": "^3.1.1" - }, - "dependencies": { - "coffee-script": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", - "integrity": "sha512-Tx8itEfCsQp8RbLDFt7qwjqXycAx2g6SI7//4PPUR2j6meLmNifYm6zKrNDcU1+Q/GWRhjhEZk7DaLG1TfIzGA==" - } - } - }, "mime": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", @@ -3968,6 +3766,38 @@ "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" }, + "minipass": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-2.9.0.tgz", + "integrity": "sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg==", + "requires": { + "safe-buffer": "^5.1.2", + "yallist": "^3.0.0" + } + }, + "minizlib": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-1.3.3.tgz", + "integrity": "sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q==", + "requires": { + "minipass": "^2.9.0" + } + }, + "mkdirp": { + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", + "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", + "requires": { + "minimist": "^1.2.5" + }, + "dependencies": { + "minimist": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" + } + } + }, "mocha": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/mocha/-/mocha-5.2.0.tgz", @@ -4107,6 +3937,31 @@ "integrity": "sha1-GVoh1sRuNh0vsSgbo4uR6d9727M=", "optional": true }, + "needle": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/needle/-/needle-2.5.2.tgz", + "integrity": "sha512-LbRIwS9BfkPvNwNHlsA41Q29kL2L/6VaOJ0qisM5lLWsTV3nP15abO5ITL6L81zqFhzjRKDAYjpcBcwM0AVvLQ==", + "requires": { + "debug": "^3.2.6", + "iconv-lite": "^0.4.4", + "sax": "^1.2.4" + }, + "dependencies": { + "debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "requires": { + "ms": "^2.1.1" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + } + } + }, "negotiator": { "version": "0.6.2", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz", @@ -4158,6 +4013,60 @@ "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.9.2.tgz", "integrity": "sha512-naKSScof4Wn+aoHU6HBsifh92Zeicm1GDQKd1vp3Y/kOi8ub0DozCa9KpvYNCXslFHYRmLNiqRopGdTGwNLpNw==" }, + "node-pre-gyp": { + "version": "0.16.0", + "resolved": "https://registry.npmjs.org/node-pre-gyp/-/node-pre-gyp-0.16.0.tgz", + "integrity": "sha512-4efGA+X/YXAHLi1hN8KaPrILULaUn2nWecFrn1k2I+99HpoyvcOGEbtcOxpDiUwPF2ZANMJDh32qwOUPenuR1g==", + "requires": { + "detect-libc": "^1.0.2", + "mkdirp": "^0.5.3", + "needle": "^2.5.0", + "nopt": "^4.0.1", + "npm-packlist": "^1.1.6", + "npmlog": "^4.0.2", + "rc": "^1.2.7", + "rimraf": "^2.6.1", + "semver": "^5.3.0", + "tar": "^4.4.2" + }, + "dependencies": { + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "requires": { + "glob": "^7.1.3" + } + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + } + } + }, + "nopt": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-4.0.3.tgz", + "integrity": "sha512-CvaGwVMztSMJLOeXPrez7fyfObdZqNUK1cPAEzLHrTybIua9pMdmmPR5YwtfNftIOMv3DPUhFaxsZMNTQO20Kg==", + "requires": { + "abbrev": "1", + "osenv": "^0.1.4" + } + }, "normalize-package-data": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", @@ -4178,6 +4087,45 @@ } } }, + "npm-bundled": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-1.1.1.tgz", + "integrity": "sha512-gqkfgGePhTpAEgUsGEgcq1rqPXA+tv/aVBlgEzfXwA1yiUJF7xtEt3CtVwOjNYQOVknDk0F20w58Fnm3EtG0fA==", + "requires": { + "npm-normalize-package-bin": "^1.0.1" + } + }, + "npm-normalize-package-bin": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-1.0.1.tgz", + "integrity": "sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA==" + }, + "npm-packlist": { + "version": "1.4.8", + "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-1.4.8.tgz", + "integrity": "sha512-5+AZgwru5IevF5ZdnFglB5wNlHG1AOOuw28WhUq8/8emhBmLv6jX5by4WJCh7lW0uSYZYS6DXqIsyZVIXRZU9A==", + "requires": { + "ignore-walk": "^3.0.1", + "npm-bundled": "^1.0.1", + "npm-normalize-package-bin": "^1.0.1" + } + }, + "npmlog": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz", + "integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==", + "requires": { + "are-we-there-yet": "~1.1.2", + "console-control-strings": "~1.1.0", + "gauge": "~2.7.3", + "set-blocking": "~2.0.0" + } + }, + "number-is-nan": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", + "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=" + }, "oauth-sign": { "version": "0.9.0", "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", @@ -4186,8 +4134,7 @@ "object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", - "dev": true + "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" }, "object-inspect": { "version": "1.7.0", @@ -4264,6 +4211,11 @@ } } }, + "on-headers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", + "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==" + }, "once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -4295,16 +4247,30 @@ "word-wrap": "~1.2.3" } }, + "os-homedir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", + "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=" + }, "os-tmpdir": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", - "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=", - "dev": true + "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=" + }, + "osenv": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/osenv/-/osenv-0.1.5.tgz", + "integrity": "sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g==", + "requires": { + "os-homedir": "^1.0.0", + "os-tmpdir": "^1.0.0" + } }, "p-limit": { "version": "2.2.2", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.2.tgz", "integrity": "sha512-WGR+xHecKTr7EbUEhyLSh5Dube9JtdiG78ufaeLxTgpudf/20KqyMioIUZJAezlTIi6evxuoUs9YXc11cU+yzQ==", + "dev": true, "requires": { "p-try": "^2.0.0" } @@ -4321,7 +4287,8 @@ "p-try": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha1-yyhoVA4xPWHeWPr741zpAE1VQOY=" + "integrity": "sha1-yyhoVA4xPWHeWPr741zpAE1VQOY=", + "dev": true }, "parent-module": { "version": "1.0.1", @@ -4332,11 +4299,6 @@ "callsites": "^3.0.0" } }, - "parse-duration": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.1.2.tgz", - "integrity": "sha512-0qfMZyjOUFBeEIvJ5EayfXJqaEXxQ+Oj2b7tWJM3hvEXvXsYCk05EDVI23oYnEw2NaFYUWdABEVPBvBMh8L/pA==" - }, "parse-ms": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.1.0.tgz", @@ -4385,10 +4347,67 @@ "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" }, - "pify": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", - "integrity": "sha1-SyzSXFDVmHNcUCkiJP2MbfQeMjE=" + "pprof": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pprof/-/pprof-3.0.0.tgz", + "integrity": "sha512-uPWbAhoH/zvq1kM3/Fd/wshb4D7sLlGap8t6uCTER4aZRWqqyPYgXzpjWbT0Unn5U25pEy2VREUu27nQ9o9VPA==", + "requires": { + "bindings": "^1.2.1", + "delay": "^4.0.1", + "findit2": "^2.2.3", + "nan": "^2.14.0", + "node-pre-gyp": "^0.16.0", + "p-limit": "^3.0.0", + "pify": "^5.0.0", + "protobufjs": "~6.10.0", + "source-map": "^0.7.3", + "split": "^1.0.1" + }, + "dependencies": { + "@types/node": { + "version": "13.13.33", + "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.33.tgz", + "integrity": "sha512-1B3GM1yuYsFyEvBb+ljBqWBOylsWDYioZ5wpu8AhXdIhq20neXS7eaSC8GkwHE0yQYGiOIV43lMsgRYTgKZefQ==" + }, + "p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "requires": { + "yocto-queue": "^0.1.0" + } + }, + "pify": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-5.0.0.tgz", + "integrity": "sha512-eW/gHNMlxdSP6dmG6uJip6FXN0EQBwm2clYYd8Wul42Cwu/DK8HEftzsapcNdYe2MfLiIwZqsDk2RDEsTE79hA==" + }, + "protobufjs": { + "version": "6.10.2", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.2.tgz", + "integrity": "sha512-27yj+04uF6ya9l+qfpH187aqEzfCF4+Uit0I9ZBQVqK09hk/SQzKa2MUqUpXaVa7LOFRg1TSSr3lVxGOk6c0SQ==", + "requires": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/long": "^4.0.1", + "@types/node": "^13.7.0", + "long": "^4.0.0" + } + }, + "source-map": { + "version": "0.7.3", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", + "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==" + } + } }, "prelude-ls": { "version": "1.1.2", @@ -5043,14 +5062,6 @@ } } }, - "pretty-ms": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-4.0.0.tgz", - "integrity": "sha1-Mbr0G5T9AiJwmKqgO9YmCOsNbpI=", - "requires": { - "parse-ms": "^2.0.0" - } - }, "process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", @@ -5209,6 +5220,29 @@ "unpipe": "1.0.0" } }, + "rc": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", + "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", + "requires": { + "deep-extend": "^0.6.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~2.0.1" + }, + "dependencies": { + "minimist": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" + }, + "strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=" + } + } + }, "react-is": { "version": "16.13.1", "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", @@ -5332,31 +5366,6 @@ "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", "dev": true }, - "require-in-the-middle": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-4.0.1.tgz", - "integrity": "sha512-EfkM2zANyGkrfIExsECMeNn/uzjvHrE9h36yLXSavmrDiH4tgDNvltAmEKnt4PNLbqKPHZz+uszW2wTKrLUX0w==", - "requires": { - "debug": "^4.1.1", - "module-details-from-path": "^1.0.3", - "resolve": "^1.12.0" - }, - "dependencies": { - "debug": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", - "requires": { - "ms": "^2.1.1" - } - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - } - } - }, "require-like": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", @@ -5414,11 +5423,6 @@ "signal-exit": "^3.0.2" } }, - "retry-axios": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/retry-axios/-/retry-axios-0.3.2.tgz", - "integrity": "sha1-V1fID1hbTMTEmGqi/9R6YMbTXhM=" - }, "retry-request": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-4.1.1.tgz", @@ -5510,6 +5514,11 @@ "sparse-bitfield": "^3.0.3" } }, + "sax": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", + "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==" + }, "semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", @@ -5556,8 +5565,7 @@ "set-blocking": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", - "dev": true + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=" }, "setprototypeof": { "version": "1.1.1", @@ -5612,8 +5620,7 @@ "signal-exit": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", - "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=", - "dev": true + "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=" }, "sinon": { "version": "9.0.2", @@ -5678,6 +5685,15 @@ "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", "integrity": "sha1-dHIq8y6WFOnCh6jQu95IteLxomM=" }, + "source-map-support": { + "version": "0.5.19", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.19.tgz", + "integrity": "sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==", + "requires": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, "sparse-bitfield": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz", @@ -5759,11 +5775,6 @@ "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.0.1.tgz", "integrity": "sha512-NQOxSeB8gOI5WjSaxjBgog2QFw55FV8TkS6Y07BiB3VJ8xNTvUYm0wl0s8ObgQ5NhdpnNfigMIKjgPESzgr4tg==" }, - "statsd-parser": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz", - "integrity": "sha1-y9JDlTzELv/VSLXSI4jtaJ7GOb0=" - }, "statuses": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", @@ -5904,6 +5915,20 @@ "string-width": "^3.0.0" } }, + "tar": { + "version": "4.4.13", + "resolved": "https://registry.npmjs.org/tar/-/tar-4.4.13.tgz", + "integrity": "sha512-w2VwSrBoHa5BsSyH+KxEqeQBAllHhccyMFVHtGtdMpF4W7IRWfZjFiQceJPChOeTsSDVUpER2T8FA93pr0L+QA==", + "requires": { + "chownr": "^1.1.1", + "fs-minipass": "^1.2.5", + "minipass": "^2.8.6", + "minizlib": "^1.2.1", + "mkdirp": "^0.5.0", + "safe-buffer": "^5.1.2", + "yallist": "^3.0.3" + } + }, "tdigest": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.1.tgz", @@ -6255,6 +6280,38 @@ "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", "dev": true }, + "wide-align": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz", + "integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==", + "requires": { + "string-width": "^1.0.2 || 2" + }, + "dependencies": { + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=" + }, + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "requires": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + } + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "requires": { + "ansi-regex": "^3.0.0" + } + } + } + }, "word-wrap": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", @@ -6355,6 +6412,11 @@ "version": "3.1.1", "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==" + }, + "yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==" } } } diff --git a/services/document-updater/package.json b/services/document-updater/package.json index d5484b1aef..c8ac5229ce 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -18,6 +18,7 @@ "format:fix": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --write" }, "dependencies": { + "@overleaf/metrics": "^3.4.1", "@overleaf/o-error": "^3.1.0", "@overleaf/redis-wrapper": "^2.0.0", "async": "^2.5.0", @@ -26,7 +27,6 @@ "express": "4.17.1", "lodash": "^4.17.19", "logger-sharelatex": "^1.9.1", - "metrics-sharelatex": "^2.6.2", "mongodb": "^3.6.0", "request": "^2.88.2", "requestretry": "^4.1.0", From a91770e9799cf87bee6ee8fe83b02d9c456482ea Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Mon, 7 Dec 2020 15:25:20 -0500 Subject: [PATCH 706/769] Decaf cleanup: remove Array.from() --- services/document-updater/app/js/DiffCodec.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/services/document-updater/app/js/DiffCodec.js b/services/document-updater/app/js/DiffCodec.js index 22251800b2..457ae526d0 100644 --- a/services/document-updater/app/js/DiffCodec.js +++ b/services/document-updater/app/js/DiffCodec.js @@ -9,7 +9,6 @@ // Fix any style issues and re-enable lint. /* * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md @@ -32,7 +31,7 @@ module.exports = DiffCodec = { const ops = [] let position = 0 - for (const diff of Array.from(diffs)) { + for (const diff of diffs) { const type = diff[0] const content = diff[1] if (type === this.ADDED) { From b74e7f6feb4d8bfcaca301abdf2d715204199f24 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Mon, 7 Dec 2020 15:25:52 -0500 Subject: [PATCH 707/769] Decaf cleanup: unnecessary returns --- services/document-updater/app/js/DiffCodec.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/services/document-updater/app/js/DiffCodec.js b/services/document-updater/app/js/DiffCodec.js index 457ae526d0..b00af1eeb6 100644 --- a/services/document-updater/app/js/DiffCodec.js +++ b/services/document-updater/app/js/DiffCodec.js @@ -9,7 +9,6 @@ // Fix any style issues and re-enable lint. /* * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ @@ -51,6 +50,6 @@ module.exports = DiffCodec = { throw 'Unknown type' } } - return callback(null, ops) + callback(null, ops) } } From 9f17f3ea0a69c1bc5839baa957c97184ff96fc26 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Mon, 7 Dec 2020 15:27:01 -0500 Subject: [PATCH 708/769] Decaf cleanup: remove default callback --- services/document-updater/app/js/DiffCodec.js | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/services/document-updater/app/js/DiffCodec.js b/services/document-updater/app/js/DiffCodec.js index b00af1eeb6..f52462869c 100644 --- a/services/document-updater/app/js/DiffCodec.js +++ b/services/document-updater/app/js/DiffCodec.js @@ -5,13 +5,6 @@ no-throw-literal, no-unused-vars, */ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ let DiffCodec const { diff_match_patch } = require('../lib/diff_match_patch') const dmp = new diff_match_patch() @@ -22,9 +15,6 @@ module.exports = DiffCodec = { UNCHANGED: 0, diffAsShareJsOp(before, after, callback) { - if (callback == null) { - callback = function (error, ops) {} - } const diffs = dmp.diff_main(before.join('\n'), after.join('\n')) dmp.diff_cleanupSemantic(diffs) From 8c70e72bfafdcbddb2bb5a8b8fc02590d18cea15 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Mon, 7 Dec 2020 15:27:41 -0500 Subject: [PATCH 709/769] Decaf cleanup: unused variable --- services/document-updater/app/js/DiffCodec.js | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/services/document-updater/app/js/DiffCodec.js b/services/document-updater/app/js/DiffCodec.js index f52462869c..428f46c1c6 100644 --- a/services/document-updater/app/js/DiffCodec.js +++ b/services/document-updater/app/js/DiffCodec.js @@ -3,13 +3,11 @@ handle-callback-err, new-cap, no-throw-literal, - no-unused-vars, */ -let DiffCodec const { diff_match_patch } = require('../lib/diff_match_patch') const dmp = new diff_match_patch() -module.exports = DiffCodec = { +module.exports = { ADDED: 1, REMOVED: -1, UNCHANGED: 0, From db4b0a6f384c4f0313321c1aecbefdb6a808226b Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Mon, 7 Dec 2020 15:28:25 -0500 Subject: [PATCH 710/769] Decaf cleanup: do not throw strings --- services/document-updater/app/js/DiffCodec.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/services/document-updater/app/js/DiffCodec.js b/services/document-updater/app/js/DiffCodec.js index 428f46c1c6..bca22cd2b3 100644 --- a/services/document-updater/app/js/DiffCodec.js +++ b/services/document-updater/app/js/DiffCodec.js @@ -2,7 +2,6 @@ camelcase, handle-callback-err, new-cap, - no-throw-literal, */ const { diff_match_patch } = require('../lib/diff_match_patch') const dmp = new diff_match_patch() @@ -35,7 +34,7 @@ module.exports = { } else if (type === this.UNCHANGED) { position += content.length } else { - throw 'Unknown type' + throw new Error('Unknown type') } } callback(null, ops) From dce5b8759a0234c3704cd9b5701fb792b363286b Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Mon, 7 Dec 2020 15:29:24 -0500 Subject: [PATCH 711/769] Decaf cleanup: capitalize class names --- services/document-updater/app/js/DiffCodec.js | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/services/document-updater/app/js/DiffCodec.js b/services/document-updater/app/js/DiffCodec.js index bca22cd2b3..f6ea27712f 100644 --- a/services/document-updater/app/js/DiffCodec.js +++ b/services/document-updater/app/js/DiffCodec.js @@ -1,10 +1,5 @@ -/* eslint-disable - camelcase, - handle-callback-err, - new-cap, -*/ -const { diff_match_patch } = require('../lib/diff_match_patch') -const dmp = new diff_match_patch() +const { diff_match_patch: DMP } = require('../lib/diff_match_patch') +const dmp = new DMP() module.exports = { ADDED: 1, From de247302b16ce9a414146f0820c682e35d5fbe30 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Mon, 7 Dec 2020 16:13:36 -0500 Subject: [PATCH 712/769] Use a centralized diff-match-patch package We use our own fork of the diff-match-patch npm package, which adds an optimization for the semantic alignment loop. --- services/document-updater/app/js/DiffCodec.js | 2 +- .../app/lib/diff_match_patch.js | 2341 ----------------- services/document-updater/package-lock.json | 26 +- services/document-updater/package.json | 1 + 4 files changed, 17 insertions(+), 2353 deletions(-) delete mode 100644 services/document-updater/app/lib/diff_match_patch.js diff --git a/services/document-updater/app/js/DiffCodec.js b/services/document-updater/app/js/DiffCodec.js index f6ea27712f..359b91e2a7 100644 --- a/services/document-updater/app/js/DiffCodec.js +++ b/services/document-updater/app/js/DiffCodec.js @@ -1,4 +1,4 @@ -const { diff_match_patch: DMP } = require('../lib/diff_match_patch') +const DMP = require('diff-match-patch') const dmp = new DMP() module.exports = { diff --git a/services/document-updater/app/lib/diff_match_patch.js b/services/document-updater/app/lib/diff_match_patch.js deleted file mode 100644 index f90ecdd8b0..0000000000 --- a/services/document-updater/app/lib/diff_match_patch.js +++ /dev/null @@ -1,2341 +0,0 @@ -/* eslint-disable */ -/** - * Diff Match and Patch - * - * Copyright 2006 Google Inc. - * http://code.google.com/p/google-diff-match-patch/ - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * @fileoverview Computes the difference between two texts to create a patch. - * Applies the patch onto another text, allowing for errors. - * @author fraser@google.com (Neil Fraser) - */ - -/** - * Class containing the diff, match and patch methods. - * @constructor - */ -function diff_match_patch() { - // Defaults. - // Redefine these in your program to override the defaults. - - // Number of seconds to map a diff before giving up (0 for infinity). - this.Diff_Timeout = 1.0 - // Cost of an empty edit operation in terms of edit characters. - this.Diff_EditCost = 4 - // At what point is no match declared (0.0 = perfection, 1.0 = very loose). - this.Match_Threshold = 0.5 - // How far to search for a match (0 = exact location, 1000+ = broad match). - // A match this many characters away from the expected location will add - // 1.0 to the score (0.0 is a perfect match). - this.Match_Distance = 1000 - // When deleting a large block of text (over ~64 characters), how close do - // the contents have to be to match the expected contents. (0.0 = perfection, - // 1.0 = very loose). Note that Match_Threshold controls how closely the - // end points of a delete need to match. - this.Patch_DeleteThreshold = 0.5 - // Chunk size for context length. - this.Patch_Margin = 4 - - // The number of bits in an int. - this.Match_MaxBits = 32 -} - -// DIFF FUNCTIONS - -/** - * The data structure representing a diff is an array of tuples: - * [[DIFF_DELETE, 'Hello'], [DIFF_INSERT, 'Goodbye'], [DIFF_EQUAL, ' world.']] - * which means: delete 'Hello', add 'Goodbye' and keep ' world.' - */ -var DIFF_DELETE = -1 -var DIFF_INSERT = 1 -var DIFF_EQUAL = 0 - -/** @typedef {{0: number, 1: string}} */ -diff_match_patch.Diff - -/** - * Find the differences between two texts. Simplifies the problem by stripping - * any common prefix or suffix off the texts before diffing. - * @param {string} text1 Old string to be diffed. - * @param {string} text2 New string to be diffed. - * @param {boolean=} opt_checklines Optional speedup flag. If present and false, - * then don't run a line-level diff first to identify the changed areas. - * Defaults to true, which does a faster, slightly less optimal diff. - * @param {number} opt_deadline Optional time when the diff should be complete - * by. Used internally for recursive calls. Users should set DiffTimeout - * instead. - * @return {!Array.} Array of diff tuples. - */ -diff_match_patch.prototype.diff_main = function ( - text1, - text2, - opt_checklines, - opt_deadline -) { - // Set a deadline by which time the diff must be complete. - if (typeof opt_deadline == 'undefined') { - if (this.Diff_Timeout <= 0) { - opt_deadline = Number.MAX_VALUE - } else { - opt_deadline = new Date().getTime() + this.Diff_Timeout * 1000 - } - } - var deadline = opt_deadline - - // Check for null inputs. - if (text1 == null || text2 == null) { - throw new Error('Null input. (diff_main)') - } - - // Check for equality (speedup). - if (text1 == text2) { - if (text1) { - return [[DIFF_EQUAL, text1]] - } - return [] - } - - if (typeof opt_checklines == 'undefined') { - opt_checklines = true - } - var checklines = opt_checklines - - // Trim off common prefix (speedup). - var commonlength = this.diff_commonPrefix(text1, text2) - var commonprefix = text1.substring(0, commonlength) - text1 = text1.substring(commonlength) - text2 = text2.substring(commonlength) - - // Trim off common suffix (speedup). - commonlength = this.diff_commonSuffix(text1, text2) - var commonsuffix = text1.substring(text1.length - commonlength) - text1 = text1.substring(0, text1.length - commonlength) - text2 = text2.substring(0, text2.length - commonlength) - - // Compute the diff on the middle block. - var diffs = this.diff_compute_(text1, text2, checklines, deadline) - - // Restore the prefix and suffix. - if (commonprefix) { - diffs.unshift([DIFF_EQUAL, commonprefix]) - } - if (commonsuffix) { - diffs.push([DIFF_EQUAL, commonsuffix]) - } - this.diff_cleanupMerge(diffs) - return diffs -} - -/** - * Find the differences between two texts. Assumes that the texts do not - * have any common prefix or suffix. - * @param {string} text1 Old string to be diffed. - * @param {string} text2 New string to be diffed. - * @param {boolean} checklines Speedup flag. If false, then don't run a - * line-level diff first to identify the changed areas. - * If true, then run a faster, slightly less optimal diff. - * @param {number} deadline Time when the diff should be complete by. - * @return {!Array.} Array of diff tuples. - * @private - */ -diff_match_patch.prototype.diff_compute_ = function ( - text1, - text2, - checklines, - deadline -) { - var diffs - - if (!text1) { - // Just add some text (speedup). - return [[DIFF_INSERT, text2]] - } - - if (!text2) { - // Just delete some text (speedup). - return [[DIFF_DELETE, text1]] - } - - var longtext = text1.length > text2.length ? text1 : text2 - var shorttext = text1.length > text2.length ? text2 : text1 - var i = longtext.indexOf(shorttext) - if (i != -1) { - // Shorter text is inside the longer text (speedup). - diffs = [ - [DIFF_INSERT, longtext.substring(0, i)], - [DIFF_EQUAL, shorttext], - [DIFF_INSERT, longtext.substring(i + shorttext.length)] - ] - // Swap insertions for deletions if diff is reversed. - if (text1.length > text2.length) { - diffs[0][0] = diffs[2][0] = DIFF_DELETE - } - return diffs - } - - if (shorttext.length == 1) { - // Single character string. - // After the previous speedup, the character can't be an equality. - return [ - [DIFF_DELETE, text1], - [DIFF_INSERT, text2] - ] - } - - // Check to see if the problem can be split in two. - var hm = this.diff_halfMatch_(text1, text2) - if (hm) { - // A half-match was found, sort out the return data. - var text1_a = hm[0] - var text1_b = hm[1] - var text2_a = hm[2] - var text2_b = hm[3] - var mid_common = hm[4] - // Send both pairs off for separate processing. - var diffs_a = this.diff_main(text1_a, text2_a, checklines, deadline) - var diffs_b = this.diff_main(text1_b, text2_b, checklines, deadline) - // Merge the results. - return diffs_a.concat([[DIFF_EQUAL, mid_common]], diffs_b) - } - - if (checklines && text1.length > 100 && text2.length > 100) { - return this.diff_lineMode_(text1, text2, deadline) - } - - return this.diff_bisect_(text1, text2, deadline) -} - -/** - * Do a quick line-level diff on both strings, then rediff the parts for - * greater accuracy. - * This speedup can produce non-minimal diffs. - * @param {string} text1 Old string to be diffed. - * @param {string} text2 New string to be diffed. - * @param {number} deadline Time when the diff should be complete by. - * @return {!Array.} Array of diff tuples. - * @private - */ -diff_match_patch.prototype.diff_lineMode_ = function (text1, text2, deadline) { - // Scan the text on a line-by-line basis first. - var a = this.diff_linesToChars_(text1, text2) - text1 = a.chars1 - text2 = a.chars2 - var linearray = a.lineArray - - var diffs = this.diff_main(text1, text2, false, deadline) - - // Convert the diff back to original text. - this.diff_charsToLines_(diffs, linearray) - // Eliminate freak matches (e.g. blank lines) - this.diff_cleanupSemantic(diffs) - - // Rediff any replacement blocks, this time character-by-character. - // Add a dummy entry at the end. - diffs.push([DIFF_EQUAL, '']) - var pointer = 0 - var count_delete = 0 - var count_insert = 0 - var text_delete = '' - var text_insert = '' - while (pointer < diffs.length) { - switch (diffs[pointer][0]) { - case DIFF_INSERT: - count_insert++ - text_insert += diffs[pointer][1] - break - case DIFF_DELETE: - count_delete++ - text_delete += diffs[pointer][1] - break - case DIFF_EQUAL: - // Upon reaching an equality, check for prior redundancies. - if (count_delete >= 1 && count_insert >= 1) { - // Delete the offending records and add the merged ones. - diffs.splice( - pointer - count_delete - count_insert, - count_delete + count_insert - ) - pointer = pointer - count_delete - count_insert - var a = this.diff_main(text_delete, text_insert, false, deadline) - for (var j = a.length - 1; j >= 0; j--) { - diffs.splice(pointer, 0, a[j]) - } - pointer = pointer + a.length - } - count_insert = 0 - count_delete = 0 - text_delete = '' - text_insert = '' - break - } - pointer++ - } - diffs.pop() // Remove the dummy entry at the end. - - return diffs -} - -/** - * Find the 'middle snake' of a diff, split the problem in two - * and return the recursively constructed diff. - * See Myers 1986 paper: An O(ND) Difference Algorithm and Its Variations. - * @param {string} text1 Old string to be diffed. - * @param {string} text2 New string to be diffed. - * @param {number} deadline Time at which to bail if not yet complete. - * @return {!Array.} Array of diff tuples. - * @private - */ -diff_match_patch.prototype.diff_bisect_ = function (text1, text2, deadline) { - // Cache the text lengths to prevent multiple calls. - var text1_length = text1.length - var text2_length = text2.length - var max_d = Math.ceil((text1_length + text2_length) / 2) - var v_offset = max_d - var v_length = 2 * max_d - var v1 = new Array(v_length) - var v2 = new Array(v_length) - // Setting all elements to -1 is faster in Chrome & Firefox than mixing - // integers and undefined. - for (var x = 0; x < v_length; x++) { - v1[x] = -1 - v2[x] = -1 - } - v1[v_offset + 1] = 0 - v2[v_offset + 1] = 0 - var delta = text1_length - text2_length - // If the total number of characters is odd, then the front path will collide - // with the reverse path. - var front = delta % 2 != 0 - // Offsets for start and end of k loop. - // Prevents mapping of space beyond the grid. - var k1start = 0 - var k1end = 0 - var k2start = 0 - var k2end = 0 - for (var d = 0; d < max_d; d++) { - // Bail out if deadline is reached. - if (new Date().getTime() > deadline) { - break - } - - // Walk the front path one step. - for (var k1 = -d + k1start; k1 <= d - k1end; k1 += 2) { - var k1_offset = v_offset + k1 - var x1 - if (k1 == -d || (k1 != d && v1[k1_offset - 1] < v1[k1_offset + 1])) { - x1 = v1[k1_offset + 1] - } else { - x1 = v1[k1_offset - 1] + 1 - } - var y1 = x1 - k1 - while ( - x1 < text1_length && - y1 < text2_length && - text1.charAt(x1) == text2.charAt(y1) - ) { - x1++ - y1++ - } - v1[k1_offset] = x1 - if (x1 > text1_length) { - // Ran off the right of the graph. - k1end += 2 - } else if (y1 > text2_length) { - // Ran off the bottom of the graph. - k1start += 2 - } else if (front) { - var k2_offset = v_offset + delta - k1 - if (k2_offset >= 0 && k2_offset < v_length && v2[k2_offset] != -1) { - // Mirror x2 onto top-left coordinate system. - var x2 = text1_length - v2[k2_offset] - if (x1 >= x2) { - // Overlap detected. - return this.diff_bisectSplit_(text1, text2, x1, y1, deadline) - } - } - } - } - - // Walk the reverse path one step. - for (var k2 = -d + k2start; k2 <= d - k2end; k2 += 2) { - var k2_offset = v_offset + k2 - var x2 - if (k2 == -d || (k2 != d && v2[k2_offset - 1] < v2[k2_offset + 1])) { - x2 = v2[k2_offset + 1] - } else { - x2 = v2[k2_offset - 1] + 1 - } - var y2 = x2 - k2 - while ( - x2 < text1_length && - y2 < text2_length && - text1.charAt(text1_length - x2 - 1) == - text2.charAt(text2_length - y2 - 1) - ) { - x2++ - y2++ - } - v2[k2_offset] = x2 - if (x2 > text1_length) { - // Ran off the left of the graph. - k2end += 2 - } else if (y2 > text2_length) { - // Ran off the top of the graph. - k2start += 2 - } else if (!front) { - var k1_offset = v_offset + delta - k2 - if (k1_offset >= 0 && k1_offset < v_length && v1[k1_offset] != -1) { - var x1 = v1[k1_offset] - var y1 = v_offset + x1 - k1_offset - // Mirror x2 onto top-left coordinate system. - x2 = text1_length - x2 - if (x1 >= x2) { - // Overlap detected. - return this.diff_bisectSplit_(text1, text2, x1, y1, deadline) - } - } - } - } - } - // Diff took too long and hit the deadline or - // number of diffs equals number of characters, no commonality at all. - return [ - [DIFF_DELETE, text1], - [DIFF_INSERT, text2] - ] -} - -/** - * Given the location of the 'middle snake', split the diff in two parts - * and recurse. - * @param {string} text1 Old string to be diffed. - * @param {string} text2 New string to be diffed. - * @param {number} x Index of split point in text1. - * @param {number} y Index of split point in text2. - * @param {number} deadline Time at which to bail if not yet complete. - * @return {!Array.} Array of diff tuples. - * @private - */ -diff_match_patch.prototype.diff_bisectSplit_ = function ( - text1, - text2, - x, - y, - deadline -) { - var text1a = text1.substring(0, x) - var text2a = text2.substring(0, y) - var text1b = text1.substring(x) - var text2b = text2.substring(y) - - // Compute both diffs serially. - var diffs = this.diff_main(text1a, text2a, false, deadline) - var diffsb = this.diff_main(text1b, text2b, false, deadline) - - return diffs.concat(diffsb) -} - -/** - * Split two texts into an array of strings. Reduce the texts to a string of - * hashes where each Unicode character represents one line. - * @param {string} text1 First string. - * @param {string} text2 Second string. - * @return {{chars1: string, chars2: string, lineArray: !Array.}} - * An object containing the encoded text1, the encoded text2 and - * the array of unique strings. - * The zeroth element of the array of unique strings is intentionally blank. - * @private - */ -diff_match_patch.prototype.diff_linesToChars_ = function (text1, text2) { - var lineArray = [] // e.g. lineArray[4] == 'Hello\n' - var lineHash = {} // e.g. lineHash['Hello\n'] == 4 - - // '\x00' is a valid character, but various debuggers don't like it. - // So we'll insert a junk entry to avoid generating a null character. - lineArray[0] = '' - - /** - * Split a text into an array of strings. Reduce the texts to a string of - * hashes where each Unicode character represents one line. - * Modifies linearray and linehash through being a closure. - * @param {string} text String to encode. - * @return {string} Encoded string. - * @private - */ - function diff_linesToCharsMunge_(text) { - var chars = '' - // Walk the text, pulling out a substring for each line. - // text.split('\n') would would temporarily double our memory footprint. - // Modifying text would create many large strings to garbage collect. - var lineStart = 0 - var lineEnd = -1 - // Keeping our own length variable is faster than looking it up. - var lineArrayLength = lineArray.length - while (lineEnd < text.length - 1) { - lineEnd = text.indexOf('\n', lineStart) - if (lineEnd == -1) { - lineEnd = text.length - 1 - } - var line = text.substring(lineStart, lineEnd + 1) - lineStart = lineEnd + 1 - - if ( - lineHash.hasOwnProperty - ? lineHash.hasOwnProperty(line) - : lineHash[line] !== undefined - ) { - chars += String.fromCharCode(lineHash[line]) - } else { - chars += String.fromCharCode(lineArrayLength) - lineHash[line] = lineArrayLength - lineArray[lineArrayLength++] = line - } - } - return chars - } - - var chars1 = diff_linesToCharsMunge_(text1) - var chars2 = diff_linesToCharsMunge_(text2) - return { chars1: chars1, chars2: chars2, lineArray: lineArray } -} - -/** - * Rehydrate the text in a diff from a string of line hashes to real lines of - * text. - * @param {!Array.} diffs Array of diff tuples. - * @param {!Array.} lineArray Array of unique strings. - * @private - */ -diff_match_patch.prototype.diff_charsToLines_ = function (diffs, lineArray) { - for (var x = 0; x < diffs.length; x++) { - var chars = diffs[x][1] - var text = [] - for (var y = 0; y < chars.length; y++) { - text[y] = lineArray[chars.charCodeAt(y)] - } - diffs[x][1] = text.join('') - } -} - -/** - * Determine the common prefix of two strings. - * @param {string} text1 First string. - * @param {string} text2 Second string. - * @return {number} The number of characters common to the start of each - * string. - */ -diff_match_patch.prototype.diff_commonPrefix = function (text1, text2) { - // Quick check for common null cases. - if (!text1 || !text2 || text1.charAt(0) != text2.charAt(0)) { - return 0 - } - // Binary search. - // Performance analysis: http://neil.fraser.name/news/2007/10/09/ - var pointermin = 0 - var pointermax = Math.min(text1.length, text2.length) - var pointermid = pointermax - var pointerstart = 0 - while (pointermin < pointermid) { - if ( - text1.substring(pointerstart, pointermid) == - text2.substring(pointerstart, pointermid) - ) { - pointermin = pointermid - pointerstart = pointermin - } else { - pointermax = pointermid - } - pointermid = Math.floor((pointermax - pointermin) / 2 + pointermin) - } - return pointermid -} - -/** - * Determine the common suffix of two strings. - * @param {string} text1 First string. - * @param {string} text2 Second string. - * @return {number} The number of characters common to the end of each string. - */ -diff_match_patch.prototype.diff_commonSuffix = function (text1, text2) { - // Quick check for common null cases. - if ( - !text1 || - !text2 || - text1.charAt(text1.length - 1) != text2.charAt(text2.length - 1) - ) { - return 0 - } - // Binary search. - // Performance analysis: http://neil.fraser.name/news/2007/10/09/ - var pointermin = 0 - var pointermax = Math.min(text1.length, text2.length) - var pointermid = pointermax - var pointerend = 0 - while (pointermin < pointermid) { - if ( - text1.substring(text1.length - pointermid, text1.length - pointerend) == - text2.substring(text2.length - pointermid, text2.length - pointerend) - ) { - pointermin = pointermid - pointerend = pointermin - } else { - pointermax = pointermid - } - pointermid = Math.floor((pointermax - pointermin) / 2 + pointermin) - } - return pointermid -} - -/** - * Determine if the suffix of one string is the prefix of another. - * @param {string} text1 First string. - * @param {string} text2 Second string. - * @return {number} The number of characters common to the end of the first - * string and the start of the second string. - * @private - */ -diff_match_patch.prototype.diff_commonOverlap_ = function (text1, text2) { - // Cache the text lengths to prevent multiple calls. - var text1_length = text1.length - var text2_length = text2.length - // Eliminate the null case. - if (text1_length == 0 || text2_length == 0) { - return 0 - } - // Truncate the longer string. - if (text1_length > text2_length) { - text1 = text1.substring(text1_length - text2_length) - } else if (text1_length < text2_length) { - text2 = text2.substring(0, text1_length) - } - var text_length = Math.min(text1_length, text2_length) - // Quick check for the worst case. - if (text1 == text2) { - return text_length - } - - // Start by looking for a single character match - // and increase length until no match is found. - // Performance analysis: http://neil.fraser.name/news/2010/11/04/ - var best = 0 - var length = 1 - while (true) { - var pattern = text1.substring(text_length - length) - var found = text2.indexOf(pattern) - if (found == -1) { - return best - } - length += found - if ( - found == 0 || - text1.substring(text_length - length) == text2.substring(0, length) - ) { - best = length - length++ - } - } -} - -/** - * Do the two texts share a substring which is at least half the length of the - * longer text? - * This speedup can produce non-minimal diffs. - * @param {string} text1 First string. - * @param {string} text2 Second string. - * @return {Array.} Five element Array, containing the prefix of - * text1, the suffix of text1, the prefix of text2, the suffix of - * text2 and the common middle. Or null if there was no match. - * @private - */ -diff_match_patch.prototype.diff_halfMatch_ = function (text1, text2) { - if (this.Diff_Timeout <= 0) { - // Don't risk returning a non-optimal diff if we have unlimited time. - return null - } - var longtext = text1.length > text2.length ? text1 : text2 - var shorttext = text1.length > text2.length ? text2 : text1 - if (longtext.length < 4 || shorttext.length * 2 < longtext.length) { - return null // Pointless. - } - var dmp = this // 'this' becomes 'window' in a closure. - - /** - * Does a substring of shorttext exist within longtext such that the substring - * is at least half the length of longtext? - * Closure, but does not reference any external variables. - * @param {string} longtext Longer string. - * @param {string} shorttext Shorter string. - * @param {number} i Start index of quarter length substring within longtext. - * @return {Array.} Five element Array, containing the prefix of - * longtext, the suffix of longtext, the prefix of shorttext, the suffix - * of shorttext and the common middle. Or null if there was no match. - * @private - */ - function diff_halfMatchI_(longtext, shorttext, i) { - // Start with a 1/4 length substring at position i as a seed. - var seed = longtext.substring(i, i + Math.floor(longtext.length / 4)) - var j = -1 - var best_common = '' - var best_longtext_a, best_longtext_b, best_shorttext_a, best_shorttext_b - while ((j = shorttext.indexOf(seed, j + 1)) != -1) { - var prefixLength = dmp.diff_commonPrefix( - longtext.substring(i), - shorttext.substring(j) - ) - var suffixLength = dmp.diff_commonSuffix( - longtext.substring(0, i), - shorttext.substring(0, j) - ) - if (best_common.length < suffixLength + prefixLength) { - best_common = - shorttext.substring(j - suffixLength, j) + - shorttext.substring(j, j + prefixLength) - best_longtext_a = longtext.substring(0, i - suffixLength) - best_longtext_b = longtext.substring(i + prefixLength) - best_shorttext_a = shorttext.substring(0, j - suffixLength) - best_shorttext_b = shorttext.substring(j + prefixLength) - } - } - if (best_common.length * 2 >= longtext.length) { - return [ - best_longtext_a, - best_longtext_b, - best_shorttext_a, - best_shorttext_b, - best_common - ] - } else { - return null - } - } - - // First check if the second quarter is the seed for a half-match. - var hm1 = diff_halfMatchI_( - longtext, - shorttext, - Math.ceil(longtext.length / 4) - ) - // Check again based on the third quarter. - var hm2 = diff_halfMatchI_( - longtext, - shorttext, - Math.ceil(longtext.length / 2) - ) - var hm - if (!hm1 && !hm2) { - return null - } else if (!hm2) { - hm = hm1 - } else if (!hm1) { - hm = hm2 - } else { - // Both matched. Select the longest. - hm = hm1[4].length > hm2[4].length ? hm1 : hm2 - } - - // A half-match was found, sort out the return data. - var text1_a, text1_b, text2_a, text2_b - if (text1.length > text2.length) { - text1_a = hm[0] - text1_b = hm[1] - text2_a = hm[2] - text2_b = hm[3] - } else { - text2_a = hm[0] - text2_b = hm[1] - text1_a = hm[2] - text1_b = hm[3] - } - var mid_common = hm[4] - return [text1_a, text1_b, text2_a, text2_b, mid_common] -} - -/** - * Reduce the number of edits by eliminating semantically trivial equalities. - * @param {!Array.} diffs Array of diff tuples. - */ -diff_match_patch.prototype.diff_cleanupSemantic = function (diffs) { - var changes = false - var equalities = [] // Stack of indices where equalities are found. - var equalitiesLength = 0 // Keeping our own length var is faster in JS. - /** @type {?string} */ - var lastequality = null - // Always equal to diffs[equalities[equalitiesLength - 1]][1] - var pointer = 0 // Index of current position. - // Number of characters that changed prior to the equality. - var length_insertions1 = 0 - var length_deletions1 = 0 - // Number of characters that changed after the equality. - var length_insertions2 = 0 - var length_deletions2 = 0 - while (pointer < diffs.length) { - if (diffs[pointer][0] == DIFF_EQUAL) { - // Equality found. - equalities[equalitiesLength++] = pointer - length_insertions1 = length_insertions2 - length_deletions1 = length_deletions2 - length_insertions2 = 0 - length_deletions2 = 0 - lastequality = diffs[pointer][1] - } else { - // An insertion or deletion. - if (diffs[pointer][0] == DIFF_INSERT) { - length_insertions2 += diffs[pointer][1].length - } else { - length_deletions2 += diffs[pointer][1].length - } - // Eliminate an equality that is smaller or equal to the edits on both - // sides of it. - if ( - lastequality && - lastequality.length <= - Math.max(length_insertions1, length_deletions1) && - lastequality.length <= Math.max(length_insertions2, length_deletions2) - ) { - // Duplicate record. - diffs.splice(equalities[equalitiesLength - 1], 0, [ - DIFF_DELETE, - lastequality - ]) - // Change second copy to insert. - diffs[equalities[equalitiesLength - 1] + 1][0] = DIFF_INSERT - // Throw away the equality we just deleted. - equalitiesLength-- - // Throw away the previous equality (it needs to be reevaluated). - equalitiesLength-- - pointer = equalitiesLength > 0 ? equalities[equalitiesLength - 1] : -1 - length_insertions1 = 0 // Reset the counters. - length_deletions1 = 0 - length_insertions2 = 0 - length_deletions2 = 0 - lastequality = null - changes = true - } - } - pointer++ - } - - // Normalize the diff. - if (changes) { - this.diff_cleanupMerge(diffs) - } - this.diff_cleanupSemanticLossless(diffs) - - // Find any overlaps between deletions and insertions. - // e.g: abcxxxxxxdef - // -> abcxxxdef - // e.g: xxxabcdefxxx - // -> defxxxabc - // Only extract an overlap if it is as big as the edit ahead or behind it. - pointer = 1 - while (pointer < diffs.length) { - if ( - diffs[pointer - 1][0] == DIFF_DELETE && - diffs[pointer][0] == DIFF_INSERT - ) { - var deletion = diffs[pointer - 1][1] - var insertion = diffs[pointer][1] - var overlap_length1 = this.diff_commonOverlap_(deletion, insertion) - var overlap_length2 = this.diff_commonOverlap_(insertion, deletion) - if (overlap_length1 >= overlap_length2) { - if ( - overlap_length1 >= deletion.length / 2 || - overlap_length1 >= insertion.length / 2 - ) { - // Overlap found. Insert an equality and trim the surrounding edits. - diffs.splice(pointer, 0, [ - DIFF_EQUAL, - insertion.substring(0, overlap_length1) - ]) - diffs[pointer - 1][1] = deletion.substring( - 0, - deletion.length - overlap_length1 - ) - diffs[pointer + 1][1] = insertion.substring(overlap_length1) - pointer++ - } - } else { - if ( - overlap_length2 >= deletion.length / 2 || - overlap_length2 >= insertion.length / 2 - ) { - // Reverse overlap found. - // Insert an equality and swap and trim the surrounding edits. - diffs.splice(pointer, 0, [ - DIFF_EQUAL, - deletion.substring(0, overlap_length2) - ]) - diffs[pointer - 1][0] = DIFF_INSERT - diffs[pointer - 1][1] = insertion.substring( - 0, - insertion.length - overlap_length2 - ) - diffs[pointer + 1][0] = DIFF_DELETE - diffs[pointer + 1][1] = deletion.substring(overlap_length2) - pointer++ - } - } - pointer++ - } - pointer++ - } -} - -/** - * Look for single edits surrounded on both sides by equalities - * which can be shifted sideways to align the edit to a word boundary. - * e.g: The cat came. -> The cat came. - * @param {!Array.} diffs Array of diff tuples. - */ -diff_match_patch.prototype.diff_cleanupSemanticLossless = function (diffs) { - /** - * Given two strings, compute a score representing whether the internal - * boundary falls on logical boundaries. - * Scores range from 6 (best) to 0 (worst). - * Closure, but does not reference any external variables. - * @param {string} one First string. - * @param {string} two Second string. - * @return {number} The score. - * @private - */ - function diff_cleanupSemanticScore_(one, two) { - if (!one || !two) { - // Edges are the best. - return 6 - } - - // Each port of this function behaves slightly differently due to - // subtle differences in each language's definition of things like - // 'whitespace'. Since this function's purpose is largely cosmetic, - // the choice has been made to use each language's native features - // rather than force total conformity. - var char1 = one.charAt(one.length - 1) - var char2 = two.charAt(0) - var nonAlphaNumeric1 = char1.match(diff_match_patch.nonAlphaNumericRegex_) - var nonAlphaNumeric2 = char2.match(diff_match_patch.nonAlphaNumericRegex_) - var whitespace1 = - nonAlphaNumeric1 && char1.match(diff_match_patch.whitespaceRegex_) - var whitespace2 = - nonAlphaNumeric2 && char2.match(diff_match_patch.whitespaceRegex_) - var lineBreak1 = - whitespace1 && char1.match(diff_match_patch.linebreakRegex_) - var lineBreak2 = - whitespace2 && char2.match(diff_match_patch.linebreakRegex_) - var blankLine1 = - lineBreak1 && one.match(diff_match_patch.blanklineEndRegex_) - var blankLine2 = - lineBreak2 && two.match(diff_match_patch.blanklineStartRegex_) - - if (blankLine1 || blankLine2) { - // Five points for blank lines. - return 5 - } else if (lineBreak1 || lineBreak2) { - // Four points for line breaks. - return 4 - } else if (nonAlphaNumeric1 && !whitespace1 && whitespace2) { - // Three points for end of sentences. - return 3 - } else if (whitespace1 || whitespace2) { - // Two points for whitespace. - return 2 - } else if (nonAlphaNumeric1 || nonAlphaNumeric2) { - // One point for non-alphanumeric. - return 1 - } - return 0 - } - - var pointer = 1 - // Intentionally ignore the first and last element (don't need checking). - while (pointer < diffs.length - 1) { - if ( - diffs[pointer - 1][0] == DIFF_EQUAL && - diffs[pointer + 1][0] == DIFF_EQUAL - ) { - // This is a single edit surrounded by equalities. - var equality1 = diffs[pointer - 1][1] - var edit = diffs[pointer][1] - var equality2 = diffs[pointer + 1][1] - - // First, shift the edit as far left as possible. - var commonOffset = this.diff_commonSuffix(equality1, edit) - if (commonOffset) { - var commonString = edit.substring(edit.length - commonOffset) - equality1 = equality1.substring(0, equality1.length - commonOffset) - edit = commonString + edit.substring(0, edit.length - commonOffset) - equality2 = commonString + equality2 - } - - // Second, step character by character right, looking for the best fit. - var bestEquality1 = equality1 - var bestEdit = edit - var bestEquality2 = equality2 - var bestScore = - diff_cleanupSemanticScore_(equality1, edit) + - diff_cleanupSemanticScore_(edit, equality2) - while (edit.charAt(0) === equality2.charAt(0)) { - equality1 += edit.charAt(0) - edit = edit.substring(1) + equality2.charAt(0) - equality2 = equality2.substring(1) - var score = - diff_cleanupSemanticScore_(equality1, edit) + - diff_cleanupSemanticScore_(edit, equality2) - // The >= encourages trailing rather than leading whitespace on edits. - if (score >= bestScore) { - bestScore = score - bestEquality1 = equality1 - bestEdit = edit - bestEquality2 = equality2 - } - } - - if (diffs[pointer - 1][1] != bestEquality1) { - // We have an improvement, save it back to the diff. - if (bestEquality1) { - diffs[pointer - 1][1] = bestEquality1 - } else { - diffs.splice(pointer - 1, 1) - pointer-- - } - diffs[pointer][1] = bestEdit - if (bestEquality2) { - diffs[pointer + 1][1] = bestEquality2 - } else { - diffs.splice(pointer + 1, 1) - pointer-- - } - } - } - pointer++ - } -} - -// Define some regex patterns for matching boundaries. -diff_match_patch.nonAlphaNumericRegex_ = /[^a-zA-Z0-9]/ -diff_match_patch.whitespaceRegex_ = /\s/ -diff_match_patch.linebreakRegex_ = /[\r\n]/ -diff_match_patch.blanklineEndRegex_ = /\n\r?\n$/ -diff_match_patch.blanklineStartRegex_ = /^\r?\n\r?\n/ - -/** - * Reduce the number of edits by eliminating operationally trivial equalities. - * @param {!Array.} diffs Array of diff tuples. - */ -diff_match_patch.prototype.diff_cleanupEfficiency = function (diffs) { - var changes = false - var equalities = [] // Stack of indices where equalities are found. - var equalitiesLength = 0 // Keeping our own length var is faster in JS. - /** @type {?string} */ - var lastequality = null - // Always equal to diffs[equalities[equalitiesLength - 1]][1] - var pointer = 0 // Index of current position. - // Is there an insertion operation before the last equality. - var pre_ins = false - // Is there a deletion operation before the last equality. - var pre_del = false - // Is there an insertion operation after the last equality. - var post_ins = false - // Is there a deletion operation after the last equality. - var post_del = false - while (pointer < diffs.length) { - if (diffs[pointer][0] == DIFF_EQUAL) { - // Equality found. - if ( - diffs[pointer][1].length < this.Diff_EditCost && - (post_ins || post_del) - ) { - // Candidate found. - equalities[equalitiesLength++] = pointer - pre_ins = post_ins - pre_del = post_del - lastequality = diffs[pointer][1] - } else { - // Not a candidate, and can never become one. - equalitiesLength = 0 - lastequality = null - } - post_ins = post_del = false - } else { - // An insertion or deletion. - if (diffs[pointer][0] == DIFF_DELETE) { - post_del = true - } else { - post_ins = true - } - /* - * Five types to be split: - * ABXYCD - * AXCD - * ABXC - * AXCD - * ABXC - */ - if ( - lastequality && - ((pre_ins && pre_del && post_ins && post_del) || - (lastequality.length < this.Diff_EditCost / 2 && - pre_ins + pre_del + post_ins + post_del == 3)) - ) { - // Duplicate record. - diffs.splice(equalities[equalitiesLength - 1], 0, [ - DIFF_DELETE, - lastequality - ]) - // Change second copy to insert. - diffs[equalities[equalitiesLength - 1] + 1][0] = DIFF_INSERT - equalitiesLength-- // Throw away the equality we just deleted; - lastequality = null - if (pre_ins && pre_del) { - // No changes made which could affect previous entry, keep going. - post_ins = post_del = true - equalitiesLength = 0 - } else { - equalitiesLength-- // Throw away the previous equality. - pointer = equalitiesLength > 0 ? equalities[equalitiesLength - 1] : -1 - post_ins = post_del = false - } - changes = true - } - } - pointer++ - } - - if (changes) { - this.diff_cleanupMerge(diffs) - } -} - -/** - * Reorder and merge like edit sections. Merge equalities. - * Any edit section can move as long as it doesn't cross an equality. - * @param {!Array.} diffs Array of diff tuples. - */ -diff_match_patch.prototype.diff_cleanupMerge = function (diffs) { - diffs.push([DIFF_EQUAL, '']) // Add a dummy entry at the end. - var pointer = 0 - var count_delete = 0 - var count_insert = 0 - var text_delete = '' - var text_insert = '' - var commonlength - while (pointer < diffs.length) { - switch (diffs[pointer][0]) { - case DIFF_INSERT: - count_insert++ - text_insert += diffs[pointer][1] - pointer++ - break - case DIFF_DELETE: - count_delete++ - text_delete += diffs[pointer][1] - pointer++ - break - case DIFF_EQUAL: - // Upon reaching an equality, check for prior redundancies. - if (count_delete + count_insert > 1) { - if (count_delete !== 0 && count_insert !== 0) { - // Factor out any common prefixies. - commonlength = this.diff_commonPrefix(text_insert, text_delete) - if (commonlength !== 0) { - if ( - pointer - count_delete - count_insert > 0 && - diffs[pointer - count_delete - count_insert - 1][0] == - DIFF_EQUAL - ) { - diffs[ - pointer - count_delete - count_insert - 1 - ][1] += text_insert.substring(0, commonlength) - } else { - diffs.splice(0, 0, [ - DIFF_EQUAL, - text_insert.substring(0, commonlength) - ]) - pointer++ - } - text_insert = text_insert.substring(commonlength) - text_delete = text_delete.substring(commonlength) - } - // Factor out any common suffixies. - commonlength = this.diff_commonSuffix(text_insert, text_delete) - if (commonlength !== 0) { - diffs[pointer][1] = - text_insert.substring(text_insert.length - commonlength) + - diffs[pointer][1] - text_insert = text_insert.substring( - 0, - text_insert.length - commonlength - ) - text_delete = text_delete.substring( - 0, - text_delete.length - commonlength - ) - } - } - // Delete the offending records and add the merged ones. - if (count_delete === 0) { - diffs.splice(pointer - count_insert, count_delete + count_insert, [ - DIFF_INSERT, - text_insert - ]) - } else if (count_insert === 0) { - diffs.splice(pointer - count_delete, count_delete + count_insert, [ - DIFF_DELETE, - text_delete - ]) - } else { - diffs.splice( - pointer - count_delete - count_insert, - count_delete + count_insert, - [DIFF_DELETE, text_delete], - [DIFF_INSERT, text_insert] - ) - } - pointer = - pointer - - count_delete - - count_insert + - (count_delete ? 1 : 0) + - (count_insert ? 1 : 0) + - 1 - } else if (pointer !== 0 && diffs[pointer - 1][0] == DIFF_EQUAL) { - // Merge this equality with the previous one. - diffs[pointer - 1][1] += diffs[pointer][1] - diffs.splice(pointer, 1) - } else { - pointer++ - } - count_insert = 0 - count_delete = 0 - text_delete = '' - text_insert = '' - break - } - } - if (diffs[diffs.length - 1][1] === '') { - diffs.pop() // Remove the dummy entry at the end. - } - - // Second pass: look for single edits surrounded on both sides by equalities - // which can be shifted sideways to eliminate an equality. - // e.g: ABAC -> ABAC - var changes = false - pointer = 1 - // Intentionally ignore the first and last element (don't need checking). - while (pointer < diffs.length - 1) { - if ( - diffs[pointer - 1][0] == DIFF_EQUAL && - diffs[pointer + 1][0] == DIFF_EQUAL - ) { - // This is a single edit surrounded by equalities. - if ( - diffs[pointer][1].substring( - diffs[pointer][1].length - diffs[pointer - 1][1].length - ) == diffs[pointer - 1][1] - ) { - // Shift the edit over the previous equality. - diffs[pointer][1] = - diffs[pointer - 1][1] + - diffs[pointer][1].substring( - 0, - diffs[pointer][1].length - diffs[pointer - 1][1].length - ) - diffs[pointer + 1][1] = diffs[pointer - 1][1] + diffs[pointer + 1][1] - diffs.splice(pointer - 1, 1) - changes = true - } else if ( - diffs[pointer][1].substring(0, diffs[pointer + 1][1].length) == - diffs[pointer + 1][1] - ) { - // Shift the edit over the next equality. - diffs[pointer - 1][1] += diffs[pointer + 1][1] - diffs[pointer][1] = - diffs[pointer][1].substring(diffs[pointer + 1][1].length) + - diffs[pointer + 1][1] - diffs.splice(pointer + 1, 1) - changes = true - } - } - pointer++ - } - // If shifts were made, the diff needs reordering and another shift sweep. - if (changes) { - this.diff_cleanupMerge(diffs) - } -} - -/** - * loc is a location in text1, compute and return the equivalent location in - * text2. - * e.g. 'The cat' vs 'The big cat', 1->1, 5->8 - * @param {!Array.} diffs Array of diff tuples. - * @param {number} loc Location within text1. - * @return {number} Location within text2. - */ -diff_match_patch.prototype.diff_xIndex = function (diffs, loc) { - var chars1 = 0 - var chars2 = 0 - var last_chars1 = 0 - var last_chars2 = 0 - var x - for (x = 0; x < diffs.length; x++) { - if (diffs[x][0] !== DIFF_INSERT) { - // Equality or deletion. - chars1 += diffs[x][1].length - } - if (diffs[x][0] !== DIFF_DELETE) { - // Equality or insertion. - chars2 += diffs[x][1].length - } - if (chars1 > loc) { - // Overshot the location. - break - } - last_chars1 = chars1 - last_chars2 = chars2 - } - // Was the location was deleted? - if (diffs.length != x && diffs[x][0] === DIFF_DELETE) { - return last_chars2 - } - // Add the remaining character length. - return last_chars2 + (loc - last_chars1) -} - -/** - * Convert a diff array into a pretty HTML report. - * @param {!Array.} diffs Array of diff tuples. - * @return {string} HTML representation. - */ -diff_match_patch.prototype.diff_prettyHtml = function (diffs) { - var html = [] - var pattern_amp = /&/g - var pattern_lt = //g - var pattern_para = /\n/g - for (var x = 0; x < diffs.length; x++) { - var op = diffs[x][0] // Operation (insert, delete, equal) - var data = diffs[x][1] // Text of change. - var text = data - .replace(pattern_amp, '&') - .replace(pattern_lt, '<') - .replace(pattern_gt, '>') - .replace(pattern_para, '¶
') - switch (op) { - case DIFF_INSERT: - html[x] = '' + text + '' - break - case DIFF_DELETE: - html[x] = '' + text + '' - break - case DIFF_EQUAL: - html[x] = '' + text + '' - break - } - } - return html.join('') -} - -/** - * Compute and return the source text (all equalities and deletions). - * @param {!Array.} diffs Array of diff tuples. - * @return {string} Source text. - */ -diff_match_patch.prototype.diff_text1 = function (diffs) { - var text = [] - for (var x = 0; x < diffs.length; x++) { - if (diffs[x][0] !== DIFF_INSERT) { - text[x] = diffs[x][1] - } - } - return text.join('') -} - -/** - * Compute and return the destination text (all equalities and insertions). - * @param {!Array.} diffs Array of diff tuples. - * @return {string} Destination text. - */ -diff_match_patch.prototype.diff_text2 = function (diffs) { - var text = [] - for (var x = 0; x < diffs.length; x++) { - if (diffs[x][0] !== DIFF_DELETE) { - text[x] = diffs[x][1] - } - } - return text.join('') -} - -/** - * Compute the Levenshtein distance; the number of inserted, deleted or - * substituted characters. - * @param {!Array.} diffs Array of diff tuples. - * @return {number} Number of changes. - */ -diff_match_patch.prototype.diff_levenshtein = function (diffs) { - var levenshtein = 0 - var insertions = 0 - var deletions = 0 - for (var x = 0; x < diffs.length; x++) { - var op = diffs[x][0] - var data = diffs[x][1] - switch (op) { - case DIFF_INSERT: - insertions += data.length - break - case DIFF_DELETE: - deletions += data.length - break - case DIFF_EQUAL: - // A deletion and an insertion is one substitution. - levenshtein += Math.max(insertions, deletions) - insertions = 0 - deletions = 0 - break - } - } - levenshtein += Math.max(insertions, deletions) - return levenshtein -} - -/** - * Crush the diff into an encoded string which describes the operations - * required to transform text1 into text2. - * E.g. =3\t-2\t+ing -> Keep 3 chars, delete 2 chars, insert 'ing'. - * Operations are tab-separated. Inserted text is escaped using %xx notation. - * @param {!Array.} diffs Array of diff tuples. - * @return {string} Delta text. - */ -diff_match_patch.prototype.diff_toDelta = function (diffs) { - var text = [] - for (var x = 0; x < diffs.length; x++) { - switch (diffs[x][0]) { - case DIFF_INSERT: - text[x] = '+' + encodeURI(diffs[x][1]) - break - case DIFF_DELETE: - text[x] = '-' + diffs[x][1].length - break - case DIFF_EQUAL: - text[x] = '=' + diffs[x][1].length - break - } - } - return text.join('\t').replace(/%20/g, ' ') -} - -/** - * Given the original text1, and an encoded string which describes the - * operations required to transform text1 into text2, compute the full diff. - * @param {string} text1 Source string for the diff. - * @param {string} delta Delta text. - * @return {!Array.} Array of diff tuples. - * @throws {!Error} If invalid input. - */ -diff_match_patch.prototype.diff_fromDelta = function (text1, delta) { - var diffs = [] - var diffsLength = 0 // Keeping our own length var is faster in JS. - var pointer = 0 // Cursor in text1 - var tokens = delta.split(/\t/g) - for (var x = 0; x < tokens.length; x++) { - // Each token begins with a one character parameter which specifies the - // operation of this token (delete, insert, equality). - var param = tokens[x].substring(1) - switch (tokens[x].charAt(0)) { - case '+': - try { - diffs[diffsLength++] = [DIFF_INSERT, decodeURI(param)] - } catch (ex) { - // Malformed URI sequence. - throw new Error('Illegal escape in diff_fromDelta: ' + param) - } - break - case '-': - // Fall through. - case '=': - var n = parseInt(param, 10) - if (isNaN(n) || n < 0) { - throw new Error('Invalid number in diff_fromDelta: ' + param) - } - var text = text1.substring(pointer, (pointer += n)) - if (tokens[x].charAt(0) == '=') { - diffs[diffsLength++] = [DIFF_EQUAL, text] - } else { - diffs[diffsLength++] = [DIFF_DELETE, text] - } - break - default: - // Blank tokens are ok (from a trailing \t). - // Anything else is an error. - if (tokens[x]) { - throw new Error( - 'Invalid diff operation in diff_fromDelta: ' + tokens[x] - ) - } - } - } - if (pointer != text1.length) { - throw new Error( - 'Delta length (' + - pointer + - ') does not equal source text length (' + - text1.length + - ').' - ) - } - return diffs -} - -// MATCH FUNCTIONS - -/** - * Locate the best instance of 'pattern' in 'text' near 'loc'. - * @param {string} text The text to search. - * @param {string} pattern The pattern to search for. - * @param {number} loc The location to search around. - * @return {number} Best match index or -1. - */ -diff_match_patch.prototype.match_main = function (text, pattern, loc) { - // Check for null inputs. - if (text == null || pattern == null || loc == null) { - throw new Error('Null input. (match_main)') - } - - loc = Math.max(0, Math.min(loc, text.length)) - if (text == pattern) { - // Shortcut (potentially not guaranteed by the algorithm) - return 0 - } else if (!text.length) { - // Nothing to match. - return -1 - } else if (text.substring(loc, loc + pattern.length) == pattern) { - // Perfect match at the perfect spot! (Includes case of null pattern) - return loc - } else { - // Do a fuzzy compare. - return this.match_bitap_(text, pattern, loc) - } -} - -/** - * Locate the best instance of 'pattern' in 'text' near 'loc' using the - * Bitap algorithm. - * @param {string} text The text to search. - * @param {string} pattern The pattern to search for. - * @param {number} loc The location to search around. - * @return {number} Best match index or -1. - * @private - */ -diff_match_patch.prototype.match_bitap_ = function (text, pattern, loc) { - if (pattern.length > this.Match_MaxBits) { - throw new Error('Pattern too long for this browser.') - } - - // Initialise the alphabet. - var s = this.match_alphabet_(pattern) - - var dmp = this // 'this' becomes 'window' in a closure. - - /** - * Compute and return the score for a match with e errors and x location. - * Accesses loc and pattern through being a closure. - * @param {number} e Number of errors in match. - * @param {number} x Location of match. - * @return {number} Overall score for match (0.0 = good, 1.0 = bad). - * @private - */ - function match_bitapScore_(e, x) { - var accuracy = e / pattern.length - var proximity = Math.abs(loc - x) - if (!dmp.Match_Distance) { - // Dodge divide by zero error. - return proximity ? 1.0 : accuracy - } - return accuracy + proximity / dmp.Match_Distance - } - - // Highest score beyond which we give up. - var score_threshold = this.Match_Threshold - // Is there a nearby exact match? (speedup) - var best_loc = text.indexOf(pattern, loc) - if (best_loc != -1) { - score_threshold = Math.min(match_bitapScore_(0, best_loc), score_threshold) - // What about in the other direction? (speedup) - best_loc = text.lastIndexOf(pattern, loc + pattern.length) - if (best_loc != -1) { - score_threshold = Math.min( - match_bitapScore_(0, best_loc), - score_threshold - ) - } - } - - // Initialise the bit arrays. - var matchmask = 1 << (pattern.length - 1) - best_loc = -1 - - var bin_min, bin_mid - var bin_max = pattern.length + text.length - var last_rd - for (var d = 0; d < pattern.length; d++) { - // Scan for the best match; each iteration allows for one more error. - // Run a binary search to determine how far from 'loc' we can stray at this - // error level. - bin_min = 0 - bin_mid = bin_max - while (bin_min < bin_mid) { - if (match_bitapScore_(d, loc + bin_mid) <= score_threshold) { - bin_min = bin_mid - } else { - bin_max = bin_mid - } - bin_mid = Math.floor((bin_max - bin_min) / 2 + bin_min) - } - // Use the result from this iteration as the maximum for the next. - bin_max = bin_mid - var start = Math.max(1, loc - bin_mid + 1) - var finish = Math.min(loc + bin_mid, text.length) + pattern.length - - var rd = Array(finish + 2) - rd[finish + 1] = (1 << d) - 1 - for (var j = finish; j >= start; j--) { - // The alphabet (s) is a sparse hash, so the following line generates - // warnings. - var charMatch = s[text.charAt(j - 1)] - if (d === 0) { - // First pass: exact match. - rd[j] = ((rd[j + 1] << 1) | 1) & charMatch - } else { - // Subsequent passes: fuzzy match. - rd[j] = - (((rd[j + 1] << 1) | 1) & charMatch) | - (((last_rd[j + 1] | last_rd[j]) << 1) | 1) | - last_rd[j + 1] - } - if (rd[j] & matchmask) { - var score = match_bitapScore_(d, j - 1) - // This match will almost certainly be better than any existing match. - // But check anyway. - if (score <= score_threshold) { - // Told you so. - score_threshold = score - best_loc = j - 1 - if (best_loc > loc) { - // When passing loc, don't exceed our current distance from loc. - start = Math.max(1, 2 * loc - best_loc) - } else { - // Already passed loc, downhill from here on in. - break - } - } - } - } - // No hope for a (better) match at greater error levels. - if (match_bitapScore_(d + 1, loc) > score_threshold) { - break - } - last_rd = rd - } - return best_loc -} - -/** - * Initialise the alphabet for the Bitap algorithm. - * @param {string} pattern The text to encode. - * @return {!Object} Hash of character locations. - * @private - */ -diff_match_patch.prototype.match_alphabet_ = function (pattern) { - var s = {} - for (var i = 0; i < pattern.length; i++) { - s[pattern.charAt(i)] = 0 - } - for (var i = 0; i < pattern.length; i++) { - s[pattern.charAt(i)] |= 1 << (pattern.length - i - 1) - } - return s -} - -// PATCH FUNCTIONS - -/** - * Increase the context until it is unique, - * but don't let the pattern expand beyond Match_MaxBits. - * @param {!diff_match_patch.patch_obj} patch The patch to grow. - * @param {string} text Source text. - * @private - */ -diff_match_patch.prototype.patch_addContext_ = function (patch, text) { - if (text.length == 0) { - return - } - var pattern = text.substring(patch.start2, patch.start2 + patch.length1) - var padding = 0 - - // Look for the first and last matches of pattern in text. If two different - // matches are found, increase the pattern length. - while ( - text.indexOf(pattern) != text.lastIndexOf(pattern) && - pattern.length < this.Match_MaxBits - this.Patch_Margin - this.Patch_Margin - ) { - padding += this.Patch_Margin - pattern = text.substring( - patch.start2 - padding, - patch.start2 + patch.length1 + padding - ) - } - // Add one chunk for good luck. - padding += this.Patch_Margin - - // Add the prefix. - var prefix = text.substring(patch.start2 - padding, patch.start2) - if (prefix) { - patch.diffs.unshift([DIFF_EQUAL, prefix]) - } - // Add the suffix. - var suffix = text.substring( - patch.start2 + patch.length1, - patch.start2 + patch.length1 + padding - ) - if (suffix) { - patch.diffs.push([DIFF_EQUAL, suffix]) - } - - // Roll back the start points. - patch.start1 -= prefix.length - patch.start2 -= prefix.length - // Extend the lengths. - patch.length1 += prefix.length + suffix.length - patch.length2 += prefix.length + suffix.length -} - -/** - * Compute a list of patches to turn text1 into text2. - * Use diffs if provided, otherwise compute it ourselves. - * There are four ways to call this function, depending on what data is - * available to the caller: - * Method 1: - * a = text1, b = text2 - * Method 2: - * a = diffs - * Method 3 (optimal): - * a = text1, b = diffs - * Method 4 (deprecated, use method 3): - * a = text1, b = text2, c = diffs - * - * @param {string|!Array.} a text1 (methods 1,3,4) or - * Array of diff tuples for text1 to text2 (method 2). - * @param {string|!Array.} opt_b text2 (methods 1,4) or - * Array of diff tuples for text1 to text2 (method 3) or undefined (method 2). - * @param {string|!Array.} opt_c Array of diff tuples - * for text1 to text2 (method 4) or undefined (methods 1,2,3). - * @return {!Array.} Array of Patch objects. - */ -diff_match_patch.prototype.patch_make = function (a, opt_b, opt_c) { - var text1, diffs - if ( - typeof a == 'string' && - typeof opt_b == 'string' && - typeof opt_c == 'undefined' - ) { - // Method 1: text1, text2 - // Compute diffs from text1 and text2. - text1 = /** @type {string} */ (a) - diffs = this.diff_main(text1, /** @type {string} */ (opt_b), true) - if (diffs.length > 2) { - this.diff_cleanupSemantic(diffs) - this.diff_cleanupEfficiency(diffs) - } - } else if ( - a && - typeof a == 'object' && - typeof opt_b == 'undefined' && - typeof opt_c == 'undefined' - ) { - // Method 2: diffs - // Compute text1 from diffs. - diffs = /** @type {!Array.} */ (a) - text1 = this.diff_text1(diffs) - } else if ( - typeof a == 'string' && - opt_b && - typeof opt_b == 'object' && - typeof opt_c == 'undefined' - ) { - // Method 3: text1, diffs - text1 = /** @type {string} */ (a) - diffs = /** @type {!Array.} */ (opt_b) - } else if ( - typeof a == 'string' && - typeof opt_b == 'string' && - opt_c && - typeof opt_c == 'object' - ) { - // Method 4: text1, text2, diffs - // text2 is not used. - text1 = /** @type {string} */ (a) - diffs = /** @type {!Array.} */ (opt_c) - } else { - throw new Error('Unknown call format to patch_make.') - } - - if (diffs.length === 0) { - return [] // Get rid of the null case. - } - var patches = [] - var patch = new diff_match_patch.patch_obj() - var patchDiffLength = 0 // Keeping our own length var is faster in JS. - var char_count1 = 0 // Number of characters into the text1 string. - var char_count2 = 0 // Number of characters into the text2 string. - // Start with text1 (prepatch_text) and apply the diffs until we arrive at - // text2 (postpatch_text). We recreate the patches one by one to determine - // context info. - var prepatch_text = text1 - var postpatch_text = text1 - for (var x = 0; x < diffs.length; x++) { - var diff_type = diffs[x][0] - var diff_text = diffs[x][1] - - if (!patchDiffLength && diff_type !== DIFF_EQUAL) { - // A new patch starts here. - patch.start1 = char_count1 - patch.start2 = char_count2 - } - - switch (diff_type) { - case DIFF_INSERT: - patch.diffs[patchDiffLength++] = diffs[x] - patch.length2 += diff_text.length - postpatch_text = - postpatch_text.substring(0, char_count2) + - diff_text + - postpatch_text.substring(char_count2) - break - case DIFF_DELETE: - patch.length1 += diff_text.length - patch.diffs[patchDiffLength++] = diffs[x] - postpatch_text = - postpatch_text.substring(0, char_count2) + - postpatch_text.substring(char_count2 + diff_text.length) - break - case DIFF_EQUAL: - if ( - diff_text.length <= 2 * this.Patch_Margin && - patchDiffLength && - diffs.length != x + 1 - ) { - // Small equality inside a patch. - patch.diffs[patchDiffLength++] = diffs[x] - patch.length1 += diff_text.length - patch.length2 += diff_text.length - } else if (diff_text.length >= 2 * this.Patch_Margin) { - // Time for a new patch. - if (patchDiffLength) { - this.patch_addContext_(patch, prepatch_text) - patches.push(patch) - patch = new diff_match_patch.patch_obj() - patchDiffLength = 0 - // Unlike Unidiff, our patch lists have a rolling context. - // http://code.google.com/p/google-diff-match-patch/wiki/Unidiff - // Update prepatch text & pos to reflect the application of the - // just completed patch. - prepatch_text = postpatch_text - char_count1 = char_count2 - } - } - break - } - - // Update the current character count. - if (diff_type !== DIFF_INSERT) { - char_count1 += diff_text.length - } - if (diff_type !== DIFF_DELETE) { - char_count2 += diff_text.length - } - } - // Pick up the leftover patch if not empty. - if (patchDiffLength) { - this.patch_addContext_(patch, prepatch_text) - patches.push(patch) - } - - return patches -} - -/** - * Given an array of patches, return another array that is identical. - * @param {!Array.} patches Array of Patch objects. - * @return {!Array.} Array of Patch objects. - */ -diff_match_patch.prototype.patch_deepCopy = function (patches) { - // Making deep copies is hard in JavaScript. - var patchesCopy = [] - for (var x = 0; x < patches.length; x++) { - var patch = patches[x] - var patchCopy = new diff_match_patch.patch_obj() - patchCopy.diffs = [] - for (var y = 0; y < patch.diffs.length; y++) { - patchCopy.diffs[y] = patch.diffs[y].slice() - } - patchCopy.start1 = patch.start1 - patchCopy.start2 = patch.start2 - patchCopy.length1 = patch.length1 - patchCopy.length2 = patch.length2 - patchesCopy[x] = patchCopy - } - return patchesCopy -} - -/** - * Merge a set of patches onto the text. Return a patched text, as well - * as a list of true/false values indicating which patches were applied. - * @param {!Array.} patches Array of Patch objects. - * @param {string} text Old text. - * @return {!Array.>} Two element Array, containing the - * new text and an array of boolean values. - */ -diff_match_patch.prototype.patch_apply = function (patches, text) { - if (patches.length == 0) { - return [text, []] - } - - // Deep copy the patches so that no changes are made to originals. - patches = this.patch_deepCopy(patches) - - var nullPadding = this.patch_addPadding(patches) - text = nullPadding + text + nullPadding - - this.patch_splitMax(patches) - // delta keeps track of the offset between the expected and actual location - // of the previous patch. If there are patches expected at positions 10 and - // 20, but the first patch was found at 12, delta is 2 and the second patch - // has an effective expected position of 22. - var delta = 0 - var results = [] - for (var x = 0; x < patches.length; x++) { - var expected_loc = patches[x].start2 + delta - var text1 = this.diff_text1(patches[x].diffs) - var start_loc - var end_loc = -1 - if (text1.length > this.Match_MaxBits) { - // patch_splitMax will only provide an oversized pattern in the case of - // a monster delete. - start_loc = this.match_main( - text, - text1.substring(0, this.Match_MaxBits), - expected_loc - ) - if (start_loc != -1) { - end_loc = this.match_main( - text, - text1.substring(text1.length - this.Match_MaxBits), - expected_loc + text1.length - this.Match_MaxBits - ) - if (end_loc == -1 || start_loc >= end_loc) { - // Can't find valid trailing context. Drop this patch. - start_loc = -1 - } - } - } else { - start_loc = this.match_main(text, text1, expected_loc) - } - if (start_loc == -1) { - // No match found. :( - results[x] = false - // Subtract the delta for this failed patch from subsequent patches. - delta -= patches[x].length2 - patches[x].length1 - } else { - // Found a match. :) - results[x] = true - delta = start_loc - expected_loc - var text2 - if (end_loc == -1) { - text2 = text.substring(start_loc, start_loc + text1.length) - } else { - text2 = text.substring(start_loc, end_loc + this.Match_MaxBits) - } - if (text1 == text2) { - // Perfect match, just shove the replacement text in. - text = - text.substring(0, start_loc) + - this.diff_text2(patches[x].diffs) + - text.substring(start_loc + text1.length) - } else { - // Imperfect match. Run a diff to get a framework of equivalent - // indices. - var diffs = this.diff_main(text1, text2, false) - if ( - text1.length > this.Match_MaxBits && - this.diff_levenshtein(diffs) / text1.length > - this.Patch_DeleteThreshold - ) { - // The end points match, but the content is unacceptably bad. - results[x] = false - } else { - this.diff_cleanupSemanticLossless(diffs) - var index1 = 0 - var index2 - for (var y = 0; y < patches[x].diffs.length; y++) { - var mod = patches[x].diffs[y] - if (mod[0] !== DIFF_EQUAL) { - index2 = this.diff_xIndex(diffs, index1) - } - if (mod[0] === DIFF_INSERT) { - // Insertion - text = - text.substring(0, start_loc + index2) + - mod[1] + - text.substring(start_loc + index2) - } else if (mod[0] === DIFF_DELETE) { - // Deletion - text = - text.substring(0, start_loc + index2) + - text.substring( - start_loc + this.diff_xIndex(diffs, index1 + mod[1].length) - ) - } - if (mod[0] !== DIFF_DELETE) { - index1 += mod[1].length - } - } - } - } - } - } - // Strip the padding off. - text = text.substring(nullPadding.length, text.length - nullPadding.length) - return [text, results] -} - -/** - * Add some padding on text start and end so that edges can match something. - * Intended to be called only from within patch_apply. - * @param {!Array.} patches Array of Patch objects. - * @return {string} The padding string added to each side. - */ -diff_match_patch.prototype.patch_addPadding = function (patches) { - var paddingLength = this.Patch_Margin - var nullPadding = '' - for (var x = 1; x <= paddingLength; x++) { - nullPadding += String.fromCharCode(x) - } - - // Bump all the patches forward. - for (var x = 0; x < patches.length; x++) { - patches[x].start1 += paddingLength - patches[x].start2 += paddingLength - } - - // Add some padding on start of first diff. - var patch = patches[0] - var diffs = patch.diffs - if (diffs.length == 0 || diffs[0][0] != DIFF_EQUAL) { - // Add nullPadding equality. - diffs.unshift([DIFF_EQUAL, nullPadding]) - patch.start1 -= paddingLength // Should be 0. - patch.start2 -= paddingLength // Should be 0. - patch.length1 += paddingLength - patch.length2 += paddingLength - } else if (paddingLength > diffs[0][1].length) { - // Grow first equality. - var extraLength = paddingLength - diffs[0][1].length - diffs[0][1] = nullPadding.substring(diffs[0][1].length) + diffs[0][1] - patch.start1 -= extraLength - patch.start2 -= extraLength - patch.length1 += extraLength - patch.length2 += extraLength - } - - // Add some padding on end of last diff. - patch = patches[patches.length - 1] - diffs = patch.diffs - if (diffs.length == 0 || diffs[diffs.length - 1][0] != DIFF_EQUAL) { - // Add nullPadding equality. - diffs.push([DIFF_EQUAL, nullPadding]) - patch.length1 += paddingLength - patch.length2 += paddingLength - } else if (paddingLength > diffs[diffs.length - 1][1].length) { - // Grow last equality. - var extraLength = paddingLength - diffs[diffs.length - 1][1].length - diffs[diffs.length - 1][1] += nullPadding.substring(0, extraLength) - patch.length1 += extraLength - patch.length2 += extraLength - } - - return nullPadding -} - -/** - * Look through the patches and break up any which are longer than the maximum - * limit of the match algorithm. - * Intended to be called only from within patch_apply. - * @param {!Array.} patches Array of Patch objects. - */ -diff_match_patch.prototype.patch_splitMax = function (patches) { - var patch_size = this.Match_MaxBits - for (var x = 0; x < patches.length; x++) { - if (patches[x].length1 <= patch_size) { - continue - } - var bigpatch = patches[x] - // Remove the big old patch. - patches.splice(x--, 1) - var start1 = bigpatch.start1 - var start2 = bigpatch.start2 - var precontext = '' - while (bigpatch.diffs.length !== 0) { - // Create one of several smaller patches. - var patch = new diff_match_patch.patch_obj() - var empty = true - patch.start1 = start1 - precontext.length - patch.start2 = start2 - precontext.length - if (precontext !== '') { - patch.length1 = patch.length2 = precontext.length - patch.diffs.push([DIFF_EQUAL, precontext]) - } - while ( - bigpatch.diffs.length !== 0 && - patch.length1 < patch_size - this.Patch_Margin - ) { - var diff_type = bigpatch.diffs[0][0] - var diff_text = bigpatch.diffs[0][1] - if (diff_type === DIFF_INSERT) { - // Insertions are harmless. - patch.length2 += diff_text.length - start2 += diff_text.length - patch.diffs.push(bigpatch.diffs.shift()) - empty = false - } else if ( - diff_type === DIFF_DELETE && - patch.diffs.length == 1 && - patch.diffs[0][0] == DIFF_EQUAL && - diff_text.length > 2 * patch_size - ) { - // This is a large deletion. Let it pass in one chunk. - patch.length1 += diff_text.length - start1 += diff_text.length - empty = false - patch.diffs.push([diff_type, diff_text]) - bigpatch.diffs.shift() - } else { - // Deletion or equality. Only take as much as we can stomach. - diff_text = diff_text.substring( - 0, - patch_size - patch.length1 - this.Patch_Margin - ) - patch.length1 += diff_text.length - start1 += diff_text.length - if (diff_type === DIFF_EQUAL) { - patch.length2 += diff_text.length - start2 += diff_text.length - } else { - empty = false - } - patch.diffs.push([diff_type, diff_text]) - if (diff_text == bigpatch.diffs[0][1]) { - bigpatch.diffs.shift() - } else { - bigpatch.diffs[0][1] = bigpatch.diffs[0][1].substring( - diff_text.length - ) - } - } - } - // Compute the head context for the next patch. - precontext = this.diff_text2(patch.diffs) - precontext = precontext.substring(precontext.length - this.Patch_Margin) - // Append the end context for this patch. - var postcontext = this.diff_text1(bigpatch.diffs).substring( - 0, - this.Patch_Margin - ) - if (postcontext !== '') { - patch.length1 += postcontext.length - patch.length2 += postcontext.length - if ( - patch.diffs.length !== 0 && - patch.diffs[patch.diffs.length - 1][0] === DIFF_EQUAL - ) { - patch.diffs[patch.diffs.length - 1][1] += postcontext - } else { - patch.diffs.push([DIFF_EQUAL, postcontext]) - } - } - if (!empty) { - patches.splice(++x, 0, patch) - } - } - } -} - -/** - * Take a list of patches and return a textual representation. - * @param {!Array.} patches Array of Patch objects. - * @return {string} Text representation of patches. - */ -diff_match_patch.prototype.patch_toText = function (patches) { - var text = [] - for (var x = 0; x < patches.length; x++) { - text[x] = patches[x] - } - return text.join('') -} - -/** - * Parse a textual representation of patches and return a list of Patch objects. - * @param {string} textline Text representation of patches. - * @return {!Array.} Array of Patch objects. - * @throws {!Error} If invalid input. - */ -diff_match_patch.prototype.patch_fromText = function (textline) { - var patches = [] - if (!textline) { - return patches - } - var text = textline.split('\n') - var textPointer = 0 - var patchHeader = /^@@ -(\d+),?(\d*) \+(\d+),?(\d*) @@$/ - while (textPointer < text.length) { - var m = text[textPointer].match(patchHeader) - if (!m) { - throw new Error('Invalid patch string: ' + text[textPointer]) - } - var patch = new diff_match_patch.patch_obj() - patches.push(patch) - patch.start1 = parseInt(m[1], 10) - if (m[2] === '') { - patch.start1-- - patch.length1 = 1 - } else if (m[2] == '0') { - patch.length1 = 0 - } else { - patch.start1-- - patch.length1 = parseInt(m[2], 10) - } - - patch.start2 = parseInt(m[3], 10) - if (m[4] === '') { - patch.start2-- - patch.length2 = 1 - } else if (m[4] == '0') { - patch.length2 = 0 - } else { - patch.start2-- - patch.length2 = parseInt(m[4], 10) - } - textPointer++ - - while (textPointer < text.length) { - var sign = text[textPointer].charAt(0) - try { - var line = decodeURI(text[textPointer].substring(1)) - } catch (ex) { - // Malformed URI sequence. - throw new Error('Illegal escape in patch_fromText: ' + line) - } - if (sign == '-') { - // Deletion. - patch.diffs.push([DIFF_DELETE, line]) - } else if (sign == '+') { - // Insertion. - patch.diffs.push([DIFF_INSERT, line]) - } else if (sign == ' ') { - // Minor equality. - patch.diffs.push([DIFF_EQUAL, line]) - } else if (sign == '@') { - // Start of next patch. - break - } else if (sign === '') { - // Blank line? Whatever. - } else { - // WTF? - throw new Error('Invalid patch mode "' + sign + '" in: ' + line) - } - textPointer++ - } - } - return patches -} - -/** - * Class representing one patch operation. - * @constructor - */ -diff_match_patch.patch_obj = function () { - /** @type {!Array.} */ - this.diffs = [] - /** @type {?number} */ - this.start1 = null - /** @type {?number} */ - this.start2 = null - /** @type {number} */ - this.length1 = 0 - /** @type {number} */ - this.length2 = 0 -} - -/** - * Emmulate GNU diff's format. - * Header: @@ -382,8 +481,9 @@ - * Indicies are printed as 1-based, not 0-based. - * @return {string} The GNU diff string. - */ -diff_match_patch.patch_obj.prototype.toString = function () { - var coords1, coords2 - if (this.length1 === 0) { - coords1 = this.start1 + ',0' - } else if (this.length1 == 1) { - coords1 = this.start1 + 1 - } else { - coords1 = this.start1 + 1 + ',' + this.length1 - } - if (this.length2 === 0) { - coords2 = this.start2 + ',0' - } else if (this.length2 == 1) { - coords2 = this.start2 + 1 - } else { - coords2 = this.start2 + 1 + ',' + this.length2 - } - var text = ['@@ -' + coords1 + ' +' + coords2 + ' @@\n'] - var op - // Escape the body of the patch with %xx notation. - for (var x = 0; x < this.diffs.length; x++) { - switch (this.diffs[x][0]) { - case DIFF_INSERT: - op = '+' - break - case DIFF_DELETE: - op = '-' - break - case DIFF_EQUAL: - op = ' ' - break - } - text[x + 1] = op + encodeURI(this.diffs[x][1]) + '\n' - } - return text.join('').replace(/%20/g, ' ') -} - -// Export these global variables so that they survive Google's JS compiler. -// In a browser, 'this' will be 'window'. -// Users of node.js should 'require' the uncompressed version since Google's -// JS compiler may break the following exports for non-browser environments. -this['diff_match_patch'] = diff_match_patch -this['DIFF_DELETE'] = DIFF_DELETE -this['DIFF_INSERT'] = DIFF_INSERT -this['DIFF_EQUAL'] = DIFF_EQUAL diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 707013ae89..aa7413cd25 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -1475,7 +1475,7 @@ "d64": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz", - "integrity": "sha1-QAKofoUMv8n52XBrYPymE6MzbpA=" + "integrity": "sha512-5eNy3WZziVYnrogqgXhcdEmqcDB2IHurTqLcrgssJsfkMVCUoUaZpK6cJjxxvLV2dUm5SuJMNcYfVGoin9UIRw==" }, "damerau-levenshtein": { "version": "1.0.6", @@ -1583,6 +1583,10 @@ "integrity": "sha1-gAwN0eCov7yVg1wgKtIg/jF+WhI=", "dev": true }, + "diff-match-patch": { + "version": "https://github.com/overleaf/diff-match-patch/archive/89805f9c671a77a263fc53461acd62aa7498f688.tar.gz", + "integrity": "sha512-rX+9ry8tosctHzJfYG9Vjpof6wTYYA/oFHnzpv6O1vkUd+5dTc9LpZCTUv+FK8i4grpITxY8BYSk8A3u4anwJQ==" + }, "dlv": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", @@ -3463,12 +3467,12 @@ "lodash.at": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/lodash.at/-/lodash.at-4.6.0.tgz", - "integrity": "sha1-k83OZk8KGZTqM9181A4jr9EbD/g=" + "integrity": "sha512-GOTh0SEp+Yosnlpjic+8cl2WM9MykorogkGA9xyIFkkObQ3H3kNZqZ+ohuq4K3FrSVo7hMcZBMataJemrxC3BA==" }, "lodash.camelcase": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", - "integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY=" + "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==" }, "lodash.defaults": { "version": "4.2.0", @@ -3489,7 +3493,7 @@ "lodash.has": { "version": "4.5.2", "resolved": "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz", - "integrity": "sha1-0Z9NwQlQWMzL4rDN9O4P5Ko3yGI=" + "integrity": "sha512-rnYUdIo6xRCJnQmbVFEwcxF144erlD+M3YcJUVesflU9paQaE8p+fJDcIQrlMYbxoANFL+AB9hZrzSBBk5PL+g==" }, "lodash.memoize": { "version": "4.1.2", @@ -5262,7 +5266,7 @@ "redis-commands": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.5.0.tgz", - "integrity": "sha512-6KxamqpZ468MeQC3bkWmCB1fp56XL64D4Kf0zJSwDZbVLLm7KFkoIcHrgRvQ+sk8dnhySs7+yBg94yIkAK7aJg==" + "integrity": "sha1-gNLiBpj+aI8icSf/nlFkp90X54U=" }, "redis-errors": { "version": "1.2.0", @@ -5479,7 +5483,7 @@ "safe-json-stringify": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.2.0.tgz", - "integrity": "sha512-gH8eh2nZudPQO6TytOvbxnuhYBOvDBBLW52tz5q6X58lJcd/tkmqFR+5Z9adS8aJtURSXWThWy/xJtJwixErvg==", + "integrity": "sha1-NW5EvJjx+TzkXfFLzXwBzahuCv0=", "optional": true }, "safer-buffer": { @@ -5773,7 +5777,7 @@ "standard-as-callback": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.0.1.tgz", - "integrity": "sha512-NQOxSeB8gOI5WjSaxjBgog2QFw55FV8TkS6Y07BiB3VJ8xNTvUYm0wl0s8ObgQ5NhdpnNfigMIKjgPESzgr4tg==" + "integrity": "sha1-7YuyVkjhWDF1m2Ajvbh+a2CzgSY=" }, "statuses": { "version": "1.5.0", @@ -5892,7 +5896,7 @@ "stubs": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", - "integrity": "sha1-6NK6H6nJBXAwPAMLaQD31fiavls=" + "integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==" }, "supports-color": { "version": "5.4.0", @@ -6024,12 +6028,12 @@ "to-no-case": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/to-no-case/-/to-no-case-1.0.2.tgz", - "integrity": "sha1-xyKQcWTvaxeBMsjmmTAhLRtKoWo=" + "integrity": "sha512-Z3g735FxuZY8rodxV4gH7LxClE4H0hTIyHNIHdk+vpQxjLm0cwnKXq/OFVZ76SOQmto7txVcwSCwkU5kqp+FKg==" }, "to-snake-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-snake-case/-/to-snake-case-1.0.0.tgz", - "integrity": "sha1-znRpE4l5RgGah+Yu366upMYIq4w=", + "integrity": "sha512-joRpzBAk1Bhi2eGEYBjukEWHOe/IvclOkiJl3DtA91jV6NwQ3MwXA4FHYeqk8BNp/D8bmi9tcNbRu/SozP0jbQ==", "requires": { "to-space-case": "^1.0.0" } @@ -6037,7 +6041,7 @@ "to-space-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-space-case/-/to-space-case-1.0.0.tgz", - "integrity": "sha1-sFLar7Gysp3HcM6gFj5ewOvJ/Bc=", + "integrity": "sha512-rLdvwXZ39VOn1IxGL3V6ZstoTbwLRckQmn/U8ZDLuWwIXNpuZDhQ3AiRUlhTbOXFVE9C+dR51wM0CBDhk31VcA==", "requires": { "to-no-case": "^1.0.0" } diff --git a/services/document-updater/package.json b/services/document-updater/package.json index c8ac5229ce..8ea71edddb 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -24,6 +24,7 @@ "async": "^2.5.0", "body-parser": "^1.19.0", "bunyan": "~0.22.1", + "diff-match-patch": "https://github.com/overleaf/diff-match-patch/archive/89805f9c671a77a263fc53461acd62aa7498f688.tar.gz", "express": "4.17.1", "lodash": "^4.17.19", "logger-sharelatex": "^1.9.1", From ab6705481d52ccf59d3d886f545ba4893836d374 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 13 Dec 2020 07:35:06 +0000 Subject: [PATCH 713/769] Bump ini from 1.3.5 to 1.3.8 Bumps [ini](https://github.com/isaacs/ini) from 1.3.5 to 1.3.8. - [Release notes](https://github.com/isaacs/ini/releases) - [Commits](https://github.com/isaacs/ini/compare/v1.3.5...v1.3.8) Signed-off-by: dependabot[bot] --- services/document-updater/package-lock.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index aa7413cd25..00bed42828 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -3051,9 +3051,9 @@ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, "ini": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz", - "integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==" + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" }, "inquirer": { "version": "7.3.3", From 9cbd8751a8f608749746af761aa41c05d36b8de8 Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Wed, 6 Jan 2021 09:57:08 +0000 Subject: [PATCH 714/769] Update Node to 10.23.1 --- services/document-updater/.nvmrc | 2 +- services/document-updater/Dockerfile | 2 +- services/document-updater/buildscript.txt | 2 +- services/document-updater/docker-compose.yml | 4 ++-- services/document-updater/package-lock.json | 22 ++++++++++---------- 5 files changed, 16 insertions(+), 16 deletions(-) diff --git a/services/document-updater/.nvmrc b/services/document-updater/.nvmrc index c2f6421352..2baa2d433a 100644 --- a/services/document-updater/.nvmrc +++ b/services/document-updater/.nvmrc @@ -1 +1 @@ -10.22.1 +10.23.1 diff --git a/services/document-updater/Dockerfile b/services/document-updater/Dockerfile index f0e362fca0..2da67d2436 100644 --- a/services/document-updater/Dockerfile +++ b/services/document-updater/Dockerfile @@ -2,7 +2,7 @@ # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -FROM node:10.22.1 as base +FROM node:10.23.1 as base WORKDIR /app diff --git a/services/document-updater/buildscript.txt b/services/document-updater/buildscript.txt index 72b71fe987..0d8b15d9f5 100644 --- a/services/document-updater/buildscript.txt +++ b/services/document-updater/buildscript.txt @@ -3,6 +3,6 @@ document-updater --docker-repos=gcr.io/overleaf-ops --env-add= --env-pass-through= ---node-version=10.22.1 +--node-version=10.23.1 --public-repo=True --script-version=3.4.0 diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index 4a16f5ecb1..0db448f9b5 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -6,7 +6,7 @@ version: "2.3" services: test_unit: - image: node:10.22.1 + image: node:10.23.1 volumes: - .:/app working_dir: /app @@ -18,7 +18,7 @@ services: user: node test_acceptance: - image: node:10.22.1 + image: node:10.23.1 volumes: - .:/app working_dir: /app diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index aa7413cd25..266a40d0c5 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -1475,7 +1475,7 @@ "d64": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz", - "integrity": "sha512-5eNy3WZziVYnrogqgXhcdEmqcDB2IHurTqLcrgssJsfkMVCUoUaZpK6cJjxxvLV2dUm5SuJMNcYfVGoin9UIRw==" + "integrity": "sha1-QAKofoUMv8n52XBrYPymE6MzbpA=" }, "damerau-levenshtein": { "version": "1.0.6", @@ -3467,12 +3467,12 @@ "lodash.at": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/lodash.at/-/lodash.at-4.6.0.tgz", - "integrity": "sha512-GOTh0SEp+Yosnlpjic+8cl2WM9MykorogkGA9xyIFkkObQ3H3kNZqZ+ohuq4K3FrSVo7hMcZBMataJemrxC3BA==" + "integrity": "sha1-k83OZk8KGZTqM9181A4jr9EbD/g=" }, "lodash.camelcase": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", - "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==" + "integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY=" }, "lodash.defaults": { "version": "4.2.0", @@ -3493,7 +3493,7 @@ "lodash.has": { "version": "4.5.2", "resolved": "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz", - "integrity": "sha512-rnYUdIo6xRCJnQmbVFEwcxF144erlD+M3YcJUVesflU9paQaE8p+fJDcIQrlMYbxoANFL+AB9hZrzSBBk5PL+g==" + "integrity": "sha1-0Z9NwQlQWMzL4rDN9O4P5Ko3yGI=" }, "lodash.memoize": { "version": "4.1.2", @@ -5266,7 +5266,7 @@ "redis-commands": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.5.0.tgz", - "integrity": "sha1-gNLiBpj+aI8icSf/nlFkp90X54U=" + "integrity": "sha512-6KxamqpZ468MeQC3bkWmCB1fp56XL64D4Kf0zJSwDZbVLLm7KFkoIcHrgRvQ+sk8dnhySs7+yBg94yIkAK7aJg==" }, "redis-errors": { "version": "1.2.0", @@ -5483,7 +5483,7 @@ "safe-json-stringify": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.2.0.tgz", - "integrity": "sha1-NW5EvJjx+TzkXfFLzXwBzahuCv0=", + "integrity": "sha512-gH8eh2nZudPQO6TytOvbxnuhYBOvDBBLW52tz5q6X58lJcd/tkmqFR+5Z9adS8aJtURSXWThWy/xJtJwixErvg==", "optional": true }, "safer-buffer": { @@ -5777,7 +5777,7 @@ "standard-as-callback": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.0.1.tgz", - "integrity": "sha1-7YuyVkjhWDF1m2Ajvbh+a2CzgSY=" + "integrity": "sha512-NQOxSeB8gOI5WjSaxjBgog2QFw55FV8TkS6Y07BiB3VJ8xNTvUYm0wl0s8ObgQ5NhdpnNfigMIKjgPESzgr4tg==" }, "statuses": { "version": "1.5.0", @@ -5896,7 +5896,7 @@ "stubs": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", - "integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==" + "integrity": "sha1-6NK6H6nJBXAwPAMLaQD31fiavls=" }, "supports-color": { "version": "5.4.0", @@ -6028,12 +6028,12 @@ "to-no-case": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/to-no-case/-/to-no-case-1.0.2.tgz", - "integrity": "sha512-Z3g735FxuZY8rodxV4gH7LxClE4H0hTIyHNIHdk+vpQxjLm0cwnKXq/OFVZ76SOQmto7txVcwSCwkU5kqp+FKg==" + "integrity": "sha1-xyKQcWTvaxeBMsjmmTAhLRtKoWo=" }, "to-snake-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-snake-case/-/to-snake-case-1.0.0.tgz", - "integrity": "sha512-joRpzBAk1Bhi2eGEYBjukEWHOe/IvclOkiJl3DtA91jV6NwQ3MwXA4FHYeqk8BNp/D8bmi9tcNbRu/SozP0jbQ==", + "integrity": "sha1-znRpE4l5RgGah+Yu366upMYIq4w=", "requires": { "to-space-case": "^1.0.0" } @@ -6041,7 +6041,7 @@ "to-space-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-space-case/-/to-space-case-1.0.0.tgz", - "integrity": "sha512-rLdvwXZ39VOn1IxGL3V6ZstoTbwLRckQmn/U8ZDLuWwIXNpuZDhQ3AiRUlhTbOXFVE9C+dR51wM0CBDhk31VcA==", + "integrity": "sha1-sFLar7Gysp3HcM6gFj5ewOvJ/Bc=", "requires": { "to-no-case": "^1.0.0" } From 98f8d7f51c32718e741d4daa60f4fa9aeeee3bfb Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Thu, 14 Jan 2021 15:11:15 -0500 Subject: [PATCH 715/769] Set the diff-match-patch timeout to 100ms This might result in worse diffs, but we don't want to spend a second blocking the event loop while we figure out nicer diffs when comparing documents. --- services/document-updater/app/js/DiffCodec.js | 3 +++ 1 file changed, 3 insertions(+) diff --git a/services/document-updater/app/js/DiffCodec.js b/services/document-updater/app/js/DiffCodec.js index 359b91e2a7..59b7dee67b 100644 --- a/services/document-updater/app/js/DiffCodec.js +++ b/services/document-updater/app/js/DiffCodec.js @@ -1,6 +1,9 @@ const DMP = require('diff-match-patch') const dmp = new DMP() +// Do not attempt to produce a diff for more than 100ms +dmp.Diff_Timeout = 0.1 + module.exports = { ADDED: 1, REMOVED: -1, From 11c8cfc9396ad205496b725b93547ff9f9976f1c Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 2 Feb 2021 15:10:04 +0000 Subject: [PATCH 716/769] shard the pending-updates-list queue --- services/document-updater/app.js | 2 +- .../app/js/DispatchManager.js | 29 +++++++++---------- .../config/settings.defaults.js | 2 +- .../acceptance/js/helpers/DocUpdaterClient.js | 17 ++++++++++- .../DispatchManager/DispatchManagerTests.js | 27 +++++++++++++++-- 5 files changed, 57 insertions(+), 20 deletions(-) diff --git a/services/document-updater/app.js b/services/document-updater/app.js index 6d1cc43b82..d8b67dd31e 100644 --- a/services/document-updater/app.js +++ b/services/document-updater/app.js @@ -34,7 +34,7 @@ app.use(Metrics.http.monitor(logger)) app.use(bodyParser.json({ limit: Settings.maxJsonRequestSize })) Metrics.injectMetricsRoute(app) -DispatchManager.createAndStartDispatchers(Settings.dispatcherCount || 10) +DispatchManager.createAndStartDispatchers(Settings.dispatcherCount) app.param('project_id', (req, res, next, projectId) => { if (projectId != null && projectId.match(/^[0-9a-f]{24}$/)) { diff --git a/services/document-updater/app/js/DispatchManager.js b/services/document-updater/app/js/DispatchManager.js index aa7c4f1f0e..97bd7eafbd 100644 --- a/services/document-updater/app/js/DispatchManager.js +++ b/services/document-updater/app/js/DispatchManager.js @@ -20,13 +20,21 @@ const logger = require('logger-sharelatex') const Keys = require('./UpdateKeys') const redis = require('@overleaf/redis-wrapper') const Errors = require('./Errors') +const _ = require('lodash') const UpdateManager = require('./UpdateManager') const Metrics = require('./Metrics') const RateLimitManager = require('./RateLimitManager') module.exports = DispatchManager = { - createDispatcher(RateLimiter) { + createDispatcher(RateLimiter, queueShardNumber) { + let pendingListKey + if (queueShardNumber === 0) { + pendingListKey = 'pending-updates-list' + } else { + pendingListKey = `pending-updates-list-${queueShardNumber}` + } + const client = redis.createClient(Settings.redis.documentupdater) var worker = { client, @@ -35,11 +43,8 @@ module.exports = DispatchManager = { callback = function (error) {} } const timer = new Metrics.Timer('worker.waiting') - return worker.client.blpop('pending-updates-list', 0, function ( - error, - result - ) { - logger.log('getting pending-updates-list', error, result) + return worker.client.blpop(pendingListKey, 0, function (error, result) { + logger.log(`getting ${queueShardNumber}`, error, result) timer.done() if (error != null) { return callback(error) @@ -103,15 +108,9 @@ module.exports = DispatchManager = { createAndStartDispatchers(number) { const RateLimiter = new RateLimitManager(number) return (() => { - const result = [] - for ( - let i = 1, end = number, asc = end >= 1; - asc ? i <= end : i >= end; - asc ? i++ : i-- - ) { - const worker = DispatchManager.createDispatcher(RateLimiter) - result.push(worker.run()) - } + const result = _.times(number, function (shardNumber) { + return DispatchManager.createDispatcher(RateLimiter, shardNumber).run() + }) return result })() } diff --git a/services/document-updater/config/settings.defaults.js b/services/document-updater/config/settings.defaults.js index 0228941382..67be229eef 100755 --- a/services/document-updater/config/settings.defaults.js +++ b/services/document-updater/config/settings.defaults.js @@ -171,7 +171,7 @@ module.exports = { maxJsonRequestSize: parseInt(process.env.MAX_JSON_REQUEST_SIZE, 10) || 8 * 1024 * 1024, - dispatcherCount: process.env.DISPATCHER_COUNT, + dispatcherCount: process.env.DISPATCHER_COUNT || 10, mongo: { options: { diff --git a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js index 7156da0c26..d4efa453ec 100644 --- a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js +++ b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js @@ -1,5 +1,6 @@ let DocUpdaterClient const Settings = require('settings-sharelatex') +const _ = require('lodash') const rclient = require('@overleaf/redis-wrapper').createClient( Settings.redis.documentupdater ) @@ -26,6 +27,15 @@ module.exports = DocUpdaterClient = { rclientSub.on('message', callback) }, + _getPendingUpdateListKey() { + const shard = _.random(0, Settings.dispatcherCount) + if (shard === 0) { + return 'pending-updates-list' + } else { + return `pending-updates-list-${shard}` + } + }, + sendUpdate(projectId, docId, update, callback) { rclient.rpush( keys.pendingUpdates({ doc_id: docId }), @@ -39,7 +49,12 @@ module.exports = DocUpdaterClient = { if (error) { return callback(error) } - rclient.rpush('pending-updates-list', docKey, callback) + + rclient.rpush( + DocUpdaterClient._getPendingUpdateListKey(), + docKey, + callback + ) }) } ) diff --git a/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js b/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js index 0907b14e57..5610c4abc1 100644 --- a/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js +++ b/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js @@ -64,7 +64,8 @@ describe('DispatchManager', function () { this.client = { auth: sinon.stub() } this.redis.createClient = sinon.stub().returns(this.client) return (this.worker = this.DispatchManager.createDispatcher( - this.RateLimiter + this.RateLimiter, + 0 )) }) @@ -129,7 +130,7 @@ describe('DispatchManager', function () { }) }) - return describe("with a 'Delete component' error", function () { + describe("with a 'Delete component' error", function () { beforeEach(function () { this.UpdateManager.processOutstandingUpdatesWithLock = sinon .stub() @@ -145,6 +146,28 @@ describe('DispatchManager', function () { return this.callback.called.should.equal(true) }) }) + + describe('pending updates list with shard key', function () { + beforeEach(function (done) { + this.client = { + auth: sinon.stub(), + blpop: sinon.stub().callsArgWith(2) + } + this.redis.createClient = sinon.stub().returns(this.client) + this.queueShardNumber = 7 + this.worker = this.DispatchManager.createDispatcher( + this.RateLimiter, + this.queueShardNumber + ) + this.worker._waitForUpdateThenDispatchWorker(done) + }) + + it('should call redis with BLPOP with the correct key', function () { + this.client.blpop + .calledWith(`pending-updates-list-${this.queueShardNumber}`, 0) + .should.equal(true) + }) + }) }) return describe('run', function () { From 40de9997664c275bfae57d11322064700f67fb9a Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 4 Feb 2021 09:30:35 +0000 Subject: [PATCH 717/769] Update config/settings.defaults.js parseint on dispatcher count Co-authored-by: John Lees-Miller --- services/document-updater/config/settings.defaults.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/config/settings.defaults.js b/services/document-updater/config/settings.defaults.js index 67be229eef..37c23792f6 100755 --- a/services/document-updater/config/settings.defaults.js +++ b/services/document-updater/config/settings.defaults.js @@ -171,7 +171,7 @@ module.exports = { maxJsonRequestSize: parseInt(process.env.MAX_JSON_REQUEST_SIZE, 10) || 8 * 1024 * 1024, - dispatcherCount: process.env.DISPATCHER_COUNT || 10, + dispatcherCount: parseInt(process.env.DISPATCHER_COUNT || 10, 10) mongo: { options: { From bcfc7e66fc51deefcba21269fc718538be30ef3f Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 9 Feb 2021 10:32:16 +0000 Subject: [PATCH 718/769] add missing comma in settings file --- services/document-updater/config/settings.defaults.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/config/settings.defaults.js b/services/document-updater/config/settings.defaults.js index 37c23792f6..bac86ff55f 100755 --- a/services/document-updater/config/settings.defaults.js +++ b/services/document-updater/config/settings.defaults.js @@ -171,7 +171,7 @@ module.exports = { maxJsonRequestSize: parseInt(process.env.MAX_JSON_REQUEST_SIZE, 10) || 8 * 1024 * 1024, - dispatcherCount: parseInt(process.env.DISPATCHER_COUNT || 10, 10) + dispatcherCount: parseInt(process.env.DISPATCHER_COUNT || 10, 10), mongo: { options: { From 0cdeffae6cb91c32325466bd1764d36ae38696c3 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 9 Feb 2021 10:50:37 +0000 Subject: [PATCH 719/769] fix off by 1 error in Doc updater client helper file --- .../test/acceptance/js/helpers/DocUpdaterClient.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js index d4efa453ec..719df741c3 100644 --- a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js +++ b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js @@ -28,7 +28,7 @@ module.exports = DocUpdaterClient = { }, _getPendingUpdateListKey() { - const shard = _.random(0, Settings.dispatcherCount) + const shard = _.random(0, Settings.dispatcherCount - 1) if (shard === 0) { return 'pending-updates-list' } else { From 854e24bb5784d0a8e8c0d0c1f11f811c132bb5c5 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 15 Feb 2021 14:12:28 +0000 Subject: [PATCH 720/769] remove unneeded anonymous func --- services/document-updater/app/js/DispatchManager.js | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/services/document-updater/app/js/DispatchManager.js b/services/document-updater/app/js/DispatchManager.js index 97bd7eafbd..c600adec7b 100644 --- a/services/document-updater/app/js/DispatchManager.js +++ b/services/document-updater/app/js/DispatchManager.js @@ -107,11 +107,8 @@ module.exports = DispatchManager = { createAndStartDispatchers(number) { const RateLimiter = new RateLimitManager(number) - return (() => { - const result = _.times(number, function (shardNumber) { - return DispatchManager.createDispatcher(RateLimiter, shardNumber).run() - }) - return result - })() + _.times(number, function (shardNumber) { + return DispatchManager.createDispatcher(RateLimiter, shardNumber).run() + }) } } From c7e57cd28fb4ae307e64c3b4ff40513a7efdc226 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 15 Feb 2021 14:16:45 +0000 Subject: [PATCH 721/769] add Dispatchers running on old queue while we migrate revert once migrated --- services/document-updater/app/js/DispatchManager.js | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/services/document-updater/app/js/DispatchManager.js b/services/document-updater/app/js/DispatchManager.js index c600adec7b..19942b67cf 100644 --- a/services/document-updater/app/js/DispatchManager.js +++ b/services/document-updater/app/js/DispatchManager.js @@ -110,5 +110,10 @@ module.exports = DispatchManager = { _.times(number, function (shardNumber) { return DispatchManager.createDispatcher(RateLimiter, shardNumber).run() }) + + // run extra dispatchers on old queue while we migrate + _.times(number, function () { + return DispatchManager.createDispatcher(RateLimiter, 0).run() + }) } } From 20a373d95cd3438108bc8fb038843b2cb509b4e9 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 23 Feb 2021 08:27:29 +0000 Subject: [PATCH 722/769] stop listening on the pending updates channels 10 times --- services/document-updater/app/js/DispatchManager.js | 5 ----- 1 file changed, 5 deletions(-) diff --git a/services/document-updater/app/js/DispatchManager.js b/services/document-updater/app/js/DispatchManager.js index 19942b67cf..c600adec7b 100644 --- a/services/document-updater/app/js/DispatchManager.js +++ b/services/document-updater/app/js/DispatchManager.js @@ -110,10 +110,5 @@ module.exports = DispatchManager = { _.times(number, function (shardNumber) { return DispatchManager.createDispatcher(RateLimiter, shardNumber).run() }) - - // run extra dispatchers on old queue while we migrate - _.times(number, function () { - return DispatchManager.createDispatcher(RateLimiter, 0).run() - }) } } From 23738540ed745c5c2632b535b05a8191abf9df1f Mon Sep 17 00:00:00 2001 From: Thomas Date: Wed, 24 Feb 2021 15:09:19 +0100 Subject: [PATCH 723/769] Fix API request errors which could contain API hostname or address (#160) Wrap errors produced by failing requests to web API, and remove the url/hostname from thrown error messages. (But keep the URL path for info.) --- .../app/js/PersistenceManager.js | 32 +++++++---- .../js/ApplyingUpdatesToADocTests.js | 56 ++++++++++++++++++- .../PersistenceManagerTests.js | 23 ++++++-- 3 files changed, 95 insertions(+), 16 deletions(-) diff --git a/services/document-updater/app/js/PersistenceManager.js b/services/document-updater/app/js/PersistenceManager.js index a9f384afa1..fca23a7c47 100644 --- a/services/document-updater/app/js/PersistenceManager.js +++ b/services/document-updater/app/js/PersistenceManager.js @@ -69,10 +69,10 @@ module.exports = PersistenceManager = { return _callback(...Array.from(args || [])) } - const url = `${Settings.apis.web.url}/project/${project_id}/doc/${doc_id}` + const urlPath = `/project/${project_id}/doc/${doc_id}` return request( { - url, + url: `${Settings.apis.web.url}${urlPath}`, method: 'GET', headers: { accept: 'application/json' @@ -88,7 +88,11 @@ module.exports = PersistenceManager = { function (error, res, body) { updateMetric('getDoc', error, res) if (error != null) { - return callback(error) + logger.error( + { err: error, project_id, doc_id }, + 'web API request failed' + ) + return callback(new Error('error connecting to web API')) } if (res.statusCode >= 200 && res.statusCode < 300) { try { @@ -119,10 +123,12 @@ module.exports = PersistenceManager = { body.projectHistoryType ) } else if (res.statusCode === 404) { - return callback(new Errors.NotFoundError(`doc not not found: ${url}`)) + return callback( + new Errors.NotFoundError(`doc not not found: ${urlPath}`) + ) } else { return callback( - new Error(`error accessing web API: ${url} ${res.statusCode}`) + new Error(`error accessing web API: ${urlPath} ${res.statusCode}`) ) } } @@ -148,10 +154,10 @@ module.exports = PersistenceManager = { return _callback(...Array.from(args || [])) } - const url = `${Settings.apis.web.url}/project/${project_id}/doc/${doc_id}` + const urlPath = `/project/${project_id}/doc/${doc_id}` return request( { - url, + url: `${Settings.apis.web.url}${urlPath}`, method: 'POST', json: { lines, @@ -171,15 +177,21 @@ module.exports = PersistenceManager = { function (error, res, body) { updateMetric('setDoc', error, res) if (error != null) { - return callback(error) + logger.error( + { err: error, project_id, doc_id }, + 'web API request failed' + ) + return callback(new Error('error connecting to web API')) } if (res.statusCode >= 200 && res.statusCode < 300) { return callback(null) } else if (res.statusCode === 404) { - return callback(new Errors.NotFoundError(`doc not not found: ${url}`)) + return callback( + new Errors.NotFoundError(`doc not not found: ${urlPath}`) + ) } else { return callback( - new Error(`error accessing web API: ${url} ${res.statusCode}`) + new Error(`error accessing web API: ${urlPath} ${res.statusCode}`) ) } } diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js index 918baba141..f460a2d3ff 100644 --- a/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js +++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js @@ -703,7 +703,7 @@ describe('Applying updates to a doc', function () { }) }) - return describe('when the sending duplicate ops', function () { + describe('when the sending duplicate ops', function () { before(function (done) { ;[this.project_id, this.doc_id] = Array.from([ DocUpdaterClient.randomId(), @@ -792,4 +792,58 @@ describe('Applying updates to a doc', function () { ).to.equal(true) }) }) + + return describe('when sending updates for a non-existing doc id', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId() + ]) + this.non_existing = { + doc_id: this.doc_id, + v: this.version, + op: [{ d: 'content', p: 0 }] + } + + DocUpdaterClient.subscribeToAppliedOps( + (this.messageCallback = sinon.stub()) + ) + + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + this.non_existing, + (error) => { + if (error != null) { + throw error + } + return setTimeout(done, 200) + } + ) + return null + }) + + it('should not update or create a doc', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + res.statusCode.should.equal(404) + return done() + } + ) + return null + }) + + return it('should send a message with an error', function () { + this.messageCallback.called.should.equal(true) + const [channel, message] = Array.from(this.messageCallback.args[0]) + channel.should.equal('applied-ops') + return JSON.parse(message).should.deep.include({ + project_id: this.project_id, + doc_id: this.doc_id, + error: `doc not not found: /project/${this.project_id}/doc/${this.doc_id}` + }) + }) + }) }) diff --git a/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js b/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js index 645ee8a59b..1013752dee 100644 --- a/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js +++ b/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js @@ -40,7 +40,8 @@ describe('PersistenceManager', function () { }), 'logger-sharelatex': (this.logger = { log: sinon.stub(), - err: sinon.stub() + err: sinon.stub(), + error: sinon.stub() }), './Errors': Errors } @@ -145,8 +146,14 @@ describe('PersistenceManager', function () { ) }) - it('should return the error', function () { - return this.callback.calledWith(this.error).should.equal(true) + it('should return a generic connection error', function () { + return this.callback + .calledWith( + sinon.match + .instanceOf(Error) + .and(sinon.match.has('message', 'error connecting to web API')) + ) + .should.equal(true) }) it('should time the execution', function () { @@ -355,8 +362,14 @@ describe('PersistenceManager', function () { ) }) - it('should return the error', function () { - return this.callback.calledWith(this.error).should.equal(true) + it('should return a generic connection error', function () { + return this.callback + .calledWith( + sinon.match + .instanceOf(Error) + .and(sinon.match.has('message', 'error connecting to web API')) + ) + .should.equal(true) }) it('should time the execution', function () { From dc5bfea1816fd2c58a360f262959029bf09f79f0 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Mon, 15 Mar 2021 17:22:21 -0400 Subject: [PATCH 724/769] Global test setup Configure chai and SandboxedModule globally with options used in many tests. That required upgrading mocha and SandboxedModule. --- services/document-updater/.mocharc.json | 3 + services/document-updater/package-lock.json | 581 ++++++++++++++++-- services/document-updater/package.json | 6 +- .../js/ApplyingUpdatesToADocTests.js | 4 +- .../ApplyingUpdatesToProjectStructureTests.js | 2 - .../acceptance/js/DeletingADocumentTests.js | 3 - .../acceptance/js/DeletingAProjectTests.js | 2 - .../acceptance/js/FlushingAProjectTests.js | 2 - .../test/acceptance/js/FlushingDocsTests.js | 4 +- .../acceptance/js/GettingADocumentTests.js | 4 +- .../acceptance/js/GettingProjectDocsTests.js | 4 +- .../test/acceptance/js/RangesTests.js | 4 +- .../acceptance/js/SettingADocumentTests.js | 2 - services/document-updater/test/setup.js | 37 ++ .../test/unit/js/DiffCodec/DiffCodecTests.js | 4 +- .../DispatchManager/DispatchManagerTests.js | 7 - .../DocumentManager/DocumentManagerTests.js | 9 - .../js/HistoryManager/HistoryManagerTests.js | 6 - .../HistoryRedisManagerTests.js | 5 +- .../js/HttpController/HttpControllerTests.js | 1 - .../unit/js/LockManager/CheckingTheLock.js | 1 - .../unit/js/LockManager/ReleasingTheLock.js | 4 - .../test/unit/js/LockManager/getLockTests.js | 3 - .../test/unit/js/LockManager/tryLockTests.js | 3 - .../PersistenceManagerTests.js | 7 - .../ProjectHistoryRedisManagerTests.js | 12 +- .../flushAndDeleteProjectTests.js | 6 - .../js/ProjectManager/flushProjectTests.js | 6 - .../js/ProjectManager/getProjectDocsTests.js | 6 - .../js/ProjectManager/updateProjectTests.js | 6 - .../js/RangesManager/RangesManagerTests.js | 19 +- .../js/RateLimitManager/RateLimitManager.js | 5 +- .../RealTimeRedisManagerTests.js | 3 - .../unit/js/RedisManager/RedisManagerTests.js | 24 +- .../unit/js/ShareJS/TextTransformTests.js | 1 - .../test/unit/js/ShareJsDB/ShareJsDBTests.js | 4 +- .../ShareJsUpdateManagerTests.js | 3 - .../js/UpdateManager/UpdateManagerTests.js | 3 - 38 files changed, 578 insertions(+), 228 deletions(-) create mode 100644 services/document-updater/.mocharc.json create mode 100644 services/document-updater/test/setup.js diff --git a/services/document-updater/.mocharc.json b/services/document-updater/.mocharc.json new file mode 100644 index 0000000000..dc3280aa96 --- /dev/null +++ b/services/document-updater/.mocharc.json @@ -0,0 +1,3 @@ +{ + "require": "test/setup.js" +} diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index ac530644db..65b0a0d9c2 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -819,6 +819,12 @@ } } }, + "@ungap/promise-all-settled": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@ungap/promise-all-settled/-/promise-all-settled-1.1.2.tgz", + "integrity": "sha512-sL/cEvJWAnClXw0wHk85/2L0G6Sj8UB0Ctc1TEMbKSsmpRosqhwj9gWgFRZSrBr2f9tiXISwNhCPmlfqUqyb9Q==", + "dev": true + }, "abbrev": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", @@ -887,6 +893,12 @@ "uri-js": "^4.2.2" } }, + "ansi-colors": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", + "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", + "dev": true + }, "ansi-escapes": { "version": "4.3.1", "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.1.tgz", @@ -919,6 +931,16 @@ "color-convert": "^1.9.0" } }, + "anymatch": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.1.tgz", + "integrity": "sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==", + "dev": true, + "requires": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + } + }, "aproba": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", @@ -1112,6 +1134,12 @@ "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz", "integrity": "sha1-gMBIdZ2CaACAfEv9Uh5Q7bulel8=" }, + "binary-extensions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "dev": true + }, "bindings": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", @@ -1182,10 +1210,19 @@ "concat-map": "0.0.1" } }, + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "requires": { + "fill-range": "^7.0.1" + } + }, "browser-stdout": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", - "integrity": "sha1-uqVZ7hTO1zRSIputcyZGfGH6vWA=", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", "dev": true }, "bson": { @@ -1269,12 +1306,6 @@ "type-detect": "^1.0.0" } }, - "chai-spies": { - "version": "0.7.1", - "resolved": "https://registry.npmjs.org/chai-spies/-/chai-spies-0.7.1.tgz", - "integrity": "sha1-ND2Z9RJEIS6LF+ZLk5lv97LCqbE=", - "dev": true - }, "chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", @@ -1292,6 +1323,22 @@ "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", "dev": true }, + "chokidar": { + "version": "3.5.1", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.1.tgz", + "integrity": "sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw==", + "dev": true, + "requires": { + "anymatch": "~3.1.1", + "braces": "~3.0.2", + "fsevents": "~2.3.1", + "glob-parent": "~5.1.0", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.5.0" + } + }, "chownr": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", @@ -1333,6 +1380,12 @@ "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=" }, + "coffee-script": { + "version": "1.12.7", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.12.7.tgz", + "integrity": "sha512-fLeEhqwymYat/MpTPUjSKHVYYl0ec2mOyALEMLmzr5i1isuG+6jfI2j2d5oBO3VIzgUXgBVIcOT9uH1TFxBckw==", + "dev": true + }, "coffeescript": { "version": "2.5.1", "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.5.1.tgz", @@ -1475,7 +1528,7 @@ "d64": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz", - "integrity": "sha1-QAKofoUMv8n52XBrYPymE6MzbpA=" + "integrity": "sha512-5eNy3WZziVYnrogqgXhcdEmqcDB2IHurTqLcrgssJsfkMVCUoUaZpK6cJjxxvLV2dUm5SuJMNcYfVGoin9UIRw==" }, "damerau-levenshtein": { "version": "1.0.6", @@ -1578,9 +1631,9 @@ "integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=" }, "diff": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", - "integrity": "sha1-gAwN0eCov7yVg1wgKtIg/jF+WhI=", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", + "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", "dev": true }, "diff-match-patch": { @@ -1747,6 +1800,12 @@ "is-symbol": "^1.0.2" } }, + "escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true + }, "escape-html": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", @@ -2482,6 +2541,15 @@ "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", "integrity": "sha1-VTp7hEb/b2hDWcRF8eN6BdrMM90=" }, + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "requires": { + "to-regex-range": "^5.0.1" + } + }, "finalhandler": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz", @@ -2510,6 +2578,12 @@ "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz", "integrity": "sha1-WKRmaX34piBc39vzlVNri9d3pfY=" }, + "flat": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "dev": true + }, "flat-cache": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-2.0.1.tgz", @@ -2600,6 +2674,13 @@ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" }, + "fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "optional": true + }, "function-bind": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", @@ -2890,9 +2971,9 @@ "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=" }, "he": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", - "integrity": "sha1-k0EP0hsAlzUVH4howvJx80J+I/0=", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", "dev": true }, "hex2dec": { @@ -3245,6 +3326,15 @@ "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", "dev": true }, + "is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "requires": { + "binary-extensions": "^2.0.0" + } + }, "is-callable": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", @@ -3277,11 +3367,23 @@ "is-extglob": "^2.1.1" } }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true + }, "is-obj": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==" }, + "is-plain-obj": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "dev": true + }, "is-regex": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", @@ -3467,12 +3569,12 @@ "lodash.at": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/lodash.at/-/lodash.at-4.6.0.tgz", - "integrity": "sha1-k83OZk8KGZTqM9181A4jr9EbD/g=" + "integrity": "sha512-GOTh0SEp+Yosnlpjic+8cl2WM9MykorogkGA9xyIFkkObQ3H3kNZqZ+ohuq4K3FrSVo7hMcZBMataJemrxC3BA==" }, "lodash.camelcase": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", - "integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY=" + "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==" }, "lodash.defaults": { "version": "4.2.0", @@ -3493,7 +3595,7 @@ "lodash.has": { "version": "4.5.2", "resolved": "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz", - "integrity": "sha1-0Z9NwQlQWMzL4rDN9O4P5Ko3yGI=" + "integrity": "sha512-rnYUdIo6xRCJnQmbVFEwcxF144erlD+M3YcJUVesflU9paQaE8p+fJDcIQrlMYbxoANFL+AB9hZrzSBBk5PL+g==" }, "lodash.memoize": { "version": "4.1.2", @@ -3518,6 +3620,66 @@ "resolved": "https://registry.npmjs.org/log-driver/-/log-driver-1.2.7.tgz", "integrity": "sha512-U7KCmLdqsGHBLeWqYlFA0V0Sl6P08EE1ZrmA9cxjUE0WVqT9qnyVDPz1kzpFEP0jdJuFnasWIfSd7fsaNXkpbg==" }, + "log-symbols": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.0.0.tgz", + "integrity": "sha512-FN8JBzLx6CzeMrB0tg6pqlGU1wCrXW+ZXGH481kfsBqer0hToTIiHdjH4Mq8xJUbvATujKCvaREGWpGUionraA==", + "dev": true, + "requires": { + "chalk": "^4.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", + "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, "logger-sharelatex": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.9.1.tgz", @@ -3768,7 +3930,8 @@ "minimist": { "version": "0.0.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", - "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" + "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=", + "optional": true }, "minipass": { "version": "2.9.0", @@ -3803,43 +3966,128 @@ } }, "mocha": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/mocha/-/mocha-5.2.0.tgz", - "integrity": "sha1-bYrlCPWRZ/lA8rWzxKYSrlDJCuY=", + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-8.3.2.tgz", + "integrity": "sha512-UdmISwr/5w+uXLPKspgoV7/RXZwKRTiTjJ2/AC5ZiEztIoOYdfKb19+9jNmEInzx5pBsCyJQzarAxqIGBNYJhg==", "dev": true, "requires": { + "@ungap/promise-all-settled": "1.1.2", + "ansi-colors": "4.1.1", "browser-stdout": "1.3.1", - "commander": "2.15.1", - "debug": "3.1.0", - "diff": "3.5.0", - "escape-string-regexp": "1.0.5", - "glob": "7.1.2", + "chokidar": "3.5.1", + "debug": "4.3.1", + "diff": "5.0.0", + "escape-string-regexp": "4.0.0", + "find-up": "5.0.0", + "glob": "7.1.6", "growl": "1.10.5", - "he": "1.1.1", + "he": "1.2.0", + "js-yaml": "4.0.0", + "log-symbols": "4.0.0", "minimatch": "3.0.4", - "mkdirp": "0.5.1", - "supports-color": "5.4.0" + "ms": "2.1.3", + "nanoid": "3.1.20", + "serialize-javascript": "5.0.1", + "strip-json-comments": "3.1.1", + "supports-color": "8.1.1", + "which": "2.0.2", + "wide-align": "1.1.3", + "workerpool": "6.1.0", + "yargs": "16.2.0", + "yargs-parser": "20.2.4", + "yargs-unparser": "2.0.0" }, "dependencies": { - "commander": { - "version": "2.15.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.15.1.tgz", - "integrity": "sha1-30boZ9D8Kuxmo0ZitAapzK//Ww8=", + "ansi-regex": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "dev": true + }, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "requires": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, "debug": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", - "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", + "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", "dev": true, "requires": { - "ms": "2.0.0" + "ms": "2.1.2" + }, + "dependencies": { + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + } + } + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true + }, + "find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "requires": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" } }, "glob": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz", - "integrity": "sha1-wZyd+aAocC1nhhI4SmVSQExjbRU=", + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", "dev": true, "requires": { "fs.realpath": "^1.0.0", @@ -3850,19 +4098,140 @@ "path-is-absolute": "^1.0.0" } }, - "mkdirp": { - "version": "0.5.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "js-yaml": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.0.0.tgz", + "integrity": "sha512-pqon0s+4ScYUvX30wxQi3PogGFAlUyH0awepWvwkj4jD4v+ova3RiYw8bmA6x2rDrEaj8i/oWKoRxpVNW+Re8Q==", "dev": true, "requires": { - "minimist": "0.0.8" + "argparse": "^2.0.1" + } + }, + "locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "requires": { + "p-locate": "^5.0.0" } }, "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "requires": { + "yocto-queue": "^0.1.0" + } + }, + "p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "requires": { + "p-limit": "^3.0.2" + } + }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true + }, + "string-width": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz", + "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.0" + } + }, + "strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.0" + } + }, + "supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + }, + "wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + }, + "y18n": { + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz", + "integrity": "sha512-hsRUr4FFrvhhRH12wOdfs38Gy7k2FFzB9qgN9v3aLykRq0dRcdcpz5C9FxdS2NuhOrI/628b/KSTJ3rwHysYSg==", + "dev": true + }, + "yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "requires": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + } + }, + "yargs-parser": { + "version": "20.2.4", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", + "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", "dev": true } } @@ -3929,6 +4298,12 @@ "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", "integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==" }, + "nanoid": { + "version": "3.1.20", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.20.tgz", + "integrity": "sha512-a1cQNyczgKbLX9jwbS/+d7W8fX/RfgYR7lVWwWOGIPNgK2m0MWvrGF6/m4kk6U3QcFMnZf3RIhL0v2Jgh/0Uxw==", + "dev": true + }, "natural-compare": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", @@ -4091,6 +4466,12 @@ } } }, + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true + }, "npm-bundled": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-1.1.1.tgz", @@ -4351,6 +4732,12 @@ "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" }, + "picomatch": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.2.tgz", + "integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==", + "dev": true + }, "pprof": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/pprof/-/pprof-3.0.0.tgz", @@ -5184,6 +5571,15 @@ "integrity": "sha512-pVzZdDpWwWqEVVLshWUHjNwuVP7SfcmPraYuqocJp1yo2U1R7P+5QAfDhdItkuoGqIBnBYrtPp7rEPqDn9HlZA==", "dev": true }, + "randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "requires": { + "safe-buffer": "^5.1.0" + } + }, "range-parser": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", @@ -5263,10 +5659,19 @@ "util-deprecate": "^1.0.1" } }, + "readdirp": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.5.0.tgz", + "integrity": "sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ==", + "dev": true, + "requires": { + "picomatch": "^2.2.1" + } + }, "redis-commands": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.5.0.tgz", - "integrity": "sha512-6KxamqpZ468MeQC3bkWmCB1fp56XL64D4Kf0zJSwDZbVLLm7KFkoIcHrgRvQ+sk8dnhySs7+yBg94yIkAK7aJg==" + "integrity": "sha1-gNLiBpj+aI8icSf/nlFkp90X54U=" }, "redis-errors": { "version": "1.2.0", @@ -5483,7 +5888,7 @@ "safe-json-stringify": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.2.0.tgz", - "integrity": "sha512-gH8eh2nZudPQO6TytOvbxnuhYBOvDBBLW52tz5q6X58lJcd/tkmqFR+5Z9adS8aJtURSXWThWy/xJtJwixErvg==", + "integrity": "sha1-NW5EvJjx+TzkXfFLzXwBzahuCv0=", "optional": true }, "safer-buffer": { @@ -5492,21 +5897,13 @@ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, "sandboxed-module": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-0.2.2.tgz", - "integrity": "sha1-bL3sghOAx31FdcjIeDi5ET5kulA=", + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-2.0.4.tgz", + "integrity": "sha512-AwEPOdO8mg/wJjr876yCHP2DHqVN0MaggEXhp6IIf3bcI5cYoQl9QrrCHSrvToHjvdEiS5x4TVZRgjD2bEmNTA==", "dev": true, "requires": { "require-like": "0.1.2", - "stack-trace": "0.0.6" - }, - "dependencies": { - "stack-trace": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.6.tgz", - "integrity": "sha1-HnGb1qJin/CcGJ4Xqe+QKpT8XbA=", - "dev": true - } + "stack-trace": "0.0.9" } }, "saslprep": { @@ -5555,6 +5952,15 @@ } } }, + "serialize-javascript": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-5.0.1.tgz", + "integrity": "sha512-SaaNal9imEO737H2c05Og0/8LUXG7EnsZyMa8MzkmuHoELfT6txuj0cMqRj6zfPKnmQ1yasR4PCJc8x+M4JSPA==", + "dev": true, + "requires": { + "randombytes": "^2.1.0" + } + }, "serve-static": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.14.1.tgz", @@ -5777,7 +6183,7 @@ "standard-as-callback": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.0.1.tgz", - "integrity": "sha512-NQOxSeB8gOI5WjSaxjBgog2QFw55FV8TkS6Y07BiB3VJ8xNTvUYm0wl0s8ObgQ5NhdpnNfigMIKjgPESzgr4tg==" + "integrity": "sha1-7YuyVkjhWDF1m2Ajvbh+a2CzgSY=" }, "statuses": { "version": "1.5.0", @@ -5896,7 +6302,7 @@ "stubs": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", - "integrity": "sha1-6NK6H6nJBXAwPAMLaQD31fiavls=" + "integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==" }, "supports-color": { "version": "5.4.0", @@ -6028,12 +6434,21 @@ "to-no-case": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/to-no-case/-/to-no-case-1.0.2.tgz", - "integrity": "sha1-xyKQcWTvaxeBMsjmmTAhLRtKoWo=" + "integrity": "sha512-Z3g735FxuZY8rodxV4gH7LxClE4H0hTIyHNIHdk+vpQxjLm0cwnKXq/OFVZ76SOQmto7txVcwSCwkU5kqp+FKg==" + }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "requires": { + "is-number": "^7.0.0" + } }, "to-snake-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-snake-case/-/to-snake-case-1.0.0.tgz", - "integrity": "sha1-znRpE4l5RgGah+Yu366upMYIq4w=", + "integrity": "sha512-joRpzBAk1Bhi2eGEYBjukEWHOe/IvclOkiJl3DtA91jV6NwQ3MwXA4FHYeqk8BNp/D8bmi9tcNbRu/SozP0jbQ==", "requires": { "to-space-case": "^1.0.0" } @@ -6041,7 +6456,7 @@ "to-space-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-space-case/-/to-space-case-1.0.0.tgz", - "integrity": "sha1-sFLar7Gysp3HcM6gFj5ewOvJ/Bc=", + "integrity": "sha512-rLdvwXZ39VOn1IxGL3V6ZstoTbwLRckQmn/U8ZDLuWwIXNpuZDhQ3AiRUlhTbOXFVE9C+dR51wM0CBDhk31VcA==", "requires": { "to-no-case": "^1.0.0" } @@ -6322,6 +6737,12 @@ "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", "dev": true }, + "workerpool": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.1.0.tgz", + "integrity": "sha512-toV7q9rWNYha963Pl/qyeZ6wG+3nnsyvolaNUS8+R5Wtw6qJPTxIlOP1ZSvcGhEJw+l3HMMmtiNo9Gl61G4GVg==", + "dev": true + }, "wrap-ansi": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", @@ -6412,6 +6833,32 @@ "decamelize": "^1.2.0" } }, + "yargs-unparser": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz", + "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==", + "dev": true, + "requires": { + "camelcase": "^6.0.0", + "decamelize": "^4.0.0", + "flat": "^5.0.2", + "is-plain-obj": "^2.1.0" + }, + "dependencies": { + "camelcase": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.2.0.tgz", + "integrity": "sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg==", + "dev": true + }, + "decamelize": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", + "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", + "dev": true + } + } + }, "yn": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 8ea71edddb..a3fb2690cb 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -36,8 +36,8 @@ "devDependencies": { "babel-eslint": "^10.1.0", "chai": "^3.5.0", - "chai-spies": "^0.7.1", "cluster-key-slot": "^1.0.5", + "coffee-script": "^1.12.7", "eslint": "^6.8.0", "eslint-config-prettier": "^6.10.0", "eslint-config-standard": "^14.1.0", @@ -53,10 +53,10 @@ "eslint-plugin-promise": "^4.2.1", "eslint-plugin-react": "^7.19.0", "eslint-plugin-standard": "^4.0.1", - "mocha": "^5.0.1", + "mocha": "^8.3.2", "prettier": "^2.0.0", "prettier-eslint-cli": "^5.0.0", - "sandboxed-module": "~0.2.0", + "sandboxed-module": "^2.0.4", "sinon": "^9.0.2", "timekeeper": "^2.0.0" } diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js index f460a2d3ff..adf045645e 100644 --- a/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js +++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js @@ -12,9 +12,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -chai.should() -const { expect } = chai +const { expect } = require('chai') const async = require('async') const Settings = require('settings-sharelatex') const rclient_history = require('@overleaf/redis-wrapper').createClient( diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js index 42c3c8af6a..3bc2c793e1 100644 --- a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js +++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js @@ -1,6 +1,4 @@ const sinon = require('sinon') -const chai = require('chai') -chai.should() const Settings = require('settings-sharelatex') const rclientProjectHistory = require('@overleaf/redis-wrapper').createClient( Settings.redis.project_history diff --git a/services/document-updater/test/acceptance/js/DeletingADocumentTests.js b/services/document-updater/test/acceptance/js/DeletingADocumentTests.js index fbb8055aae..4051d4f5a4 100644 --- a/services/document-updater/test/acceptance/js/DeletingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/DeletingADocumentTests.js @@ -11,9 +11,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -chai.should() - const MockTrackChangesApi = require('./helpers/MockTrackChangesApi') const MockProjectHistoryApi = require('./helpers/MockProjectHistoryApi') const MockWebApi = require('./helpers/MockWebApi') diff --git a/services/document-updater/test/acceptance/js/DeletingAProjectTests.js b/services/document-updater/test/acceptance/js/DeletingAProjectTests.js index f050ea22e1..b07ffae0f5 100644 --- a/services/document-updater/test/acceptance/js/DeletingAProjectTests.js +++ b/services/document-updater/test/acceptance/js/DeletingAProjectTests.js @@ -12,8 +12,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -chai.should() const async = require('async') const MockTrackChangesApi = require('./helpers/MockTrackChangesApi') diff --git a/services/document-updater/test/acceptance/js/FlushingAProjectTests.js b/services/document-updater/test/acceptance/js/FlushingAProjectTests.js index c860fce849..4f4abc2730 100644 --- a/services/document-updater/test/acceptance/js/FlushingAProjectTests.js +++ b/services/document-updater/test/acceptance/js/FlushingAProjectTests.js @@ -12,8 +12,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -chai.should() const async = require('async') const MockWebApi = require('./helpers/MockWebApi') diff --git a/services/document-updater/test/acceptance/js/FlushingDocsTests.js b/services/document-updater/test/acceptance/js/FlushingDocsTests.js index 109f89d434..5eac9fa2f3 100644 --- a/services/document-updater/test/acceptance/js/FlushingDocsTests.js +++ b/services/document-updater/test/acceptance/js/FlushingDocsTests.js @@ -14,9 +14,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -chai.should() -const { expect } = chai +const { expect } = require('chai') const async = require('async') const MockWebApi = require('./helpers/MockWebApi') diff --git a/services/document-updater/test/acceptance/js/GettingADocumentTests.js b/services/document-updater/test/acceptance/js/GettingADocumentTests.js index a0b9de5773..50dc35059c 100644 --- a/services/document-updater/test/acceptance/js/GettingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/GettingADocumentTests.js @@ -12,9 +12,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -chai.should() -const { expect } = chai +const { expect } = require('chai') const MockWebApi = require('./helpers/MockWebApi') const DocUpdaterClient = require('./helpers/DocUpdaterClient') diff --git a/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js b/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js index 72a6824562..b32ccb0837 100644 --- a/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js +++ b/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js @@ -12,9 +12,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -chai.should() -const { expect } = chai +const { expect } = require('chai') const MockWebApi = require('./helpers/MockWebApi') const DocUpdaterClient = require('./helpers/DocUpdaterClient') diff --git a/services/document-updater/test/acceptance/js/RangesTests.js b/services/document-updater/test/acceptance/js/RangesTests.js index b765e58b7c..7034436440 100644 --- a/services/document-updater/test/acceptance/js/RangesTests.js +++ b/services/document-updater/test/acceptance/js/RangesTests.js @@ -12,9 +12,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -chai.should() -const { expect } = chai +const { expect } = require('chai') const async = require('async') const { db, ObjectId } = require('../../../app/js/mongodb') diff --git a/services/document-updater/test/acceptance/js/SettingADocumentTests.js b/services/document-updater/test/acceptance/js/SettingADocumentTests.js index d47931868c..7d2307c526 100644 --- a/services/document-updater/test/acceptance/js/SettingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/SettingADocumentTests.js @@ -1,6 +1,4 @@ const sinon = require('sinon') -const chai = require('chai') -chai.should() const { expect } = require('chai') const Settings = require('settings-sharelatex') const docUpdaterRedis = require('@overleaf/redis-wrapper').createClient( diff --git a/services/document-updater/test/setup.js b/services/document-updater/test/setup.js new file mode 100644 index 0000000000..0fb9848427 --- /dev/null +++ b/services/document-updater/test/setup.js @@ -0,0 +1,37 @@ +const chai = require('chai') +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') + +// Chai configuration +chai.should() + +// Global stubs +const sandbox = sinon.createSandbox() +const stubs = { + logger: { + debug: sandbox.stub(), + log: sandbox.stub(), + warn: sandbox.stub(), + err: sandbox.stub(), + error: sandbox.stub() + } +} + +// SandboxedModule configuration +SandboxedModule.configure({ + requires: { + 'logger-sharelatex': stubs.logger + }, + globals: { Buffer, JSON, Math, console, process } +}) + +// Mocha hooks +exports.mochaHooks = { + beforeEach() { + this.logger = stubs.logger + }, + + afterEach() { + sandbox.reset() + } +} diff --git a/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js b/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js index f208c17bd6..d498d6b45c 100644 --- a/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js +++ b/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js @@ -11,9 +11,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() -const { expect } = chai +const { expect } = require('chai') const modulePath = '../../../../app/js/DiffCodec.js' const SandboxedModule = require('sandboxed-module') diff --git a/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js b/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js index 5610c4abc1..81ef37f4be 100644 --- a/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js +++ b/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js @@ -12,8 +12,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/DispatchManager.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../../app/js/Errors.js') @@ -25,11 +23,6 @@ describe('DispatchManager', function () { this.DispatchManager = SandboxedModule.require(modulePath, { requires: { './UpdateManager': (this.UpdateManager = {}), - 'logger-sharelatex': (this.logger = { - log: sinon.stub(), - error: sinon.stub(), - warn: sinon.stub() - }), 'settings-sharelatex': (this.settings = { redis: { documentupdater: {} diff --git a/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js b/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js index 295a643cee..8ca42df757 100644 --- a/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js +++ b/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js @@ -13,8 +13,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/DocumentManager.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../../app/js/Errors') @@ -33,11 +31,6 @@ describe('DocumentManager', function () { flushDocChangesAsync: sinon.stub(), flushProjectChangesAsync: sinon.stub() }), - 'logger-sharelatex': (this.logger = { - log: sinon.stub(), - warn: sinon.stub() - }), - './DocOpsManager': (this.DocOpsManager = {}), './Metrics': (this.Metrics = { Timer: (Timer = (function () { Timer = class Timer { @@ -220,7 +213,6 @@ describe('DocumentManager', function () { .stub() .callsArgWith(2, null, null, null, null) this.PersistenceManager.setDoc = sinon.stub().yields() - this.DocOpsManager.flushDocOpsToMongo = sinon.stub().callsArgWith(2) return this.DocumentManager.flushDocIfLoaded( this.project_id, this.doc_id, @@ -236,7 +228,6 @@ describe('DocumentManager', function () { it('should not write anything to the persistence layer', function () { this.PersistenceManager.setDoc.called.should.equal(false) - return this.DocOpsManager.flushDocOpsToMongo.called.should.equal(false) }) it('should call the callback without error', function () { diff --git a/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js b/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js index 263f1cd094..df3261b0f4 100644 --- a/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js +++ b/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js @@ -11,7 +11,6 @@ */ const SandboxedModule = require('sandboxed-module') const sinon = require('sinon') -require('chai').should() const modulePath = require('path').join( __dirname, '../../../../app/js/HistoryManager' @@ -33,11 +32,6 @@ describe('HistoryManager', function () { } } }), - 'logger-sharelatex': (this.logger = { - log: sinon.stub(), - error: sinon.stub(), - debug: sinon.stub() - }), './DocumentManager': (this.DocumentManager = {}), './HistoryRedisManager': (this.HistoryRedisManager = {}), './RedisManager': (this.RedisManager = {}), diff --git a/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js b/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js index 1b266685d1..f9b719991a 100644 --- a/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js +++ b/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js @@ -12,8 +12,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/HistoryRedisManager.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../../app/js/Errors') @@ -41,8 +39,7 @@ describe('HistoryRedisManager', function () { } }) } - }, - 'logger-sharelatex': { log() {} } + } } }) this.doc_id = 'doc-id-123' diff --git a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js index 07e9d93c9a..64477eb944 100644 --- a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js +++ b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js @@ -12,7 +12,6 @@ describe('HttpController', function () { flushProjectChangesAsync: sinon.stub() }), './ProjectManager': (this.ProjectManager = {}), - 'logger-sharelatex': (this.logger = { log: sinon.stub() }), './ProjectFlusher': { flushAllProjects() {} }, './DeleteQueueManager': (this.DeleteQueueManager = {}), './Metrics': (this.Metrics = {}), diff --git a/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js b/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js index 6b3c3b539e..4f700cc144 100644 --- a/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js +++ b/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js @@ -25,7 +25,6 @@ describe('LockManager - checking the lock', function () { const existsStub = sinon.stub() const mocks = { - 'logger-sharelatex': { log() {} }, '@overleaf/redis-wrapper': { createClient() { return { diff --git a/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js b/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js index a04db7614f..3d6cf7a5ba 100644 --- a/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js +++ b/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js @@ -27,10 +27,6 @@ describe('LockManager - releasing the lock', function () { eval: sinon.stub() } const mocks = { - 'logger-sharelatex': { - log() {}, - error() {} - }, '@overleaf/redis-wrapper': { createClient: () => this.client }, diff --git a/services/document-updater/test/unit/js/LockManager/getLockTests.js b/services/document-updater/test/unit/js/LockManager/getLockTests.js index d56a244510..0b938c3753 100644 --- a/services/document-updater/test/unit/js/LockManager/getLockTests.js +++ b/services/document-updater/test/unit/js/LockManager/getLockTests.js @@ -15,8 +15,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/LockManager.js' const SandboxedModule = require('sandboxed-module') @@ -25,7 +23,6 @@ describe('LockManager - getting the lock', function () { let Profiler this.LockManager = SandboxedModule.require(modulePath, { requires: { - 'logger-sharelatex': { log() {} }, '@overleaf/redis-wrapper': { createClient: () => { return { auth() {} } diff --git a/services/document-updater/test/unit/js/LockManager/tryLockTests.js b/services/document-updater/test/unit/js/LockManager/tryLockTests.js index 02c279dd11..fb49e94aa1 100644 --- a/services/document-updater/test/unit/js/LockManager/tryLockTests.js +++ b/services/document-updater/test/unit/js/LockManager/tryLockTests.js @@ -12,8 +12,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/LockManager.js' const SandboxedModule = require('sandboxed-module') @@ -22,7 +20,6 @@ describe('LockManager - trying the lock', function () { let Profiler this.LockManager = SandboxedModule.require(modulePath, { requires: { - 'logger-sharelatex': { log() {} }, '@overleaf/redis-wrapper': { createClient: () => { return { diff --git a/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js b/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js index 1013752dee..4015ef2662 100644 --- a/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js +++ b/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js @@ -11,8 +11,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/PersistenceManager.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../../app/js/Errors') @@ -38,11 +36,6 @@ describe('PersistenceManager', function () { })()), inc: sinon.stub() }), - 'logger-sharelatex': (this.logger = { - log: sinon.stub(), - err: sinon.stub(), - error: sinon.stub() - }), './Errors': Errors } }) diff --git a/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js b/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js index 8b62bd83f3..1ff3d53ded 100644 --- a/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js +++ b/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js @@ -12,8 +12,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/ProjectHistoryRedisManager.js' const SandboxedModule = require('sandboxed-module') const tk = require('timekeeper') @@ -47,13 +45,7 @@ describe('ProjectHistoryRedisManager', function () { '@overleaf/redis-wrapper': { createClient: () => this.rclient }, - 'logger-sharelatex': { - log() {} - }, './Metrics': (this.metrics = { summary: sinon.stub() }) - }, - globals: { - JSON: (this.JSON = JSON) } } )) @@ -136,7 +128,7 @@ describe('ProjectHistoryRedisManager', function () { return this.ProjectHistoryRedisManager.queueOps .calledWithExactly( this.project_id, - this.JSON.stringify(update), + JSON.stringify(update), this.callback ) .should.equal(true) @@ -184,7 +176,7 @@ describe('ProjectHistoryRedisManager', function () { return this.ProjectHistoryRedisManager.queueOps .calledWithExactly( this.project_id, - this.JSON.stringify(update), + JSON.stringify(update), this.callback ) .should.equal(true) diff --git a/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js index 9589d42054..d8342c0cff 100644 --- a/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js @@ -13,8 +13,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/ProjectManager.js' const SandboxedModule = require('sandboxed-module') @@ -26,10 +24,6 @@ describe('ProjectManager - flushAndDeleteProject', function () { './RedisManager': (this.RedisManager = {}), './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), './DocumentManager': (this.DocumentManager = {}), - 'logger-sharelatex': (this.logger = { - log: sinon.stub(), - error: sinon.stub() - }), './HistoryManager': (this.HistoryManager = { flushProjectChanges: sinon.stub().callsArg(2) }), diff --git a/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js index c0bb668f49..70ae03e861 100644 --- a/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js @@ -15,8 +15,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/ProjectManager.js' const SandboxedModule = require('sandboxed-module') @@ -28,10 +26,6 @@ describe('ProjectManager - flushProject', function () { './RedisManager': (this.RedisManager = {}), './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), './DocumentManager': (this.DocumentManager = {}), - 'logger-sharelatex': (this.logger = { - log: sinon.stub(), - error: sinon.stub() - }), './HistoryManager': (this.HistoryManager = {}), './Metrics': (this.Metrics = { Timer: (Timer = (function () { diff --git a/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js b/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js index db9f31e4ad..467a190168 100644 --- a/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js @@ -11,8 +11,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/ProjectManager.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../../app/js/Errors.js') @@ -25,10 +23,6 @@ describe('ProjectManager - getProjectDocsAndFlushIfOld', function () { './RedisManager': (this.RedisManager = {}), './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), './DocumentManager': (this.DocumentManager = {}), - 'logger-sharelatex': (this.logger = { - log: sinon.stub(), - error: sinon.stub() - }), './HistoryManager': (this.HistoryManager = {}), './Metrics': (this.Metrics = { Timer: (Timer = (function () { diff --git a/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js index aa3db813a0..896517679c 100644 --- a/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js @@ -22,17 +22,11 @@ describe('ProjectManager', function () { } this.Metrics.Timer.prototype.done = sinon.stub() - this.logger = { - log: sinon.stub(), - error: sinon.stub() - } - this.ProjectManager = SandboxedModule.require(modulePath, { requires: { './RedisManager': this.RedisManager, './ProjectHistoryRedisManager': this.ProjectHistoryRedisManager, './DocumentManager': this.DocumentManager, - 'logger-sharelatex': this.logger, './HistoryManager': this.HistoryManager, './Metrics': this.Metrics } diff --git a/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js b/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js index a336125a1c..c857153888 100644 --- a/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js +++ b/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js @@ -13,23 +13,13 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() -const { expect } = chai +const { expect } = require('chai') const modulePath = '../../../../app/js/RangesManager.js' const SandboxedModule = require('sandboxed-module') describe('RangesManager', function () { beforeEach(function () { - this.RangesManager = SandboxedModule.require(modulePath, { - requires: { - 'logger-sharelatex': (this.logger = { - error: sinon.stub(), - log: sinon.stub(), - warn: sinon.stub() - }) - } - }) + this.RangesManager = SandboxedModule.require(modulePath) this.doc_id = 'doc-id-123' this.project_id = 'project-id-123' @@ -368,11 +358,6 @@ describe('RangesManager', function () { beforeEach(function () { this.RangesManager = SandboxedModule.require(modulePath, { requires: { - 'logger-sharelatex': (this.logger = { - error: sinon.stub(), - log: sinon.stub(), - warn: sinon.stub() - }), './RangesTracker': (this.RangesTracker = SandboxedModule.require( '../../../../app/js/RangesTracker.js' )) diff --git a/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js b/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js index e84d557501..8fef08051f 100644 --- a/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js +++ b/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js @@ -11,9 +11,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() -const { expect } = chai +const { expect } = require('chai') const modulePath = '../../../../app/js/RateLimitManager.js' const SandboxedModule = require('sandboxed-module') @@ -22,7 +20,6 @@ describe('RateLimitManager', function () { let Timer this.RateLimitManager = SandboxedModule.require(modulePath, { requires: { - 'logger-sharelatex': (this.logger = { log: sinon.stub() }), 'settings-sharelatex': (this.settings = {}), './Metrics': (this.Metrics = { Timer: (Timer = (function () { diff --git a/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js b/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js index c5e4647df4..83cd5f99ce 100644 --- a/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js +++ b/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js @@ -11,8 +11,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/RealTimeRedisManager.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../../app/js/Errors') @@ -45,7 +43,6 @@ describe('RealTimeRedisManager', function () { } } }, - 'logger-sharelatex': { log() {} }, crypto: (this.crypto = { randomBytes: sinon .stub() diff --git a/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js index 739aa88ab8..d14d0c23de 100644 --- a/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js +++ b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js @@ -12,8 +12,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/RedisManager.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../../app/js/Errors') @@ -28,11 +26,6 @@ describe('RedisManager', function () { tk.freeze(new Date()) this.RedisManager = SandboxedModule.require(modulePath, { requires: { - 'logger-sharelatex': (this.logger = { - error: sinon.stub(), - log: sinon.stub(), - warn: sinon.stub() - }), './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), 'settings-sharelatex': (this.settings = { documentupdater: { logHashErrors: { write: true, read: true } }, @@ -122,9 +115,6 @@ describe('RedisManager', function () { }) }), './Errors': Errors - }, - globals: { - JSON: (this.JSON = JSON) } }) @@ -924,8 +914,9 @@ describe('RedisManager', function () { this.RedisManager.getDocVersion .withArgs(this.doc_id) .yields(null, this.version - this.ops.length) - this._stringify = JSON.stringify - this.JSON.stringify = () => '["bad bytes! \u0000 <- here"]' + this.stringifyStub = sinon + .stub(JSON, 'stringify') + .callsFake(() => '["bad bytes! \u0000 <- here"]') return this.RedisManager.updateDocument( this.project_id, this.doc_id, @@ -939,7 +930,7 @@ describe('RedisManager', function () { }) afterEach(function () { - return (this.JSON.stringify = this._stringify) + this.stringifyStub.restore() }) it('should log an error', function () { @@ -1127,8 +1118,9 @@ describe('RedisManager', function () { describe('with null bytes in the serialized doc lines', function () { beforeEach(function () { - this._stringify = JSON.stringify - this.JSON.stringify = () => '["bad bytes! \u0000 <- here"]' + this.stringifyStub = sinon + .stub(JSON, 'stringify') + .callsFake(() => '["bad bytes! \u0000 <- here"]') return this.RedisManager.putDocInMemory( this.project_id, this.doc_id, @@ -1142,7 +1134,7 @@ describe('RedisManager', function () { }) afterEach(function () { - return (this.JSON.stringify = this._stringify) + this.stringifyStub.restore() }) it('should log an error', function () { diff --git a/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js b/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js index 8ea99aee5c..a5e3a8599a 100644 --- a/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js +++ b/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js @@ -14,7 +14,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const text = require('../../../../app/js/sharejs/types/text') -require('chai').should() const RangesTracker = require('../../../../app/js/RangesTracker') describe('ShareJS text type', function () { diff --git a/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js b/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js index ddf98775d8..1b4e4422a6 100644 --- a/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js +++ b/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js @@ -11,9 +11,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() -const { expect } = chai +const { expect } = require('chai') const modulePath = '../../../../app/js/ShareJsDB.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../../app/js/Errors') diff --git a/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js b/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js index 2ab5ba617e..d6e9700bff 100644 --- a/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js +++ b/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js @@ -10,8 +10,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/ShareJsUpdateManager.js' const SandboxedModule = require('sandboxed-module') const crypto = require('crypto') @@ -35,7 +33,6 @@ describe('ShareJsUpdateManager', function () { return (this.rclient = { auth() {} }) } }, - 'logger-sharelatex': (this.logger = { log: sinon.stub() }), './RealTimeRedisManager': (this.RealTimeRedisManager = {}), './Metrics': (this.metrics = { inc: sinon.stub() }) }, diff --git a/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js b/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js index 4e39089490..cac51b9f5f 100644 --- a/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js +++ b/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js @@ -12,8 +12,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/UpdateManager.js' const SandboxedModule = require('sandboxed-module') @@ -31,7 +29,6 @@ describe('UpdateManager', function () { './RealTimeRedisManager': (this.RealTimeRedisManager = {}), './ShareJsUpdateManager': (this.ShareJsUpdateManager = {}), './HistoryManager': (this.HistoryManager = {}), - 'logger-sharelatex': (this.logger = { log: sinon.stub() }), './Metrics': (this.Metrics = { Timer: (Timer = (function () { Timer = class Timer { From 1a2235a219c1e669e03f32043332a58673b81fb5 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Mon, 15 Mar 2021 17:23:50 -0400 Subject: [PATCH 725/769] Upgrade to Node 12 --- services/document-updater/.nvmrc | 2 +- services/document-updater/Dockerfile | 2 +- services/document-updater/Makefile | 6 ++++-- services/document-updater/app/js/RangesTracker.js | 8 ++++++-- services/document-updater/buildscript.txt | 4 ++-- services/document-updater/docker-compose.yml | 4 ++-- 6 files changed, 16 insertions(+), 10 deletions(-) diff --git a/services/document-updater/.nvmrc b/services/document-updater/.nvmrc index 2baa2d433a..e68b860383 100644 --- a/services/document-updater/.nvmrc +++ b/services/document-updater/.nvmrc @@ -1 +1 @@ -10.23.1 +12.21.0 diff --git a/services/document-updater/Dockerfile b/services/document-updater/Dockerfile index 2da67d2436..4f417a2a4b 100644 --- a/services/document-updater/Dockerfile +++ b/services/document-updater/Dockerfile @@ -2,7 +2,7 @@ # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -FROM node:10.23.1 as base +FROM node:12.21.0 as base WORKDIR /app diff --git a/services/document-updater/Makefile b/services/document-updater/Makefile index 596aa47fdb..7591d2a689 100644 --- a/services/document-updater/Makefile +++ b/services/document-updater/Makefile @@ -21,8 +21,10 @@ DOCKER_COMPOSE_TEST_UNIT = \ COMPOSE_PROJECT_NAME=test_unit_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) clean: - docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) - docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -$(DOCKER_COMPOSE_TEST_UNIT) down --rmi local + -$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down --rmi local format: $(DOCKER_COMPOSE) run --rm test_unit npm run --silent format diff --git a/services/document-updater/app/js/RangesTracker.js b/services/document-updater/app/js/RangesTracker.js index 6107acf300..5991ee2993 100644 --- a/services/document-updater/app/js/RangesTracker.js +++ b/services/document-updater/app/js/RangesTracker.js @@ -706,8 +706,10 @@ const load = function () { return result } else if (c1.op.i != null && c2.op.d != null) { return 1 - } else { + } else if (c1.op.d != null && c2.op.i != null) { return -1 + } else { + return 0 } }) @@ -728,8 +730,10 @@ const load = function () { return result } else if (a.i != null && b.d != null) { return 1 - } else { + } else if (a.d != null && b.i != null) { return -1 + } else { + return 0 } }) diff --git a/services/document-updater/buildscript.txt b/services/document-updater/buildscript.txt index 0d8b15d9f5..2d2f00495e 100644 --- a/services/document-updater/buildscript.txt +++ b/services/document-updater/buildscript.txt @@ -3,6 +3,6 @@ document-updater --docker-repos=gcr.io/overleaf-ops --env-add= --env-pass-through= ---node-version=10.23.1 +--node-version=12.21.0 --public-repo=True ---script-version=3.4.0 +--script-version=3.7.0 diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index 0db448f9b5..6a1c097a30 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -6,7 +6,7 @@ version: "2.3" services: test_unit: - image: node:10.23.1 + image: node:12.21.0 volumes: - .:/app working_dir: /app @@ -18,7 +18,7 @@ services: user: node test_acceptance: - image: node:10.23.1 + image: node:12.21.0 volumes: - .:/app working_dir: /app From 98a32833a98f481a61dc9b6c0bcd3532faf02f59 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Tue, 16 Feb 2021 15:10:11 +0000 Subject: [PATCH 726/769] [misc] bump the version of the metrics module to 3.5.1 --- services/document-updater/package-lock.json | 1004 ++++++++++++------- services/document-updater/package.json | 2 +- 2 files changed, 655 insertions(+), 351 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 65b0a0d9c2..a1551e3502 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -170,6 +170,192 @@ "teeny-request": "^6.0.0" } }, + "@google-cloud/debug-agent": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-5.1.3.tgz", + "integrity": "sha512-WbzeEz4MvPlM7DX2QBsPcWgF62u7LSQv/oMYPl0L+TddTebqjDKiVXwxpzWk61NIfcKiet3dyCbPIt3N5o8XPQ==", + "requires": { + "@google-cloud/common": "^3.0.0", + "acorn": "^8.0.0", + "coffeescript": "^2.0.0", + "console-log-level": "^1.4.0", + "extend": "^3.0.2", + "findit2": "^2.2.3", + "gcp-metadata": "^4.0.0", + "p-limit": "^3.0.1", + "semver": "^7.0.0", + "source-map": "^0.6.1", + "split": "^1.0.0" + }, + "dependencies": { + "@google-cloud/common": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.6.0.tgz", + "integrity": "sha512-aHIFTqJZmeTNO9md8XxV+ywuvXF3xBm5WNmgWeeCK+XN5X+kGW0WEX94wGwj+/MdOnrVf4dL2RvSIt9J5yJG6Q==", + "requires": { + "@google-cloud/projectify": "^2.0.0", + "@google-cloud/promisify": "^2.0.0", + "arrify": "^2.0.1", + "duplexify": "^4.1.1", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^7.0.2", + "retry-request": "^4.1.1", + "teeny-request": "^7.0.0" + } + }, + "@google-cloud/projectify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", + "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" + }, + "@google-cloud/promisify": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz", + "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw==" + }, + "acorn": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.1.0.tgz", + "integrity": "sha512-LWCF/Wn0nfHOmJ9rzQApGnxnvgfROzGilS8936rqN/lfcYkY9MYZzdMqN+2NJ4SlTc+m5HiSa+kNfDtI64dwUA==" + }, + "bignumber.js": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz", + "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==" + }, + "coffeescript": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.5.1.tgz", + "integrity": "sha512-J2jRPX0eeFh5VKyVnoLrfVFgLZtnnmp96WQSLAS8OrLm2wtQLcnikYKe1gViJKDH7vucjuhHvBKKBP3rKcD1tQ==" + }, + "duplexify": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", + "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", + "requires": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, + "gaxios": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.2.0.tgz", + "integrity": "sha512-Ms7fNifGv0XVU+6eIyL9LB7RVESeML9+cMvkwGS70xyD6w2Z80wl6RiqiJ9k1KFlJCUTQqFFc8tXmPQfSKUe8g==", + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.3.0" + } + }, + "gcp-metadata": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz", + "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==", + "requires": { + "gaxios": "^4.0.0", + "json-bigint": "^1.0.0" + } + }, + "google-auth-library": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-7.0.3.tgz", + "integrity": "sha512-6wJNYqY1QUr5I2lWaUkkzOT2b9OCNhNQrdFOt/bsBbGb7T7NCdEvrBsXraUm+KTUGk2xGlQ7m9RgUd4Llcw8NQ==", + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^4.0.0", + "gcp-metadata": "^4.2.0", + "gtoken": "^5.0.4", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + } + }, + "google-p12-pem": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz", + "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==", + "requires": { + "node-forge": "^0.10.0" + } + }, + "gtoken": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.2.1.tgz", + "integrity": "sha512-OY0BfPKe3QnMsY9MzTHTSKn+Vl2l1CcLe6BwDEQj00mbbkl5nyQ/7EUREstg4fQNZ8iYE7br4JJ7TdKeDOPWmw==", + "requires": { + "gaxios": "^4.0.0", + "google-p12-pem": "^3.0.3", + "jws": "^4.0.0" + } + }, + "json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "requires": { + "bignumber.js": "^9.0.0" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + }, + "node-forge": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", + "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" + }, + "p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "requires": { + "yocto-queue": "^0.1.0" + } + }, + "semver": { + "version": "7.3.5", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", + "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "requires": { + "lru-cache": "^6.0.0" + } + }, + "teeny-request": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz", + "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==", + "requires": { + "http-proxy-agent": "^4.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.1", + "stream-events": "^1.0.5", + "uuid": "^8.0.0" + } + }, + "uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + } + }, "@google-cloud/logging": { "version": "7.3.0", "resolved": "https://registry.npmjs.org/@google-cloud/logging/-/logging-7.3.0.tgz", @@ -214,6 +400,201 @@ "extend": "^3.0.2" } }, + "@google-cloud/profiler": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/@google-cloud/profiler/-/profiler-4.1.1.tgz", + "integrity": "sha512-qk08aDxTaLnu+NoNEh5Jh+Fs5iR8lRLMr5Mb3YJDoZw72jHJI4f5N5F2JWt1xRc9D6da4gA6stBUJrbfbubvGQ==", + "requires": { + "@google-cloud/common": "^3.0.0", + "@types/console-log-level": "^1.4.0", + "@types/semver": "^7.0.0", + "console-log-level": "^1.4.0", + "delay": "^5.0.0", + "extend": "^3.0.2", + "gcp-metadata": "^4.0.0", + "parse-duration": "^1.0.0", + "pprof": "3.0.0", + "pretty-ms": "^7.0.0", + "protobufjs": "~6.10.0", + "semver": "^7.0.0", + "teeny-request": "^7.0.0" + }, + "dependencies": { + "@google-cloud/common": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.6.0.tgz", + "integrity": "sha512-aHIFTqJZmeTNO9md8XxV+ywuvXF3xBm5WNmgWeeCK+XN5X+kGW0WEX94wGwj+/MdOnrVf4dL2RvSIt9J5yJG6Q==", + "requires": { + "@google-cloud/projectify": "^2.0.0", + "@google-cloud/promisify": "^2.0.0", + "arrify": "^2.0.1", + "duplexify": "^4.1.1", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^7.0.2", + "retry-request": "^4.1.1", + "teeny-request": "^7.0.0" + } + }, + "@google-cloud/projectify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", + "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" + }, + "@google-cloud/promisify": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz", + "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw==" + }, + "@types/node": { + "version": "13.13.48", + "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.48.tgz", + "integrity": "sha512-z8wvSsgWQzkr4sVuMEEOvwMdOQjiRY2Y/ZW4fDfjfe3+TfQrZqFKOthBgk2RnVEmtOKrkwdZ7uTvsxTBLjKGDQ==" + }, + "bignumber.js": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz", + "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==" + }, + "duplexify": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", + "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", + "requires": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, + "gaxios": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.2.0.tgz", + "integrity": "sha512-Ms7fNifGv0XVU+6eIyL9LB7RVESeML9+cMvkwGS70xyD6w2Z80wl6RiqiJ9k1KFlJCUTQqFFc8tXmPQfSKUe8g==", + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.3.0" + } + }, + "gcp-metadata": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz", + "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==", + "requires": { + "gaxios": "^4.0.0", + "json-bigint": "^1.0.0" + } + }, + "google-auth-library": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-7.0.3.tgz", + "integrity": "sha512-6wJNYqY1QUr5I2lWaUkkzOT2b9OCNhNQrdFOt/bsBbGb7T7NCdEvrBsXraUm+KTUGk2xGlQ7m9RgUd4Llcw8NQ==", + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^4.0.0", + "gcp-metadata": "^4.2.0", + "gtoken": "^5.0.4", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + } + }, + "google-p12-pem": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz", + "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==", + "requires": { + "node-forge": "^0.10.0" + } + }, + "gtoken": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.2.1.tgz", + "integrity": "sha512-OY0BfPKe3QnMsY9MzTHTSKn+Vl2l1CcLe6BwDEQj00mbbkl5nyQ/7EUREstg4fQNZ8iYE7br4JJ7TdKeDOPWmw==", + "requires": { + "gaxios": "^4.0.0", + "google-p12-pem": "^3.0.3", + "jws": "^4.0.0" + } + }, + "json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "requires": { + "bignumber.js": "^9.0.0" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + }, + "node-forge": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", + "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" + }, + "protobufjs": { + "version": "6.10.2", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.2.tgz", + "integrity": "sha512-27yj+04uF6ya9l+qfpH187aqEzfCF4+Uit0I9ZBQVqK09hk/SQzKa2MUqUpXaVa7LOFRg1TSSr3lVxGOk6c0SQ==", + "requires": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/long": "^4.0.1", + "@types/node": "^13.7.0", + "long": "^4.0.0" + } + }, + "semver": { + "version": "7.3.5", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", + "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "requires": { + "lru-cache": "^6.0.0" + } + }, + "teeny-request": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz", + "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==", + "requires": { + "http-proxy-agent": "^4.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.1", + "stream-events": "^1.0.5", + "uuid": "^8.0.0" + } + }, + "uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + } + }, "@google-cloud/projectify": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-1.0.4.tgz", @@ -224,6 +605,201 @@ "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-1.0.4.tgz", "integrity": "sha512-VccZDcOql77obTnFh0TbNED/6ZbbmHDf8UMNnzO1d5g9V0Htfm4k5cllY8P1tJsRKC3zWYGRLaViiupcgVjBoQ==" }, + "@google-cloud/trace-agent": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-5.1.3.tgz", + "integrity": "sha512-f+5DX7n6QpDlHA+4kr81z69SLAdrlvd9T8skqCMgnYvtXx14AwzXZyzEDf3jppOYzYoqPPJv8XYiyYHHmYD0BA==", + "requires": { + "@google-cloud/common": "^3.0.0", + "@opencensus/propagation-stackdriver": "0.0.22", + "builtin-modules": "^3.0.0", + "console-log-level": "^1.4.0", + "continuation-local-storage": "^3.2.1", + "extend": "^3.0.2", + "gcp-metadata": "^4.0.0", + "google-auth-library": "^7.0.0", + "hex2dec": "^1.0.1", + "is": "^3.2.0", + "methods": "^1.1.1", + "require-in-the-middle": "^5.0.0", + "semver": "^7.0.0", + "shimmer": "^1.2.0", + "source-map-support": "^0.5.16", + "uuid": "^8.0.0" + }, + "dependencies": { + "@google-cloud/common": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.6.0.tgz", + "integrity": "sha512-aHIFTqJZmeTNO9md8XxV+ywuvXF3xBm5WNmgWeeCK+XN5X+kGW0WEX94wGwj+/MdOnrVf4dL2RvSIt9J5yJG6Q==", + "requires": { + "@google-cloud/projectify": "^2.0.0", + "@google-cloud/promisify": "^2.0.0", + "arrify": "^2.0.1", + "duplexify": "^4.1.1", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^7.0.2", + "retry-request": "^4.1.1", + "teeny-request": "^7.0.0" + } + }, + "@google-cloud/projectify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", + "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" + }, + "@google-cloud/promisify": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz", + "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw==" + }, + "@opencensus/core": { + "version": "0.0.22", + "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.22.tgz", + "integrity": "sha512-ErazJtivjceNoOZI1bG9giQ6cWS45J4i6iPUtlp7dLNu58OLs/v+CD0FsaPCh47XgPxAI12vbBE8Ec09ViwHNA==", + "requires": { + "continuation-local-storage": "^3.2.1", + "log-driver": "^1.2.7", + "semver": "^7.0.0", + "shimmer": "^1.2.0", + "uuid": "^8.0.0" + } + }, + "@opencensus/propagation-stackdriver": { + "version": "0.0.22", + "resolved": "https://registry.npmjs.org/@opencensus/propagation-stackdriver/-/propagation-stackdriver-0.0.22.tgz", + "integrity": "sha512-eBvf/ihb1mN8Yz/ASkz8nHzuMKqygu77+VNnUeR0yEh3Nj+ykB8VVR6lK+NAFXo1Rd1cOsTmgvuXAZgDAGleQQ==", + "requires": { + "@opencensus/core": "^0.0.22", + "hex2dec": "^1.0.1", + "uuid": "^8.0.0" + } + }, + "bignumber.js": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz", + "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==" + }, + "duplexify": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", + "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", + "requires": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, + "gaxios": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.2.0.tgz", + "integrity": "sha512-Ms7fNifGv0XVU+6eIyL9LB7RVESeML9+cMvkwGS70xyD6w2Z80wl6RiqiJ9k1KFlJCUTQqFFc8tXmPQfSKUe8g==", + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.3.0" + } + }, + "gcp-metadata": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz", + "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==", + "requires": { + "gaxios": "^4.0.0", + "json-bigint": "^1.0.0" + } + }, + "google-auth-library": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-7.0.3.tgz", + "integrity": "sha512-6wJNYqY1QUr5I2lWaUkkzOT2b9OCNhNQrdFOt/bsBbGb7T7NCdEvrBsXraUm+KTUGk2xGlQ7m9RgUd4Llcw8NQ==", + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^4.0.0", + "gcp-metadata": "^4.2.0", + "gtoken": "^5.0.4", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + } + }, + "google-p12-pem": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz", + "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==", + "requires": { + "node-forge": "^0.10.0" + } + }, + "gtoken": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.2.1.tgz", + "integrity": "sha512-OY0BfPKe3QnMsY9MzTHTSKn+Vl2l1CcLe6BwDEQj00mbbkl5nyQ/7EUREstg4fQNZ8iYE7br4JJ7TdKeDOPWmw==", + "requires": { + "gaxios": "^4.0.0", + "google-p12-pem": "^3.0.3", + "jws": "^4.0.0" + } + }, + "json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "requires": { + "bignumber.js": "^9.0.0" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + }, + "node-forge": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", + "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" + }, + "semver": { + "version": "7.3.5", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", + "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "requires": { + "lru-cache": "^6.0.0" + } + }, + "teeny-request": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz", + "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==", + "requires": { + "http-proxy-agent": "^4.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.1", + "stream-events": "^1.0.5", + "uuid": "^8.0.0" + } + }, + "uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + } + }, "@grpc/grpc-js": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.0.5.tgz", @@ -264,9 +840,9 @@ } }, "@overleaf/metrics": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/@overleaf/metrics/-/metrics-3.4.1.tgz", - "integrity": "sha512-OgjlzuC+2gPdIEDHhmd9LDMu01tk1ln0cJhw1727BZ+Wgf2Z1hjuHRt4JeCkf+PFTHwJutVYT8v6IGPpNEPtbg==", + "version": "3.5.1", + "resolved": "https://registry.npmjs.org/@overleaf/metrics/-/metrics-3.5.1.tgz", + "integrity": "sha512-RLHxkMF7Y3725L3QwXo9cIn2gGobsMYUGuxKxg7PVMrPTMsomHEMeG7StOxCO7ML1Z/BwB/9nsVYNrsRdAJtKg==", "requires": { "@google-cloud/debug-agent": "^5.1.2", "@google-cloud/profiler": "^4.0.3", @@ -275,321 +851,6 @@ "prom-client": "^11.1.3", "underscore": "~1.6.0", "yn": "^3.1.1" - }, - "dependencies": { - "@google-cloud/common": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.5.0.tgz", - "integrity": "sha512-10d7ZAvKhq47L271AqvHEd8KzJqGU45TY+rwM2Z3JHuB070FeTi7oJJd7elfrnKaEvaktw3hH2wKnRWxk/3oWQ==", - "requires": { - "@google-cloud/projectify": "^2.0.0", - "@google-cloud/promisify": "^2.0.0", - "arrify": "^2.0.1", - "duplexify": "^4.1.1", - "ent": "^2.2.0", - "extend": "^3.0.2", - "google-auth-library": "^6.1.1", - "retry-request": "^4.1.1", - "teeny-request": "^7.0.0" - } - }, - "@google-cloud/debug-agent": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-5.1.3.tgz", - "integrity": "sha512-WbzeEz4MvPlM7DX2QBsPcWgF62u7LSQv/oMYPl0L+TddTebqjDKiVXwxpzWk61NIfcKiet3dyCbPIt3N5o8XPQ==", - "requires": { - "@google-cloud/common": "^3.0.0", - "acorn": "^8.0.0", - "coffeescript": "^2.0.0", - "console-log-level": "^1.4.0", - "extend": "^3.0.2", - "findit2": "^2.2.3", - "gcp-metadata": "^4.0.0", - "p-limit": "^3.0.1", - "semver": "^7.0.0", - "source-map": "^0.6.1", - "split": "^1.0.0" - } - }, - "@google-cloud/profiler": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/@google-cloud/profiler/-/profiler-4.1.0.tgz", - "integrity": "sha512-9e1zXRctLSUHAoAsFGwE4rS28fr0siiG+jXl5OpwTK8ZAUlxb70aosHaZGdsv8YXrYKjuiufjRZ/OXCs0XLI9g==", - "requires": { - "@google-cloud/common": "^3.0.0", - "@types/console-log-level": "^1.4.0", - "@types/semver": "^7.0.0", - "console-log-level": "^1.4.0", - "delay": "^4.0.1", - "extend": "^3.0.2", - "gcp-metadata": "^4.0.0", - "parse-duration": "^0.4.4", - "pprof": "3.0.0", - "pretty-ms": "^7.0.0", - "protobufjs": "~6.10.0", - "semver": "^7.0.0", - "teeny-request": "^7.0.0" - } - }, - "@google-cloud/projectify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", - "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" - }, - "@google-cloud/promisify": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz", - "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw==" - }, - "@google-cloud/trace-agent": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-5.1.1.tgz", - "integrity": "sha512-YTcK0RLN90pLCprg0XC8uV4oAVd79vsXhkcxmEVwiOOYjUDvSrAhb7y/0SY606zgfhJHmUTNb/fZSWEtZP/slQ==", - "requires": { - "@google-cloud/common": "^3.0.0", - "@opencensus/propagation-stackdriver": "0.0.22", - "builtin-modules": "^3.0.0", - "console-log-level": "^1.4.0", - "continuation-local-storage": "^3.2.1", - "extend": "^3.0.2", - "gcp-metadata": "^4.0.0", - "google-auth-library": "^6.0.0", - "hex2dec": "^1.0.1", - "is": "^3.2.0", - "methods": "^1.1.1", - "require-in-the-middle": "^5.0.0", - "semver": "^7.0.0", - "shimmer": "^1.2.0", - "source-map-support": "^0.5.16", - "uuid": "^8.0.0" - } - }, - "@opencensus/core": { - "version": "0.0.22", - "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.22.tgz", - "integrity": "sha512-ErazJtivjceNoOZI1bG9giQ6cWS45J4i6iPUtlp7dLNu58OLs/v+CD0FsaPCh47XgPxAI12vbBE8Ec09ViwHNA==", - "requires": { - "continuation-local-storage": "^3.2.1", - "log-driver": "^1.2.7", - "semver": "^7.0.0", - "shimmer": "^1.2.0", - "uuid": "^8.0.0" - } - }, - "@opencensus/propagation-stackdriver": { - "version": "0.0.22", - "resolved": "https://registry.npmjs.org/@opencensus/propagation-stackdriver/-/propagation-stackdriver-0.0.22.tgz", - "integrity": "sha512-eBvf/ihb1mN8Yz/ASkz8nHzuMKqygu77+VNnUeR0yEh3Nj+ykB8VVR6lK+NAFXo1Rd1cOsTmgvuXAZgDAGleQQ==", - "requires": { - "@opencensus/core": "^0.0.22", - "hex2dec": "^1.0.1", - "uuid": "^8.0.0" - } - }, - "@types/node": { - "version": "13.13.33", - "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.33.tgz", - "integrity": "sha512-1B3GM1yuYsFyEvBb+ljBqWBOylsWDYioZ5wpu8AhXdIhq20neXS7eaSC8GkwHE0yQYGiOIV43lMsgRYTgKZefQ==" - }, - "@types/semver": { - "version": "7.3.4", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.4.tgz", - "integrity": "sha512-+nVsLKlcUCeMzD2ufHEYuJ9a2ovstb6Dp52A5VsoKxDXgvE051XgHI/33I1EymwkRGQkwnA0LkhnUzituGs4EQ==" - }, - "acorn": { - "version": "8.0.4", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.0.4.tgz", - "integrity": "sha512-XNP0PqF1XD19ZlLKvB7cMmnZswW4C/03pRHgirB30uSJTaS3A3V1/P4sS3HPvFmjoriPCJQs+JDSbm4bL1TxGQ==" - }, - "bignumber.js": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz", - "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==" - }, - "debug": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", - "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", - "requires": { - "ms": "2.1.2" - } - }, - "duplexify": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", - "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", - "requires": { - "end-of-stream": "^1.4.1", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1", - "stream-shift": "^1.0.0" - } - }, - "gaxios": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.0.1.tgz", - "integrity": "sha512-jOin8xRZ/UytQeBpSXFqIzqU7Fi5TqgPNLlUsSB8kjJ76+FiGBfImF8KJu++c6J4jOldfJUtt0YmkRj2ZpSHTQ==", - "requires": { - "abort-controller": "^3.0.0", - "extend": "^3.0.2", - "https-proxy-agent": "^5.0.0", - "is-stream": "^2.0.0", - "node-fetch": "^2.3.0" - } - }, - "gcp-metadata": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz", - "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==", - "requires": { - "gaxios": "^4.0.0", - "json-bigint": "^1.0.0" - } - }, - "google-auth-library": { - "version": "6.1.3", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.1.3.tgz", - "integrity": "sha512-m9mwvY3GWbr7ZYEbl61isWmk+fvTmOt0YNUfPOUY2VH8K5pZlAIWJjxEi0PqR3OjMretyiQLI6GURMrPSwHQ2g==", - "requires": { - "arrify": "^2.0.0", - "base64-js": "^1.3.0", - "ecdsa-sig-formatter": "^1.0.11", - "fast-text-encoding": "^1.0.0", - "gaxios": "^4.0.0", - "gcp-metadata": "^4.2.0", - "gtoken": "^5.0.4", - "jws": "^4.0.0", - "lru-cache": "^6.0.0" - } - }, - "google-p12-pem": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz", - "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==", - "requires": { - "node-forge": "^0.10.0" - } - }, - "gtoken": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.1.0.tgz", - "integrity": "sha512-4d8N6Lk8TEAHl9vVoRVMh9BNOKWVgl2DdNtr3428O75r3QFrF/a5MMu851VmK0AA8+iSvbwRv69k5XnMLURGhg==", - "requires": { - "gaxios": "^4.0.0", - "google-p12-pem": "^3.0.3", - "jws": "^4.0.0", - "mime": "^2.2.0" - } - }, - "json-bigint": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", - "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", - "requires": { - "bignumber.js": "^9.0.0" - } - }, - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "requires": { - "yallist": "^4.0.0" - } - }, - "mime": { - "version": "2.4.6", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", - "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - }, - "node-forge": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", - "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" - }, - "p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "requires": { - "yocto-queue": "^0.1.0" - } - }, - "parse-duration": { - "version": "0.4.4", - "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.4.4.tgz", - "integrity": "sha512-KbAJuYGUhZkB9gotDiKLnZ7Z3VTacK3fgwmDdB6ZVDtJbMBT6MfLga0WJaYpPDu0mzqT0NgHtHDt5PY4l0nidg==" - }, - "pretty-ms": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-7.0.1.tgz", - "integrity": "sha512-973driJZvxiGOQ5ONsFhOF/DtzPMOMtgC11kCpUrPGMTgqp2q/1gwzCquocrN33is0VZ5GFHXZYMM9l6h67v2Q==", - "requires": { - "parse-ms": "^2.1.0" - } - }, - "protobufjs": { - "version": "6.10.2", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.2.tgz", - "integrity": "sha512-27yj+04uF6ya9l+qfpH187aqEzfCF4+Uit0I9ZBQVqK09hk/SQzKa2MUqUpXaVa7LOFRg1TSSr3lVxGOk6c0SQ==", - "requires": { - "@protobufjs/aspromise": "^1.1.2", - "@protobufjs/base64": "^1.1.2", - "@protobufjs/codegen": "^2.0.4", - "@protobufjs/eventemitter": "^1.1.0", - "@protobufjs/fetch": "^1.1.0", - "@protobufjs/float": "^1.0.2", - "@protobufjs/inquire": "^1.1.0", - "@protobufjs/path": "^1.1.2", - "@protobufjs/pool": "^1.1.0", - "@protobufjs/utf8": "^1.1.0", - "@types/long": "^4.0.1", - "@types/node": "^13.7.0", - "long": "^4.0.0" - } - }, - "require-in-the-middle": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-5.0.3.tgz", - "integrity": "sha512-p/ICV8uMlqC4tjOYabLMxAWCIKa0YUQgZZ6KDM0xgXJNgdGQ1WmL2A07TwmrZw+wi6ITUFKzH5v3n+ENEyXVkA==", - "requires": { - "debug": "^4.1.1", - "module-details-from-path": "^1.0.3", - "resolve": "^1.12.0" - } - }, - "semver": { - "version": "7.3.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.2.tgz", - "integrity": "sha512-OrOb32TeeambH6UrhtShmF7CRDqhL6/5XpPNp2DuRH6+9QLw/orhp72j87v8Qa1ScDkvrrBNpZcDejAirJmfXQ==" - }, - "teeny-request": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz", - "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==", - "requires": { - "http-proxy-agent": "^4.0.0", - "https-proxy-agent": "^5.0.0", - "node-fetch": "^2.6.1", - "stream-events": "^1.0.5", - "uuid": "^8.0.0" - } - }, - "uuid": { - "version": "8.3.1", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.1.tgz", - "integrity": "sha512-FOmRr+FmWEIG8uhZv6C2bTgEVXsHk08kE7mPlrBbEe+c3r9pjceVPgupIfNIhc4yx55H69OXANrUaSuu9eInKg==" - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - } } }, "@overleaf/o-error": { @@ -734,7 +995,7 @@ "@types/console-log-level": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/@types/console-log-level/-/console-log-level-1.4.0.tgz", - "integrity": "sha1-7/ccQa689RyLpa2LBdfVQkviuPM=" + "integrity": "sha512-x+OscEQwcx5Biair4enH7ov9W+clcqUWaZRaxn5IkT4yNWWjRr2oiYDkY/x1uXSTVZOQ2xlbFQySaQGB+VdXGQ==" }, "@types/eslint-visitor-keys": { "version": "1.0.0", @@ -766,6 +1027,11 @@ "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.15.tgz", "integrity": "sha512-daFGV9GSs6USfPgxceDA8nlSe48XrVCJfDeYm7eokxq/ye7iuOH87hKXgMtEAVLFapkczbZsx868PMDT1Y0a6A==" }, + "@types/semver": { + "version": "7.3.4", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.4.tgz", + "integrity": "sha512-+nVsLKlcUCeMzD2ufHEYuJ9a2ovstb6Dp52A5VsoKxDXgvE051XgHI/33I1EymwkRGQkwnA0LkhnUzituGs4EQ==" + }, "@typescript-eslint/experimental-utils": { "version": "1.13.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-1.13.0.tgz", @@ -1143,7 +1409,7 @@ "bindings": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", - "integrity": "sha1-EDU8npRTNLwFEabZCzj7x8nFBN8=", + "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", "requires": { "file-uri-to-path": "1.0.0" } @@ -1241,9 +1507,9 @@ "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==" }, "builtin-modules": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.1.0.tgz", - "integrity": "sha1-qtl8FRMet2tltQ7yCOdYTNdqdIQ=" + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.2.0.tgz", + "integrity": "sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA==" }, "bunyan": { "version": "0.22.3", @@ -1386,11 +1652,6 @@ "integrity": "sha512-fLeEhqwymYat/MpTPUjSKHVYYl0ec2mOyALEMLmzr5i1isuG+6jfI2j2d5oBO3VIzgUXgBVIcOT9uH1TFxBckw==", "dev": true }, - "coffeescript": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.5.1.tgz", - "integrity": "sha512-J2jRPX0eeFh5VKyVnoLrfVFgLZtnnmp96WQSLAS8OrLm2wtQLcnikYKe1gViJKDH7vucjuhHvBKKBP3rKcD1tQ==" - }, "color-convert": { "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", @@ -1468,7 +1729,7 @@ "console-log-level": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz", - "integrity": "sha1-nFprue8e9lsFq6gwKLD/iUzfYwo=" + "integrity": "sha512-VZzbIORbP+PPcN/gg3DXClTLPLg5Slwd5fL2MIc+o1qZ4BXBvWyc6QxPk6T/Mkr6IVjRpoAGf32XxP3ZWMVRcQ==" }, "contains-path": { "version": "0.1.0", @@ -1596,9 +1857,9 @@ } }, "delay": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/delay/-/delay-4.3.0.tgz", - "integrity": "sha1-7+6/uPVFV5yzlrOnIkQ+yW0UxQ4=" + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/delay/-/delay-5.0.0.tgz", + "integrity": "sha512-ReEBKkIfe4ya47wlPYf/gu5ib6yUG0/Aez0JQZQz94kiWtRQvZIQbTiehsnwHvLSWJnQdhVeqYue7Id1dKr0qw==" }, "delayed-stream": { "version": "1.0.0", @@ -2539,7 +2800,7 @@ "file-uri-to-path": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", - "integrity": "sha1-VTp7hEb/b2hDWcRF8eN6BdrMM90=" + "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==" }, "fill-range": { "version": "7.0.1", @@ -4317,9 +4578,9 @@ "optional": true }, "needle": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/needle/-/needle-2.5.2.tgz", - "integrity": "sha512-LbRIwS9BfkPvNwNHlsA41Q29kL2L/6VaOJ0qisM5lLWsTV3nP15abO5ITL6L81zqFhzjRKDAYjpcBcwM0AVvLQ==", + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/needle/-/needle-2.6.0.tgz", + "integrity": "sha512-KKYdza4heMsEfSWD7VPUIz3zX2XDwOyX2d+geb4vrERZMT5RMU6ujjaD+I5Yr54uZxQ2w6XRTAhHBbSCyovZBg==", "requires": { "debug": "^3.2.6", "iconv-lite": "^0.4.4", @@ -4335,9 +4596,9 @@ } }, "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" } } }, @@ -4684,10 +4945,15 @@ "callsites": "^3.0.0" } }, + "parse-duration": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-1.0.0.tgz", + "integrity": "sha512-X4kUkCTHU1N/kEbwK9FpUJ0UZQa90VzeczfS704frR30gljxDG0pSziws06XlK+CGRSo/1wtG1mFIdBFQTMQNw==" + }, "parse-ms": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.1.0.tgz", - "integrity": "sha1-NIVlp1PUOR+lJAKZVrFyy3dTCX0=" + "integrity": "sha512-kHt7kzLoS9VBZfUsiKjv43mr91ea+U05EyKkEtqp7vNbHxmaVuEqN7XxeEVnGrMtYOAxGrDElSi96K7EgO1zCA==" }, "parseurl": { "version": "1.3.3", @@ -4738,6 +5004,11 @@ "integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==", "dev": true }, + "pify": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-5.0.0.tgz", + "integrity": "sha512-eW/gHNMlxdSP6dmG6uJip6FXN0EQBwm2clYYd8Wul42Cwu/DK8HEftzsapcNdYe2MfLiIwZqsDk2RDEsTE79hA==" + }, "pprof": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/pprof/-/pprof-3.0.0.tgz", @@ -4756,9 +5027,14 @@ }, "dependencies": { "@types/node": { - "version": "13.13.33", - "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.33.tgz", - "integrity": "sha512-1B3GM1yuYsFyEvBb+ljBqWBOylsWDYioZ5wpu8AhXdIhq20neXS7eaSC8GkwHE0yQYGiOIV43lMsgRYTgKZefQ==" + "version": "13.13.48", + "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.48.tgz", + "integrity": "sha512-z8wvSsgWQzkr4sVuMEEOvwMdOQjiRY2Y/ZW4fDfjfe3+TfQrZqFKOthBgk2RnVEmtOKrkwdZ7uTvsxTBLjKGDQ==" + }, + "delay": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/delay/-/delay-4.4.1.tgz", + "integrity": "sha512-aL3AhqtfhOlT/3ai6sWXeqwnw63ATNpnUiN4HL7x9q+My5QtHlO3OIkasmug9LKzpheLdmUKGRKnYXYAS7FQkQ==" }, "p-limit": { "version": "3.1.0", @@ -4768,11 +5044,6 @@ "yocto-queue": "^0.1.0" } }, - "pify": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-5.0.0.tgz", - "integrity": "sha512-eW/gHNMlxdSP6dmG6uJip6FXN0EQBwm2clYYd8Wul42Cwu/DK8HEftzsapcNdYe2MfLiIwZqsDk2RDEsTE79hA==" - }, "protobufjs": { "version": "6.10.2", "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.2.tgz", @@ -5453,6 +5724,14 @@ } } }, + "pretty-ms": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-7.0.1.tgz", + "integrity": "sha512-973driJZvxiGOQ5ONsFhOF/DtzPMOMtgC11kCpUrPGMTgqp2q/1gwzCquocrN33is0VZ5GFHXZYMM9l6h67v2Q==", + "requires": { + "parse-ms": "^2.1.0" + } + }, "process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", @@ -5775,6 +6054,31 @@ "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", "dev": true }, + "require-in-the-middle": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-5.1.0.tgz", + "integrity": "sha512-M2rLKVupQfJ5lf9OvqFGIT+9iVLnTmjgbOmpil12hiSQNn5zJTKGPoIisETNjfK+09vP3rpm1zJajmErpr2sEQ==", + "requires": { + "debug": "^4.1.1", + "module-details-from-path": "^1.0.3", + "resolve": "^1.12.0" + }, + "dependencies": { + "debug": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", + "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", + "requires": { + "ms": "2.1.2" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + } + } + }, "require-like": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", @@ -6093,7 +6397,7 @@ "source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha1-dHIq8y6WFOnCh6jQu95IteLxomM=" + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" }, "source-map-support": { "version": "0.5.19", @@ -6148,7 +6452,7 @@ "split": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", - "integrity": "sha1-YFvZvjA6pZ+zX5Ip++oN3snqB9k=", + "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==", "requires": { "through": "2" } diff --git a/services/document-updater/package.json b/services/document-updater/package.json index a3fb2690cb..fbe12047a6 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -18,7 +18,7 @@ "format:fix": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --write" }, "dependencies": { - "@overleaf/metrics": "^3.4.1", + "@overleaf/metrics": "^3.5.1", "@overleaf/o-error": "^3.1.0", "@overleaf/redis-wrapper": "^2.0.0", "async": "^2.5.0", From b233e6588c5f4bbb1b03432616d29f6c28dba9b3 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Wed, 31 Mar 2021 12:06:41 -0400 Subject: [PATCH 727/769] Revert "Merge pull request #158 from overleaf/jpa-metrics-module-3-5-1" This reverts commit 93c98921372eed4244d22fce800716cb27eca299, reversing changes made to d44102751b9436ad89c5b3b05e7abdff51fcc78a. --- services/document-updater/package-lock.json | 1004 +++++++------------ services/document-updater/package.json | 2 +- 2 files changed, 351 insertions(+), 655 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index a1551e3502..65b0a0d9c2 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -170,192 +170,6 @@ "teeny-request": "^6.0.0" } }, - "@google-cloud/debug-agent": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-5.1.3.tgz", - "integrity": "sha512-WbzeEz4MvPlM7DX2QBsPcWgF62u7LSQv/oMYPl0L+TddTebqjDKiVXwxpzWk61NIfcKiet3dyCbPIt3N5o8XPQ==", - "requires": { - "@google-cloud/common": "^3.0.0", - "acorn": "^8.0.0", - "coffeescript": "^2.0.0", - "console-log-level": "^1.4.0", - "extend": "^3.0.2", - "findit2": "^2.2.3", - "gcp-metadata": "^4.0.0", - "p-limit": "^3.0.1", - "semver": "^7.0.0", - "source-map": "^0.6.1", - "split": "^1.0.0" - }, - "dependencies": { - "@google-cloud/common": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.6.0.tgz", - "integrity": "sha512-aHIFTqJZmeTNO9md8XxV+ywuvXF3xBm5WNmgWeeCK+XN5X+kGW0WEX94wGwj+/MdOnrVf4dL2RvSIt9J5yJG6Q==", - "requires": { - "@google-cloud/projectify": "^2.0.0", - "@google-cloud/promisify": "^2.0.0", - "arrify": "^2.0.1", - "duplexify": "^4.1.1", - "ent": "^2.2.0", - "extend": "^3.0.2", - "google-auth-library": "^7.0.2", - "retry-request": "^4.1.1", - "teeny-request": "^7.0.0" - } - }, - "@google-cloud/projectify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", - "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" - }, - "@google-cloud/promisify": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz", - "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw==" - }, - "acorn": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.1.0.tgz", - "integrity": "sha512-LWCF/Wn0nfHOmJ9rzQApGnxnvgfROzGilS8936rqN/lfcYkY9MYZzdMqN+2NJ4SlTc+m5HiSa+kNfDtI64dwUA==" - }, - "bignumber.js": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz", - "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==" - }, - "coffeescript": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.5.1.tgz", - "integrity": "sha512-J2jRPX0eeFh5VKyVnoLrfVFgLZtnnmp96WQSLAS8OrLm2wtQLcnikYKe1gViJKDH7vucjuhHvBKKBP3rKcD1tQ==" - }, - "duplexify": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", - "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", - "requires": { - "end-of-stream": "^1.4.1", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1", - "stream-shift": "^1.0.0" - } - }, - "gaxios": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.2.0.tgz", - "integrity": "sha512-Ms7fNifGv0XVU+6eIyL9LB7RVESeML9+cMvkwGS70xyD6w2Z80wl6RiqiJ9k1KFlJCUTQqFFc8tXmPQfSKUe8g==", - "requires": { - "abort-controller": "^3.0.0", - "extend": "^3.0.2", - "https-proxy-agent": "^5.0.0", - "is-stream": "^2.0.0", - "node-fetch": "^2.3.0" - } - }, - "gcp-metadata": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz", - "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==", - "requires": { - "gaxios": "^4.0.0", - "json-bigint": "^1.0.0" - } - }, - "google-auth-library": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-7.0.3.tgz", - "integrity": "sha512-6wJNYqY1QUr5I2lWaUkkzOT2b9OCNhNQrdFOt/bsBbGb7T7NCdEvrBsXraUm+KTUGk2xGlQ7m9RgUd4Llcw8NQ==", - "requires": { - "arrify": "^2.0.0", - "base64-js": "^1.3.0", - "ecdsa-sig-formatter": "^1.0.11", - "fast-text-encoding": "^1.0.0", - "gaxios": "^4.0.0", - "gcp-metadata": "^4.2.0", - "gtoken": "^5.0.4", - "jws": "^4.0.0", - "lru-cache": "^6.0.0" - } - }, - "google-p12-pem": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz", - "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==", - "requires": { - "node-forge": "^0.10.0" - } - }, - "gtoken": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.2.1.tgz", - "integrity": "sha512-OY0BfPKe3QnMsY9MzTHTSKn+Vl2l1CcLe6BwDEQj00mbbkl5nyQ/7EUREstg4fQNZ8iYE7br4JJ7TdKeDOPWmw==", - "requires": { - "gaxios": "^4.0.0", - "google-p12-pem": "^3.0.3", - "jws": "^4.0.0" - } - }, - "json-bigint": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", - "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", - "requires": { - "bignumber.js": "^9.0.0" - } - }, - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "requires": { - "yallist": "^4.0.0" - } - }, - "node-forge": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", - "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" - }, - "p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "requires": { - "yocto-queue": "^0.1.0" - } - }, - "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "requires": { - "lru-cache": "^6.0.0" - } - }, - "teeny-request": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz", - "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==", - "requires": { - "http-proxy-agent": "^4.0.0", - "https-proxy-agent": "^5.0.0", - "node-fetch": "^2.6.1", - "stream-events": "^1.0.5", - "uuid": "^8.0.0" - } - }, - "uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - } - } - }, "@google-cloud/logging": { "version": "7.3.0", "resolved": "https://registry.npmjs.org/@google-cloud/logging/-/logging-7.3.0.tgz", @@ -400,201 +214,6 @@ "extend": "^3.0.2" } }, - "@google-cloud/profiler": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/@google-cloud/profiler/-/profiler-4.1.1.tgz", - "integrity": "sha512-qk08aDxTaLnu+NoNEh5Jh+Fs5iR8lRLMr5Mb3YJDoZw72jHJI4f5N5F2JWt1xRc9D6da4gA6stBUJrbfbubvGQ==", - "requires": { - "@google-cloud/common": "^3.0.0", - "@types/console-log-level": "^1.4.0", - "@types/semver": "^7.0.0", - "console-log-level": "^1.4.0", - "delay": "^5.0.0", - "extend": "^3.0.2", - "gcp-metadata": "^4.0.0", - "parse-duration": "^1.0.0", - "pprof": "3.0.0", - "pretty-ms": "^7.0.0", - "protobufjs": "~6.10.0", - "semver": "^7.0.0", - "teeny-request": "^7.0.0" - }, - "dependencies": { - "@google-cloud/common": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.6.0.tgz", - "integrity": "sha512-aHIFTqJZmeTNO9md8XxV+ywuvXF3xBm5WNmgWeeCK+XN5X+kGW0WEX94wGwj+/MdOnrVf4dL2RvSIt9J5yJG6Q==", - "requires": { - "@google-cloud/projectify": "^2.0.0", - "@google-cloud/promisify": "^2.0.0", - "arrify": "^2.0.1", - "duplexify": "^4.1.1", - "ent": "^2.2.0", - "extend": "^3.0.2", - "google-auth-library": "^7.0.2", - "retry-request": "^4.1.1", - "teeny-request": "^7.0.0" - } - }, - "@google-cloud/projectify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", - "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" - }, - "@google-cloud/promisify": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz", - "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw==" - }, - "@types/node": { - "version": "13.13.48", - "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.48.tgz", - "integrity": "sha512-z8wvSsgWQzkr4sVuMEEOvwMdOQjiRY2Y/ZW4fDfjfe3+TfQrZqFKOthBgk2RnVEmtOKrkwdZ7uTvsxTBLjKGDQ==" - }, - "bignumber.js": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz", - "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==" - }, - "duplexify": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", - "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", - "requires": { - "end-of-stream": "^1.4.1", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1", - "stream-shift": "^1.0.0" - } - }, - "gaxios": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.2.0.tgz", - "integrity": "sha512-Ms7fNifGv0XVU+6eIyL9LB7RVESeML9+cMvkwGS70xyD6w2Z80wl6RiqiJ9k1KFlJCUTQqFFc8tXmPQfSKUe8g==", - "requires": { - "abort-controller": "^3.0.0", - "extend": "^3.0.2", - "https-proxy-agent": "^5.0.0", - "is-stream": "^2.0.0", - "node-fetch": "^2.3.0" - } - }, - "gcp-metadata": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz", - "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==", - "requires": { - "gaxios": "^4.0.0", - "json-bigint": "^1.0.0" - } - }, - "google-auth-library": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-7.0.3.tgz", - "integrity": "sha512-6wJNYqY1QUr5I2lWaUkkzOT2b9OCNhNQrdFOt/bsBbGb7T7NCdEvrBsXraUm+KTUGk2xGlQ7m9RgUd4Llcw8NQ==", - "requires": { - "arrify": "^2.0.0", - "base64-js": "^1.3.0", - "ecdsa-sig-formatter": "^1.0.11", - "fast-text-encoding": "^1.0.0", - "gaxios": "^4.0.0", - "gcp-metadata": "^4.2.0", - "gtoken": "^5.0.4", - "jws": "^4.0.0", - "lru-cache": "^6.0.0" - } - }, - "google-p12-pem": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz", - "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==", - "requires": { - "node-forge": "^0.10.0" - } - }, - "gtoken": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.2.1.tgz", - "integrity": "sha512-OY0BfPKe3QnMsY9MzTHTSKn+Vl2l1CcLe6BwDEQj00mbbkl5nyQ/7EUREstg4fQNZ8iYE7br4JJ7TdKeDOPWmw==", - "requires": { - "gaxios": "^4.0.0", - "google-p12-pem": "^3.0.3", - "jws": "^4.0.0" - } - }, - "json-bigint": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", - "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", - "requires": { - "bignumber.js": "^9.0.0" - } - }, - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "requires": { - "yallist": "^4.0.0" - } - }, - "node-forge": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", - "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" - }, - "protobufjs": { - "version": "6.10.2", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.2.tgz", - "integrity": "sha512-27yj+04uF6ya9l+qfpH187aqEzfCF4+Uit0I9ZBQVqK09hk/SQzKa2MUqUpXaVa7LOFRg1TSSr3lVxGOk6c0SQ==", - "requires": { - "@protobufjs/aspromise": "^1.1.2", - "@protobufjs/base64": "^1.1.2", - "@protobufjs/codegen": "^2.0.4", - "@protobufjs/eventemitter": "^1.1.0", - "@protobufjs/fetch": "^1.1.0", - "@protobufjs/float": "^1.0.2", - "@protobufjs/inquire": "^1.1.0", - "@protobufjs/path": "^1.1.2", - "@protobufjs/pool": "^1.1.0", - "@protobufjs/utf8": "^1.1.0", - "@types/long": "^4.0.1", - "@types/node": "^13.7.0", - "long": "^4.0.0" - } - }, - "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "requires": { - "lru-cache": "^6.0.0" - } - }, - "teeny-request": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz", - "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==", - "requires": { - "http-proxy-agent": "^4.0.0", - "https-proxy-agent": "^5.0.0", - "node-fetch": "^2.6.1", - "stream-events": "^1.0.5", - "uuid": "^8.0.0" - } - }, - "uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - } - } - }, "@google-cloud/projectify": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-1.0.4.tgz", @@ -605,201 +224,6 @@ "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-1.0.4.tgz", "integrity": "sha512-VccZDcOql77obTnFh0TbNED/6ZbbmHDf8UMNnzO1d5g9V0Htfm4k5cllY8P1tJsRKC3zWYGRLaViiupcgVjBoQ==" }, - "@google-cloud/trace-agent": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-5.1.3.tgz", - "integrity": "sha512-f+5DX7n6QpDlHA+4kr81z69SLAdrlvd9T8skqCMgnYvtXx14AwzXZyzEDf3jppOYzYoqPPJv8XYiyYHHmYD0BA==", - "requires": { - "@google-cloud/common": "^3.0.0", - "@opencensus/propagation-stackdriver": "0.0.22", - "builtin-modules": "^3.0.0", - "console-log-level": "^1.4.0", - "continuation-local-storage": "^3.2.1", - "extend": "^3.0.2", - "gcp-metadata": "^4.0.0", - "google-auth-library": "^7.0.0", - "hex2dec": "^1.0.1", - "is": "^3.2.0", - "methods": "^1.1.1", - "require-in-the-middle": "^5.0.0", - "semver": "^7.0.0", - "shimmer": "^1.2.0", - "source-map-support": "^0.5.16", - "uuid": "^8.0.0" - }, - "dependencies": { - "@google-cloud/common": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.6.0.tgz", - "integrity": "sha512-aHIFTqJZmeTNO9md8XxV+ywuvXF3xBm5WNmgWeeCK+XN5X+kGW0WEX94wGwj+/MdOnrVf4dL2RvSIt9J5yJG6Q==", - "requires": { - "@google-cloud/projectify": "^2.0.0", - "@google-cloud/promisify": "^2.0.0", - "arrify": "^2.0.1", - "duplexify": "^4.1.1", - "ent": "^2.2.0", - "extend": "^3.0.2", - "google-auth-library": "^7.0.2", - "retry-request": "^4.1.1", - "teeny-request": "^7.0.0" - } - }, - "@google-cloud/projectify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", - "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" - }, - "@google-cloud/promisify": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz", - "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw==" - }, - "@opencensus/core": { - "version": "0.0.22", - "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.22.tgz", - "integrity": "sha512-ErazJtivjceNoOZI1bG9giQ6cWS45J4i6iPUtlp7dLNu58OLs/v+CD0FsaPCh47XgPxAI12vbBE8Ec09ViwHNA==", - "requires": { - "continuation-local-storage": "^3.2.1", - "log-driver": "^1.2.7", - "semver": "^7.0.0", - "shimmer": "^1.2.0", - "uuid": "^8.0.0" - } - }, - "@opencensus/propagation-stackdriver": { - "version": "0.0.22", - "resolved": "https://registry.npmjs.org/@opencensus/propagation-stackdriver/-/propagation-stackdriver-0.0.22.tgz", - "integrity": "sha512-eBvf/ihb1mN8Yz/ASkz8nHzuMKqygu77+VNnUeR0yEh3Nj+ykB8VVR6lK+NAFXo1Rd1cOsTmgvuXAZgDAGleQQ==", - "requires": { - "@opencensus/core": "^0.0.22", - "hex2dec": "^1.0.1", - "uuid": "^8.0.0" - } - }, - "bignumber.js": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz", - "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==" - }, - "duplexify": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", - "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", - "requires": { - "end-of-stream": "^1.4.1", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1", - "stream-shift": "^1.0.0" - } - }, - "gaxios": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.2.0.tgz", - "integrity": "sha512-Ms7fNifGv0XVU+6eIyL9LB7RVESeML9+cMvkwGS70xyD6w2Z80wl6RiqiJ9k1KFlJCUTQqFFc8tXmPQfSKUe8g==", - "requires": { - "abort-controller": "^3.0.0", - "extend": "^3.0.2", - "https-proxy-agent": "^5.0.0", - "is-stream": "^2.0.0", - "node-fetch": "^2.3.0" - } - }, - "gcp-metadata": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz", - "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==", - "requires": { - "gaxios": "^4.0.0", - "json-bigint": "^1.0.0" - } - }, - "google-auth-library": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-7.0.3.tgz", - "integrity": "sha512-6wJNYqY1QUr5I2lWaUkkzOT2b9OCNhNQrdFOt/bsBbGb7T7NCdEvrBsXraUm+KTUGk2xGlQ7m9RgUd4Llcw8NQ==", - "requires": { - "arrify": "^2.0.0", - "base64-js": "^1.3.0", - "ecdsa-sig-formatter": "^1.0.11", - "fast-text-encoding": "^1.0.0", - "gaxios": "^4.0.0", - "gcp-metadata": "^4.2.0", - "gtoken": "^5.0.4", - "jws": "^4.0.0", - "lru-cache": "^6.0.0" - } - }, - "google-p12-pem": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz", - "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==", - "requires": { - "node-forge": "^0.10.0" - } - }, - "gtoken": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.2.1.tgz", - "integrity": "sha512-OY0BfPKe3QnMsY9MzTHTSKn+Vl2l1CcLe6BwDEQj00mbbkl5nyQ/7EUREstg4fQNZ8iYE7br4JJ7TdKeDOPWmw==", - "requires": { - "gaxios": "^4.0.0", - "google-p12-pem": "^3.0.3", - "jws": "^4.0.0" - } - }, - "json-bigint": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", - "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", - "requires": { - "bignumber.js": "^9.0.0" - } - }, - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "requires": { - "yallist": "^4.0.0" - } - }, - "node-forge": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", - "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" - }, - "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "requires": { - "lru-cache": "^6.0.0" - } - }, - "teeny-request": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz", - "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==", - "requires": { - "http-proxy-agent": "^4.0.0", - "https-proxy-agent": "^5.0.0", - "node-fetch": "^2.6.1", - "stream-events": "^1.0.5", - "uuid": "^8.0.0" - } - }, - "uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - } - } - }, "@grpc/grpc-js": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.0.5.tgz", @@ -840,9 +264,9 @@ } }, "@overleaf/metrics": { - "version": "3.5.1", - "resolved": "https://registry.npmjs.org/@overleaf/metrics/-/metrics-3.5.1.tgz", - "integrity": "sha512-RLHxkMF7Y3725L3QwXo9cIn2gGobsMYUGuxKxg7PVMrPTMsomHEMeG7StOxCO7ML1Z/BwB/9nsVYNrsRdAJtKg==", + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/@overleaf/metrics/-/metrics-3.4.1.tgz", + "integrity": "sha512-OgjlzuC+2gPdIEDHhmd9LDMu01tk1ln0cJhw1727BZ+Wgf2Z1hjuHRt4JeCkf+PFTHwJutVYT8v6IGPpNEPtbg==", "requires": { "@google-cloud/debug-agent": "^5.1.2", "@google-cloud/profiler": "^4.0.3", @@ -851,6 +275,321 @@ "prom-client": "^11.1.3", "underscore": "~1.6.0", "yn": "^3.1.1" + }, + "dependencies": { + "@google-cloud/common": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.5.0.tgz", + "integrity": "sha512-10d7ZAvKhq47L271AqvHEd8KzJqGU45TY+rwM2Z3JHuB070FeTi7oJJd7elfrnKaEvaktw3hH2wKnRWxk/3oWQ==", + "requires": { + "@google-cloud/projectify": "^2.0.0", + "@google-cloud/promisify": "^2.0.0", + "arrify": "^2.0.1", + "duplexify": "^4.1.1", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^6.1.1", + "retry-request": "^4.1.1", + "teeny-request": "^7.0.0" + } + }, + "@google-cloud/debug-agent": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-5.1.3.tgz", + "integrity": "sha512-WbzeEz4MvPlM7DX2QBsPcWgF62u7LSQv/oMYPl0L+TddTebqjDKiVXwxpzWk61NIfcKiet3dyCbPIt3N5o8XPQ==", + "requires": { + "@google-cloud/common": "^3.0.0", + "acorn": "^8.0.0", + "coffeescript": "^2.0.0", + "console-log-level": "^1.4.0", + "extend": "^3.0.2", + "findit2": "^2.2.3", + "gcp-metadata": "^4.0.0", + "p-limit": "^3.0.1", + "semver": "^7.0.0", + "source-map": "^0.6.1", + "split": "^1.0.0" + } + }, + "@google-cloud/profiler": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@google-cloud/profiler/-/profiler-4.1.0.tgz", + "integrity": "sha512-9e1zXRctLSUHAoAsFGwE4rS28fr0siiG+jXl5OpwTK8ZAUlxb70aosHaZGdsv8YXrYKjuiufjRZ/OXCs0XLI9g==", + "requires": { + "@google-cloud/common": "^3.0.0", + "@types/console-log-level": "^1.4.0", + "@types/semver": "^7.0.0", + "console-log-level": "^1.4.0", + "delay": "^4.0.1", + "extend": "^3.0.2", + "gcp-metadata": "^4.0.0", + "parse-duration": "^0.4.4", + "pprof": "3.0.0", + "pretty-ms": "^7.0.0", + "protobufjs": "~6.10.0", + "semver": "^7.0.0", + "teeny-request": "^7.0.0" + } + }, + "@google-cloud/projectify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", + "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" + }, + "@google-cloud/promisify": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz", + "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw==" + }, + "@google-cloud/trace-agent": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-5.1.1.tgz", + "integrity": "sha512-YTcK0RLN90pLCprg0XC8uV4oAVd79vsXhkcxmEVwiOOYjUDvSrAhb7y/0SY606zgfhJHmUTNb/fZSWEtZP/slQ==", + "requires": { + "@google-cloud/common": "^3.0.0", + "@opencensus/propagation-stackdriver": "0.0.22", + "builtin-modules": "^3.0.0", + "console-log-level": "^1.4.0", + "continuation-local-storage": "^3.2.1", + "extend": "^3.0.2", + "gcp-metadata": "^4.0.0", + "google-auth-library": "^6.0.0", + "hex2dec": "^1.0.1", + "is": "^3.2.0", + "methods": "^1.1.1", + "require-in-the-middle": "^5.0.0", + "semver": "^7.0.0", + "shimmer": "^1.2.0", + "source-map-support": "^0.5.16", + "uuid": "^8.0.0" + } + }, + "@opencensus/core": { + "version": "0.0.22", + "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.22.tgz", + "integrity": "sha512-ErazJtivjceNoOZI1bG9giQ6cWS45J4i6iPUtlp7dLNu58OLs/v+CD0FsaPCh47XgPxAI12vbBE8Ec09ViwHNA==", + "requires": { + "continuation-local-storage": "^3.2.1", + "log-driver": "^1.2.7", + "semver": "^7.0.0", + "shimmer": "^1.2.0", + "uuid": "^8.0.0" + } + }, + "@opencensus/propagation-stackdriver": { + "version": "0.0.22", + "resolved": "https://registry.npmjs.org/@opencensus/propagation-stackdriver/-/propagation-stackdriver-0.0.22.tgz", + "integrity": "sha512-eBvf/ihb1mN8Yz/ASkz8nHzuMKqygu77+VNnUeR0yEh3Nj+ykB8VVR6lK+NAFXo1Rd1cOsTmgvuXAZgDAGleQQ==", + "requires": { + "@opencensus/core": "^0.0.22", + "hex2dec": "^1.0.1", + "uuid": "^8.0.0" + } + }, + "@types/node": { + "version": "13.13.33", + "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.33.tgz", + "integrity": "sha512-1B3GM1yuYsFyEvBb+ljBqWBOylsWDYioZ5wpu8AhXdIhq20neXS7eaSC8GkwHE0yQYGiOIV43lMsgRYTgKZefQ==" + }, + "@types/semver": { + "version": "7.3.4", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.4.tgz", + "integrity": "sha512-+nVsLKlcUCeMzD2ufHEYuJ9a2ovstb6Dp52A5VsoKxDXgvE051XgHI/33I1EymwkRGQkwnA0LkhnUzituGs4EQ==" + }, + "acorn": { + "version": "8.0.4", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.0.4.tgz", + "integrity": "sha512-XNP0PqF1XD19ZlLKvB7cMmnZswW4C/03pRHgirB30uSJTaS3A3V1/P4sS3HPvFmjoriPCJQs+JDSbm4bL1TxGQ==" + }, + "bignumber.js": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz", + "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==" + }, + "debug": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", + "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", + "requires": { + "ms": "2.1.2" + } + }, + "duplexify": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", + "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", + "requires": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, + "gaxios": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.0.1.tgz", + "integrity": "sha512-jOin8xRZ/UytQeBpSXFqIzqU7Fi5TqgPNLlUsSB8kjJ76+FiGBfImF8KJu++c6J4jOldfJUtt0YmkRj2ZpSHTQ==", + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.3.0" + } + }, + "gcp-metadata": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz", + "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==", + "requires": { + "gaxios": "^4.0.0", + "json-bigint": "^1.0.0" + } + }, + "google-auth-library": { + "version": "6.1.3", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.1.3.tgz", + "integrity": "sha512-m9mwvY3GWbr7ZYEbl61isWmk+fvTmOt0YNUfPOUY2VH8K5pZlAIWJjxEi0PqR3OjMretyiQLI6GURMrPSwHQ2g==", + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^4.0.0", + "gcp-metadata": "^4.2.0", + "gtoken": "^5.0.4", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + } + }, + "google-p12-pem": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz", + "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==", + "requires": { + "node-forge": "^0.10.0" + } + }, + "gtoken": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.1.0.tgz", + "integrity": "sha512-4d8N6Lk8TEAHl9vVoRVMh9BNOKWVgl2DdNtr3428O75r3QFrF/a5MMu851VmK0AA8+iSvbwRv69k5XnMLURGhg==", + "requires": { + "gaxios": "^4.0.0", + "google-p12-pem": "^3.0.3", + "jws": "^4.0.0", + "mime": "^2.2.0" + } + }, + "json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "requires": { + "bignumber.js": "^9.0.0" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + }, + "mime": { + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", + "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node-forge": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", + "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" + }, + "p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "requires": { + "yocto-queue": "^0.1.0" + } + }, + "parse-duration": { + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.4.4.tgz", + "integrity": "sha512-KbAJuYGUhZkB9gotDiKLnZ7Z3VTacK3fgwmDdB6ZVDtJbMBT6MfLga0WJaYpPDu0mzqT0NgHtHDt5PY4l0nidg==" + }, + "pretty-ms": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-7.0.1.tgz", + "integrity": "sha512-973driJZvxiGOQ5ONsFhOF/DtzPMOMtgC11kCpUrPGMTgqp2q/1gwzCquocrN33is0VZ5GFHXZYMM9l6h67v2Q==", + "requires": { + "parse-ms": "^2.1.0" + } + }, + "protobufjs": { + "version": "6.10.2", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.2.tgz", + "integrity": "sha512-27yj+04uF6ya9l+qfpH187aqEzfCF4+Uit0I9ZBQVqK09hk/SQzKa2MUqUpXaVa7LOFRg1TSSr3lVxGOk6c0SQ==", + "requires": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/long": "^4.0.1", + "@types/node": "^13.7.0", + "long": "^4.0.0" + } + }, + "require-in-the-middle": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-5.0.3.tgz", + "integrity": "sha512-p/ICV8uMlqC4tjOYabLMxAWCIKa0YUQgZZ6KDM0xgXJNgdGQ1WmL2A07TwmrZw+wi6ITUFKzH5v3n+ENEyXVkA==", + "requires": { + "debug": "^4.1.1", + "module-details-from-path": "^1.0.3", + "resolve": "^1.12.0" + } + }, + "semver": { + "version": "7.3.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.2.tgz", + "integrity": "sha512-OrOb32TeeambH6UrhtShmF7CRDqhL6/5XpPNp2DuRH6+9QLw/orhp72j87v8Qa1ScDkvrrBNpZcDejAirJmfXQ==" + }, + "teeny-request": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz", + "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==", + "requires": { + "http-proxy-agent": "^4.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.1", + "stream-events": "^1.0.5", + "uuid": "^8.0.0" + } + }, + "uuid": { + "version": "8.3.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.1.tgz", + "integrity": "sha512-FOmRr+FmWEIG8uhZv6C2bTgEVXsHk08kE7mPlrBbEe+c3r9pjceVPgupIfNIhc4yx55H69OXANrUaSuu9eInKg==" + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } } }, "@overleaf/o-error": { @@ -995,7 +734,7 @@ "@types/console-log-level": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/@types/console-log-level/-/console-log-level-1.4.0.tgz", - "integrity": "sha512-x+OscEQwcx5Biair4enH7ov9W+clcqUWaZRaxn5IkT4yNWWjRr2oiYDkY/x1uXSTVZOQ2xlbFQySaQGB+VdXGQ==" + "integrity": "sha1-7/ccQa689RyLpa2LBdfVQkviuPM=" }, "@types/eslint-visitor-keys": { "version": "1.0.0", @@ -1027,11 +766,6 @@ "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.15.tgz", "integrity": "sha512-daFGV9GSs6USfPgxceDA8nlSe48XrVCJfDeYm7eokxq/ye7iuOH87hKXgMtEAVLFapkczbZsx868PMDT1Y0a6A==" }, - "@types/semver": { - "version": "7.3.4", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.4.tgz", - "integrity": "sha512-+nVsLKlcUCeMzD2ufHEYuJ9a2ovstb6Dp52A5VsoKxDXgvE051XgHI/33I1EymwkRGQkwnA0LkhnUzituGs4EQ==" - }, "@typescript-eslint/experimental-utils": { "version": "1.13.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-1.13.0.tgz", @@ -1409,7 +1143,7 @@ "bindings": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", - "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", + "integrity": "sha1-EDU8npRTNLwFEabZCzj7x8nFBN8=", "requires": { "file-uri-to-path": "1.0.0" } @@ -1507,9 +1241,9 @@ "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==" }, "builtin-modules": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.2.0.tgz", - "integrity": "sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA==" + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.1.0.tgz", + "integrity": "sha1-qtl8FRMet2tltQ7yCOdYTNdqdIQ=" }, "bunyan": { "version": "0.22.3", @@ -1652,6 +1386,11 @@ "integrity": "sha512-fLeEhqwymYat/MpTPUjSKHVYYl0ec2mOyALEMLmzr5i1isuG+6jfI2j2d5oBO3VIzgUXgBVIcOT9uH1TFxBckw==", "dev": true }, + "coffeescript": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.5.1.tgz", + "integrity": "sha512-J2jRPX0eeFh5VKyVnoLrfVFgLZtnnmp96WQSLAS8OrLm2wtQLcnikYKe1gViJKDH7vucjuhHvBKKBP3rKcD1tQ==" + }, "color-convert": { "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", @@ -1729,7 +1468,7 @@ "console-log-level": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz", - "integrity": "sha512-VZzbIORbP+PPcN/gg3DXClTLPLg5Slwd5fL2MIc+o1qZ4BXBvWyc6QxPk6T/Mkr6IVjRpoAGf32XxP3ZWMVRcQ==" + "integrity": "sha1-nFprue8e9lsFq6gwKLD/iUzfYwo=" }, "contains-path": { "version": "0.1.0", @@ -1857,9 +1596,9 @@ } }, "delay": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/delay/-/delay-5.0.0.tgz", - "integrity": "sha512-ReEBKkIfe4ya47wlPYf/gu5ib6yUG0/Aez0JQZQz94kiWtRQvZIQbTiehsnwHvLSWJnQdhVeqYue7Id1dKr0qw==" + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/delay/-/delay-4.3.0.tgz", + "integrity": "sha1-7+6/uPVFV5yzlrOnIkQ+yW0UxQ4=" }, "delayed-stream": { "version": "1.0.0", @@ -2800,7 +2539,7 @@ "file-uri-to-path": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", - "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==" + "integrity": "sha1-VTp7hEb/b2hDWcRF8eN6BdrMM90=" }, "fill-range": { "version": "7.0.1", @@ -4578,9 +4317,9 @@ "optional": true }, "needle": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/needle/-/needle-2.6.0.tgz", - "integrity": "sha512-KKYdza4heMsEfSWD7VPUIz3zX2XDwOyX2d+geb4vrERZMT5RMU6ujjaD+I5Yr54uZxQ2w6XRTAhHBbSCyovZBg==", + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/needle/-/needle-2.5.2.tgz", + "integrity": "sha512-LbRIwS9BfkPvNwNHlsA41Q29kL2L/6VaOJ0qisM5lLWsTV3nP15abO5ITL6L81zqFhzjRKDAYjpcBcwM0AVvLQ==", "requires": { "debug": "^3.2.6", "iconv-lite": "^0.4.4", @@ -4596,9 +4335,9 @@ } }, "ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" } } }, @@ -4945,15 +4684,10 @@ "callsites": "^3.0.0" } }, - "parse-duration": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-1.0.0.tgz", - "integrity": "sha512-X4kUkCTHU1N/kEbwK9FpUJ0UZQa90VzeczfS704frR30gljxDG0pSziws06XlK+CGRSo/1wtG1mFIdBFQTMQNw==" - }, "parse-ms": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.1.0.tgz", - "integrity": "sha512-kHt7kzLoS9VBZfUsiKjv43mr91ea+U05EyKkEtqp7vNbHxmaVuEqN7XxeEVnGrMtYOAxGrDElSi96K7EgO1zCA==" + "integrity": "sha1-NIVlp1PUOR+lJAKZVrFyy3dTCX0=" }, "parseurl": { "version": "1.3.3", @@ -5004,11 +4738,6 @@ "integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==", "dev": true }, - "pify": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-5.0.0.tgz", - "integrity": "sha512-eW/gHNMlxdSP6dmG6uJip6FXN0EQBwm2clYYd8Wul42Cwu/DK8HEftzsapcNdYe2MfLiIwZqsDk2RDEsTE79hA==" - }, "pprof": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/pprof/-/pprof-3.0.0.tgz", @@ -5027,14 +4756,9 @@ }, "dependencies": { "@types/node": { - "version": "13.13.48", - "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.48.tgz", - "integrity": "sha512-z8wvSsgWQzkr4sVuMEEOvwMdOQjiRY2Y/ZW4fDfjfe3+TfQrZqFKOthBgk2RnVEmtOKrkwdZ7uTvsxTBLjKGDQ==" - }, - "delay": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/delay/-/delay-4.4.1.tgz", - "integrity": "sha512-aL3AhqtfhOlT/3ai6sWXeqwnw63ATNpnUiN4HL7x9q+My5QtHlO3OIkasmug9LKzpheLdmUKGRKnYXYAS7FQkQ==" + "version": "13.13.33", + "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.33.tgz", + "integrity": "sha512-1B3GM1yuYsFyEvBb+ljBqWBOylsWDYioZ5wpu8AhXdIhq20neXS7eaSC8GkwHE0yQYGiOIV43lMsgRYTgKZefQ==" }, "p-limit": { "version": "3.1.0", @@ -5044,6 +4768,11 @@ "yocto-queue": "^0.1.0" } }, + "pify": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-5.0.0.tgz", + "integrity": "sha512-eW/gHNMlxdSP6dmG6uJip6FXN0EQBwm2clYYd8Wul42Cwu/DK8HEftzsapcNdYe2MfLiIwZqsDk2RDEsTE79hA==" + }, "protobufjs": { "version": "6.10.2", "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.2.tgz", @@ -5724,14 +5453,6 @@ } } }, - "pretty-ms": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-7.0.1.tgz", - "integrity": "sha512-973driJZvxiGOQ5ONsFhOF/DtzPMOMtgC11kCpUrPGMTgqp2q/1gwzCquocrN33is0VZ5GFHXZYMM9l6h67v2Q==", - "requires": { - "parse-ms": "^2.1.0" - } - }, "process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", @@ -6054,31 +5775,6 @@ "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", "dev": true }, - "require-in-the-middle": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-5.1.0.tgz", - "integrity": "sha512-M2rLKVupQfJ5lf9OvqFGIT+9iVLnTmjgbOmpil12hiSQNn5zJTKGPoIisETNjfK+09vP3rpm1zJajmErpr2sEQ==", - "requires": { - "debug": "^4.1.1", - "module-details-from-path": "^1.0.3", - "resolve": "^1.12.0" - }, - "dependencies": { - "debug": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", - "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", - "requires": { - "ms": "2.1.2" - } - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - } - } - }, "require-like": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", @@ -6397,7 +6093,7 @@ "source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + "integrity": "sha1-dHIq8y6WFOnCh6jQu95IteLxomM=" }, "source-map-support": { "version": "0.5.19", @@ -6452,7 +6148,7 @@ "split": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", - "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==", + "integrity": "sha1-YFvZvjA6pZ+zX5Ip++oN3snqB9k=", "requires": { "through": "2" } diff --git a/services/document-updater/package.json b/services/document-updater/package.json index fbe12047a6..a3fb2690cb 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -18,7 +18,7 @@ "format:fix": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --write" }, "dependencies": { - "@overleaf/metrics": "^3.5.1", + "@overleaf/metrics": "^3.4.1", "@overleaf/o-error": "^3.1.0", "@overleaf/redis-wrapper": "^2.0.0", "async": "^2.5.0", From 4dd1b26b2e81d56c0057f69d421be425b69a4fd0 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Wed, 31 Mar 2021 12:07:11 -0400 Subject: [PATCH 728/769] Revert "Merge pull request #161 from overleaf/em-upgrade-node-12" This reverts commit d44102751b9436ad89c5b3b05e7abdff51fcc78a, reversing changes made to 6c2f5b8d053b75c677da2b7ddd04f998d2be6fff. --- services/document-updater/.mocharc.json | 3 - services/document-updater/.nvmrc | 2 +- services/document-updater/Dockerfile | 2 +- services/document-updater/Makefile | 6 +- .../document-updater/app/js/RangesTracker.js | 8 +- services/document-updater/buildscript.txt | 4 +- services/document-updater/docker-compose.yml | 4 +- services/document-updater/package-lock.json | 581 ++---------------- services/document-updater/package.json | 6 +- .../js/ApplyingUpdatesToADocTests.js | 4 +- .../ApplyingUpdatesToProjectStructureTests.js | 2 + .../acceptance/js/DeletingADocumentTests.js | 3 + .../acceptance/js/DeletingAProjectTests.js | 2 + .../acceptance/js/FlushingAProjectTests.js | 2 + .../test/acceptance/js/FlushingDocsTests.js | 4 +- .../acceptance/js/GettingADocumentTests.js | 4 +- .../acceptance/js/GettingProjectDocsTests.js | 4 +- .../test/acceptance/js/RangesTests.js | 4 +- .../acceptance/js/SettingADocumentTests.js | 2 + services/document-updater/test/setup.js | 37 -- .../test/unit/js/DiffCodec/DiffCodecTests.js | 4 +- .../DispatchManager/DispatchManagerTests.js | 7 + .../DocumentManager/DocumentManagerTests.js | 9 + .../js/HistoryManager/HistoryManagerTests.js | 6 + .../HistoryRedisManagerTests.js | 5 +- .../js/HttpController/HttpControllerTests.js | 1 + .../unit/js/LockManager/CheckingTheLock.js | 1 + .../unit/js/LockManager/ReleasingTheLock.js | 4 + .../test/unit/js/LockManager/getLockTests.js | 3 + .../test/unit/js/LockManager/tryLockTests.js | 3 + .../PersistenceManagerTests.js | 7 + .../ProjectHistoryRedisManagerTests.js | 12 +- .../flushAndDeleteProjectTests.js | 6 + .../js/ProjectManager/flushProjectTests.js | 6 + .../js/ProjectManager/getProjectDocsTests.js | 6 + .../js/ProjectManager/updateProjectTests.js | 6 + .../js/RangesManager/RangesManagerTests.js | 19 +- .../js/RateLimitManager/RateLimitManager.js | 5 +- .../RealTimeRedisManagerTests.js | 3 + .../unit/js/RedisManager/RedisManagerTests.js | 24 +- .../unit/js/ShareJS/TextTransformTests.js | 1 + .../test/unit/js/ShareJsDB/ShareJsDBTests.js | 4 +- .../ShareJsUpdateManagerTests.js | 3 + .../js/UpdateManager/UpdateManagerTests.js | 3 + 44 files changed, 238 insertions(+), 594 deletions(-) delete mode 100644 services/document-updater/.mocharc.json delete mode 100644 services/document-updater/test/setup.js diff --git a/services/document-updater/.mocharc.json b/services/document-updater/.mocharc.json deleted file mode 100644 index dc3280aa96..0000000000 --- a/services/document-updater/.mocharc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "require": "test/setup.js" -} diff --git a/services/document-updater/.nvmrc b/services/document-updater/.nvmrc index e68b860383..2baa2d433a 100644 --- a/services/document-updater/.nvmrc +++ b/services/document-updater/.nvmrc @@ -1 +1 @@ -12.21.0 +10.23.1 diff --git a/services/document-updater/Dockerfile b/services/document-updater/Dockerfile index 4f417a2a4b..2da67d2436 100644 --- a/services/document-updater/Dockerfile +++ b/services/document-updater/Dockerfile @@ -2,7 +2,7 @@ # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -FROM node:12.21.0 as base +FROM node:10.23.1 as base WORKDIR /app diff --git a/services/document-updater/Makefile b/services/document-updater/Makefile index 7591d2a689..596aa47fdb 100644 --- a/services/document-updater/Makefile +++ b/services/document-updater/Makefile @@ -21,10 +21,8 @@ DOCKER_COMPOSE_TEST_UNIT = \ COMPOSE_PROJECT_NAME=test_unit_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) clean: - -docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) - -docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) - -$(DOCKER_COMPOSE_TEST_UNIT) down --rmi local - -$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down --rmi local + docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) format: $(DOCKER_COMPOSE) run --rm test_unit npm run --silent format diff --git a/services/document-updater/app/js/RangesTracker.js b/services/document-updater/app/js/RangesTracker.js index 5991ee2993..6107acf300 100644 --- a/services/document-updater/app/js/RangesTracker.js +++ b/services/document-updater/app/js/RangesTracker.js @@ -706,10 +706,8 @@ const load = function () { return result } else if (c1.op.i != null && c2.op.d != null) { return 1 - } else if (c1.op.d != null && c2.op.i != null) { - return -1 } else { - return 0 + return -1 } }) @@ -730,10 +728,8 @@ const load = function () { return result } else if (a.i != null && b.d != null) { return 1 - } else if (a.d != null && b.i != null) { - return -1 } else { - return 0 + return -1 } }) diff --git a/services/document-updater/buildscript.txt b/services/document-updater/buildscript.txt index 2d2f00495e..0d8b15d9f5 100644 --- a/services/document-updater/buildscript.txt +++ b/services/document-updater/buildscript.txt @@ -3,6 +3,6 @@ document-updater --docker-repos=gcr.io/overleaf-ops --env-add= --env-pass-through= ---node-version=12.21.0 +--node-version=10.23.1 --public-repo=True ---script-version=3.7.0 +--script-version=3.4.0 diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index 6a1c097a30..0db448f9b5 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -6,7 +6,7 @@ version: "2.3" services: test_unit: - image: node:12.21.0 + image: node:10.23.1 volumes: - .:/app working_dir: /app @@ -18,7 +18,7 @@ services: user: node test_acceptance: - image: node:12.21.0 + image: node:10.23.1 volumes: - .:/app working_dir: /app diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 65b0a0d9c2..ac530644db 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -819,12 +819,6 @@ } } }, - "@ungap/promise-all-settled": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@ungap/promise-all-settled/-/promise-all-settled-1.1.2.tgz", - "integrity": "sha512-sL/cEvJWAnClXw0wHk85/2L0G6Sj8UB0Ctc1TEMbKSsmpRosqhwj9gWgFRZSrBr2f9tiXISwNhCPmlfqUqyb9Q==", - "dev": true - }, "abbrev": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", @@ -893,12 +887,6 @@ "uri-js": "^4.2.2" } }, - "ansi-colors": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", - "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", - "dev": true - }, "ansi-escapes": { "version": "4.3.1", "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.1.tgz", @@ -931,16 +919,6 @@ "color-convert": "^1.9.0" } }, - "anymatch": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.1.tgz", - "integrity": "sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==", - "dev": true, - "requires": { - "normalize-path": "^3.0.0", - "picomatch": "^2.0.4" - } - }, "aproba": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", @@ -1134,12 +1112,6 @@ "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz", "integrity": "sha1-gMBIdZ2CaACAfEv9Uh5Q7bulel8=" }, - "binary-extensions": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", - "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", - "dev": true - }, "bindings": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", @@ -1210,19 +1182,10 @@ "concat-map": "0.0.1" } }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "requires": { - "fill-range": "^7.0.1" - } - }, "browser-stdout": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", - "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "integrity": "sha1-uqVZ7hTO1zRSIputcyZGfGH6vWA=", "dev": true }, "bson": { @@ -1306,6 +1269,12 @@ "type-detect": "^1.0.0" } }, + "chai-spies": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/chai-spies/-/chai-spies-0.7.1.tgz", + "integrity": "sha1-ND2Z9RJEIS6LF+ZLk5lv97LCqbE=", + "dev": true + }, "chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", @@ -1323,22 +1292,6 @@ "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", "dev": true }, - "chokidar": { - "version": "3.5.1", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.1.tgz", - "integrity": "sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw==", - "dev": true, - "requires": { - "anymatch": "~3.1.1", - "braces": "~3.0.2", - "fsevents": "~2.3.1", - "glob-parent": "~5.1.0", - "is-binary-path": "~2.1.0", - "is-glob": "~4.0.1", - "normalize-path": "~3.0.0", - "readdirp": "~3.5.0" - } - }, "chownr": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", @@ -1380,12 +1333,6 @@ "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=" }, - "coffee-script": { - "version": "1.12.7", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.12.7.tgz", - "integrity": "sha512-fLeEhqwymYat/MpTPUjSKHVYYl0ec2mOyALEMLmzr5i1isuG+6jfI2j2d5oBO3VIzgUXgBVIcOT9uH1TFxBckw==", - "dev": true - }, "coffeescript": { "version": "2.5.1", "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.5.1.tgz", @@ -1528,7 +1475,7 @@ "d64": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz", - "integrity": "sha512-5eNy3WZziVYnrogqgXhcdEmqcDB2IHurTqLcrgssJsfkMVCUoUaZpK6cJjxxvLV2dUm5SuJMNcYfVGoin9UIRw==" + "integrity": "sha1-QAKofoUMv8n52XBrYPymE6MzbpA=" }, "damerau-levenshtein": { "version": "1.0.6", @@ -1631,9 +1578,9 @@ "integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=" }, "diff": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", - "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", + "integrity": "sha1-gAwN0eCov7yVg1wgKtIg/jF+WhI=", "dev": true }, "diff-match-patch": { @@ -1800,12 +1747,6 @@ "is-symbol": "^1.0.2" } }, - "escalade": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", - "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", - "dev": true - }, "escape-html": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", @@ -2541,15 +2482,6 @@ "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", "integrity": "sha1-VTp7hEb/b2hDWcRF8eN6BdrMM90=" }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "requires": { - "to-regex-range": "^5.0.1" - } - }, "finalhandler": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz", @@ -2578,12 +2510,6 @@ "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz", "integrity": "sha1-WKRmaX34piBc39vzlVNri9d3pfY=" }, - "flat": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", - "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", - "dev": true - }, "flat-cache": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-2.0.1.tgz", @@ -2674,13 +2600,6 @@ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" }, - "fsevents": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", - "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", - "dev": true, - "optional": true - }, "function-bind": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", @@ -2971,9 +2890,9 @@ "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=" }, "he": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", - "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", + "integrity": "sha1-k0EP0hsAlzUVH4howvJx80J+I/0=", "dev": true }, "hex2dec": { @@ -3326,15 +3245,6 @@ "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", "dev": true }, - "is-binary-path": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", - "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", - "dev": true, - "requires": { - "binary-extensions": "^2.0.0" - } - }, "is-callable": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", @@ -3367,23 +3277,11 @@ "is-extglob": "^2.1.1" } }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true - }, "is-obj": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==" }, - "is-plain-obj": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", - "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", - "dev": true - }, "is-regex": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", @@ -3569,12 +3467,12 @@ "lodash.at": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/lodash.at/-/lodash.at-4.6.0.tgz", - "integrity": "sha512-GOTh0SEp+Yosnlpjic+8cl2WM9MykorogkGA9xyIFkkObQ3H3kNZqZ+ohuq4K3FrSVo7hMcZBMataJemrxC3BA==" + "integrity": "sha1-k83OZk8KGZTqM9181A4jr9EbD/g=" }, "lodash.camelcase": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", - "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==" + "integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY=" }, "lodash.defaults": { "version": "4.2.0", @@ -3595,7 +3493,7 @@ "lodash.has": { "version": "4.5.2", "resolved": "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz", - "integrity": "sha512-rnYUdIo6xRCJnQmbVFEwcxF144erlD+M3YcJUVesflU9paQaE8p+fJDcIQrlMYbxoANFL+AB9hZrzSBBk5PL+g==" + "integrity": "sha1-0Z9NwQlQWMzL4rDN9O4P5Ko3yGI=" }, "lodash.memoize": { "version": "4.1.2", @@ -3620,66 +3518,6 @@ "resolved": "https://registry.npmjs.org/log-driver/-/log-driver-1.2.7.tgz", "integrity": "sha512-U7KCmLdqsGHBLeWqYlFA0V0Sl6P08EE1ZrmA9cxjUE0WVqT9qnyVDPz1kzpFEP0jdJuFnasWIfSd7fsaNXkpbg==" }, - "log-symbols": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.0.0.tgz", - "integrity": "sha512-FN8JBzLx6CzeMrB0tg6pqlGU1wCrXW+ZXGH481kfsBqer0hToTIiHdjH4Mq8xJUbvATujKCvaREGWpGUionraA==", - "dev": true, - "requires": { - "chalk": "^4.0.0" - }, - "dependencies": { - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "requires": { - "color-convert": "^2.0.1" - } - }, - "chalk": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", - "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", - "dev": true, - "requires": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - } - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true - }, - "supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dev": true, - "requires": { - "has-flag": "^4.0.0" - } - } - } - }, "logger-sharelatex": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.9.1.tgz", @@ -3930,8 +3768,7 @@ "minimist": { "version": "0.0.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", - "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=", - "optional": true + "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" }, "minipass": { "version": "2.9.0", @@ -3966,128 +3803,43 @@ } }, "mocha": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/mocha/-/mocha-8.3.2.tgz", - "integrity": "sha512-UdmISwr/5w+uXLPKspgoV7/RXZwKRTiTjJ2/AC5ZiEztIoOYdfKb19+9jNmEInzx5pBsCyJQzarAxqIGBNYJhg==", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-5.2.0.tgz", + "integrity": "sha1-bYrlCPWRZ/lA8rWzxKYSrlDJCuY=", "dev": true, "requires": { - "@ungap/promise-all-settled": "1.1.2", - "ansi-colors": "4.1.1", "browser-stdout": "1.3.1", - "chokidar": "3.5.1", - "debug": "4.3.1", - "diff": "5.0.0", - "escape-string-regexp": "4.0.0", - "find-up": "5.0.0", - "glob": "7.1.6", + "commander": "2.15.1", + "debug": "3.1.0", + "diff": "3.5.0", + "escape-string-regexp": "1.0.5", + "glob": "7.1.2", "growl": "1.10.5", - "he": "1.2.0", - "js-yaml": "4.0.0", - "log-symbols": "4.0.0", + "he": "1.1.1", "minimatch": "3.0.4", - "ms": "2.1.3", - "nanoid": "3.1.20", - "serialize-javascript": "5.0.1", - "strip-json-comments": "3.1.1", - "supports-color": "8.1.1", - "which": "2.0.2", - "wide-align": "1.1.3", - "workerpool": "6.1.0", - "yargs": "16.2.0", - "yargs-parser": "20.2.4", - "yargs-unparser": "2.0.0" + "mkdirp": "0.5.1", + "supports-color": "5.4.0" }, "dependencies": { - "ansi-regex": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", - "dev": true - }, - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "requires": { - "color-convert": "^2.0.1" - } - }, - "argparse": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", - "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "dev": true - }, - "cliui": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", - "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", - "dev": true, - "requires": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.0", - "wrap-ansi": "^7.0.0" - } - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "commander": { + "version": "2.15.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.15.1.tgz", + "integrity": "sha1-30boZ9D8Kuxmo0ZitAapzK//Ww8=", "dev": true }, "debug": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", - "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", + "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", "dev": true, "requires": { - "ms": "2.1.2" - }, - "dependencies": { - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - } - } - }, - "emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true - }, - "escape-string-regexp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", - "dev": true - }, - "find-up": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", - "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", - "dev": true, - "requires": { - "locate-path": "^6.0.0", - "path-exists": "^4.0.0" + "ms": "2.0.0" } }, "glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz", + "integrity": "sha1-wZyd+aAocC1nhhI4SmVSQExjbRU=", "dev": true, "requires": { "fs.realpath": "^1.0.0", @@ -4098,140 +3850,19 @@ "path-is-absolute": "^1.0.0" } }, - "has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true - }, - "is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true - }, - "js-yaml": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.0.0.tgz", - "integrity": "sha512-pqon0s+4ScYUvX30wxQi3PogGFAlUyH0awepWvwkj4jD4v+ova3RiYw8bmA6x2rDrEaj8i/oWKoRxpVNW+Re8Q==", + "mkdirp": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", "dev": true, "requires": { - "argparse": "^2.0.1" - } - }, - "locate-path": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", - "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", - "dev": true, - "requires": { - "p-locate": "^5.0.0" + "minimist": "0.0.8" } }, "ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "dev": true - }, - "p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "dev": true, - "requires": { - "yocto-queue": "^0.1.0" - } - }, - "p-locate": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", - "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", - "dev": true, - "requires": { - "p-limit": "^3.0.2" - } - }, - "path-exists": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", - "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", - "dev": true - }, - "string-width": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz", - "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==", - "dev": true, - "requires": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.0" - } - }, - "strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.0" - } - }, - "supports-color": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", - "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", - "dev": true, - "requires": { - "has-flag": "^4.0.0" - } - }, - "which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - }, - "wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "requires": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - } - }, - "y18n": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz", - "integrity": "sha512-hsRUr4FFrvhhRH12wOdfs38Gy7k2FFzB9qgN9v3aLykRq0dRcdcpz5C9FxdS2NuhOrI/628b/KSTJ3rwHysYSg==", - "dev": true - }, - "yargs": { - "version": "16.2.0", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", - "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", - "dev": true, - "requires": { - "cliui": "^7.0.2", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.0", - "y18n": "^5.0.5", - "yargs-parser": "^20.2.2" - } - }, - "yargs-parser": { - "version": "20.2.4", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", - "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", "dev": true } } @@ -4298,12 +3929,6 @@ "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", "integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==" }, - "nanoid": { - "version": "3.1.20", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.20.tgz", - "integrity": "sha512-a1cQNyczgKbLX9jwbS/+d7W8fX/RfgYR7lVWwWOGIPNgK2m0MWvrGF6/m4kk6U3QcFMnZf3RIhL0v2Jgh/0Uxw==", - "dev": true - }, "natural-compare": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", @@ -4466,12 +4091,6 @@ } } }, - "normalize-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", - "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", - "dev": true - }, "npm-bundled": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-1.1.1.tgz", @@ -4732,12 +4351,6 @@ "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" }, - "picomatch": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.2.tgz", - "integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==", - "dev": true - }, "pprof": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/pprof/-/pprof-3.0.0.tgz", @@ -5571,15 +5184,6 @@ "integrity": "sha512-pVzZdDpWwWqEVVLshWUHjNwuVP7SfcmPraYuqocJp1yo2U1R7P+5QAfDhdItkuoGqIBnBYrtPp7rEPqDn9HlZA==", "dev": true }, - "randombytes": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", - "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", - "dev": true, - "requires": { - "safe-buffer": "^5.1.0" - } - }, "range-parser": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", @@ -5659,19 +5263,10 @@ "util-deprecate": "^1.0.1" } }, - "readdirp": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.5.0.tgz", - "integrity": "sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ==", - "dev": true, - "requires": { - "picomatch": "^2.2.1" - } - }, "redis-commands": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.5.0.tgz", - "integrity": "sha1-gNLiBpj+aI8icSf/nlFkp90X54U=" + "integrity": "sha512-6KxamqpZ468MeQC3bkWmCB1fp56XL64D4Kf0zJSwDZbVLLm7KFkoIcHrgRvQ+sk8dnhySs7+yBg94yIkAK7aJg==" }, "redis-errors": { "version": "1.2.0", @@ -5888,7 +5483,7 @@ "safe-json-stringify": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.2.0.tgz", - "integrity": "sha1-NW5EvJjx+TzkXfFLzXwBzahuCv0=", + "integrity": "sha512-gH8eh2nZudPQO6TytOvbxnuhYBOvDBBLW52tz5q6X58lJcd/tkmqFR+5Z9adS8aJtURSXWThWy/xJtJwixErvg==", "optional": true }, "safer-buffer": { @@ -5897,13 +5492,21 @@ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, "sandboxed-module": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-2.0.4.tgz", - "integrity": "sha512-AwEPOdO8mg/wJjr876yCHP2DHqVN0MaggEXhp6IIf3bcI5cYoQl9QrrCHSrvToHjvdEiS5x4TVZRgjD2bEmNTA==", + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-0.2.2.tgz", + "integrity": "sha1-bL3sghOAx31FdcjIeDi5ET5kulA=", "dev": true, "requires": { "require-like": "0.1.2", - "stack-trace": "0.0.9" + "stack-trace": "0.0.6" + }, + "dependencies": { + "stack-trace": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.6.tgz", + "integrity": "sha1-HnGb1qJin/CcGJ4Xqe+QKpT8XbA=", + "dev": true + } } }, "saslprep": { @@ -5952,15 +5555,6 @@ } } }, - "serialize-javascript": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-5.0.1.tgz", - "integrity": "sha512-SaaNal9imEO737H2c05Og0/8LUXG7EnsZyMa8MzkmuHoELfT6txuj0cMqRj6zfPKnmQ1yasR4PCJc8x+M4JSPA==", - "dev": true, - "requires": { - "randombytes": "^2.1.0" - } - }, "serve-static": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.14.1.tgz", @@ -6183,7 +5777,7 @@ "standard-as-callback": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.0.1.tgz", - "integrity": "sha1-7YuyVkjhWDF1m2Ajvbh+a2CzgSY=" + "integrity": "sha512-NQOxSeB8gOI5WjSaxjBgog2QFw55FV8TkS6Y07BiB3VJ8xNTvUYm0wl0s8ObgQ5NhdpnNfigMIKjgPESzgr4tg==" }, "statuses": { "version": "1.5.0", @@ -6302,7 +5896,7 @@ "stubs": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", - "integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==" + "integrity": "sha1-6NK6H6nJBXAwPAMLaQD31fiavls=" }, "supports-color": { "version": "5.4.0", @@ -6434,21 +6028,12 @@ "to-no-case": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/to-no-case/-/to-no-case-1.0.2.tgz", - "integrity": "sha512-Z3g735FxuZY8rodxV4gH7LxClE4H0hTIyHNIHdk+vpQxjLm0cwnKXq/OFVZ76SOQmto7txVcwSCwkU5kqp+FKg==" - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "requires": { - "is-number": "^7.0.0" - } + "integrity": "sha1-xyKQcWTvaxeBMsjmmTAhLRtKoWo=" }, "to-snake-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-snake-case/-/to-snake-case-1.0.0.tgz", - "integrity": "sha512-joRpzBAk1Bhi2eGEYBjukEWHOe/IvclOkiJl3DtA91jV6NwQ3MwXA4FHYeqk8BNp/D8bmi9tcNbRu/SozP0jbQ==", + "integrity": "sha1-znRpE4l5RgGah+Yu366upMYIq4w=", "requires": { "to-space-case": "^1.0.0" } @@ -6456,7 +6041,7 @@ "to-space-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-space-case/-/to-space-case-1.0.0.tgz", - "integrity": "sha512-rLdvwXZ39VOn1IxGL3V6ZstoTbwLRckQmn/U8ZDLuWwIXNpuZDhQ3AiRUlhTbOXFVE9C+dR51wM0CBDhk31VcA==", + "integrity": "sha1-sFLar7Gysp3HcM6gFj5ewOvJ/Bc=", "requires": { "to-no-case": "^1.0.0" } @@ -6737,12 +6322,6 @@ "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", "dev": true }, - "workerpool": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.1.0.tgz", - "integrity": "sha512-toV7q9rWNYha963Pl/qyeZ6wG+3nnsyvolaNUS8+R5Wtw6qJPTxIlOP1ZSvcGhEJw+l3HMMmtiNo9Gl61G4GVg==", - "dev": true - }, "wrap-ansi": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", @@ -6833,32 +6412,6 @@ "decamelize": "^1.2.0" } }, - "yargs-unparser": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz", - "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==", - "dev": true, - "requires": { - "camelcase": "^6.0.0", - "decamelize": "^4.0.0", - "flat": "^5.0.2", - "is-plain-obj": "^2.1.0" - }, - "dependencies": { - "camelcase": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.2.0.tgz", - "integrity": "sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg==", - "dev": true - }, - "decamelize": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", - "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", - "dev": true - } - } - }, "yn": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index a3fb2690cb..8ea71edddb 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -36,8 +36,8 @@ "devDependencies": { "babel-eslint": "^10.1.0", "chai": "^3.5.0", + "chai-spies": "^0.7.1", "cluster-key-slot": "^1.0.5", - "coffee-script": "^1.12.7", "eslint": "^6.8.0", "eslint-config-prettier": "^6.10.0", "eslint-config-standard": "^14.1.0", @@ -53,10 +53,10 @@ "eslint-plugin-promise": "^4.2.1", "eslint-plugin-react": "^7.19.0", "eslint-plugin-standard": "^4.0.1", - "mocha": "^8.3.2", + "mocha": "^5.0.1", "prettier": "^2.0.0", "prettier-eslint-cli": "^5.0.0", - "sandboxed-module": "^2.0.4", + "sandboxed-module": "~0.2.0", "sinon": "^9.0.2", "timekeeper": "^2.0.0" } diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js index adf045645e..f460a2d3ff 100644 --- a/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js +++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js @@ -12,7 +12,9 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const { expect } = require('chai') +const chai = require('chai') +chai.should() +const { expect } = chai const async = require('async') const Settings = require('settings-sharelatex') const rclient_history = require('@overleaf/redis-wrapper').createClient( diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js index 3bc2c793e1..42c3c8af6a 100644 --- a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js +++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js @@ -1,4 +1,6 @@ const sinon = require('sinon') +const chai = require('chai') +chai.should() const Settings = require('settings-sharelatex') const rclientProjectHistory = require('@overleaf/redis-wrapper').createClient( Settings.redis.project_history diff --git a/services/document-updater/test/acceptance/js/DeletingADocumentTests.js b/services/document-updater/test/acceptance/js/DeletingADocumentTests.js index 4051d4f5a4..fbb8055aae 100644 --- a/services/document-updater/test/acceptance/js/DeletingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/DeletingADocumentTests.js @@ -11,6 +11,9 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') +const chai = require('chai') +chai.should() + const MockTrackChangesApi = require('./helpers/MockTrackChangesApi') const MockProjectHistoryApi = require('./helpers/MockProjectHistoryApi') const MockWebApi = require('./helpers/MockWebApi') diff --git a/services/document-updater/test/acceptance/js/DeletingAProjectTests.js b/services/document-updater/test/acceptance/js/DeletingAProjectTests.js index b07ffae0f5..f050ea22e1 100644 --- a/services/document-updater/test/acceptance/js/DeletingAProjectTests.js +++ b/services/document-updater/test/acceptance/js/DeletingAProjectTests.js @@ -12,6 +12,8 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') +const chai = require('chai') +chai.should() const async = require('async') const MockTrackChangesApi = require('./helpers/MockTrackChangesApi') diff --git a/services/document-updater/test/acceptance/js/FlushingAProjectTests.js b/services/document-updater/test/acceptance/js/FlushingAProjectTests.js index 4f4abc2730..c860fce849 100644 --- a/services/document-updater/test/acceptance/js/FlushingAProjectTests.js +++ b/services/document-updater/test/acceptance/js/FlushingAProjectTests.js @@ -12,6 +12,8 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') +const chai = require('chai') +chai.should() const async = require('async') const MockWebApi = require('./helpers/MockWebApi') diff --git a/services/document-updater/test/acceptance/js/FlushingDocsTests.js b/services/document-updater/test/acceptance/js/FlushingDocsTests.js index 5eac9fa2f3..109f89d434 100644 --- a/services/document-updater/test/acceptance/js/FlushingDocsTests.js +++ b/services/document-updater/test/acceptance/js/FlushingDocsTests.js @@ -14,7 +14,9 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const { expect } = require('chai') +const chai = require('chai') +chai.should() +const { expect } = chai const async = require('async') const MockWebApi = require('./helpers/MockWebApi') diff --git a/services/document-updater/test/acceptance/js/GettingADocumentTests.js b/services/document-updater/test/acceptance/js/GettingADocumentTests.js index 50dc35059c..a0b9de5773 100644 --- a/services/document-updater/test/acceptance/js/GettingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/GettingADocumentTests.js @@ -12,7 +12,9 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const { expect } = require('chai') +const chai = require('chai') +chai.should() +const { expect } = chai const MockWebApi = require('./helpers/MockWebApi') const DocUpdaterClient = require('./helpers/DocUpdaterClient') diff --git a/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js b/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js index b32ccb0837..72a6824562 100644 --- a/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js +++ b/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js @@ -12,7 +12,9 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const { expect } = require('chai') +const chai = require('chai') +chai.should() +const { expect } = chai const MockWebApi = require('./helpers/MockWebApi') const DocUpdaterClient = require('./helpers/DocUpdaterClient') diff --git a/services/document-updater/test/acceptance/js/RangesTests.js b/services/document-updater/test/acceptance/js/RangesTests.js index 7034436440..b765e58b7c 100644 --- a/services/document-updater/test/acceptance/js/RangesTests.js +++ b/services/document-updater/test/acceptance/js/RangesTests.js @@ -12,7 +12,9 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const { expect } = require('chai') +const chai = require('chai') +chai.should() +const { expect } = chai const async = require('async') const { db, ObjectId } = require('../../../app/js/mongodb') diff --git a/services/document-updater/test/acceptance/js/SettingADocumentTests.js b/services/document-updater/test/acceptance/js/SettingADocumentTests.js index 7d2307c526..d47931868c 100644 --- a/services/document-updater/test/acceptance/js/SettingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/SettingADocumentTests.js @@ -1,4 +1,6 @@ const sinon = require('sinon') +const chai = require('chai') +chai.should() const { expect } = require('chai') const Settings = require('settings-sharelatex') const docUpdaterRedis = require('@overleaf/redis-wrapper').createClient( diff --git a/services/document-updater/test/setup.js b/services/document-updater/test/setup.js deleted file mode 100644 index 0fb9848427..0000000000 --- a/services/document-updater/test/setup.js +++ /dev/null @@ -1,37 +0,0 @@ -const chai = require('chai') -const SandboxedModule = require('sandboxed-module') -const sinon = require('sinon') - -// Chai configuration -chai.should() - -// Global stubs -const sandbox = sinon.createSandbox() -const stubs = { - logger: { - debug: sandbox.stub(), - log: sandbox.stub(), - warn: sandbox.stub(), - err: sandbox.stub(), - error: sandbox.stub() - } -} - -// SandboxedModule configuration -SandboxedModule.configure({ - requires: { - 'logger-sharelatex': stubs.logger - }, - globals: { Buffer, JSON, Math, console, process } -}) - -// Mocha hooks -exports.mochaHooks = { - beforeEach() { - this.logger = stubs.logger - }, - - afterEach() { - sandbox.reset() - } -} diff --git a/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js b/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js index d498d6b45c..f208c17bd6 100644 --- a/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js +++ b/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js @@ -11,7 +11,9 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const { expect } = require('chai') +const chai = require('chai') +const should = chai.should() +const { expect } = chai const modulePath = '../../../../app/js/DiffCodec.js' const SandboxedModule = require('sandboxed-module') diff --git a/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js b/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js index 81ef37f4be..5610c4abc1 100644 --- a/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js +++ b/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js @@ -12,6 +12,8 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() const modulePath = '../../../../app/js/DispatchManager.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../../app/js/Errors.js') @@ -23,6 +25,11 @@ describe('DispatchManager', function () { this.DispatchManager = SandboxedModule.require(modulePath, { requires: { './UpdateManager': (this.UpdateManager = {}), + 'logger-sharelatex': (this.logger = { + log: sinon.stub(), + error: sinon.stub(), + warn: sinon.stub() + }), 'settings-sharelatex': (this.settings = { redis: { documentupdater: {} diff --git a/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js b/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js index 8ca42df757..295a643cee 100644 --- a/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js +++ b/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js @@ -13,6 +13,8 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() const modulePath = '../../../../app/js/DocumentManager.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../../app/js/Errors') @@ -31,6 +33,11 @@ describe('DocumentManager', function () { flushDocChangesAsync: sinon.stub(), flushProjectChangesAsync: sinon.stub() }), + 'logger-sharelatex': (this.logger = { + log: sinon.stub(), + warn: sinon.stub() + }), + './DocOpsManager': (this.DocOpsManager = {}), './Metrics': (this.Metrics = { Timer: (Timer = (function () { Timer = class Timer { @@ -213,6 +220,7 @@ describe('DocumentManager', function () { .stub() .callsArgWith(2, null, null, null, null) this.PersistenceManager.setDoc = sinon.stub().yields() + this.DocOpsManager.flushDocOpsToMongo = sinon.stub().callsArgWith(2) return this.DocumentManager.flushDocIfLoaded( this.project_id, this.doc_id, @@ -228,6 +236,7 @@ describe('DocumentManager', function () { it('should not write anything to the persistence layer', function () { this.PersistenceManager.setDoc.called.should.equal(false) + return this.DocOpsManager.flushDocOpsToMongo.called.should.equal(false) }) it('should call the callback without error', function () { diff --git a/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js b/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js index df3261b0f4..263f1cd094 100644 --- a/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js +++ b/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js @@ -11,6 +11,7 @@ */ const SandboxedModule = require('sandboxed-module') const sinon = require('sinon') +require('chai').should() const modulePath = require('path').join( __dirname, '../../../../app/js/HistoryManager' @@ -32,6 +33,11 @@ describe('HistoryManager', function () { } } }), + 'logger-sharelatex': (this.logger = { + log: sinon.stub(), + error: sinon.stub(), + debug: sinon.stub() + }), './DocumentManager': (this.DocumentManager = {}), './HistoryRedisManager': (this.HistoryRedisManager = {}), './RedisManager': (this.RedisManager = {}), diff --git a/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js b/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js index f9b719991a..1b266685d1 100644 --- a/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js +++ b/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js @@ -12,6 +12,8 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() const modulePath = '../../../../app/js/HistoryRedisManager.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../../app/js/Errors') @@ -39,7 +41,8 @@ describe('HistoryRedisManager', function () { } }) } - } + }, + 'logger-sharelatex': { log() {} } } }) this.doc_id = 'doc-id-123' diff --git a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js index 64477eb944..07e9d93c9a 100644 --- a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js +++ b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js @@ -12,6 +12,7 @@ describe('HttpController', function () { flushProjectChangesAsync: sinon.stub() }), './ProjectManager': (this.ProjectManager = {}), + 'logger-sharelatex': (this.logger = { log: sinon.stub() }), './ProjectFlusher': { flushAllProjects() {} }, './DeleteQueueManager': (this.DeleteQueueManager = {}), './Metrics': (this.Metrics = {}), diff --git a/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js b/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js index 4f700cc144..6b3c3b539e 100644 --- a/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js +++ b/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js @@ -25,6 +25,7 @@ describe('LockManager - checking the lock', function () { const existsStub = sinon.stub() const mocks = { + 'logger-sharelatex': { log() {} }, '@overleaf/redis-wrapper': { createClient() { return { diff --git a/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js b/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js index 3d6cf7a5ba..a04db7614f 100644 --- a/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js +++ b/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js @@ -27,6 +27,10 @@ describe('LockManager - releasing the lock', function () { eval: sinon.stub() } const mocks = { + 'logger-sharelatex': { + log() {}, + error() {} + }, '@overleaf/redis-wrapper': { createClient: () => this.client }, diff --git a/services/document-updater/test/unit/js/LockManager/getLockTests.js b/services/document-updater/test/unit/js/LockManager/getLockTests.js index 0b938c3753..d56a244510 100644 --- a/services/document-updater/test/unit/js/LockManager/getLockTests.js +++ b/services/document-updater/test/unit/js/LockManager/getLockTests.js @@ -15,6 +15,8 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() const modulePath = '../../../../app/js/LockManager.js' const SandboxedModule = require('sandboxed-module') @@ -23,6 +25,7 @@ describe('LockManager - getting the lock', function () { let Profiler this.LockManager = SandboxedModule.require(modulePath, { requires: { + 'logger-sharelatex': { log() {} }, '@overleaf/redis-wrapper': { createClient: () => { return { auth() {} } diff --git a/services/document-updater/test/unit/js/LockManager/tryLockTests.js b/services/document-updater/test/unit/js/LockManager/tryLockTests.js index fb49e94aa1..02c279dd11 100644 --- a/services/document-updater/test/unit/js/LockManager/tryLockTests.js +++ b/services/document-updater/test/unit/js/LockManager/tryLockTests.js @@ -12,6 +12,8 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() const modulePath = '../../../../app/js/LockManager.js' const SandboxedModule = require('sandboxed-module') @@ -20,6 +22,7 @@ describe('LockManager - trying the lock', function () { let Profiler this.LockManager = SandboxedModule.require(modulePath, { requires: { + 'logger-sharelatex': { log() {} }, '@overleaf/redis-wrapper': { createClient: () => { return { diff --git a/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js b/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js index 4015ef2662..1013752dee 100644 --- a/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js +++ b/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js @@ -11,6 +11,8 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() const modulePath = '../../../../app/js/PersistenceManager.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../../app/js/Errors') @@ -36,6 +38,11 @@ describe('PersistenceManager', function () { })()), inc: sinon.stub() }), + 'logger-sharelatex': (this.logger = { + log: sinon.stub(), + err: sinon.stub(), + error: sinon.stub() + }), './Errors': Errors } }) diff --git a/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js b/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js index 1ff3d53ded..8b62bd83f3 100644 --- a/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js +++ b/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js @@ -12,6 +12,8 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() const modulePath = '../../../../app/js/ProjectHistoryRedisManager.js' const SandboxedModule = require('sandboxed-module') const tk = require('timekeeper') @@ -45,7 +47,13 @@ describe('ProjectHistoryRedisManager', function () { '@overleaf/redis-wrapper': { createClient: () => this.rclient }, + 'logger-sharelatex': { + log() {} + }, './Metrics': (this.metrics = { summary: sinon.stub() }) + }, + globals: { + JSON: (this.JSON = JSON) } } )) @@ -128,7 +136,7 @@ describe('ProjectHistoryRedisManager', function () { return this.ProjectHistoryRedisManager.queueOps .calledWithExactly( this.project_id, - JSON.stringify(update), + this.JSON.stringify(update), this.callback ) .should.equal(true) @@ -176,7 +184,7 @@ describe('ProjectHistoryRedisManager', function () { return this.ProjectHistoryRedisManager.queueOps .calledWithExactly( this.project_id, - JSON.stringify(update), + this.JSON.stringify(update), this.callback ) .should.equal(true) diff --git a/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js index d8342c0cff..9589d42054 100644 --- a/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js @@ -13,6 +13,8 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() const modulePath = '../../../../app/js/ProjectManager.js' const SandboxedModule = require('sandboxed-module') @@ -24,6 +26,10 @@ describe('ProjectManager - flushAndDeleteProject', function () { './RedisManager': (this.RedisManager = {}), './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), './DocumentManager': (this.DocumentManager = {}), + 'logger-sharelatex': (this.logger = { + log: sinon.stub(), + error: sinon.stub() + }), './HistoryManager': (this.HistoryManager = { flushProjectChanges: sinon.stub().callsArg(2) }), diff --git a/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js index 70ae03e861..c0bb668f49 100644 --- a/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js @@ -15,6 +15,8 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() const modulePath = '../../../../app/js/ProjectManager.js' const SandboxedModule = require('sandboxed-module') @@ -26,6 +28,10 @@ describe('ProjectManager - flushProject', function () { './RedisManager': (this.RedisManager = {}), './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), './DocumentManager': (this.DocumentManager = {}), + 'logger-sharelatex': (this.logger = { + log: sinon.stub(), + error: sinon.stub() + }), './HistoryManager': (this.HistoryManager = {}), './Metrics': (this.Metrics = { Timer: (Timer = (function () { diff --git a/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js b/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js index 467a190168..db9f31e4ad 100644 --- a/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js @@ -11,6 +11,8 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() const modulePath = '../../../../app/js/ProjectManager.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../../app/js/Errors.js') @@ -23,6 +25,10 @@ describe('ProjectManager - getProjectDocsAndFlushIfOld', function () { './RedisManager': (this.RedisManager = {}), './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), './DocumentManager': (this.DocumentManager = {}), + 'logger-sharelatex': (this.logger = { + log: sinon.stub(), + error: sinon.stub() + }), './HistoryManager': (this.HistoryManager = {}), './Metrics': (this.Metrics = { Timer: (Timer = (function () { diff --git a/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js index 896517679c..aa3db813a0 100644 --- a/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js @@ -22,11 +22,17 @@ describe('ProjectManager', function () { } this.Metrics.Timer.prototype.done = sinon.stub() + this.logger = { + log: sinon.stub(), + error: sinon.stub() + } + this.ProjectManager = SandboxedModule.require(modulePath, { requires: { './RedisManager': this.RedisManager, './ProjectHistoryRedisManager': this.ProjectHistoryRedisManager, './DocumentManager': this.DocumentManager, + 'logger-sharelatex': this.logger, './HistoryManager': this.HistoryManager, './Metrics': this.Metrics } diff --git a/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js b/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js index c857153888..a336125a1c 100644 --- a/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js +++ b/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js @@ -13,13 +13,23 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const { expect } = require('chai') +const chai = require('chai') +const should = chai.should() +const { expect } = chai const modulePath = '../../../../app/js/RangesManager.js' const SandboxedModule = require('sandboxed-module') describe('RangesManager', function () { beforeEach(function () { - this.RangesManager = SandboxedModule.require(modulePath) + this.RangesManager = SandboxedModule.require(modulePath, { + requires: { + 'logger-sharelatex': (this.logger = { + error: sinon.stub(), + log: sinon.stub(), + warn: sinon.stub() + }) + } + }) this.doc_id = 'doc-id-123' this.project_id = 'project-id-123' @@ -358,6 +368,11 @@ describe('RangesManager', function () { beforeEach(function () { this.RangesManager = SandboxedModule.require(modulePath, { requires: { + 'logger-sharelatex': (this.logger = { + error: sinon.stub(), + log: sinon.stub(), + warn: sinon.stub() + }), './RangesTracker': (this.RangesTracker = SandboxedModule.require( '../../../../app/js/RangesTracker.js' )) diff --git a/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js b/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js index 8fef08051f..e84d557501 100644 --- a/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js +++ b/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js @@ -11,7 +11,9 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const { expect } = require('chai') +const chai = require('chai') +const should = chai.should() +const { expect } = chai const modulePath = '../../../../app/js/RateLimitManager.js' const SandboxedModule = require('sandboxed-module') @@ -20,6 +22,7 @@ describe('RateLimitManager', function () { let Timer this.RateLimitManager = SandboxedModule.require(modulePath, { requires: { + 'logger-sharelatex': (this.logger = { log: sinon.stub() }), 'settings-sharelatex': (this.settings = {}), './Metrics': (this.Metrics = { Timer: (Timer = (function () { diff --git a/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js b/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js index 83cd5f99ce..c5e4647df4 100644 --- a/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js +++ b/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js @@ -11,6 +11,8 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() const modulePath = '../../../../app/js/RealTimeRedisManager.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../../app/js/Errors') @@ -43,6 +45,7 @@ describe('RealTimeRedisManager', function () { } } }, + 'logger-sharelatex': { log() {} }, crypto: (this.crypto = { randomBytes: sinon .stub() diff --git a/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js index d14d0c23de..739aa88ab8 100644 --- a/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js +++ b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js @@ -12,6 +12,8 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() const modulePath = '../../../../app/js/RedisManager.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../../app/js/Errors') @@ -26,6 +28,11 @@ describe('RedisManager', function () { tk.freeze(new Date()) this.RedisManager = SandboxedModule.require(modulePath, { requires: { + 'logger-sharelatex': (this.logger = { + error: sinon.stub(), + log: sinon.stub(), + warn: sinon.stub() + }), './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), 'settings-sharelatex': (this.settings = { documentupdater: { logHashErrors: { write: true, read: true } }, @@ -115,6 +122,9 @@ describe('RedisManager', function () { }) }), './Errors': Errors + }, + globals: { + JSON: (this.JSON = JSON) } }) @@ -914,9 +924,8 @@ describe('RedisManager', function () { this.RedisManager.getDocVersion .withArgs(this.doc_id) .yields(null, this.version - this.ops.length) - this.stringifyStub = sinon - .stub(JSON, 'stringify') - .callsFake(() => '["bad bytes! \u0000 <- here"]') + this._stringify = JSON.stringify + this.JSON.stringify = () => '["bad bytes! \u0000 <- here"]' return this.RedisManager.updateDocument( this.project_id, this.doc_id, @@ -930,7 +939,7 @@ describe('RedisManager', function () { }) afterEach(function () { - this.stringifyStub.restore() + return (this.JSON.stringify = this._stringify) }) it('should log an error', function () { @@ -1118,9 +1127,8 @@ describe('RedisManager', function () { describe('with null bytes in the serialized doc lines', function () { beforeEach(function () { - this.stringifyStub = sinon - .stub(JSON, 'stringify') - .callsFake(() => '["bad bytes! \u0000 <- here"]') + this._stringify = JSON.stringify + this.JSON.stringify = () => '["bad bytes! \u0000 <- here"]' return this.RedisManager.putDocInMemory( this.project_id, this.doc_id, @@ -1134,7 +1142,7 @@ describe('RedisManager', function () { }) afterEach(function () { - this.stringifyStub.restore() + return (this.JSON.stringify = this._stringify) }) it('should log an error', function () { diff --git a/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js b/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js index a5e3a8599a..8ea99aee5c 100644 --- a/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js +++ b/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js @@ -14,6 +14,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const text = require('../../../../app/js/sharejs/types/text') +require('chai').should() const RangesTracker = require('../../../../app/js/RangesTracker') describe('ShareJS text type', function () { diff --git a/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js b/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js index 1b4e4422a6..ddf98775d8 100644 --- a/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js +++ b/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js @@ -11,7 +11,9 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const { expect } = require('chai') +const chai = require('chai') +const should = chai.should() +const { expect } = chai const modulePath = '../../../../app/js/ShareJsDB.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../../app/js/Errors') diff --git a/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js b/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js index d6e9700bff..2ab5ba617e 100644 --- a/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js +++ b/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js @@ -10,6 +10,8 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() const modulePath = '../../../../app/js/ShareJsUpdateManager.js' const SandboxedModule = require('sandboxed-module') const crypto = require('crypto') @@ -33,6 +35,7 @@ describe('ShareJsUpdateManager', function () { return (this.rclient = { auth() {} }) } }, + 'logger-sharelatex': (this.logger = { log: sinon.stub() }), './RealTimeRedisManager': (this.RealTimeRedisManager = {}), './Metrics': (this.metrics = { inc: sinon.stub() }) }, diff --git a/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js b/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js index cac51b9f5f..4e39089490 100644 --- a/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js +++ b/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js @@ -12,6 +12,8 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() const modulePath = '../../../../app/js/UpdateManager.js' const SandboxedModule = require('sandboxed-module') @@ -29,6 +31,7 @@ describe('UpdateManager', function () { './RealTimeRedisManager': (this.RealTimeRedisManager = {}), './ShareJsUpdateManager': (this.ShareJsUpdateManager = {}), './HistoryManager': (this.HistoryManager = {}), + 'logger-sharelatex': (this.logger = { log: sinon.stub() }), './Metrics': (this.Metrics = { Timer: (Timer = (function () { Timer = class Timer { From 4d70bd664fd153a69f56e811be2f85c158bba819 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Thu, 1 Apr 2021 15:51:00 -0400 Subject: [PATCH 729/769] Reintroduce Node 12 and metrics upgrades These changes were previously merged, not deployed, and reverted. This reverts the revert. This reverts commit a6b8c6c658b33b6eee78b8b99e43308f32211ae2, reversing changes made to 93c98921372eed4244d22fce800716cb27eca299. --- services/document-updater/.mocharc.json | 3 + services/document-updater/.nvmrc | 2 +- services/document-updater/Dockerfile | 2 +- services/document-updater/Makefile | 6 +- .../document-updater/app/js/RangesTracker.js | 8 +- services/document-updater/buildscript.txt | 4 +- services/document-updater/docker-compose.yml | 4 +- services/document-updater/package-lock.json | 1583 ++++++++++++----- services/document-updater/package.json | 8 +- .../js/ApplyingUpdatesToADocTests.js | 4 +- .../ApplyingUpdatesToProjectStructureTests.js | 2 - .../acceptance/js/DeletingADocumentTests.js | 3 - .../acceptance/js/DeletingAProjectTests.js | 2 - .../acceptance/js/FlushingAProjectTests.js | 2 - .../test/acceptance/js/FlushingDocsTests.js | 4 +- .../acceptance/js/GettingADocumentTests.js | 4 +- .../acceptance/js/GettingProjectDocsTests.js | 4 +- .../test/acceptance/js/RangesTests.js | 4 +- .../acceptance/js/SettingADocumentTests.js | 2 - services/document-updater/test/setup.js | 37 + .../test/unit/js/DiffCodec/DiffCodecTests.js | 4 +- .../DispatchManager/DispatchManagerTests.js | 7 - .../DocumentManager/DocumentManagerTests.js | 9 - .../js/HistoryManager/HistoryManagerTests.js | 6 - .../HistoryRedisManagerTests.js | 5 +- .../js/HttpController/HttpControllerTests.js | 1 - .../unit/js/LockManager/CheckingTheLock.js | 1 - .../unit/js/LockManager/ReleasingTheLock.js | 4 - .../test/unit/js/LockManager/getLockTests.js | 3 - .../test/unit/js/LockManager/tryLockTests.js | 3 - .../PersistenceManagerTests.js | 7 - .../ProjectHistoryRedisManagerTests.js | 12 +- .../flushAndDeleteProjectTests.js | 6 - .../js/ProjectManager/flushProjectTests.js | 6 - .../js/ProjectManager/getProjectDocsTests.js | 6 - .../js/ProjectManager/updateProjectTests.js | 6 - .../js/RangesManager/RangesManagerTests.js | 19 +- .../js/RateLimitManager/RateLimitManager.js | 5 +- .../RealTimeRedisManagerTests.js | 3 - .../unit/js/RedisManager/RedisManagerTests.js | 24 +- .../unit/js/ShareJS/TextTransformTests.js | 1 - .../test/unit/js/ShareJsDB/ShareJsDBTests.js | 4 +- .../ShareJsUpdateManagerTests.js | 3 - .../js/UpdateManager/UpdateManagerTests.js | 3 - 44 files changed, 1248 insertions(+), 588 deletions(-) create mode 100644 services/document-updater/.mocharc.json create mode 100644 services/document-updater/test/setup.js diff --git a/services/document-updater/.mocharc.json b/services/document-updater/.mocharc.json new file mode 100644 index 0000000000..dc3280aa96 --- /dev/null +++ b/services/document-updater/.mocharc.json @@ -0,0 +1,3 @@ +{ + "require": "test/setup.js" +} diff --git a/services/document-updater/.nvmrc b/services/document-updater/.nvmrc index 2baa2d433a..e68b860383 100644 --- a/services/document-updater/.nvmrc +++ b/services/document-updater/.nvmrc @@ -1 +1 @@ -10.23.1 +12.21.0 diff --git a/services/document-updater/Dockerfile b/services/document-updater/Dockerfile index 2da67d2436..4f417a2a4b 100644 --- a/services/document-updater/Dockerfile +++ b/services/document-updater/Dockerfile @@ -2,7 +2,7 @@ # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -FROM node:10.23.1 as base +FROM node:12.21.0 as base WORKDIR /app diff --git a/services/document-updater/Makefile b/services/document-updater/Makefile index 596aa47fdb..7591d2a689 100644 --- a/services/document-updater/Makefile +++ b/services/document-updater/Makefile @@ -21,8 +21,10 @@ DOCKER_COMPOSE_TEST_UNIT = \ COMPOSE_PROJECT_NAME=test_unit_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) clean: - docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) - docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -$(DOCKER_COMPOSE_TEST_UNIT) down --rmi local + -$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down --rmi local format: $(DOCKER_COMPOSE) run --rm test_unit npm run --silent format diff --git a/services/document-updater/app/js/RangesTracker.js b/services/document-updater/app/js/RangesTracker.js index 6107acf300..5991ee2993 100644 --- a/services/document-updater/app/js/RangesTracker.js +++ b/services/document-updater/app/js/RangesTracker.js @@ -706,8 +706,10 @@ const load = function () { return result } else if (c1.op.i != null && c2.op.d != null) { return 1 - } else { + } else if (c1.op.d != null && c2.op.i != null) { return -1 + } else { + return 0 } }) @@ -728,8 +730,10 @@ const load = function () { return result } else if (a.i != null && b.d != null) { return 1 - } else { + } else if (a.d != null && b.i != null) { return -1 + } else { + return 0 } }) diff --git a/services/document-updater/buildscript.txt b/services/document-updater/buildscript.txt index 0d8b15d9f5..2d2f00495e 100644 --- a/services/document-updater/buildscript.txt +++ b/services/document-updater/buildscript.txt @@ -3,6 +3,6 @@ document-updater --docker-repos=gcr.io/overleaf-ops --env-add= --env-pass-through= ---node-version=10.23.1 +--node-version=12.21.0 --public-repo=True ---script-version=3.4.0 +--script-version=3.7.0 diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index 0db448f9b5..6a1c097a30 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -6,7 +6,7 @@ version: "2.3" services: test_unit: - image: node:10.23.1 + image: node:12.21.0 volumes: - .:/app working_dir: /app @@ -18,7 +18,7 @@ services: user: node test_acceptance: - image: node:10.23.1 + image: node:12.21.0 volumes: - .:/app working_dir: /app diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index ac530644db..a1551e3502 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -170,6 +170,192 @@ "teeny-request": "^6.0.0" } }, + "@google-cloud/debug-agent": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-5.1.3.tgz", + "integrity": "sha512-WbzeEz4MvPlM7DX2QBsPcWgF62u7LSQv/oMYPl0L+TddTebqjDKiVXwxpzWk61NIfcKiet3dyCbPIt3N5o8XPQ==", + "requires": { + "@google-cloud/common": "^3.0.0", + "acorn": "^8.0.0", + "coffeescript": "^2.0.0", + "console-log-level": "^1.4.0", + "extend": "^3.0.2", + "findit2": "^2.2.3", + "gcp-metadata": "^4.0.0", + "p-limit": "^3.0.1", + "semver": "^7.0.0", + "source-map": "^0.6.1", + "split": "^1.0.0" + }, + "dependencies": { + "@google-cloud/common": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.6.0.tgz", + "integrity": "sha512-aHIFTqJZmeTNO9md8XxV+ywuvXF3xBm5WNmgWeeCK+XN5X+kGW0WEX94wGwj+/MdOnrVf4dL2RvSIt9J5yJG6Q==", + "requires": { + "@google-cloud/projectify": "^2.0.0", + "@google-cloud/promisify": "^2.0.0", + "arrify": "^2.0.1", + "duplexify": "^4.1.1", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^7.0.2", + "retry-request": "^4.1.1", + "teeny-request": "^7.0.0" + } + }, + "@google-cloud/projectify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", + "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" + }, + "@google-cloud/promisify": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz", + "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw==" + }, + "acorn": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.1.0.tgz", + "integrity": "sha512-LWCF/Wn0nfHOmJ9rzQApGnxnvgfROzGilS8936rqN/lfcYkY9MYZzdMqN+2NJ4SlTc+m5HiSa+kNfDtI64dwUA==" + }, + "bignumber.js": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz", + "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==" + }, + "coffeescript": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.5.1.tgz", + "integrity": "sha512-J2jRPX0eeFh5VKyVnoLrfVFgLZtnnmp96WQSLAS8OrLm2wtQLcnikYKe1gViJKDH7vucjuhHvBKKBP3rKcD1tQ==" + }, + "duplexify": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", + "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", + "requires": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, + "gaxios": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.2.0.tgz", + "integrity": "sha512-Ms7fNifGv0XVU+6eIyL9LB7RVESeML9+cMvkwGS70xyD6w2Z80wl6RiqiJ9k1KFlJCUTQqFFc8tXmPQfSKUe8g==", + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.3.0" + } + }, + "gcp-metadata": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz", + "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==", + "requires": { + "gaxios": "^4.0.0", + "json-bigint": "^1.0.0" + } + }, + "google-auth-library": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-7.0.3.tgz", + "integrity": "sha512-6wJNYqY1QUr5I2lWaUkkzOT2b9OCNhNQrdFOt/bsBbGb7T7NCdEvrBsXraUm+KTUGk2xGlQ7m9RgUd4Llcw8NQ==", + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^4.0.0", + "gcp-metadata": "^4.2.0", + "gtoken": "^5.0.4", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + } + }, + "google-p12-pem": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz", + "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==", + "requires": { + "node-forge": "^0.10.0" + } + }, + "gtoken": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.2.1.tgz", + "integrity": "sha512-OY0BfPKe3QnMsY9MzTHTSKn+Vl2l1CcLe6BwDEQj00mbbkl5nyQ/7EUREstg4fQNZ8iYE7br4JJ7TdKeDOPWmw==", + "requires": { + "gaxios": "^4.0.0", + "google-p12-pem": "^3.0.3", + "jws": "^4.0.0" + } + }, + "json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "requires": { + "bignumber.js": "^9.0.0" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + }, + "node-forge": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", + "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" + }, + "p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "requires": { + "yocto-queue": "^0.1.0" + } + }, + "semver": { + "version": "7.3.5", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", + "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "requires": { + "lru-cache": "^6.0.0" + } + }, + "teeny-request": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz", + "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==", + "requires": { + "http-proxy-agent": "^4.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.1", + "stream-events": "^1.0.5", + "uuid": "^8.0.0" + } + }, + "uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + } + }, "@google-cloud/logging": { "version": "7.3.0", "resolved": "https://registry.npmjs.org/@google-cloud/logging/-/logging-7.3.0.tgz", @@ -214,6 +400,201 @@ "extend": "^3.0.2" } }, + "@google-cloud/profiler": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/@google-cloud/profiler/-/profiler-4.1.1.tgz", + "integrity": "sha512-qk08aDxTaLnu+NoNEh5Jh+Fs5iR8lRLMr5Mb3YJDoZw72jHJI4f5N5F2JWt1xRc9D6da4gA6stBUJrbfbubvGQ==", + "requires": { + "@google-cloud/common": "^3.0.0", + "@types/console-log-level": "^1.4.0", + "@types/semver": "^7.0.0", + "console-log-level": "^1.4.0", + "delay": "^5.0.0", + "extend": "^3.0.2", + "gcp-metadata": "^4.0.0", + "parse-duration": "^1.0.0", + "pprof": "3.0.0", + "pretty-ms": "^7.0.0", + "protobufjs": "~6.10.0", + "semver": "^7.0.0", + "teeny-request": "^7.0.0" + }, + "dependencies": { + "@google-cloud/common": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.6.0.tgz", + "integrity": "sha512-aHIFTqJZmeTNO9md8XxV+ywuvXF3xBm5WNmgWeeCK+XN5X+kGW0WEX94wGwj+/MdOnrVf4dL2RvSIt9J5yJG6Q==", + "requires": { + "@google-cloud/projectify": "^2.0.0", + "@google-cloud/promisify": "^2.0.0", + "arrify": "^2.0.1", + "duplexify": "^4.1.1", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^7.0.2", + "retry-request": "^4.1.1", + "teeny-request": "^7.0.0" + } + }, + "@google-cloud/projectify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", + "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" + }, + "@google-cloud/promisify": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz", + "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw==" + }, + "@types/node": { + "version": "13.13.48", + "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.48.tgz", + "integrity": "sha512-z8wvSsgWQzkr4sVuMEEOvwMdOQjiRY2Y/ZW4fDfjfe3+TfQrZqFKOthBgk2RnVEmtOKrkwdZ7uTvsxTBLjKGDQ==" + }, + "bignumber.js": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz", + "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==" + }, + "duplexify": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", + "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", + "requires": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, + "gaxios": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.2.0.tgz", + "integrity": "sha512-Ms7fNifGv0XVU+6eIyL9LB7RVESeML9+cMvkwGS70xyD6w2Z80wl6RiqiJ9k1KFlJCUTQqFFc8tXmPQfSKUe8g==", + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.3.0" + } + }, + "gcp-metadata": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz", + "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==", + "requires": { + "gaxios": "^4.0.0", + "json-bigint": "^1.0.0" + } + }, + "google-auth-library": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-7.0.3.tgz", + "integrity": "sha512-6wJNYqY1QUr5I2lWaUkkzOT2b9OCNhNQrdFOt/bsBbGb7T7NCdEvrBsXraUm+KTUGk2xGlQ7m9RgUd4Llcw8NQ==", + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^4.0.0", + "gcp-metadata": "^4.2.0", + "gtoken": "^5.0.4", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + } + }, + "google-p12-pem": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz", + "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==", + "requires": { + "node-forge": "^0.10.0" + } + }, + "gtoken": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.2.1.tgz", + "integrity": "sha512-OY0BfPKe3QnMsY9MzTHTSKn+Vl2l1CcLe6BwDEQj00mbbkl5nyQ/7EUREstg4fQNZ8iYE7br4JJ7TdKeDOPWmw==", + "requires": { + "gaxios": "^4.0.0", + "google-p12-pem": "^3.0.3", + "jws": "^4.0.0" + } + }, + "json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "requires": { + "bignumber.js": "^9.0.0" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + }, + "node-forge": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", + "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" + }, + "protobufjs": { + "version": "6.10.2", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.2.tgz", + "integrity": "sha512-27yj+04uF6ya9l+qfpH187aqEzfCF4+Uit0I9ZBQVqK09hk/SQzKa2MUqUpXaVa7LOFRg1TSSr3lVxGOk6c0SQ==", + "requires": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/long": "^4.0.1", + "@types/node": "^13.7.0", + "long": "^4.0.0" + } + }, + "semver": { + "version": "7.3.5", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", + "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "requires": { + "lru-cache": "^6.0.0" + } + }, + "teeny-request": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz", + "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==", + "requires": { + "http-proxy-agent": "^4.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.1", + "stream-events": "^1.0.5", + "uuid": "^8.0.0" + } + }, + "uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + } + }, "@google-cloud/projectify": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-1.0.4.tgz", @@ -224,6 +605,201 @@ "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-1.0.4.tgz", "integrity": "sha512-VccZDcOql77obTnFh0TbNED/6ZbbmHDf8UMNnzO1d5g9V0Htfm4k5cllY8P1tJsRKC3zWYGRLaViiupcgVjBoQ==" }, + "@google-cloud/trace-agent": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-5.1.3.tgz", + "integrity": "sha512-f+5DX7n6QpDlHA+4kr81z69SLAdrlvd9T8skqCMgnYvtXx14AwzXZyzEDf3jppOYzYoqPPJv8XYiyYHHmYD0BA==", + "requires": { + "@google-cloud/common": "^3.0.0", + "@opencensus/propagation-stackdriver": "0.0.22", + "builtin-modules": "^3.0.0", + "console-log-level": "^1.4.0", + "continuation-local-storage": "^3.2.1", + "extend": "^3.0.2", + "gcp-metadata": "^4.0.0", + "google-auth-library": "^7.0.0", + "hex2dec": "^1.0.1", + "is": "^3.2.0", + "methods": "^1.1.1", + "require-in-the-middle": "^5.0.0", + "semver": "^7.0.0", + "shimmer": "^1.2.0", + "source-map-support": "^0.5.16", + "uuid": "^8.0.0" + }, + "dependencies": { + "@google-cloud/common": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.6.0.tgz", + "integrity": "sha512-aHIFTqJZmeTNO9md8XxV+ywuvXF3xBm5WNmgWeeCK+XN5X+kGW0WEX94wGwj+/MdOnrVf4dL2RvSIt9J5yJG6Q==", + "requires": { + "@google-cloud/projectify": "^2.0.0", + "@google-cloud/promisify": "^2.0.0", + "arrify": "^2.0.1", + "duplexify": "^4.1.1", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^7.0.2", + "retry-request": "^4.1.1", + "teeny-request": "^7.0.0" + } + }, + "@google-cloud/projectify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", + "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" + }, + "@google-cloud/promisify": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz", + "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw==" + }, + "@opencensus/core": { + "version": "0.0.22", + "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.22.tgz", + "integrity": "sha512-ErazJtivjceNoOZI1bG9giQ6cWS45J4i6iPUtlp7dLNu58OLs/v+CD0FsaPCh47XgPxAI12vbBE8Ec09ViwHNA==", + "requires": { + "continuation-local-storage": "^3.2.1", + "log-driver": "^1.2.7", + "semver": "^7.0.0", + "shimmer": "^1.2.0", + "uuid": "^8.0.0" + } + }, + "@opencensus/propagation-stackdriver": { + "version": "0.0.22", + "resolved": "https://registry.npmjs.org/@opencensus/propagation-stackdriver/-/propagation-stackdriver-0.0.22.tgz", + "integrity": "sha512-eBvf/ihb1mN8Yz/ASkz8nHzuMKqygu77+VNnUeR0yEh3Nj+ykB8VVR6lK+NAFXo1Rd1cOsTmgvuXAZgDAGleQQ==", + "requires": { + "@opencensus/core": "^0.0.22", + "hex2dec": "^1.0.1", + "uuid": "^8.0.0" + } + }, + "bignumber.js": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz", + "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==" + }, + "duplexify": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", + "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", + "requires": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, + "gaxios": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.2.0.tgz", + "integrity": "sha512-Ms7fNifGv0XVU+6eIyL9LB7RVESeML9+cMvkwGS70xyD6w2Z80wl6RiqiJ9k1KFlJCUTQqFFc8tXmPQfSKUe8g==", + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.3.0" + } + }, + "gcp-metadata": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz", + "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==", + "requires": { + "gaxios": "^4.0.0", + "json-bigint": "^1.0.0" + } + }, + "google-auth-library": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-7.0.3.tgz", + "integrity": "sha512-6wJNYqY1QUr5I2lWaUkkzOT2b9OCNhNQrdFOt/bsBbGb7T7NCdEvrBsXraUm+KTUGk2xGlQ7m9RgUd4Llcw8NQ==", + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^4.0.0", + "gcp-metadata": "^4.2.0", + "gtoken": "^5.0.4", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + } + }, + "google-p12-pem": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz", + "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==", + "requires": { + "node-forge": "^0.10.0" + } + }, + "gtoken": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.2.1.tgz", + "integrity": "sha512-OY0BfPKe3QnMsY9MzTHTSKn+Vl2l1CcLe6BwDEQj00mbbkl5nyQ/7EUREstg4fQNZ8iYE7br4JJ7TdKeDOPWmw==", + "requires": { + "gaxios": "^4.0.0", + "google-p12-pem": "^3.0.3", + "jws": "^4.0.0" + } + }, + "json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "requires": { + "bignumber.js": "^9.0.0" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + }, + "node-forge": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", + "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" + }, + "semver": { + "version": "7.3.5", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", + "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "requires": { + "lru-cache": "^6.0.0" + } + }, + "teeny-request": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz", + "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==", + "requires": { + "http-proxy-agent": "^4.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.1", + "stream-events": "^1.0.5", + "uuid": "^8.0.0" + } + }, + "uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + } + }, "@grpc/grpc-js": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.0.5.tgz", @@ -264,9 +840,9 @@ } }, "@overleaf/metrics": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/@overleaf/metrics/-/metrics-3.4.1.tgz", - "integrity": "sha512-OgjlzuC+2gPdIEDHhmd9LDMu01tk1ln0cJhw1727BZ+Wgf2Z1hjuHRt4JeCkf+PFTHwJutVYT8v6IGPpNEPtbg==", + "version": "3.5.1", + "resolved": "https://registry.npmjs.org/@overleaf/metrics/-/metrics-3.5.1.tgz", + "integrity": "sha512-RLHxkMF7Y3725L3QwXo9cIn2gGobsMYUGuxKxg7PVMrPTMsomHEMeG7StOxCO7ML1Z/BwB/9nsVYNrsRdAJtKg==", "requires": { "@google-cloud/debug-agent": "^5.1.2", "@google-cloud/profiler": "^4.0.3", @@ -275,321 +851,6 @@ "prom-client": "^11.1.3", "underscore": "~1.6.0", "yn": "^3.1.1" - }, - "dependencies": { - "@google-cloud/common": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.5.0.tgz", - "integrity": "sha512-10d7ZAvKhq47L271AqvHEd8KzJqGU45TY+rwM2Z3JHuB070FeTi7oJJd7elfrnKaEvaktw3hH2wKnRWxk/3oWQ==", - "requires": { - "@google-cloud/projectify": "^2.0.0", - "@google-cloud/promisify": "^2.0.0", - "arrify": "^2.0.1", - "duplexify": "^4.1.1", - "ent": "^2.2.0", - "extend": "^3.0.2", - "google-auth-library": "^6.1.1", - "retry-request": "^4.1.1", - "teeny-request": "^7.0.0" - } - }, - "@google-cloud/debug-agent": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-5.1.3.tgz", - "integrity": "sha512-WbzeEz4MvPlM7DX2QBsPcWgF62u7LSQv/oMYPl0L+TddTebqjDKiVXwxpzWk61NIfcKiet3dyCbPIt3N5o8XPQ==", - "requires": { - "@google-cloud/common": "^3.0.0", - "acorn": "^8.0.0", - "coffeescript": "^2.0.0", - "console-log-level": "^1.4.0", - "extend": "^3.0.2", - "findit2": "^2.2.3", - "gcp-metadata": "^4.0.0", - "p-limit": "^3.0.1", - "semver": "^7.0.0", - "source-map": "^0.6.1", - "split": "^1.0.0" - } - }, - "@google-cloud/profiler": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/@google-cloud/profiler/-/profiler-4.1.0.tgz", - "integrity": "sha512-9e1zXRctLSUHAoAsFGwE4rS28fr0siiG+jXl5OpwTK8ZAUlxb70aosHaZGdsv8YXrYKjuiufjRZ/OXCs0XLI9g==", - "requires": { - "@google-cloud/common": "^3.0.0", - "@types/console-log-level": "^1.4.0", - "@types/semver": "^7.0.0", - "console-log-level": "^1.4.0", - "delay": "^4.0.1", - "extend": "^3.0.2", - "gcp-metadata": "^4.0.0", - "parse-duration": "^0.4.4", - "pprof": "3.0.0", - "pretty-ms": "^7.0.0", - "protobufjs": "~6.10.0", - "semver": "^7.0.0", - "teeny-request": "^7.0.0" - } - }, - "@google-cloud/projectify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", - "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" - }, - "@google-cloud/promisify": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz", - "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw==" - }, - "@google-cloud/trace-agent": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-5.1.1.tgz", - "integrity": "sha512-YTcK0RLN90pLCprg0XC8uV4oAVd79vsXhkcxmEVwiOOYjUDvSrAhb7y/0SY606zgfhJHmUTNb/fZSWEtZP/slQ==", - "requires": { - "@google-cloud/common": "^3.0.0", - "@opencensus/propagation-stackdriver": "0.0.22", - "builtin-modules": "^3.0.0", - "console-log-level": "^1.4.0", - "continuation-local-storage": "^3.2.1", - "extend": "^3.0.2", - "gcp-metadata": "^4.0.0", - "google-auth-library": "^6.0.0", - "hex2dec": "^1.0.1", - "is": "^3.2.0", - "methods": "^1.1.1", - "require-in-the-middle": "^5.0.0", - "semver": "^7.0.0", - "shimmer": "^1.2.0", - "source-map-support": "^0.5.16", - "uuid": "^8.0.0" - } - }, - "@opencensus/core": { - "version": "0.0.22", - "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.22.tgz", - "integrity": "sha512-ErazJtivjceNoOZI1bG9giQ6cWS45J4i6iPUtlp7dLNu58OLs/v+CD0FsaPCh47XgPxAI12vbBE8Ec09ViwHNA==", - "requires": { - "continuation-local-storage": "^3.2.1", - "log-driver": "^1.2.7", - "semver": "^7.0.0", - "shimmer": "^1.2.0", - "uuid": "^8.0.0" - } - }, - "@opencensus/propagation-stackdriver": { - "version": "0.0.22", - "resolved": "https://registry.npmjs.org/@opencensus/propagation-stackdriver/-/propagation-stackdriver-0.0.22.tgz", - "integrity": "sha512-eBvf/ihb1mN8Yz/ASkz8nHzuMKqygu77+VNnUeR0yEh3Nj+ykB8VVR6lK+NAFXo1Rd1cOsTmgvuXAZgDAGleQQ==", - "requires": { - "@opencensus/core": "^0.0.22", - "hex2dec": "^1.0.1", - "uuid": "^8.0.0" - } - }, - "@types/node": { - "version": "13.13.33", - "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.33.tgz", - "integrity": "sha512-1B3GM1yuYsFyEvBb+ljBqWBOylsWDYioZ5wpu8AhXdIhq20neXS7eaSC8GkwHE0yQYGiOIV43lMsgRYTgKZefQ==" - }, - "@types/semver": { - "version": "7.3.4", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.4.tgz", - "integrity": "sha512-+nVsLKlcUCeMzD2ufHEYuJ9a2ovstb6Dp52A5VsoKxDXgvE051XgHI/33I1EymwkRGQkwnA0LkhnUzituGs4EQ==" - }, - "acorn": { - "version": "8.0.4", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.0.4.tgz", - "integrity": "sha512-XNP0PqF1XD19ZlLKvB7cMmnZswW4C/03pRHgirB30uSJTaS3A3V1/P4sS3HPvFmjoriPCJQs+JDSbm4bL1TxGQ==" - }, - "bignumber.js": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz", - "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==" - }, - "debug": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", - "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", - "requires": { - "ms": "2.1.2" - } - }, - "duplexify": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", - "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", - "requires": { - "end-of-stream": "^1.4.1", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1", - "stream-shift": "^1.0.0" - } - }, - "gaxios": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.0.1.tgz", - "integrity": "sha512-jOin8xRZ/UytQeBpSXFqIzqU7Fi5TqgPNLlUsSB8kjJ76+FiGBfImF8KJu++c6J4jOldfJUtt0YmkRj2ZpSHTQ==", - "requires": { - "abort-controller": "^3.0.0", - "extend": "^3.0.2", - "https-proxy-agent": "^5.0.0", - "is-stream": "^2.0.0", - "node-fetch": "^2.3.0" - } - }, - "gcp-metadata": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz", - "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==", - "requires": { - "gaxios": "^4.0.0", - "json-bigint": "^1.0.0" - } - }, - "google-auth-library": { - "version": "6.1.3", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.1.3.tgz", - "integrity": "sha512-m9mwvY3GWbr7ZYEbl61isWmk+fvTmOt0YNUfPOUY2VH8K5pZlAIWJjxEi0PqR3OjMretyiQLI6GURMrPSwHQ2g==", - "requires": { - "arrify": "^2.0.0", - "base64-js": "^1.3.0", - "ecdsa-sig-formatter": "^1.0.11", - "fast-text-encoding": "^1.0.0", - "gaxios": "^4.0.0", - "gcp-metadata": "^4.2.0", - "gtoken": "^5.0.4", - "jws": "^4.0.0", - "lru-cache": "^6.0.0" - } - }, - "google-p12-pem": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz", - "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==", - "requires": { - "node-forge": "^0.10.0" - } - }, - "gtoken": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.1.0.tgz", - "integrity": "sha512-4d8N6Lk8TEAHl9vVoRVMh9BNOKWVgl2DdNtr3428O75r3QFrF/a5MMu851VmK0AA8+iSvbwRv69k5XnMLURGhg==", - "requires": { - "gaxios": "^4.0.0", - "google-p12-pem": "^3.0.3", - "jws": "^4.0.0", - "mime": "^2.2.0" - } - }, - "json-bigint": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", - "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", - "requires": { - "bignumber.js": "^9.0.0" - } - }, - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "requires": { - "yallist": "^4.0.0" - } - }, - "mime": { - "version": "2.4.6", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", - "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - }, - "node-forge": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", - "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" - }, - "p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "requires": { - "yocto-queue": "^0.1.0" - } - }, - "parse-duration": { - "version": "0.4.4", - "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.4.4.tgz", - "integrity": "sha512-KbAJuYGUhZkB9gotDiKLnZ7Z3VTacK3fgwmDdB6ZVDtJbMBT6MfLga0WJaYpPDu0mzqT0NgHtHDt5PY4l0nidg==" - }, - "pretty-ms": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-7.0.1.tgz", - "integrity": "sha512-973driJZvxiGOQ5ONsFhOF/DtzPMOMtgC11kCpUrPGMTgqp2q/1gwzCquocrN33is0VZ5GFHXZYMM9l6h67v2Q==", - "requires": { - "parse-ms": "^2.1.0" - } - }, - "protobufjs": { - "version": "6.10.2", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.2.tgz", - "integrity": "sha512-27yj+04uF6ya9l+qfpH187aqEzfCF4+Uit0I9ZBQVqK09hk/SQzKa2MUqUpXaVa7LOFRg1TSSr3lVxGOk6c0SQ==", - "requires": { - "@protobufjs/aspromise": "^1.1.2", - "@protobufjs/base64": "^1.1.2", - "@protobufjs/codegen": "^2.0.4", - "@protobufjs/eventemitter": "^1.1.0", - "@protobufjs/fetch": "^1.1.0", - "@protobufjs/float": "^1.0.2", - "@protobufjs/inquire": "^1.1.0", - "@protobufjs/path": "^1.1.2", - "@protobufjs/pool": "^1.1.0", - "@protobufjs/utf8": "^1.1.0", - "@types/long": "^4.0.1", - "@types/node": "^13.7.0", - "long": "^4.0.0" - } - }, - "require-in-the-middle": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-5.0.3.tgz", - "integrity": "sha512-p/ICV8uMlqC4tjOYabLMxAWCIKa0YUQgZZ6KDM0xgXJNgdGQ1WmL2A07TwmrZw+wi6ITUFKzH5v3n+ENEyXVkA==", - "requires": { - "debug": "^4.1.1", - "module-details-from-path": "^1.0.3", - "resolve": "^1.12.0" - } - }, - "semver": { - "version": "7.3.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.2.tgz", - "integrity": "sha512-OrOb32TeeambH6UrhtShmF7CRDqhL6/5XpPNp2DuRH6+9QLw/orhp72j87v8Qa1ScDkvrrBNpZcDejAirJmfXQ==" - }, - "teeny-request": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz", - "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==", - "requires": { - "http-proxy-agent": "^4.0.0", - "https-proxy-agent": "^5.0.0", - "node-fetch": "^2.6.1", - "stream-events": "^1.0.5", - "uuid": "^8.0.0" - } - }, - "uuid": { - "version": "8.3.1", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.1.tgz", - "integrity": "sha512-FOmRr+FmWEIG8uhZv6C2bTgEVXsHk08kE7mPlrBbEe+c3r9pjceVPgupIfNIhc4yx55H69OXANrUaSuu9eInKg==" - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - } } }, "@overleaf/o-error": { @@ -734,7 +995,7 @@ "@types/console-log-level": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/@types/console-log-level/-/console-log-level-1.4.0.tgz", - "integrity": "sha1-7/ccQa689RyLpa2LBdfVQkviuPM=" + "integrity": "sha512-x+OscEQwcx5Biair4enH7ov9W+clcqUWaZRaxn5IkT4yNWWjRr2oiYDkY/x1uXSTVZOQ2xlbFQySaQGB+VdXGQ==" }, "@types/eslint-visitor-keys": { "version": "1.0.0", @@ -766,6 +1027,11 @@ "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.15.tgz", "integrity": "sha512-daFGV9GSs6USfPgxceDA8nlSe48XrVCJfDeYm7eokxq/ye7iuOH87hKXgMtEAVLFapkczbZsx868PMDT1Y0a6A==" }, + "@types/semver": { + "version": "7.3.4", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.4.tgz", + "integrity": "sha512-+nVsLKlcUCeMzD2ufHEYuJ9a2ovstb6Dp52A5VsoKxDXgvE051XgHI/33I1EymwkRGQkwnA0LkhnUzituGs4EQ==" + }, "@typescript-eslint/experimental-utils": { "version": "1.13.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-1.13.0.tgz", @@ -819,6 +1085,12 @@ } } }, + "@ungap/promise-all-settled": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@ungap/promise-all-settled/-/promise-all-settled-1.1.2.tgz", + "integrity": "sha512-sL/cEvJWAnClXw0wHk85/2L0G6Sj8UB0Ctc1TEMbKSsmpRosqhwj9gWgFRZSrBr2f9tiXISwNhCPmlfqUqyb9Q==", + "dev": true + }, "abbrev": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", @@ -887,6 +1159,12 @@ "uri-js": "^4.2.2" } }, + "ansi-colors": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", + "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", + "dev": true + }, "ansi-escapes": { "version": "4.3.1", "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.1.tgz", @@ -919,6 +1197,16 @@ "color-convert": "^1.9.0" } }, + "anymatch": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.1.tgz", + "integrity": "sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==", + "dev": true, + "requires": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + } + }, "aproba": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", @@ -1112,10 +1400,16 @@ "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz", "integrity": "sha1-gMBIdZ2CaACAfEv9Uh5Q7bulel8=" }, + "binary-extensions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "dev": true + }, "bindings": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", - "integrity": "sha1-EDU8npRTNLwFEabZCzj7x8nFBN8=", + "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", "requires": { "file-uri-to-path": "1.0.0" } @@ -1182,10 +1476,19 @@ "concat-map": "0.0.1" } }, + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "requires": { + "fill-range": "^7.0.1" + } + }, "browser-stdout": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", - "integrity": "sha1-uqVZ7hTO1zRSIputcyZGfGH6vWA=", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", "dev": true }, "bson": { @@ -1204,9 +1507,9 @@ "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==" }, "builtin-modules": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.1.0.tgz", - "integrity": "sha1-qtl8FRMet2tltQ7yCOdYTNdqdIQ=" + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.2.0.tgz", + "integrity": "sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA==" }, "bunyan": { "version": "0.22.3", @@ -1269,12 +1572,6 @@ "type-detect": "^1.0.0" } }, - "chai-spies": { - "version": "0.7.1", - "resolved": "https://registry.npmjs.org/chai-spies/-/chai-spies-0.7.1.tgz", - "integrity": "sha1-ND2Z9RJEIS6LF+ZLk5lv97LCqbE=", - "dev": true - }, "chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", @@ -1292,6 +1589,22 @@ "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", "dev": true }, + "chokidar": { + "version": "3.5.1", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.1.tgz", + "integrity": "sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw==", + "dev": true, + "requires": { + "anymatch": "~3.1.1", + "braces": "~3.0.2", + "fsevents": "~2.3.1", + "glob-parent": "~5.1.0", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.5.0" + } + }, "chownr": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", @@ -1333,10 +1646,11 @@ "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=" }, - "coffeescript": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.5.1.tgz", - "integrity": "sha512-J2jRPX0eeFh5VKyVnoLrfVFgLZtnnmp96WQSLAS8OrLm2wtQLcnikYKe1gViJKDH7vucjuhHvBKKBP3rKcD1tQ==" + "coffee-script": { + "version": "1.12.7", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.12.7.tgz", + "integrity": "sha512-fLeEhqwymYat/MpTPUjSKHVYYl0ec2mOyALEMLmzr5i1isuG+6jfI2j2d5oBO3VIzgUXgBVIcOT9uH1TFxBckw==", + "dev": true }, "color-convert": { "version": "1.9.3", @@ -1415,7 +1729,7 @@ "console-log-level": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz", - "integrity": "sha1-nFprue8e9lsFq6gwKLD/iUzfYwo=" + "integrity": "sha512-VZzbIORbP+PPcN/gg3DXClTLPLg5Slwd5fL2MIc+o1qZ4BXBvWyc6QxPk6T/Mkr6IVjRpoAGf32XxP3ZWMVRcQ==" }, "contains-path": { "version": "0.1.0", @@ -1475,7 +1789,7 @@ "d64": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz", - "integrity": "sha1-QAKofoUMv8n52XBrYPymE6MzbpA=" + "integrity": "sha512-5eNy3WZziVYnrogqgXhcdEmqcDB2IHurTqLcrgssJsfkMVCUoUaZpK6cJjxxvLV2dUm5SuJMNcYfVGoin9UIRw==" }, "damerau-levenshtein": { "version": "1.0.6", @@ -1543,9 +1857,9 @@ } }, "delay": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/delay/-/delay-4.3.0.tgz", - "integrity": "sha1-7+6/uPVFV5yzlrOnIkQ+yW0UxQ4=" + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/delay/-/delay-5.0.0.tgz", + "integrity": "sha512-ReEBKkIfe4ya47wlPYf/gu5ib6yUG0/Aez0JQZQz94kiWtRQvZIQbTiehsnwHvLSWJnQdhVeqYue7Id1dKr0qw==" }, "delayed-stream": { "version": "1.0.0", @@ -1578,9 +1892,9 @@ "integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=" }, "diff": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", - "integrity": "sha1-gAwN0eCov7yVg1wgKtIg/jF+WhI=", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", + "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", "dev": true }, "diff-match-patch": { @@ -1747,6 +2061,12 @@ "is-symbol": "^1.0.2" } }, + "escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true + }, "escape-html": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", @@ -2480,7 +2800,16 @@ "file-uri-to-path": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", - "integrity": "sha1-VTp7hEb/b2hDWcRF8eN6BdrMM90=" + "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==" + }, + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "requires": { + "to-regex-range": "^5.0.1" + } }, "finalhandler": { "version": "1.1.2", @@ -2510,6 +2839,12 @@ "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz", "integrity": "sha1-WKRmaX34piBc39vzlVNri9d3pfY=" }, + "flat": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "dev": true + }, "flat-cache": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-2.0.1.tgz", @@ -2600,6 +2935,13 @@ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" }, + "fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "optional": true + }, "function-bind": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", @@ -2890,9 +3232,9 @@ "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=" }, "he": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", - "integrity": "sha1-k0EP0hsAlzUVH4howvJx80J+I/0=", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", "dev": true }, "hex2dec": { @@ -3245,6 +3587,15 @@ "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", "dev": true }, + "is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "requires": { + "binary-extensions": "^2.0.0" + } + }, "is-callable": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", @@ -3277,11 +3628,23 @@ "is-extglob": "^2.1.1" } }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true + }, "is-obj": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==" }, + "is-plain-obj": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "dev": true + }, "is-regex": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", @@ -3467,12 +3830,12 @@ "lodash.at": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/lodash.at/-/lodash.at-4.6.0.tgz", - "integrity": "sha1-k83OZk8KGZTqM9181A4jr9EbD/g=" + "integrity": "sha512-GOTh0SEp+Yosnlpjic+8cl2WM9MykorogkGA9xyIFkkObQ3H3kNZqZ+ohuq4K3FrSVo7hMcZBMataJemrxC3BA==" }, "lodash.camelcase": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", - "integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY=" + "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==" }, "lodash.defaults": { "version": "4.2.0", @@ -3493,7 +3856,7 @@ "lodash.has": { "version": "4.5.2", "resolved": "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz", - "integrity": "sha1-0Z9NwQlQWMzL4rDN9O4P5Ko3yGI=" + "integrity": "sha512-rnYUdIo6xRCJnQmbVFEwcxF144erlD+M3YcJUVesflU9paQaE8p+fJDcIQrlMYbxoANFL+AB9hZrzSBBk5PL+g==" }, "lodash.memoize": { "version": "4.1.2", @@ -3518,6 +3881,66 @@ "resolved": "https://registry.npmjs.org/log-driver/-/log-driver-1.2.7.tgz", "integrity": "sha512-U7KCmLdqsGHBLeWqYlFA0V0Sl6P08EE1ZrmA9cxjUE0WVqT9qnyVDPz1kzpFEP0jdJuFnasWIfSd7fsaNXkpbg==" }, + "log-symbols": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.0.0.tgz", + "integrity": "sha512-FN8JBzLx6CzeMrB0tg6pqlGU1wCrXW+ZXGH481kfsBqer0hToTIiHdjH4Mq8xJUbvATujKCvaREGWpGUionraA==", + "dev": true, + "requires": { + "chalk": "^4.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", + "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, "logger-sharelatex": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.9.1.tgz", @@ -3768,7 +4191,8 @@ "minimist": { "version": "0.0.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", - "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" + "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=", + "optional": true }, "minipass": { "version": "2.9.0", @@ -3803,43 +4227,128 @@ } }, "mocha": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/mocha/-/mocha-5.2.0.tgz", - "integrity": "sha1-bYrlCPWRZ/lA8rWzxKYSrlDJCuY=", + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-8.3.2.tgz", + "integrity": "sha512-UdmISwr/5w+uXLPKspgoV7/RXZwKRTiTjJ2/AC5ZiEztIoOYdfKb19+9jNmEInzx5pBsCyJQzarAxqIGBNYJhg==", "dev": true, "requires": { + "@ungap/promise-all-settled": "1.1.2", + "ansi-colors": "4.1.1", "browser-stdout": "1.3.1", - "commander": "2.15.1", - "debug": "3.1.0", - "diff": "3.5.0", - "escape-string-regexp": "1.0.5", - "glob": "7.1.2", + "chokidar": "3.5.1", + "debug": "4.3.1", + "diff": "5.0.0", + "escape-string-regexp": "4.0.0", + "find-up": "5.0.0", + "glob": "7.1.6", "growl": "1.10.5", - "he": "1.1.1", + "he": "1.2.0", + "js-yaml": "4.0.0", + "log-symbols": "4.0.0", "minimatch": "3.0.4", - "mkdirp": "0.5.1", - "supports-color": "5.4.0" + "ms": "2.1.3", + "nanoid": "3.1.20", + "serialize-javascript": "5.0.1", + "strip-json-comments": "3.1.1", + "supports-color": "8.1.1", + "which": "2.0.2", + "wide-align": "1.1.3", + "workerpool": "6.1.0", + "yargs": "16.2.0", + "yargs-parser": "20.2.4", + "yargs-unparser": "2.0.0" }, "dependencies": { - "commander": { - "version": "2.15.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.15.1.tgz", - "integrity": "sha1-30boZ9D8Kuxmo0ZitAapzK//Ww8=", + "ansi-regex": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "dev": true + }, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "requires": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, "debug": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", - "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", + "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", "dev": true, "requires": { - "ms": "2.0.0" + "ms": "2.1.2" + }, + "dependencies": { + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + } + } + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true + }, + "find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "requires": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" } }, "glob": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz", - "integrity": "sha1-wZyd+aAocC1nhhI4SmVSQExjbRU=", + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", "dev": true, "requires": { "fs.realpath": "^1.0.0", @@ -3850,19 +4359,140 @@ "path-is-absolute": "^1.0.0" } }, - "mkdirp": { - "version": "0.5.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "js-yaml": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.0.0.tgz", + "integrity": "sha512-pqon0s+4ScYUvX30wxQi3PogGFAlUyH0awepWvwkj4jD4v+ova3RiYw8bmA6x2rDrEaj8i/oWKoRxpVNW+Re8Q==", "dev": true, "requires": { - "minimist": "0.0.8" + "argparse": "^2.0.1" + } + }, + "locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "requires": { + "p-locate": "^5.0.0" } }, "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "requires": { + "yocto-queue": "^0.1.0" + } + }, + "p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "requires": { + "p-limit": "^3.0.2" + } + }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true + }, + "string-width": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz", + "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.0" + } + }, + "strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.0" + } + }, + "supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + }, + "wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + }, + "y18n": { + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz", + "integrity": "sha512-hsRUr4FFrvhhRH12wOdfs38Gy7k2FFzB9qgN9v3aLykRq0dRcdcpz5C9FxdS2NuhOrI/628b/KSTJ3rwHysYSg==", + "dev": true + }, + "yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "requires": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + } + }, + "yargs-parser": { + "version": "20.2.4", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", + "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", "dev": true } } @@ -3929,6 +4559,12 @@ "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", "integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==" }, + "nanoid": { + "version": "3.1.20", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.20.tgz", + "integrity": "sha512-a1cQNyczgKbLX9jwbS/+d7W8fX/RfgYR7lVWwWOGIPNgK2m0MWvrGF6/m4kk6U3QcFMnZf3RIhL0v2Jgh/0Uxw==", + "dev": true + }, "natural-compare": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", @@ -3942,9 +4578,9 @@ "optional": true }, "needle": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/needle/-/needle-2.5.2.tgz", - "integrity": "sha512-LbRIwS9BfkPvNwNHlsA41Q29kL2L/6VaOJ0qisM5lLWsTV3nP15abO5ITL6L81zqFhzjRKDAYjpcBcwM0AVvLQ==", + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/needle/-/needle-2.6.0.tgz", + "integrity": "sha512-KKYdza4heMsEfSWD7VPUIz3zX2XDwOyX2d+geb4vrERZMT5RMU6ujjaD+I5Yr54uZxQ2w6XRTAhHBbSCyovZBg==", "requires": { "debug": "^3.2.6", "iconv-lite": "^0.4.4", @@ -3960,9 +4596,9 @@ } }, "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" } } }, @@ -4091,6 +4727,12 @@ } } }, + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true + }, "npm-bundled": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-1.1.1.tgz", @@ -4303,10 +4945,15 @@ "callsites": "^3.0.0" } }, + "parse-duration": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-1.0.0.tgz", + "integrity": "sha512-X4kUkCTHU1N/kEbwK9FpUJ0UZQa90VzeczfS704frR30gljxDG0pSziws06XlK+CGRSo/1wtG1mFIdBFQTMQNw==" + }, "parse-ms": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.1.0.tgz", - "integrity": "sha1-NIVlp1PUOR+lJAKZVrFyy3dTCX0=" + "integrity": "sha512-kHt7kzLoS9VBZfUsiKjv43mr91ea+U05EyKkEtqp7vNbHxmaVuEqN7XxeEVnGrMtYOAxGrDElSi96K7EgO1zCA==" }, "parseurl": { "version": "1.3.3", @@ -4351,6 +4998,17 @@ "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" }, + "picomatch": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.2.tgz", + "integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==", + "dev": true + }, + "pify": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-5.0.0.tgz", + "integrity": "sha512-eW/gHNMlxdSP6dmG6uJip6FXN0EQBwm2clYYd8Wul42Cwu/DK8HEftzsapcNdYe2MfLiIwZqsDk2RDEsTE79hA==" + }, "pprof": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/pprof/-/pprof-3.0.0.tgz", @@ -4369,9 +5027,14 @@ }, "dependencies": { "@types/node": { - "version": "13.13.33", - "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.33.tgz", - "integrity": "sha512-1B3GM1yuYsFyEvBb+ljBqWBOylsWDYioZ5wpu8AhXdIhq20neXS7eaSC8GkwHE0yQYGiOIV43lMsgRYTgKZefQ==" + "version": "13.13.48", + "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.48.tgz", + "integrity": "sha512-z8wvSsgWQzkr4sVuMEEOvwMdOQjiRY2Y/ZW4fDfjfe3+TfQrZqFKOthBgk2RnVEmtOKrkwdZ7uTvsxTBLjKGDQ==" + }, + "delay": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/delay/-/delay-4.4.1.tgz", + "integrity": "sha512-aL3AhqtfhOlT/3ai6sWXeqwnw63ATNpnUiN4HL7x9q+My5QtHlO3OIkasmug9LKzpheLdmUKGRKnYXYAS7FQkQ==" }, "p-limit": { "version": "3.1.0", @@ -4381,11 +5044,6 @@ "yocto-queue": "^0.1.0" } }, - "pify": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-5.0.0.tgz", - "integrity": "sha512-eW/gHNMlxdSP6dmG6uJip6FXN0EQBwm2clYYd8Wul42Cwu/DK8HEftzsapcNdYe2MfLiIwZqsDk2RDEsTE79hA==" - }, "protobufjs": { "version": "6.10.2", "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.2.tgz", @@ -5066,6 +5724,14 @@ } } }, + "pretty-ms": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-7.0.1.tgz", + "integrity": "sha512-973driJZvxiGOQ5ONsFhOF/DtzPMOMtgC11kCpUrPGMTgqp2q/1gwzCquocrN33is0VZ5GFHXZYMM9l6h67v2Q==", + "requires": { + "parse-ms": "^2.1.0" + } + }, "process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", @@ -5184,6 +5850,15 @@ "integrity": "sha512-pVzZdDpWwWqEVVLshWUHjNwuVP7SfcmPraYuqocJp1yo2U1R7P+5QAfDhdItkuoGqIBnBYrtPp7rEPqDn9HlZA==", "dev": true }, + "randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "requires": { + "safe-buffer": "^5.1.0" + } + }, "range-parser": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", @@ -5263,10 +5938,19 @@ "util-deprecate": "^1.0.1" } }, + "readdirp": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.5.0.tgz", + "integrity": "sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ==", + "dev": true, + "requires": { + "picomatch": "^2.2.1" + } + }, "redis-commands": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.5.0.tgz", - "integrity": "sha512-6KxamqpZ468MeQC3bkWmCB1fp56XL64D4Kf0zJSwDZbVLLm7KFkoIcHrgRvQ+sk8dnhySs7+yBg94yIkAK7aJg==" + "integrity": "sha1-gNLiBpj+aI8icSf/nlFkp90X54U=" }, "redis-errors": { "version": "1.2.0", @@ -5370,6 +6054,31 @@ "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", "dev": true }, + "require-in-the-middle": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-5.1.0.tgz", + "integrity": "sha512-M2rLKVupQfJ5lf9OvqFGIT+9iVLnTmjgbOmpil12hiSQNn5zJTKGPoIisETNjfK+09vP3rpm1zJajmErpr2sEQ==", + "requires": { + "debug": "^4.1.1", + "module-details-from-path": "^1.0.3", + "resolve": "^1.12.0" + }, + "dependencies": { + "debug": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", + "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", + "requires": { + "ms": "2.1.2" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + } + } + }, "require-like": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", @@ -5483,7 +6192,7 @@ "safe-json-stringify": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.2.0.tgz", - "integrity": "sha512-gH8eh2nZudPQO6TytOvbxnuhYBOvDBBLW52tz5q6X58lJcd/tkmqFR+5Z9adS8aJtURSXWThWy/xJtJwixErvg==", + "integrity": "sha1-NW5EvJjx+TzkXfFLzXwBzahuCv0=", "optional": true }, "safer-buffer": { @@ -5492,21 +6201,13 @@ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, "sandboxed-module": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-0.2.2.tgz", - "integrity": "sha1-bL3sghOAx31FdcjIeDi5ET5kulA=", + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-2.0.4.tgz", + "integrity": "sha512-AwEPOdO8mg/wJjr876yCHP2DHqVN0MaggEXhp6IIf3bcI5cYoQl9QrrCHSrvToHjvdEiS5x4TVZRgjD2bEmNTA==", "dev": true, "requires": { "require-like": "0.1.2", - "stack-trace": "0.0.6" - }, - "dependencies": { - "stack-trace": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.6.tgz", - "integrity": "sha1-HnGb1qJin/CcGJ4Xqe+QKpT8XbA=", - "dev": true - } + "stack-trace": "0.0.9" } }, "saslprep": { @@ -5555,6 +6256,15 @@ } } }, + "serialize-javascript": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-5.0.1.tgz", + "integrity": "sha512-SaaNal9imEO737H2c05Og0/8LUXG7EnsZyMa8MzkmuHoELfT6txuj0cMqRj6zfPKnmQ1yasR4PCJc8x+M4JSPA==", + "dev": true, + "requires": { + "randombytes": "^2.1.0" + } + }, "serve-static": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.14.1.tgz", @@ -5687,7 +6397,7 @@ "source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha1-dHIq8y6WFOnCh6jQu95IteLxomM=" + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" }, "source-map-support": { "version": "0.5.19", @@ -5742,7 +6452,7 @@ "split": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", - "integrity": "sha1-YFvZvjA6pZ+zX5Ip++oN3snqB9k=", + "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==", "requires": { "through": "2" } @@ -5777,7 +6487,7 @@ "standard-as-callback": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.0.1.tgz", - "integrity": "sha512-NQOxSeB8gOI5WjSaxjBgog2QFw55FV8TkS6Y07BiB3VJ8xNTvUYm0wl0s8ObgQ5NhdpnNfigMIKjgPESzgr4tg==" + "integrity": "sha1-7YuyVkjhWDF1m2Ajvbh+a2CzgSY=" }, "statuses": { "version": "1.5.0", @@ -5896,7 +6606,7 @@ "stubs": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", - "integrity": "sha1-6NK6H6nJBXAwPAMLaQD31fiavls=" + "integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==" }, "supports-color": { "version": "5.4.0", @@ -6028,12 +6738,21 @@ "to-no-case": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/to-no-case/-/to-no-case-1.0.2.tgz", - "integrity": "sha1-xyKQcWTvaxeBMsjmmTAhLRtKoWo=" + "integrity": "sha512-Z3g735FxuZY8rodxV4gH7LxClE4H0hTIyHNIHdk+vpQxjLm0cwnKXq/OFVZ76SOQmto7txVcwSCwkU5kqp+FKg==" + }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "requires": { + "is-number": "^7.0.0" + } }, "to-snake-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-snake-case/-/to-snake-case-1.0.0.tgz", - "integrity": "sha1-znRpE4l5RgGah+Yu366upMYIq4w=", + "integrity": "sha512-joRpzBAk1Bhi2eGEYBjukEWHOe/IvclOkiJl3DtA91jV6NwQ3MwXA4FHYeqk8BNp/D8bmi9tcNbRu/SozP0jbQ==", "requires": { "to-space-case": "^1.0.0" } @@ -6041,7 +6760,7 @@ "to-space-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-space-case/-/to-space-case-1.0.0.tgz", - "integrity": "sha1-sFLar7Gysp3HcM6gFj5ewOvJ/Bc=", + "integrity": "sha512-rLdvwXZ39VOn1IxGL3V6ZstoTbwLRckQmn/U8ZDLuWwIXNpuZDhQ3AiRUlhTbOXFVE9C+dR51wM0CBDhk31VcA==", "requires": { "to-no-case": "^1.0.0" } @@ -6322,6 +7041,12 @@ "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", "dev": true }, + "workerpool": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.1.0.tgz", + "integrity": "sha512-toV7q9rWNYha963Pl/qyeZ6wG+3nnsyvolaNUS8+R5Wtw6qJPTxIlOP1ZSvcGhEJw+l3HMMmtiNo9Gl61G4GVg==", + "dev": true + }, "wrap-ansi": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", @@ -6412,6 +7137,32 @@ "decamelize": "^1.2.0" } }, + "yargs-unparser": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz", + "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==", + "dev": true, + "requires": { + "camelcase": "^6.0.0", + "decamelize": "^4.0.0", + "flat": "^5.0.2", + "is-plain-obj": "^2.1.0" + }, + "dependencies": { + "camelcase": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.2.0.tgz", + "integrity": "sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg==", + "dev": true + }, + "decamelize": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", + "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", + "dev": true + } + } + }, "yn": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 8ea71edddb..fbe12047a6 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -18,7 +18,7 @@ "format:fix": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --write" }, "dependencies": { - "@overleaf/metrics": "^3.4.1", + "@overleaf/metrics": "^3.5.1", "@overleaf/o-error": "^3.1.0", "@overleaf/redis-wrapper": "^2.0.0", "async": "^2.5.0", @@ -36,8 +36,8 @@ "devDependencies": { "babel-eslint": "^10.1.0", "chai": "^3.5.0", - "chai-spies": "^0.7.1", "cluster-key-slot": "^1.0.5", + "coffee-script": "^1.12.7", "eslint": "^6.8.0", "eslint-config-prettier": "^6.10.0", "eslint-config-standard": "^14.1.0", @@ -53,10 +53,10 @@ "eslint-plugin-promise": "^4.2.1", "eslint-plugin-react": "^7.19.0", "eslint-plugin-standard": "^4.0.1", - "mocha": "^5.0.1", + "mocha": "^8.3.2", "prettier": "^2.0.0", "prettier-eslint-cli": "^5.0.0", - "sandboxed-module": "~0.2.0", + "sandboxed-module": "^2.0.4", "sinon": "^9.0.2", "timekeeper": "^2.0.0" } diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js index f460a2d3ff..adf045645e 100644 --- a/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js +++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js @@ -12,9 +12,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -chai.should() -const { expect } = chai +const { expect } = require('chai') const async = require('async') const Settings = require('settings-sharelatex') const rclient_history = require('@overleaf/redis-wrapper').createClient( diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js index 42c3c8af6a..3bc2c793e1 100644 --- a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js +++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js @@ -1,6 +1,4 @@ const sinon = require('sinon') -const chai = require('chai') -chai.should() const Settings = require('settings-sharelatex') const rclientProjectHistory = require('@overleaf/redis-wrapper').createClient( Settings.redis.project_history diff --git a/services/document-updater/test/acceptance/js/DeletingADocumentTests.js b/services/document-updater/test/acceptance/js/DeletingADocumentTests.js index fbb8055aae..4051d4f5a4 100644 --- a/services/document-updater/test/acceptance/js/DeletingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/DeletingADocumentTests.js @@ -11,9 +11,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -chai.should() - const MockTrackChangesApi = require('./helpers/MockTrackChangesApi') const MockProjectHistoryApi = require('./helpers/MockProjectHistoryApi') const MockWebApi = require('./helpers/MockWebApi') diff --git a/services/document-updater/test/acceptance/js/DeletingAProjectTests.js b/services/document-updater/test/acceptance/js/DeletingAProjectTests.js index f050ea22e1..b07ffae0f5 100644 --- a/services/document-updater/test/acceptance/js/DeletingAProjectTests.js +++ b/services/document-updater/test/acceptance/js/DeletingAProjectTests.js @@ -12,8 +12,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -chai.should() const async = require('async') const MockTrackChangesApi = require('./helpers/MockTrackChangesApi') diff --git a/services/document-updater/test/acceptance/js/FlushingAProjectTests.js b/services/document-updater/test/acceptance/js/FlushingAProjectTests.js index c860fce849..4f4abc2730 100644 --- a/services/document-updater/test/acceptance/js/FlushingAProjectTests.js +++ b/services/document-updater/test/acceptance/js/FlushingAProjectTests.js @@ -12,8 +12,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -chai.should() const async = require('async') const MockWebApi = require('./helpers/MockWebApi') diff --git a/services/document-updater/test/acceptance/js/FlushingDocsTests.js b/services/document-updater/test/acceptance/js/FlushingDocsTests.js index 109f89d434..5eac9fa2f3 100644 --- a/services/document-updater/test/acceptance/js/FlushingDocsTests.js +++ b/services/document-updater/test/acceptance/js/FlushingDocsTests.js @@ -14,9 +14,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -chai.should() -const { expect } = chai +const { expect } = require('chai') const async = require('async') const MockWebApi = require('./helpers/MockWebApi') diff --git a/services/document-updater/test/acceptance/js/GettingADocumentTests.js b/services/document-updater/test/acceptance/js/GettingADocumentTests.js index a0b9de5773..50dc35059c 100644 --- a/services/document-updater/test/acceptance/js/GettingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/GettingADocumentTests.js @@ -12,9 +12,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -chai.should() -const { expect } = chai +const { expect } = require('chai') const MockWebApi = require('./helpers/MockWebApi') const DocUpdaterClient = require('./helpers/DocUpdaterClient') diff --git a/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js b/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js index 72a6824562..b32ccb0837 100644 --- a/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js +++ b/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js @@ -12,9 +12,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -chai.should() -const { expect } = chai +const { expect } = require('chai') const MockWebApi = require('./helpers/MockWebApi') const DocUpdaterClient = require('./helpers/DocUpdaterClient') diff --git a/services/document-updater/test/acceptance/js/RangesTests.js b/services/document-updater/test/acceptance/js/RangesTests.js index b765e58b7c..7034436440 100644 --- a/services/document-updater/test/acceptance/js/RangesTests.js +++ b/services/document-updater/test/acceptance/js/RangesTests.js @@ -12,9 +12,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -chai.should() -const { expect } = chai +const { expect } = require('chai') const async = require('async') const { db, ObjectId } = require('../../../app/js/mongodb') diff --git a/services/document-updater/test/acceptance/js/SettingADocumentTests.js b/services/document-updater/test/acceptance/js/SettingADocumentTests.js index d47931868c..7d2307c526 100644 --- a/services/document-updater/test/acceptance/js/SettingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/SettingADocumentTests.js @@ -1,6 +1,4 @@ const sinon = require('sinon') -const chai = require('chai') -chai.should() const { expect } = require('chai') const Settings = require('settings-sharelatex') const docUpdaterRedis = require('@overleaf/redis-wrapper').createClient( diff --git a/services/document-updater/test/setup.js b/services/document-updater/test/setup.js new file mode 100644 index 0000000000..0fb9848427 --- /dev/null +++ b/services/document-updater/test/setup.js @@ -0,0 +1,37 @@ +const chai = require('chai') +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') + +// Chai configuration +chai.should() + +// Global stubs +const sandbox = sinon.createSandbox() +const stubs = { + logger: { + debug: sandbox.stub(), + log: sandbox.stub(), + warn: sandbox.stub(), + err: sandbox.stub(), + error: sandbox.stub() + } +} + +// SandboxedModule configuration +SandboxedModule.configure({ + requires: { + 'logger-sharelatex': stubs.logger + }, + globals: { Buffer, JSON, Math, console, process } +}) + +// Mocha hooks +exports.mochaHooks = { + beforeEach() { + this.logger = stubs.logger + }, + + afterEach() { + sandbox.reset() + } +} diff --git a/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js b/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js index f208c17bd6..d498d6b45c 100644 --- a/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js +++ b/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js @@ -11,9 +11,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() -const { expect } = chai +const { expect } = require('chai') const modulePath = '../../../../app/js/DiffCodec.js' const SandboxedModule = require('sandboxed-module') diff --git a/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js b/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js index 5610c4abc1..81ef37f4be 100644 --- a/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js +++ b/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js @@ -12,8 +12,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/DispatchManager.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../../app/js/Errors.js') @@ -25,11 +23,6 @@ describe('DispatchManager', function () { this.DispatchManager = SandboxedModule.require(modulePath, { requires: { './UpdateManager': (this.UpdateManager = {}), - 'logger-sharelatex': (this.logger = { - log: sinon.stub(), - error: sinon.stub(), - warn: sinon.stub() - }), 'settings-sharelatex': (this.settings = { redis: { documentupdater: {} diff --git a/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js b/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js index 295a643cee..8ca42df757 100644 --- a/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js +++ b/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js @@ -13,8 +13,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/DocumentManager.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../../app/js/Errors') @@ -33,11 +31,6 @@ describe('DocumentManager', function () { flushDocChangesAsync: sinon.stub(), flushProjectChangesAsync: sinon.stub() }), - 'logger-sharelatex': (this.logger = { - log: sinon.stub(), - warn: sinon.stub() - }), - './DocOpsManager': (this.DocOpsManager = {}), './Metrics': (this.Metrics = { Timer: (Timer = (function () { Timer = class Timer { @@ -220,7 +213,6 @@ describe('DocumentManager', function () { .stub() .callsArgWith(2, null, null, null, null) this.PersistenceManager.setDoc = sinon.stub().yields() - this.DocOpsManager.flushDocOpsToMongo = sinon.stub().callsArgWith(2) return this.DocumentManager.flushDocIfLoaded( this.project_id, this.doc_id, @@ -236,7 +228,6 @@ describe('DocumentManager', function () { it('should not write anything to the persistence layer', function () { this.PersistenceManager.setDoc.called.should.equal(false) - return this.DocOpsManager.flushDocOpsToMongo.called.should.equal(false) }) it('should call the callback without error', function () { diff --git a/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js b/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js index 263f1cd094..df3261b0f4 100644 --- a/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js +++ b/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js @@ -11,7 +11,6 @@ */ const SandboxedModule = require('sandboxed-module') const sinon = require('sinon') -require('chai').should() const modulePath = require('path').join( __dirname, '../../../../app/js/HistoryManager' @@ -33,11 +32,6 @@ describe('HistoryManager', function () { } } }), - 'logger-sharelatex': (this.logger = { - log: sinon.stub(), - error: sinon.stub(), - debug: sinon.stub() - }), './DocumentManager': (this.DocumentManager = {}), './HistoryRedisManager': (this.HistoryRedisManager = {}), './RedisManager': (this.RedisManager = {}), diff --git a/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js b/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js index 1b266685d1..f9b719991a 100644 --- a/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js +++ b/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js @@ -12,8 +12,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/HistoryRedisManager.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../../app/js/Errors') @@ -41,8 +39,7 @@ describe('HistoryRedisManager', function () { } }) } - }, - 'logger-sharelatex': { log() {} } + } } }) this.doc_id = 'doc-id-123' diff --git a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js index 07e9d93c9a..64477eb944 100644 --- a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js +++ b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js @@ -12,7 +12,6 @@ describe('HttpController', function () { flushProjectChangesAsync: sinon.stub() }), './ProjectManager': (this.ProjectManager = {}), - 'logger-sharelatex': (this.logger = { log: sinon.stub() }), './ProjectFlusher': { flushAllProjects() {} }, './DeleteQueueManager': (this.DeleteQueueManager = {}), './Metrics': (this.Metrics = {}), diff --git a/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js b/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js index 6b3c3b539e..4f700cc144 100644 --- a/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js +++ b/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js @@ -25,7 +25,6 @@ describe('LockManager - checking the lock', function () { const existsStub = sinon.stub() const mocks = { - 'logger-sharelatex': { log() {} }, '@overleaf/redis-wrapper': { createClient() { return { diff --git a/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js b/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js index a04db7614f..3d6cf7a5ba 100644 --- a/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js +++ b/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js @@ -27,10 +27,6 @@ describe('LockManager - releasing the lock', function () { eval: sinon.stub() } const mocks = { - 'logger-sharelatex': { - log() {}, - error() {} - }, '@overleaf/redis-wrapper': { createClient: () => this.client }, diff --git a/services/document-updater/test/unit/js/LockManager/getLockTests.js b/services/document-updater/test/unit/js/LockManager/getLockTests.js index d56a244510..0b938c3753 100644 --- a/services/document-updater/test/unit/js/LockManager/getLockTests.js +++ b/services/document-updater/test/unit/js/LockManager/getLockTests.js @@ -15,8 +15,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/LockManager.js' const SandboxedModule = require('sandboxed-module') @@ -25,7 +23,6 @@ describe('LockManager - getting the lock', function () { let Profiler this.LockManager = SandboxedModule.require(modulePath, { requires: { - 'logger-sharelatex': { log() {} }, '@overleaf/redis-wrapper': { createClient: () => { return { auth() {} } diff --git a/services/document-updater/test/unit/js/LockManager/tryLockTests.js b/services/document-updater/test/unit/js/LockManager/tryLockTests.js index 02c279dd11..fb49e94aa1 100644 --- a/services/document-updater/test/unit/js/LockManager/tryLockTests.js +++ b/services/document-updater/test/unit/js/LockManager/tryLockTests.js @@ -12,8 +12,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/LockManager.js' const SandboxedModule = require('sandboxed-module') @@ -22,7 +20,6 @@ describe('LockManager - trying the lock', function () { let Profiler this.LockManager = SandboxedModule.require(modulePath, { requires: { - 'logger-sharelatex': { log() {} }, '@overleaf/redis-wrapper': { createClient: () => { return { diff --git a/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js b/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js index 1013752dee..4015ef2662 100644 --- a/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js +++ b/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js @@ -11,8 +11,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/PersistenceManager.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../../app/js/Errors') @@ -38,11 +36,6 @@ describe('PersistenceManager', function () { })()), inc: sinon.stub() }), - 'logger-sharelatex': (this.logger = { - log: sinon.stub(), - err: sinon.stub(), - error: sinon.stub() - }), './Errors': Errors } }) diff --git a/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js b/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js index 8b62bd83f3..1ff3d53ded 100644 --- a/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js +++ b/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js @@ -12,8 +12,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/ProjectHistoryRedisManager.js' const SandboxedModule = require('sandboxed-module') const tk = require('timekeeper') @@ -47,13 +45,7 @@ describe('ProjectHistoryRedisManager', function () { '@overleaf/redis-wrapper': { createClient: () => this.rclient }, - 'logger-sharelatex': { - log() {} - }, './Metrics': (this.metrics = { summary: sinon.stub() }) - }, - globals: { - JSON: (this.JSON = JSON) } } )) @@ -136,7 +128,7 @@ describe('ProjectHistoryRedisManager', function () { return this.ProjectHistoryRedisManager.queueOps .calledWithExactly( this.project_id, - this.JSON.stringify(update), + JSON.stringify(update), this.callback ) .should.equal(true) @@ -184,7 +176,7 @@ describe('ProjectHistoryRedisManager', function () { return this.ProjectHistoryRedisManager.queueOps .calledWithExactly( this.project_id, - this.JSON.stringify(update), + JSON.stringify(update), this.callback ) .should.equal(true) diff --git a/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js index 9589d42054..d8342c0cff 100644 --- a/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js @@ -13,8 +13,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/ProjectManager.js' const SandboxedModule = require('sandboxed-module') @@ -26,10 +24,6 @@ describe('ProjectManager - flushAndDeleteProject', function () { './RedisManager': (this.RedisManager = {}), './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), './DocumentManager': (this.DocumentManager = {}), - 'logger-sharelatex': (this.logger = { - log: sinon.stub(), - error: sinon.stub() - }), './HistoryManager': (this.HistoryManager = { flushProjectChanges: sinon.stub().callsArg(2) }), diff --git a/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js index c0bb668f49..70ae03e861 100644 --- a/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js @@ -15,8 +15,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/ProjectManager.js' const SandboxedModule = require('sandboxed-module') @@ -28,10 +26,6 @@ describe('ProjectManager - flushProject', function () { './RedisManager': (this.RedisManager = {}), './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), './DocumentManager': (this.DocumentManager = {}), - 'logger-sharelatex': (this.logger = { - log: sinon.stub(), - error: sinon.stub() - }), './HistoryManager': (this.HistoryManager = {}), './Metrics': (this.Metrics = { Timer: (Timer = (function () { diff --git a/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js b/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js index db9f31e4ad..467a190168 100644 --- a/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js @@ -11,8 +11,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/ProjectManager.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../../app/js/Errors.js') @@ -25,10 +23,6 @@ describe('ProjectManager - getProjectDocsAndFlushIfOld', function () { './RedisManager': (this.RedisManager = {}), './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), './DocumentManager': (this.DocumentManager = {}), - 'logger-sharelatex': (this.logger = { - log: sinon.stub(), - error: sinon.stub() - }), './HistoryManager': (this.HistoryManager = {}), './Metrics': (this.Metrics = { Timer: (Timer = (function () { diff --git a/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js index aa3db813a0..896517679c 100644 --- a/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js @@ -22,17 +22,11 @@ describe('ProjectManager', function () { } this.Metrics.Timer.prototype.done = sinon.stub() - this.logger = { - log: sinon.stub(), - error: sinon.stub() - } - this.ProjectManager = SandboxedModule.require(modulePath, { requires: { './RedisManager': this.RedisManager, './ProjectHistoryRedisManager': this.ProjectHistoryRedisManager, './DocumentManager': this.DocumentManager, - 'logger-sharelatex': this.logger, './HistoryManager': this.HistoryManager, './Metrics': this.Metrics } diff --git a/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js b/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js index a336125a1c..c857153888 100644 --- a/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js +++ b/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js @@ -13,23 +13,13 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() -const { expect } = chai +const { expect } = require('chai') const modulePath = '../../../../app/js/RangesManager.js' const SandboxedModule = require('sandboxed-module') describe('RangesManager', function () { beforeEach(function () { - this.RangesManager = SandboxedModule.require(modulePath, { - requires: { - 'logger-sharelatex': (this.logger = { - error: sinon.stub(), - log: sinon.stub(), - warn: sinon.stub() - }) - } - }) + this.RangesManager = SandboxedModule.require(modulePath) this.doc_id = 'doc-id-123' this.project_id = 'project-id-123' @@ -368,11 +358,6 @@ describe('RangesManager', function () { beforeEach(function () { this.RangesManager = SandboxedModule.require(modulePath, { requires: { - 'logger-sharelatex': (this.logger = { - error: sinon.stub(), - log: sinon.stub(), - warn: sinon.stub() - }), './RangesTracker': (this.RangesTracker = SandboxedModule.require( '../../../../app/js/RangesTracker.js' )) diff --git a/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js b/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js index e84d557501..8fef08051f 100644 --- a/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js +++ b/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js @@ -11,9 +11,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() -const { expect } = chai +const { expect } = require('chai') const modulePath = '../../../../app/js/RateLimitManager.js' const SandboxedModule = require('sandboxed-module') @@ -22,7 +20,6 @@ describe('RateLimitManager', function () { let Timer this.RateLimitManager = SandboxedModule.require(modulePath, { requires: { - 'logger-sharelatex': (this.logger = { log: sinon.stub() }), 'settings-sharelatex': (this.settings = {}), './Metrics': (this.Metrics = { Timer: (Timer = (function () { diff --git a/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js b/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js index c5e4647df4..83cd5f99ce 100644 --- a/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js +++ b/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js @@ -11,8 +11,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/RealTimeRedisManager.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../../app/js/Errors') @@ -45,7 +43,6 @@ describe('RealTimeRedisManager', function () { } } }, - 'logger-sharelatex': { log() {} }, crypto: (this.crypto = { randomBytes: sinon .stub() diff --git a/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js index 739aa88ab8..d14d0c23de 100644 --- a/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js +++ b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js @@ -12,8 +12,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/RedisManager.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../../app/js/Errors') @@ -28,11 +26,6 @@ describe('RedisManager', function () { tk.freeze(new Date()) this.RedisManager = SandboxedModule.require(modulePath, { requires: { - 'logger-sharelatex': (this.logger = { - error: sinon.stub(), - log: sinon.stub(), - warn: sinon.stub() - }), './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), 'settings-sharelatex': (this.settings = { documentupdater: { logHashErrors: { write: true, read: true } }, @@ -122,9 +115,6 @@ describe('RedisManager', function () { }) }), './Errors': Errors - }, - globals: { - JSON: (this.JSON = JSON) } }) @@ -924,8 +914,9 @@ describe('RedisManager', function () { this.RedisManager.getDocVersion .withArgs(this.doc_id) .yields(null, this.version - this.ops.length) - this._stringify = JSON.stringify - this.JSON.stringify = () => '["bad bytes! \u0000 <- here"]' + this.stringifyStub = sinon + .stub(JSON, 'stringify') + .callsFake(() => '["bad bytes! \u0000 <- here"]') return this.RedisManager.updateDocument( this.project_id, this.doc_id, @@ -939,7 +930,7 @@ describe('RedisManager', function () { }) afterEach(function () { - return (this.JSON.stringify = this._stringify) + this.stringifyStub.restore() }) it('should log an error', function () { @@ -1127,8 +1118,9 @@ describe('RedisManager', function () { describe('with null bytes in the serialized doc lines', function () { beforeEach(function () { - this._stringify = JSON.stringify - this.JSON.stringify = () => '["bad bytes! \u0000 <- here"]' + this.stringifyStub = sinon + .stub(JSON, 'stringify') + .callsFake(() => '["bad bytes! \u0000 <- here"]') return this.RedisManager.putDocInMemory( this.project_id, this.doc_id, @@ -1142,7 +1134,7 @@ describe('RedisManager', function () { }) afterEach(function () { - return (this.JSON.stringify = this._stringify) + this.stringifyStub.restore() }) it('should log an error', function () { diff --git a/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js b/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js index 8ea99aee5c..a5e3a8599a 100644 --- a/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js +++ b/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js @@ -14,7 +14,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const text = require('../../../../app/js/sharejs/types/text') -require('chai').should() const RangesTracker = require('../../../../app/js/RangesTracker') describe('ShareJS text type', function () { diff --git a/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js b/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js index ddf98775d8..1b4e4422a6 100644 --- a/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js +++ b/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js @@ -11,9 +11,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() -const { expect } = chai +const { expect } = require('chai') const modulePath = '../../../../app/js/ShareJsDB.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../../app/js/Errors') diff --git a/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js b/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js index 2ab5ba617e..d6e9700bff 100644 --- a/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js +++ b/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js @@ -10,8 +10,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/ShareJsUpdateManager.js' const SandboxedModule = require('sandboxed-module') const crypto = require('crypto') @@ -35,7 +33,6 @@ describe('ShareJsUpdateManager', function () { return (this.rclient = { auth() {} }) } }, - 'logger-sharelatex': (this.logger = { log: sinon.stub() }), './RealTimeRedisManager': (this.RealTimeRedisManager = {}), './Metrics': (this.metrics = { inc: sinon.stub() }) }, diff --git a/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js b/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js index 4e39089490..cac51b9f5f 100644 --- a/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js +++ b/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js @@ -12,8 +12,6 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() const modulePath = '../../../../app/js/UpdateManager.js' const SandboxedModule = require('sandboxed-module') @@ -31,7 +29,6 @@ describe('UpdateManager', function () { './RealTimeRedisManager': (this.RealTimeRedisManager = {}), './ShareJsUpdateManager': (this.ShareJsUpdateManager = {}), './HistoryManager': (this.HistoryManager = {}), - 'logger-sharelatex': (this.logger = { log: sinon.stub() }), './Metrics': (this.Metrics = { Timer: (Timer = (function () { Timer = class Timer { From acbbd88d8a9300e9e7089a88163f4a6095f92fec Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 6 Apr 2021 10:40:56 +0000 Subject: [PATCH 730/769] Bump y18n from 4.0.0 to 4.0.1 Bumps [y18n](https://github.com/yargs/y18n) from 4.0.0 to 4.0.1. - [Release notes](https://github.com/yargs/y18n/releases) - [Changelog](https://github.com/yargs/y18n/blob/master/CHANGELOG.md) - [Commits](https://github.com/yargs/y18n/commits) Signed-off-by: dependabot[bot] --- services/document-updater/package-lock.json | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index a1551e3502..c347199789 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -4468,12 +4468,6 @@ "strip-ansi": "^6.0.0" } }, - "y18n": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz", - "integrity": "sha512-hsRUr4FFrvhhRH12wOdfs38Gy7k2FFzB9qgN9v3aLykRq0dRcdcpz5C9FxdS2NuhOrI/628b/KSTJ3rwHysYSg==", - "dev": true - }, "yargs": { "version": "16.2.0", "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", @@ -7099,9 +7093,9 @@ } }, "y18n": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", - "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==", + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.6.tgz", + "integrity": "sha512-PlVX4Y0lDTN6E2V4ES2tEdyvXkeKzxa8c/vo0pxPr/TqbztddTP0yn7zZylIyiAuxerqj0Q5GhpJ1YJCP8LaZQ==", "dev": true }, "yallist": { @@ -7125,6 +7119,14 @@ "which-module": "^2.0.0", "y18n": "^4.0.0", "yargs-parser": "^13.1.2" + }, + "dependencies": { + "y18n": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.1.tgz", + "integrity": "sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ==", + "dev": true + } } }, "yargs-parser": { From 1e4e4b5ec07f354a22156e2508b17ef94c0850bd Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Thu, 1 Apr 2021 11:04:07 +0100 Subject: [PATCH 731/769] Update logger-sharelatex to 2.2.0 --- services/document-updater/package-lock.json | 622 +++++++++----------- services/document-updater/package.json | 2 +- 2 files changed, 288 insertions(+), 336 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index c347199789..be21275730 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -155,19 +155,19 @@ } }, "@google-cloud/common": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-2.4.0.tgz", - "integrity": "sha512-zWFjBS35eI9leAHhjfeOYlK5Plcuj/77EzstnrJIZbKgF/nkqjcQuGiMCpzCwOfPyUbz8ZaEOYgbHa759AKbjg==", + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.6.0.tgz", + "integrity": "sha512-aHIFTqJZmeTNO9md8XxV+ywuvXF3xBm5WNmgWeeCK+XN5X+kGW0WEX94wGwj+/MdOnrVf4dL2RvSIt9J5yJG6Q==", "requires": { - "@google-cloud/projectify": "^1.0.0", - "@google-cloud/promisify": "^1.0.0", - "arrify": "^2.0.0", - "duplexify": "^3.6.0", + "@google-cloud/projectify": "^2.0.0", + "@google-cloud/promisify": "^2.0.0", + "arrify": "^2.0.1", + "duplexify": "^4.1.1", "ent": "^2.2.0", "extend": "^3.0.2", - "google-auth-library": "^5.5.0", - "retry-request": "^4.0.0", - "teeny-request": "^6.0.0" + "google-auth-library": "^7.0.2", + "retry-request": "^4.1.1", + "teeny-request": "^7.0.0" } }, "@google-cloud/debug-agent": { @@ -357,44 +357,54 @@ } }, "@google-cloud/logging": { - "version": "7.3.0", - "resolved": "https://registry.npmjs.org/@google-cloud/logging/-/logging-7.3.0.tgz", - "integrity": "sha512-xTW1V4MKpYC0mjSugyuiyUoZ9g6A42IhrrO3z7Tt3SmAb2IRj2Gf4RLoguKKncs340ooZFXrrVN/++t2Aj5zgg==", + "version": "9.1.1", + "resolved": "https://registry.npmjs.org/@google-cloud/logging/-/logging-9.1.1.tgz", + "integrity": "sha512-h3rK1nfSarPbcSRA17/qVs2amvFql6drpqvi6y3+u9eATNCIZCWmagWjuAMWcmxO/IOZxvfsh9NF7BtVyYQQ1Q==", "requires": { - "@google-cloud/common": "^2.2.2", - "@google-cloud/paginator": "^2.0.0", - "@google-cloud/projectify": "^1.0.0", - "@google-cloud/promisify": "^1.0.0", - "@opencensus/propagation-stackdriver": "0.0.20", - "arrify": "^2.0.0", - "dot-prop": "^5.1.0", + "@google-cloud/common": "^3.4.1", + "@google-cloud/paginator": "^3.0.0", + "@google-cloud/projectify": "^2.0.0", + "@google-cloud/promisify": "^2.0.0", + "@opencensus/propagation-stackdriver": "0.0.22", + "arrify": "^2.0.1", + "dot-prop": "^6.0.0", "eventid": "^1.0.0", "extend": "^3.0.2", - "gcp-metadata": "^3.1.0", - "google-auth-library": "^5.2.2", - "google-gax": "^1.11.0", + "gcp-metadata": "^4.0.0", + "google-auth-library": "^7.0.0", + "google-gax": "^2.9.2", "is": "^3.3.0", "on-finished": "^2.3.0", - "pumpify": "^2.0.0", - "snakecase-keys": "^3.0.0", - "stream-events": "^1.0.4", - "through2": "^3.0.0", - "type-fest": "^0.12.0" + "pumpify": "^2.0.1", + "snakecase-keys": "^3.1.2", + "stream-events": "^1.0.5", + "through2": "^4.0.0", + "type-fest": "^0.21.0" + }, + "dependencies": { + "through2": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/through2/-/through2-4.0.2.tgz", + "integrity": "sha512-iOqSav00cVxEEICeD7TjLB1sueEL+81Wpzp2bY17uZjZN0pWZPuo4suZ/61VujxmqSGFfgOcNuTZ85QJwNZQpw==", + "requires": { + "readable-stream": "3" + } + } } }, "@google-cloud/logging-bunyan": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@google-cloud/logging-bunyan/-/logging-bunyan-2.0.3.tgz", - "integrity": "sha512-8n9MwsCRd4v8WZg17+d3m7qInud7lYTm5rpwXHY0/lzWEJYjeiztT09BiCYh56EEhHr+ynymJnzUDZKazkywlg==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@google-cloud/logging-bunyan/-/logging-bunyan-3.0.2.tgz", + "integrity": "sha512-7BmXGZLYsnDs5UT9qvb0/rA0i2BbD3AyKqwXl/hP0pDGboCg0GE8viVmwzmY8f/cUzRZHAxOgV0bTENeTd6KEA==", "requires": { - "@google-cloud/logging": "^7.0.0", - "google-auth-library": "^5.0.0" + "@google-cloud/logging": "^9.0.0", + "google-auth-library": "^7.0.0" } }, "@google-cloud/paginator": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-2.0.3.tgz", - "integrity": "sha512-kp/pkb2p/p0d8/SKUu4mOq8+HGwF8NPzHWkj+VKrIPQPyMRw8deZtrO/OcSiy9C/7bpfU5Txah5ltUNfPkgEXg==", + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-3.0.5.tgz", + "integrity": "sha512-N4Uk4BT1YuskfRhKXBs0n9Lg2YTROZc6IMpkO/8DIHODtm5s3xY8K5vVBo23v/2XulY3azwITQlYWgT4GdLsUw==", "requires": { "arrify": "^2.0.0", "extend": "^3.0.2" @@ -596,14 +606,14 @@ } }, "@google-cloud/projectify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-1.0.4.tgz", - "integrity": "sha512-ZdzQUN02eRsmTKfBj9FDL0KNDIFNjBn/d6tHQmA/+FImH5DO6ZV8E7FzxMgAUiVAUq41RFAkb25p1oHOZ8psfg==" + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", + "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" }, "@google-cloud/promisify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-1.0.4.tgz", - "integrity": "sha512-VccZDcOql77obTnFh0TbNED/6ZbbmHDf8UMNnzO1d5g9V0Htfm4k5cllY8P1tJsRKC3zWYGRLaViiupcgVjBoQ==" + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz", + "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw==" }, "@google-cloud/trace-agent": { "version": "5.1.3", @@ -801,42 +811,84 @@ } }, "@grpc/grpc-js": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.0.5.tgz", - "integrity": "sha512-Hm+xOiqAhcpT9RYM8lc15dbQD7aQurM7ZU8ulmulepiPlN7iwBXXwP3vSBUimoFoApRqz7pSIisXU8pZaCB4og==", + "version": "1.2.12", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.2.12.tgz", + "integrity": "sha512-+gPCklP1eqIgrNPyzddYQdt9+GvZqPlLpIjIo+TveE+gbtp74VV1A2ju8ExeO8ma8f7MbpaGZx/KJPYVWL9eDw==", "requires": { + "@types/node": ">=12.12.47", + "google-auth-library": "^6.1.1", "semver": "^6.2.0" + }, + "dependencies": { + "google-auth-library": { + "version": "6.1.6", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.1.6.tgz", + "integrity": "sha512-Q+ZjUEvLQj/lrVHF/IQwRo6p3s8Nc44Zk/DALsN+ac3T4HY/g/3rrufkgtl+nZ1TW7DNAw5cTChdVp4apUXVgQ==", + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^4.0.0", + "gcp-metadata": "^4.2.0", + "gtoken": "^5.0.4", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + } + } } }, "@grpc/proto-loader": { - "version": "0.5.5", - "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.5.5.tgz", - "integrity": "sha512-WwN9jVNdHRQoOBo9FDH7qU+mgfjPc8GygPYms3M+y3fbQLfnCe/Kv/E01t7JRgnrsOHH8euvSbed3mIalXhwqQ==", + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.5.6.tgz", + "integrity": "sha512-DT14xgw3PSzPxwS13auTEwxhMMOoz33DPUKNtmYK/QYbBSpLXJy78FGGs5yVoxVobEqPm4iW9MOIoz0A3bLTRQ==", "requires": { "lodash.camelcase": "^4.3.0", "protobufjs": "^6.8.6" } }, "@opencensus/core": { - "version": "0.0.20", - "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.20.tgz", - "integrity": "sha512-vqOuTd2yuMpKohp8TNNGUAPjWEGjlnGfB9Rh5e3DKqeyR94YgierNs4LbMqxKtsnwB8Dm2yoEtRuUgoe5vD9DA==", + "version": "0.0.22", + "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.22.tgz", + "integrity": "sha512-ErazJtivjceNoOZI1bG9giQ6cWS45J4i6iPUtlp7dLNu58OLs/v+CD0FsaPCh47XgPxAI12vbBE8Ec09ViwHNA==", "requires": { "continuation-local-storage": "^3.2.1", "log-driver": "^1.2.7", - "semver": "^6.0.0", + "semver": "^7.0.0", "shimmer": "^1.2.0", - "uuid": "^3.2.1" + "uuid": "^8.0.0" + }, + "dependencies": { + "semver": { + "version": "7.3.5", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", + "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "requires": { + "lru-cache": "^6.0.0" + } + }, + "uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" + } } }, "@opencensus/propagation-stackdriver": { - "version": "0.0.20", - "resolved": "https://registry.npmjs.org/@opencensus/propagation-stackdriver/-/propagation-stackdriver-0.0.20.tgz", - "integrity": "sha512-P8yuHSLtce+yb+2EZjtTVqG7DQ48laC+IuOWi3X9q78s1Gni5F9+hmbmyP6Nb61jb5BEvXQX1s2rtRI6bayUWA==", + "version": "0.0.22", + "resolved": "https://registry.npmjs.org/@opencensus/propagation-stackdriver/-/propagation-stackdriver-0.0.22.tgz", + "integrity": "sha512-eBvf/ihb1mN8Yz/ASkz8nHzuMKqygu77+VNnUeR0yEh3Nj+ykB8VVR6lK+NAFXo1Rd1cOsTmgvuXAZgDAGleQQ==", "requires": { - "@opencensus/core": "^0.0.20", + "@opencensus/core": "^0.0.22", "hex2dec": "^1.0.1", - "uuid": "^3.2.1" + "uuid": "^8.0.0" + }, + "dependencies": { + "uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" + } } }, "@overleaf/metrics": { @@ -995,7 +1047,7 @@ "@types/console-log-level": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/@types/console-log-level/-/console-log-level-1.4.0.tgz", - "integrity": "sha512-x+OscEQwcx5Biair4enH7ov9W+clcqUWaZRaxn5IkT4yNWWjRr2oiYDkY/x1uXSTVZOQ2xlbFQySaQGB+VdXGQ==" + "integrity": "sha1-7/ccQa689RyLpa2LBdfVQkviuPM=" }, "@types/eslint-visitor-keys": { "version": "1.0.0", @@ -1003,14 +1055,6 @@ "integrity": "sha512-OCutwjDZ4aFS6PB1UZ988C4YgwlBHJd6wCeQqaLdmadZ/7e+w79+hbMUFC1QXDNCmdyoRfAFdm0RypzwR+Qpag==", "dev": true }, - "@types/fs-extra": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-8.1.1.tgz", - "integrity": "sha512-TcUlBem321DFQzBNuz8p0CLLKp0VvF/XH9E4KHNmgwyp4E3AfgI5cjiIVZWlbfThBop2qxFIh4+LeY6hVWWZ2w==", - "requires": { - "@types/node": "*" - } - }, "@types/json-schema": { "version": "7.0.4", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.4.tgz", @@ -1023,9 +1067,9 @@ "integrity": "sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w==" }, "@types/node": { - "version": "10.17.15", - "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.15.tgz", - "integrity": "sha512-daFGV9GSs6USfPgxceDA8nlSe48XrVCJfDeYm7eokxq/ye7iuOH87hKXgMtEAVLFapkczbZsx868PMDT1Y0a6A==" + "version": "14.14.37", + "resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.37.tgz", + "integrity": "sha512-XYmBiy+ohOR4Lh5jE379fV2IU+6Jn4g5qASinhitfyO71b/sCo6MKsMLF5tc7Zf2CE8hViVQyYSobJNke8OvUw==" }, "@types/semver": { "version": "7.3.4", @@ -1396,9 +1440,9 @@ } }, "bignumber.js": { - "version": "7.2.1", - "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz", - "integrity": "sha1-gMBIdZ2CaACAfEv9Uh5Q7bulel8=" + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz", + "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==" }, "binary-extensions": { "version": "2.2.0", @@ -1409,7 +1453,7 @@ "bindings": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", - "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", + "integrity": "sha1-EDU8npRTNLwFEabZCzj7x8nFBN8=", "requires": { "file-uri-to-path": "1.0.0" } @@ -1488,7 +1532,7 @@ "browser-stdout": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", - "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "integrity": "sha1-uqVZ7hTO1zRSIputcyZGfGH6vWA=", "dev": true }, "bson": { @@ -1589,6 +1633,11 @@ "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", "dev": true }, + "charenc": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/charenc/-/charenc-0.0.2.tgz", + "integrity": "sha1-wKHS86cJLgN3S/qD8UwPxXkKhmc=" + }, "chokidar": { "version": "3.5.1", "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.1.tgz", @@ -1729,7 +1778,7 @@ "console-log-level": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz", - "integrity": "sha512-VZzbIORbP+PPcN/gg3DXClTLPLg5Slwd5fL2MIc+o1qZ4BXBvWyc6QxPk6T/Mkr6IVjRpoAGf32XxP3ZWMVRcQ==" + "integrity": "sha1-nFprue8e9lsFq6gwKLD/iUzfYwo=" }, "contains-path": { "version": "0.1.0", @@ -1786,10 +1835,15 @@ "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" }, + "crypt": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/crypt/-/crypt-0.0.2.tgz", + "integrity": "sha1-iNf/fsDfuG9xPch7u0LQRNPmxBs=" + }, "d64": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz", - "integrity": "sha512-5eNy3WZziVYnrogqgXhcdEmqcDB2IHurTqLcrgssJsfkMVCUoUaZpK6cJjxxvLV2dUm5SuJMNcYfVGoin9UIRw==" + "integrity": "sha1-QAKofoUMv8n52XBrYPymE6MzbpA=" }, "damerau-levenshtein": { "version": "1.0.6", @@ -1917,9 +1971,9 @@ } }, "dot-prop": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz", - "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-6.0.1.tgz", + "integrity": "sha512-tE7ztYzXHIeyvc7N+hR3oi7FIbf/NIjVP9hmAt3yMXzrQ072/fpjGLx2GxNxGxUl5V73MEqYzioOMoVhGMJ5cA==", "requires": { "is-obj": "^2.0.0" } @@ -1934,43 +1988,14 @@ } }, "duplexify": { - "version": "3.7.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz", - "integrity": "sha1-Kk31MX9sz9kfhtb9JdjYoQO4gwk=", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", + "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", "requires": { - "end-of-stream": "^1.0.0", - "inherits": "^2.0.1", - "readable-stream": "^2.0.0", + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", "stream-shift": "^1.0.0" - }, - "dependencies": { - "isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" - }, - "readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "requires": { - "safe-buffer": "~5.1.0" - } - } } }, "ecc-jsbn": { @@ -2800,7 +2825,7 @@ "file-uri-to-path": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", - "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==" + "integrity": "sha1-VTp7hEb/b2hDWcRF8eN6BdrMM90=" }, "fill-range": { "version": "7.0.1", @@ -3003,9 +3028,9 @@ } }, "gaxios": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-2.3.4.tgz", - "integrity": "sha512-US8UMj8C5pRnao3Zykc4AAVr+cffoNKRTg9Rsf2GiuZCW69vgJj38VK2PzlPuQU73FZ/nTk9/Av6/JGcE1N9vA==", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.2.0.tgz", + "integrity": "sha512-Ms7fNifGv0XVU+6eIyL9LB7RVESeML9+cMvkwGS70xyD6w2Z80wl6RiqiJ9k1KFlJCUTQqFFc8tXmPQfSKUe8g==", "requires": { "abort-controller": "^3.0.0", "extend": "^3.0.2", @@ -3015,12 +3040,12 @@ } }, "gcp-metadata": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-3.5.0.tgz", - "integrity": "sha512-ZQf+DLZ5aKcRpLzYUyBS3yo3N0JSa82lNDO8rj3nMSlovLcz2riKFBsYgDzeXcv75oo5eqB2lx+B14UvPoCRnA==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz", + "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==", "requires": { - "gaxios": "^2.1.0", - "json-bigint": "^0.3.0" + "gaxios": "^4.0.0", + "json-bigint": "^1.0.0" } }, "get-caller-file": { @@ -3072,76 +3097,52 @@ "dev": true }, "google-auth-library": { - "version": "5.10.1", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz", - "integrity": "sha512-rOlaok5vlpV9rSiUu5EpR0vVpc+PhN62oF4RyX/6++DG1VsaulAFEMlDYBLjJDDPI6OcNOCGAKy9UVB/3NIDXg==", + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-7.0.3.tgz", + "integrity": "sha512-6wJNYqY1QUr5I2lWaUkkzOT2b9OCNhNQrdFOt/bsBbGb7T7NCdEvrBsXraUm+KTUGk2xGlQ7m9RgUd4Llcw8NQ==", "requires": { "arrify": "^2.0.0", "base64-js": "^1.3.0", "ecdsa-sig-formatter": "^1.0.11", "fast-text-encoding": "^1.0.0", - "gaxios": "^2.1.0", - "gcp-metadata": "^3.4.0", - "gtoken": "^4.1.0", + "gaxios": "^4.0.0", + "gcp-metadata": "^4.2.0", + "gtoken": "^5.0.4", "jws": "^4.0.0", - "lru-cache": "^5.0.0" + "lru-cache": "^6.0.0" } }, "google-gax": { - "version": "1.15.3", - "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-1.15.3.tgz", - "integrity": "sha512-3JKJCRumNm3x2EksUTw4P1Rad43FTpqrtW9jzpf3xSMYXx+ogaqTM1vGo7VixHB4xkAyATXVIa3OcNSh8H9zsQ==", + "version": "2.11.2", + "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-2.11.2.tgz", + "integrity": "sha512-PNqXv7Oi5XBMgoMWVxLZHUidfMv7cPHrDSDXqLyEd6kY6pqFnVKC8jt2T1df4JPSc2+VLPdeo6L7X9mbdQG8Xw==", "requires": { - "@grpc/grpc-js": "~1.0.3", + "@grpc/grpc-js": "~1.2.0", "@grpc/proto-loader": "^0.5.1", - "@types/fs-extra": "^8.0.1", "@types/long": "^4.0.0", "abort-controller": "^3.0.0", - "duplexify": "^3.6.0", - "google-auth-library": "^5.0.0", + "duplexify": "^4.0.0", + "fast-text-encoding": "^1.0.3", + "google-auth-library": "^7.0.2", "is-stream-ended": "^0.1.4", - "lodash.at": "^4.6.0", - "lodash.has": "^4.5.2", - "node-fetch": "^2.6.0", - "protobufjs": "^6.8.9", - "retry-request": "^4.0.0", - "semver": "^6.0.0", - "walkdir": "^0.4.0" + "node-fetch": "^2.6.1", + "protobufjs": "^6.10.2", + "retry-request": "^4.0.0" }, "dependencies": { - "@types/node": { - "version": "13.13.25", - "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.25.tgz", - "integrity": "sha512-6ZMK4xRcF2XrPdKmPYQxZkdHKV18xKgUFVvhIgw2iwaaO6weleLPHLBGPZmLhjo+m1N+MZXRAoBEBCCVqgO2zQ==" - }, - "protobufjs": { - "version": "6.10.1", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.1.tgz", - "integrity": "sha512-pb8kTchL+1Ceg4lFd5XUpK8PdWacbvV5SK2ULH2ebrYtl4GjJmS24m6CKME67jzV53tbJxHlnNOSqQHbTsR9JQ==", - "requires": { - "@protobufjs/aspromise": "^1.1.2", - "@protobufjs/base64": "^1.1.2", - "@protobufjs/codegen": "^2.0.4", - "@protobufjs/eventemitter": "^1.1.0", - "@protobufjs/fetch": "^1.1.0", - "@protobufjs/float": "^1.0.2", - "@protobufjs/inquire": "^1.1.0", - "@protobufjs/path": "^1.1.2", - "@protobufjs/pool": "^1.1.0", - "@protobufjs/utf8": "^1.1.0", - "@types/long": "^4.0.1", - "@types/node": "^13.7.0", - "long": "^4.0.0" - } + "fast-text-encoding": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.3.tgz", + "integrity": "sha512-dtm4QZH9nZtcDt8qJiOH9fcQd1NAgi+K1O2DbE6GG1PPCK/BWfOH3idCTRQ4ImXRUOyopDEgDEnVEE7Y/2Wrig==" } } }, "google-p12-pem": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-2.0.4.tgz", - "integrity": "sha512-S4blHBQWZRnEW44OcR7TL9WR+QCqByRvhNDZ/uuQfpxywfupikf/miba8js1jZi6ZOGv5slgSuoshCWh6EMDzg==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz", + "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==", "requires": { - "node-forge": "^0.9.0" + "node-forge": "^0.10.0" } }, "graceful-fs": { @@ -3157,21 +3158,13 @@ "dev": true }, "gtoken": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-4.1.4.tgz", - "integrity": "sha512-VxirzD0SWoFUo5p8RDP8Jt2AGyOmyYcT/pOUgDKJCK+iSw0TMqwrVfY37RXTNmoKwrzmDHSk0GMT9FsgVmnVSA==", + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.2.1.tgz", + "integrity": "sha512-OY0BfPKe3QnMsY9MzTHTSKn+Vl2l1CcLe6BwDEQj00mbbkl5nyQ/7EUREstg4fQNZ8iYE7br4JJ7TdKeDOPWmw==", "requires": { - "gaxios": "^2.1.0", - "google-p12-pem": "^2.0.0", - "jws": "^4.0.0", - "mime": "^2.2.0" - }, - "dependencies": { - "mime": { - "version": "2.4.6", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", - "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" - } + "gaxios": "^4.0.0", + "google-p12-pem": "^3.0.3", + "jws": "^4.0.0" } }, "har-schema": { @@ -3596,6 +3589,11 @@ "binary-extensions": "^2.0.0" } }, + "is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" + }, "is-callable": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", @@ -3728,11 +3726,11 @@ "dev": true }, "json-bigint": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-0.3.0.tgz", - "integrity": "sha1-DM2RLEuCcNBfBW+9E4FLU9OCWx4=", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", "requires": { - "bignumber.js": "^7.0.0" + "bignumber.js": "^9.0.0" } }, "json-schema": { @@ -3827,15 +3825,10 @@ "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.19.tgz", "integrity": "sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ==" }, - "lodash.at": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/lodash.at/-/lodash.at-4.6.0.tgz", - "integrity": "sha512-GOTh0SEp+Yosnlpjic+8cl2WM9MykorogkGA9xyIFkkObQ3H3kNZqZ+ohuq4K3FrSVo7hMcZBMataJemrxC3BA==" - }, "lodash.camelcase": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", - "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==" + "integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY=" }, "lodash.defaults": { "version": "4.2.0", @@ -3853,11 +3846,6 @@ "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=", "dev": true }, - "lodash.has": { - "version": "4.5.2", - "resolved": "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz", - "integrity": "sha512-rnYUdIo6xRCJnQmbVFEwcxF144erlD+M3YcJUVesflU9paQaE8p+fJDcIQrlMYbxoANFL+AB9hZrzSBBk5PL+g==" - }, "lodash.memoize": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", @@ -3942,65 +3930,33 @@ } }, "logger-sharelatex": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.9.1.tgz", - "integrity": "sha512-9s6JQnH/PN+Js2CmI8+J3MQCTNlRzP2Dh4pcekXrV6Jm5J4HzyPi+6d3zfBskZ4NBmaUVw9hC4p5dmdaRmh4mQ==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-2.2.0.tgz", + "integrity": "sha512-ko+OmE25XHJJCiz1R9EgwlfM7J/5olpunUfR3WcfuqOQrcUqsdBrDA2sOytngT0ViwjCR0Fh4qZVPwEWfmrvwA==", "requires": { - "@google-cloud/logging-bunyan": "^2.0.0", - "@overleaf/o-error": "^2.0.0", - "bunyan": "1.8.12", - "raven": "1.1.3", - "request": "2.88.0", - "yn": "^3.1.1" + "@google-cloud/logging-bunyan": "^3.0.0", + "@overleaf/o-error": "^3.0.0", + "bunyan": "^1.8.14", + "node-fetch": "^2.6.0", + "raven": "^2.6.4", + "yn": "^4.0.0" }, "dependencies": { - "@overleaf/o-error": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@overleaf/o-error/-/o-error-2.1.0.tgz", - "integrity": "sha512-Zd9sks9LrLw8ErHt/cXeWIkyxWAqNAvNGn7wIjLQJH6TTEEW835PWOhpch+hQwwWsTxWIx/JDj+IpZ3ouw925g==" - }, "bunyan": { - "version": "1.8.12", - "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.12.tgz", - "integrity": "sha1-8VDw9nSKvdcq6uhPBEA74u8RN5c=", + "version": "1.8.15", + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.15.tgz", + "integrity": "sha512-0tECWShh6wUysgucJcBAoYegf3JJoZWibxdqhTm7OHPeT42qdjkZ29QCMcKwbgU1kiH+auSIasNRXMLWXafXig==", "requires": { "dtrace-provider": "~0.8", - "moment": "^2.10.6", + "moment": "^2.19.3", "mv": "~2", "safe-json-stringify": "~1" } }, - "qs": { - "version": "6.5.2", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", - "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==" - }, - "request": { - "version": "2.88.0", - "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", - "integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==", - "requires": { - "aws-sign2": "~0.7.0", - "aws4": "^1.8.0", - "caseless": "~0.12.0", - "combined-stream": "~1.0.6", - "extend": "~3.0.2", - "forever-agent": "~0.6.1", - "form-data": "~2.3.2", - "har-validator": "~5.1.0", - "http-signature": "~1.2.0", - "is-typedarray": "~1.0.0", - "isstream": "~0.1.2", - "json-stringify-safe": "~5.0.1", - "mime-types": "~2.1.19", - "oauth-sign": "~0.9.0", - "performance-now": "^2.1.0", - "qs": "~6.5.2", - "safe-buffer": "^5.1.2", - "tough-cookie": "~2.4.3", - "tunnel-agent": "^0.6.0", - "uuid": "^3.3.2" - } + "yn": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yn/-/yn-4.0.0.tgz", + "integrity": "sha512-huWiiCS4TxKc4SfgmTwW1K7JmXPPAmuXWYy4j9qjQo4+27Kni8mGhAAi1cloRWmBe2EqcLgt3IGqQoRL/MtPgg==" } } }, @@ -4077,18 +4033,20 @@ } }, "lru-cache": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha1-HaJ+ZxAnGUdpXa9oSOhH8B2EuSA=", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", "requires": { - "yallist": "^3.0.2" + "yallist": "^4.0.0" + }, + "dependencies": { + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } } }, - "lsmod": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/lsmod/-/lsmod-1.0.0.tgz", - "integrity": "sha1-mgD3bco26yP6BTUK/htYXUKZ5ks=" - }, "make-plural": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/make-plural/-/make-plural-4.3.0.tgz", @@ -4112,6 +4070,16 @@ "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.1.0.tgz", "integrity": "sha512-glc9y00wgtwcDmp7GaE/0b0OnxpNJsVf3ael/An6Fe2Q51LLwN1er6sdomLRzz5h0+yMpiYLhWYF5R7HeqVd4g==" }, + "md5": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/md5/-/md5-2.3.0.tgz", + "integrity": "sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==", + "requires": { + "charenc": "0.0.2", + "crypt": "0.0.2", + "is-buffer": "~1.1.6" + } + }, "media-typer": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", @@ -4643,9 +4611,9 @@ "integrity": "sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==" }, "node-forge": { - "version": "0.9.2", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.9.2.tgz", - "integrity": "sha512-naKSScof4Wn+aoHU6HBsifh92Zeicm1GDQKd1vp3Y/kOi8ub0DozCa9KpvYNCXslFHYRmLNiqRopGdTGwNLpNw==" + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", + "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" }, "node-pre-gyp": { "version": "0.16.0", @@ -4947,7 +4915,7 @@ "parse-ms": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.1.0.tgz", - "integrity": "sha512-kHt7kzLoS9VBZfUsiKjv43mr91ea+U05EyKkEtqp7vNbHxmaVuEqN7XxeEVnGrMtYOAxGrDElSi96K7EgO1zCA==" + "integrity": "sha1-NIVlp1PUOR+lJAKZVrFyy3dTCX0=" }, "parseurl": { "version": "1.3.3", @@ -5757,9 +5725,9 @@ } }, "protobufjs": { - "version": "6.8.8", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz", - "integrity": "sha1-yLTxKC/XqQ5vWxCe0RyEr4KQjnw=", + "version": "6.10.2", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.2.tgz", + "integrity": "sha512-27yj+04uF6ya9l+qfpH187aqEzfCF4+Uit0I9ZBQVqK09hk/SQzKa2MUqUpXaVa7LOFRg1TSSr3lVxGOk6c0SQ==", "requires": { "@protobufjs/aspromise": "^1.1.2", "@protobufjs/base64": "^1.1.2", @@ -5771,9 +5739,16 @@ "@protobufjs/path": "^1.1.2", "@protobufjs/pool": "^1.1.0", "@protobufjs/utf8": "^1.1.0", - "@types/long": "^4.0.0", - "@types/node": "^10.1.0", + "@types/long": "^4.0.1", + "@types/node": "^13.7.0", "long": "^4.0.0" + }, + "dependencies": { + "@types/node": { + "version": "13.13.48", + "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.48.tgz", + "integrity": "sha512-z8wvSsgWQzkr4sVuMEEOvwMdOQjiRY2Y/ZW4fDfjfe3+TfQrZqFKOthBgk2RnVEmtOKrkwdZ7uTvsxTBLjKGDQ==" + } } }, "proxy-addr": { @@ -5807,19 +5782,6 @@ "duplexify": "^4.1.1", "inherits": "^2.0.3", "pump": "^3.0.0" - }, - "dependencies": { - "duplexify": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", - "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", - "requires": { - "end-of-stream": "^1.4.1", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1", - "stream-shift": "^1.0.0" - } - } } }, "punycode": { @@ -5859,15 +5821,15 @@ "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==" }, "raven": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/raven/-/raven-1.1.3.tgz", - "integrity": "sha1-QnPBrm005CMPUbLAEEGjK5Iygio=", + "version": "2.6.4", + "resolved": "https://registry.npmjs.org/raven/-/raven-2.6.4.tgz", + "integrity": "sha512-6PQdfC4+DQSFncowthLf+B6Hr0JpPsFBgTVYTAOq7tCmx/kR4SXbeawtPch20+3QfUcQDoJBLjWW1ybvZ4kXTw==", "requires": { "cookie": "0.3.1", - "json-stringify-safe": "5.0.1", - "lsmod": "1.0.0", - "stack-trace": "0.0.9", - "uuid": "3.0.0" + "md5": "^2.2.1", + "stack-trace": "0.0.10", + "timed-out": "4.0.1", + "uuid": "3.3.2" }, "dependencies": { "cookie": { @@ -5875,10 +5837,15 @@ "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" }, + "stack-trace": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", + "integrity": "sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA=" + }, "uuid": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.0.tgz", - "integrity": "sha1-Zyj8BFnEUNeWqZwxg3VpvfZy1yg=" + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", + "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" } } }, @@ -6186,7 +6153,7 @@ "safe-json-stringify": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.2.0.tgz", - "integrity": "sha1-NW5EvJjx+TzkXfFLzXwBzahuCv0=", + "integrity": "sha512-gH8eh2nZudPQO6TytOvbxnuhYBOvDBBLW52tz5q6X58lJcd/tkmqFR+5Z9adS8aJtURSXWThWy/xJtJwixErvg==", "optional": true }, "safer-buffer": { @@ -6380,18 +6347,18 @@ } }, "snakecase-keys": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/snakecase-keys/-/snakecase-keys-3.2.0.tgz", - "integrity": "sha512-WTJ0NhCH/37J+PU3fuz0x5b6TvtWQChTcKPOndWoUy0pteKOe0hrHMzSRsJOWSIP48EQkzUEsgQPmrG3W8pFNQ==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/snakecase-keys/-/snakecase-keys-3.2.1.tgz", + "integrity": "sha512-CjU5pyRfwOtaOITYv5C8DzpZ8XA/ieRsDpr93HI2r6e3YInC6moZpSQbmUtg8cTk58tq2x3jcG2gv+p1IZGmMA==", "requires": { - "map-obj": "^4.0.0", + "map-obj": "^4.1.0", "to-snake-case": "^1.0.0" } }, "source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + "integrity": "sha1-dHIq8y6WFOnCh6jQu95IteLxomM=" }, "source-map-support": { "version": "0.5.19", @@ -6446,7 +6413,7 @@ "split": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", - "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==", + "integrity": "sha1-YFvZvjA6pZ+zX5Ip++oN3snqB9k=", "requires": { "through": "2" } @@ -6476,7 +6443,8 @@ "stack-trace": { "version": "0.0.9", "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz", - "integrity": "sha1-qPbq7KkGdMMz58Q5U/J1tFFRBpU=" + "integrity": "sha1-qPbq7KkGdMMz58Q5U/J1tFFRBpU=", + "dev": true }, "standard-as-callback": { "version": "2.0.1", @@ -6646,21 +6614,21 @@ } }, "teeny-request": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-6.0.3.tgz", - "integrity": "sha512-TZG/dfd2r6yeji19es1cUIwAlVD8y+/svB1kAC2Y0bjEyysrfbO8EZvJBRwIE6WkwmUoB7uvWLwTIhJbMXZ1Dw==", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz", + "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==", "requires": { "http-proxy-agent": "^4.0.0", "https-proxy-agent": "^5.0.0", - "node-fetch": "^2.2.0", + "node-fetch": "^2.6.1", "stream-events": "^1.0.5", - "uuid": "^7.0.0" + "uuid": "^8.0.0" }, "dependencies": { "uuid": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-7.0.3.tgz", - "integrity": "sha512-DPSke0pXhTZgoF/d+WSt2QaKMCFSfx7QegxEWT+JOuHF5aWrKEn0G+ztjuJg/gG8/ItK+rbPCD/yNv8yyih6Cg==" + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" } } }, @@ -6708,6 +6676,11 @@ } } }, + "timed-out": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/timed-out/-/timed-out-4.0.1.tgz", + "integrity": "sha1-8y6srFoXW+ol1/q1Zas+2HQe9W8=" + }, "timekeeper": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/timekeeper/-/timekeeper-2.2.0.tgz", @@ -6732,7 +6705,7 @@ "to-no-case": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/to-no-case/-/to-no-case-1.0.2.tgz", - "integrity": "sha512-Z3g735FxuZY8rodxV4gH7LxClE4H0hTIyHNIHdk+vpQxjLm0cwnKXq/OFVZ76SOQmto7txVcwSCwkU5kqp+FKg==" + "integrity": "sha1-xyKQcWTvaxeBMsjmmTAhLRtKoWo=" }, "to-regex-range": { "version": "5.0.1", @@ -6746,7 +6719,7 @@ "to-snake-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-snake-case/-/to-snake-case-1.0.0.tgz", - "integrity": "sha512-joRpzBAk1Bhi2eGEYBjukEWHOe/IvclOkiJl3DtA91jV6NwQ3MwXA4FHYeqk8BNp/D8bmi9tcNbRu/SozP0jbQ==", + "integrity": "sha1-znRpE4l5RgGah+Yu366upMYIq4w=", "requires": { "to-space-case": "^1.0.0" } @@ -6754,7 +6727,7 @@ "to-space-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-space-case/-/to-space-case-1.0.0.tgz", - "integrity": "sha512-rLdvwXZ39VOn1IxGL3V6ZstoTbwLRckQmn/U8ZDLuWwIXNpuZDhQ3AiRUlhTbOXFVE9C+dR51wM0CBDhk31VcA==", + "integrity": "sha1-sFLar7Gysp3HcM6gFj5ewOvJ/Bc=", "requires": { "to-no-case": "^1.0.0" } @@ -6764,22 +6737,6 @@ "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz", "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==" }, - "tough-cookie": { - "version": "2.4.3", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", - "integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==", - "requires": { - "psl": "^1.1.24", - "punycode": "^1.4.1" - }, - "dependencies": { - "punycode": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=" - } - } - }, "tslib": { "version": "1.11.2", "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.11.2.tgz", @@ -6815,9 +6772,9 @@ "dev": true }, "type-fest": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.12.0.tgz", - "integrity": "sha512-53RyidyjvkGpnWPMF9bQgFtWp+Sl8O2Rp13VavmJgfAP9WWG6q6TkrKU8iyJdnwnfgHI6k2hTlgqH4aSdjoTbg==" + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==" }, "type-is": { "version": "1.6.18", @@ -6972,11 +6929,6 @@ } } }, - "walkdir": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/walkdir/-/walkdir-0.4.1.tgz", - "integrity": "sha512-3eBwRyEln6E1MSzcxcVpQIhRG8Q1jLvEqRmCZqS3dsfXEDR/AhOF4d+jHg1qvDCpYaVRZjENPQyrVxAkQqxPgQ==" - }, "when": { "version": "3.7.8", "resolved": "https://registry.npmjs.org/when/-/when-3.7.8.tgz", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index fbe12047a6..32b4b90999 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -27,7 +27,7 @@ "diff-match-patch": "https://github.com/overleaf/diff-match-patch/archive/89805f9c671a77a263fc53461acd62aa7498f688.tar.gz", "express": "4.17.1", "lodash": "^4.17.19", - "logger-sharelatex": "^1.9.1", + "logger-sharelatex": "^2.2.0", "mongodb": "^3.6.0", "request": "^2.88.2", "requestretry": "^4.1.0", From 8ba799035b94228b110ca18051593db01acf8e6c Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 6 Apr 2021 15:29:09 +0100 Subject: [PATCH 732/769] Revert "Update logger-sharelatex to 2.2.0" --- services/document-updater/package-lock.json | 622 +++++++++++--------- services/document-updater/package.json | 2 +- 2 files changed, 336 insertions(+), 288 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index be21275730..c347199789 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -155,19 +155,19 @@ } }, "@google-cloud/common": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.6.0.tgz", - "integrity": "sha512-aHIFTqJZmeTNO9md8XxV+ywuvXF3xBm5WNmgWeeCK+XN5X+kGW0WEX94wGwj+/MdOnrVf4dL2RvSIt9J5yJG6Q==", + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-2.4.0.tgz", + "integrity": "sha512-zWFjBS35eI9leAHhjfeOYlK5Plcuj/77EzstnrJIZbKgF/nkqjcQuGiMCpzCwOfPyUbz8ZaEOYgbHa759AKbjg==", "requires": { - "@google-cloud/projectify": "^2.0.0", - "@google-cloud/promisify": "^2.0.0", - "arrify": "^2.0.1", - "duplexify": "^4.1.1", + "@google-cloud/projectify": "^1.0.0", + "@google-cloud/promisify": "^1.0.0", + "arrify": "^2.0.0", + "duplexify": "^3.6.0", "ent": "^2.2.0", "extend": "^3.0.2", - "google-auth-library": "^7.0.2", - "retry-request": "^4.1.1", - "teeny-request": "^7.0.0" + "google-auth-library": "^5.5.0", + "retry-request": "^4.0.0", + "teeny-request": "^6.0.0" } }, "@google-cloud/debug-agent": { @@ -357,54 +357,44 @@ } }, "@google-cloud/logging": { - "version": "9.1.1", - "resolved": "https://registry.npmjs.org/@google-cloud/logging/-/logging-9.1.1.tgz", - "integrity": "sha512-h3rK1nfSarPbcSRA17/qVs2amvFql6drpqvi6y3+u9eATNCIZCWmagWjuAMWcmxO/IOZxvfsh9NF7BtVyYQQ1Q==", + "version": "7.3.0", + "resolved": "https://registry.npmjs.org/@google-cloud/logging/-/logging-7.3.0.tgz", + "integrity": "sha512-xTW1V4MKpYC0mjSugyuiyUoZ9g6A42IhrrO3z7Tt3SmAb2IRj2Gf4RLoguKKncs340ooZFXrrVN/++t2Aj5zgg==", "requires": { - "@google-cloud/common": "^3.4.1", - "@google-cloud/paginator": "^3.0.0", - "@google-cloud/projectify": "^2.0.0", - "@google-cloud/promisify": "^2.0.0", - "@opencensus/propagation-stackdriver": "0.0.22", - "arrify": "^2.0.1", - "dot-prop": "^6.0.0", + "@google-cloud/common": "^2.2.2", + "@google-cloud/paginator": "^2.0.0", + "@google-cloud/projectify": "^1.0.0", + "@google-cloud/promisify": "^1.0.0", + "@opencensus/propagation-stackdriver": "0.0.20", + "arrify": "^2.0.0", + "dot-prop": "^5.1.0", "eventid": "^1.0.0", "extend": "^3.0.2", - "gcp-metadata": "^4.0.0", - "google-auth-library": "^7.0.0", - "google-gax": "^2.9.2", + "gcp-metadata": "^3.1.0", + "google-auth-library": "^5.2.2", + "google-gax": "^1.11.0", "is": "^3.3.0", "on-finished": "^2.3.0", - "pumpify": "^2.0.1", - "snakecase-keys": "^3.1.2", - "stream-events": "^1.0.5", - "through2": "^4.0.0", - "type-fest": "^0.21.0" - }, - "dependencies": { - "through2": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/through2/-/through2-4.0.2.tgz", - "integrity": "sha512-iOqSav00cVxEEICeD7TjLB1sueEL+81Wpzp2bY17uZjZN0pWZPuo4suZ/61VujxmqSGFfgOcNuTZ85QJwNZQpw==", - "requires": { - "readable-stream": "3" - } - } + "pumpify": "^2.0.0", + "snakecase-keys": "^3.0.0", + "stream-events": "^1.0.4", + "through2": "^3.0.0", + "type-fest": "^0.12.0" } }, "@google-cloud/logging-bunyan": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@google-cloud/logging-bunyan/-/logging-bunyan-3.0.2.tgz", - "integrity": "sha512-7BmXGZLYsnDs5UT9qvb0/rA0i2BbD3AyKqwXl/hP0pDGboCg0GE8viVmwzmY8f/cUzRZHAxOgV0bTENeTd6KEA==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/logging-bunyan/-/logging-bunyan-2.0.3.tgz", + "integrity": "sha512-8n9MwsCRd4v8WZg17+d3m7qInud7lYTm5rpwXHY0/lzWEJYjeiztT09BiCYh56EEhHr+ynymJnzUDZKazkywlg==", "requires": { - "@google-cloud/logging": "^9.0.0", - "google-auth-library": "^7.0.0" + "@google-cloud/logging": "^7.0.0", + "google-auth-library": "^5.0.0" } }, "@google-cloud/paginator": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-3.0.5.tgz", - "integrity": "sha512-N4Uk4BT1YuskfRhKXBs0n9Lg2YTROZc6IMpkO/8DIHODtm5s3xY8K5vVBo23v/2XulY3azwITQlYWgT4GdLsUw==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-2.0.3.tgz", + "integrity": "sha512-kp/pkb2p/p0d8/SKUu4mOq8+HGwF8NPzHWkj+VKrIPQPyMRw8deZtrO/OcSiy9C/7bpfU5Txah5ltUNfPkgEXg==", "requires": { "arrify": "^2.0.0", "extend": "^3.0.2" @@ -606,14 +596,14 @@ } }, "@google-cloud/projectify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", - "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-1.0.4.tgz", + "integrity": "sha512-ZdzQUN02eRsmTKfBj9FDL0KNDIFNjBn/d6tHQmA/+FImH5DO6ZV8E7FzxMgAUiVAUq41RFAkb25p1oHOZ8psfg==" }, "@google-cloud/promisify": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz", - "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw==" + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-1.0.4.tgz", + "integrity": "sha512-VccZDcOql77obTnFh0TbNED/6ZbbmHDf8UMNnzO1d5g9V0Htfm4k5cllY8P1tJsRKC3zWYGRLaViiupcgVjBoQ==" }, "@google-cloud/trace-agent": { "version": "5.1.3", @@ -811,84 +801,42 @@ } }, "@grpc/grpc-js": { - "version": "1.2.12", - "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.2.12.tgz", - "integrity": "sha512-+gPCklP1eqIgrNPyzddYQdt9+GvZqPlLpIjIo+TveE+gbtp74VV1A2ju8ExeO8ma8f7MbpaGZx/KJPYVWL9eDw==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.0.5.tgz", + "integrity": "sha512-Hm+xOiqAhcpT9RYM8lc15dbQD7aQurM7ZU8ulmulepiPlN7iwBXXwP3vSBUimoFoApRqz7pSIisXU8pZaCB4og==", "requires": { - "@types/node": ">=12.12.47", - "google-auth-library": "^6.1.1", "semver": "^6.2.0" - }, - "dependencies": { - "google-auth-library": { - "version": "6.1.6", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.1.6.tgz", - "integrity": "sha512-Q+ZjUEvLQj/lrVHF/IQwRo6p3s8Nc44Zk/DALsN+ac3T4HY/g/3rrufkgtl+nZ1TW7DNAw5cTChdVp4apUXVgQ==", - "requires": { - "arrify": "^2.0.0", - "base64-js": "^1.3.0", - "ecdsa-sig-formatter": "^1.0.11", - "fast-text-encoding": "^1.0.0", - "gaxios": "^4.0.0", - "gcp-metadata": "^4.2.0", - "gtoken": "^5.0.4", - "jws": "^4.0.0", - "lru-cache": "^6.0.0" - } - } } }, "@grpc/proto-loader": { - "version": "0.5.6", - "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.5.6.tgz", - "integrity": "sha512-DT14xgw3PSzPxwS13auTEwxhMMOoz33DPUKNtmYK/QYbBSpLXJy78FGGs5yVoxVobEqPm4iW9MOIoz0A3bLTRQ==", + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.5.5.tgz", + "integrity": "sha512-WwN9jVNdHRQoOBo9FDH7qU+mgfjPc8GygPYms3M+y3fbQLfnCe/Kv/E01t7JRgnrsOHH8euvSbed3mIalXhwqQ==", "requires": { "lodash.camelcase": "^4.3.0", "protobufjs": "^6.8.6" } }, "@opencensus/core": { - "version": "0.0.22", - "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.22.tgz", - "integrity": "sha512-ErazJtivjceNoOZI1bG9giQ6cWS45J4i6iPUtlp7dLNu58OLs/v+CD0FsaPCh47XgPxAI12vbBE8Ec09ViwHNA==", + "version": "0.0.20", + "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.20.tgz", + "integrity": "sha512-vqOuTd2yuMpKohp8TNNGUAPjWEGjlnGfB9Rh5e3DKqeyR94YgierNs4LbMqxKtsnwB8Dm2yoEtRuUgoe5vD9DA==", "requires": { "continuation-local-storage": "^3.2.1", "log-driver": "^1.2.7", - "semver": "^7.0.0", + "semver": "^6.0.0", "shimmer": "^1.2.0", - "uuid": "^8.0.0" - }, - "dependencies": { - "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "requires": { - "lru-cache": "^6.0.0" - } - }, - "uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" - } + "uuid": "^3.2.1" } }, "@opencensus/propagation-stackdriver": { - "version": "0.0.22", - "resolved": "https://registry.npmjs.org/@opencensus/propagation-stackdriver/-/propagation-stackdriver-0.0.22.tgz", - "integrity": "sha512-eBvf/ihb1mN8Yz/ASkz8nHzuMKqygu77+VNnUeR0yEh3Nj+ykB8VVR6lK+NAFXo1Rd1cOsTmgvuXAZgDAGleQQ==", + "version": "0.0.20", + "resolved": "https://registry.npmjs.org/@opencensus/propagation-stackdriver/-/propagation-stackdriver-0.0.20.tgz", + "integrity": "sha512-P8yuHSLtce+yb+2EZjtTVqG7DQ48laC+IuOWi3X9q78s1Gni5F9+hmbmyP6Nb61jb5BEvXQX1s2rtRI6bayUWA==", "requires": { - "@opencensus/core": "^0.0.22", + "@opencensus/core": "^0.0.20", "hex2dec": "^1.0.1", - "uuid": "^8.0.0" - }, - "dependencies": { - "uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" - } + "uuid": "^3.2.1" } }, "@overleaf/metrics": { @@ -1047,7 +995,7 @@ "@types/console-log-level": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/@types/console-log-level/-/console-log-level-1.4.0.tgz", - "integrity": "sha1-7/ccQa689RyLpa2LBdfVQkviuPM=" + "integrity": "sha512-x+OscEQwcx5Biair4enH7ov9W+clcqUWaZRaxn5IkT4yNWWjRr2oiYDkY/x1uXSTVZOQ2xlbFQySaQGB+VdXGQ==" }, "@types/eslint-visitor-keys": { "version": "1.0.0", @@ -1055,6 +1003,14 @@ "integrity": "sha512-OCutwjDZ4aFS6PB1UZ988C4YgwlBHJd6wCeQqaLdmadZ/7e+w79+hbMUFC1QXDNCmdyoRfAFdm0RypzwR+Qpag==", "dev": true }, + "@types/fs-extra": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-8.1.1.tgz", + "integrity": "sha512-TcUlBem321DFQzBNuz8p0CLLKp0VvF/XH9E4KHNmgwyp4E3AfgI5cjiIVZWlbfThBop2qxFIh4+LeY6hVWWZ2w==", + "requires": { + "@types/node": "*" + } + }, "@types/json-schema": { "version": "7.0.4", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.4.tgz", @@ -1067,9 +1023,9 @@ "integrity": "sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w==" }, "@types/node": { - "version": "14.14.37", - "resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.37.tgz", - "integrity": "sha512-XYmBiy+ohOR4Lh5jE379fV2IU+6Jn4g5qASinhitfyO71b/sCo6MKsMLF5tc7Zf2CE8hViVQyYSobJNke8OvUw==" + "version": "10.17.15", + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.15.tgz", + "integrity": "sha512-daFGV9GSs6USfPgxceDA8nlSe48XrVCJfDeYm7eokxq/ye7iuOH87hKXgMtEAVLFapkczbZsx868PMDT1Y0a6A==" }, "@types/semver": { "version": "7.3.4", @@ -1440,9 +1396,9 @@ } }, "bignumber.js": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz", - "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==" + "version": "7.2.1", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz", + "integrity": "sha1-gMBIdZ2CaACAfEv9Uh5Q7bulel8=" }, "binary-extensions": { "version": "2.2.0", @@ -1453,7 +1409,7 @@ "bindings": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", - "integrity": "sha1-EDU8npRTNLwFEabZCzj7x8nFBN8=", + "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", "requires": { "file-uri-to-path": "1.0.0" } @@ -1532,7 +1488,7 @@ "browser-stdout": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", - "integrity": "sha1-uqVZ7hTO1zRSIputcyZGfGH6vWA=", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", "dev": true }, "bson": { @@ -1633,11 +1589,6 @@ "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", "dev": true }, - "charenc": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/charenc/-/charenc-0.0.2.tgz", - "integrity": "sha1-wKHS86cJLgN3S/qD8UwPxXkKhmc=" - }, "chokidar": { "version": "3.5.1", "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.1.tgz", @@ -1778,7 +1729,7 @@ "console-log-level": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz", - "integrity": "sha1-nFprue8e9lsFq6gwKLD/iUzfYwo=" + "integrity": "sha512-VZzbIORbP+PPcN/gg3DXClTLPLg5Slwd5fL2MIc+o1qZ4BXBvWyc6QxPk6T/Mkr6IVjRpoAGf32XxP3ZWMVRcQ==" }, "contains-path": { "version": "0.1.0", @@ -1835,15 +1786,10 @@ "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" }, - "crypt": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/crypt/-/crypt-0.0.2.tgz", - "integrity": "sha1-iNf/fsDfuG9xPch7u0LQRNPmxBs=" - }, "d64": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz", - "integrity": "sha1-QAKofoUMv8n52XBrYPymE6MzbpA=" + "integrity": "sha512-5eNy3WZziVYnrogqgXhcdEmqcDB2IHurTqLcrgssJsfkMVCUoUaZpK6cJjxxvLV2dUm5SuJMNcYfVGoin9UIRw==" }, "damerau-levenshtein": { "version": "1.0.6", @@ -1971,9 +1917,9 @@ } }, "dot-prop": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-6.0.1.tgz", - "integrity": "sha512-tE7ztYzXHIeyvc7N+hR3oi7FIbf/NIjVP9hmAt3yMXzrQ072/fpjGLx2GxNxGxUl5V73MEqYzioOMoVhGMJ5cA==", + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz", + "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==", "requires": { "is-obj": "^2.0.0" } @@ -1988,14 +1934,43 @@ } }, "duplexify": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", - "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz", + "integrity": "sha1-Kk31MX9sz9kfhtb9JdjYoQO4gwk=", "requires": { - "end-of-stream": "^1.4.1", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1", + "end-of-stream": "^1.0.0", + "inherits": "^2.0.1", + "readable-stream": "^2.0.0", "stream-shift": "^1.0.0" + }, + "dependencies": { + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" + }, + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } + } } }, "ecc-jsbn": { @@ -2825,7 +2800,7 @@ "file-uri-to-path": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", - "integrity": "sha1-VTp7hEb/b2hDWcRF8eN6BdrMM90=" + "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==" }, "fill-range": { "version": "7.0.1", @@ -3028,9 +3003,9 @@ } }, "gaxios": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.2.0.tgz", - "integrity": "sha512-Ms7fNifGv0XVU+6eIyL9LB7RVESeML9+cMvkwGS70xyD6w2Z80wl6RiqiJ9k1KFlJCUTQqFFc8tXmPQfSKUe8g==", + "version": "2.3.4", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-2.3.4.tgz", + "integrity": "sha512-US8UMj8C5pRnao3Zykc4AAVr+cffoNKRTg9Rsf2GiuZCW69vgJj38VK2PzlPuQU73FZ/nTk9/Av6/JGcE1N9vA==", "requires": { "abort-controller": "^3.0.0", "extend": "^3.0.2", @@ -3040,12 +3015,12 @@ } }, "gcp-metadata": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz", - "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==", + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-3.5.0.tgz", + "integrity": "sha512-ZQf+DLZ5aKcRpLzYUyBS3yo3N0JSa82lNDO8rj3nMSlovLcz2riKFBsYgDzeXcv75oo5eqB2lx+B14UvPoCRnA==", "requires": { - "gaxios": "^4.0.0", - "json-bigint": "^1.0.0" + "gaxios": "^2.1.0", + "json-bigint": "^0.3.0" } }, "get-caller-file": { @@ -3097,52 +3072,76 @@ "dev": true }, "google-auth-library": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-7.0.3.tgz", - "integrity": "sha512-6wJNYqY1QUr5I2lWaUkkzOT2b9OCNhNQrdFOt/bsBbGb7T7NCdEvrBsXraUm+KTUGk2xGlQ7m9RgUd4Llcw8NQ==", + "version": "5.10.1", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz", + "integrity": "sha512-rOlaok5vlpV9rSiUu5EpR0vVpc+PhN62oF4RyX/6++DG1VsaulAFEMlDYBLjJDDPI6OcNOCGAKy9UVB/3NIDXg==", "requires": { "arrify": "^2.0.0", "base64-js": "^1.3.0", "ecdsa-sig-formatter": "^1.0.11", "fast-text-encoding": "^1.0.0", - "gaxios": "^4.0.0", - "gcp-metadata": "^4.2.0", - "gtoken": "^5.0.4", + "gaxios": "^2.1.0", + "gcp-metadata": "^3.4.0", + "gtoken": "^4.1.0", "jws": "^4.0.0", - "lru-cache": "^6.0.0" + "lru-cache": "^5.0.0" } }, "google-gax": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-2.11.2.tgz", - "integrity": "sha512-PNqXv7Oi5XBMgoMWVxLZHUidfMv7cPHrDSDXqLyEd6kY6pqFnVKC8jt2T1df4JPSc2+VLPdeo6L7X9mbdQG8Xw==", + "version": "1.15.3", + "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-1.15.3.tgz", + "integrity": "sha512-3JKJCRumNm3x2EksUTw4P1Rad43FTpqrtW9jzpf3xSMYXx+ogaqTM1vGo7VixHB4xkAyATXVIa3OcNSh8H9zsQ==", "requires": { - "@grpc/grpc-js": "~1.2.0", + "@grpc/grpc-js": "~1.0.3", "@grpc/proto-loader": "^0.5.1", + "@types/fs-extra": "^8.0.1", "@types/long": "^4.0.0", "abort-controller": "^3.0.0", - "duplexify": "^4.0.0", - "fast-text-encoding": "^1.0.3", - "google-auth-library": "^7.0.2", + "duplexify": "^3.6.0", + "google-auth-library": "^5.0.0", "is-stream-ended": "^0.1.4", - "node-fetch": "^2.6.1", - "protobufjs": "^6.10.2", - "retry-request": "^4.0.0" + "lodash.at": "^4.6.0", + "lodash.has": "^4.5.2", + "node-fetch": "^2.6.0", + "protobufjs": "^6.8.9", + "retry-request": "^4.0.0", + "semver": "^6.0.0", + "walkdir": "^0.4.0" }, "dependencies": { - "fast-text-encoding": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.3.tgz", - "integrity": "sha512-dtm4QZH9nZtcDt8qJiOH9fcQd1NAgi+K1O2DbE6GG1PPCK/BWfOH3idCTRQ4ImXRUOyopDEgDEnVEE7Y/2Wrig==" + "@types/node": { + "version": "13.13.25", + "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.25.tgz", + "integrity": "sha512-6ZMK4xRcF2XrPdKmPYQxZkdHKV18xKgUFVvhIgw2iwaaO6weleLPHLBGPZmLhjo+m1N+MZXRAoBEBCCVqgO2zQ==" + }, + "protobufjs": { + "version": "6.10.1", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.1.tgz", + "integrity": "sha512-pb8kTchL+1Ceg4lFd5XUpK8PdWacbvV5SK2ULH2ebrYtl4GjJmS24m6CKME67jzV53tbJxHlnNOSqQHbTsR9JQ==", + "requires": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/long": "^4.0.1", + "@types/node": "^13.7.0", + "long": "^4.0.0" + } } } }, "google-p12-pem": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz", - "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==", + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-2.0.4.tgz", + "integrity": "sha512-S4blHBQWZRnEW44OcR7TL9WR+QCqByRvhNDZ/uuQfpxywfupikf/miba8js1jZi6ZOGv5slgSuoshCWh6EMDzg==", "requires": { - "node-forge": "^0.10.0" + "node-forge": "^0.9.0" } }, "graceful-fs": { @@ -3158,13 +3157,21 @@ "dev": true }, "gtoken": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.2.1.tgz", - "integrity": "sha512-OY0BfPKe3QnMsY9MzTHTSKn+Vl2l1CcLe6BwDEQj00mbbkl5nyQ/7EUREstg4fQNZ8iYE7br4JJ7TdKeDOPWmw==", + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-4.1.4.tgz", + "integrity": "sha512-VxirzD0SWoFUo5p8RDP8Jt2AGyOmyYcT/pOUgDKJCK+iSw0TMqwrVfY37RXTNmoKwrzmDHSk0GMT9FsgVmnVSA==", "requires": { - "gaxios": "^4.0.0", - "google-p12-pem": "^3.0.3", - "jws": "^4.0.0" + "gaxios": "^2.1.0", + "google-p12-pem": "^2.0.0", + "jws": "^4.0.0", + "mime": "^2.2.0" + }, + "dependencies": { + "mime": { + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", + "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" + } } }, "har-schema": { @@ -3589,11 +3596,6 @@ "binary-extensions": "^2.0.0" } }, - "is-buffer": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", - "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" - }, "is-callable": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", @@ -3726,11 +3728,11 @@ "dev": true }, "json-bigint": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", - "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-0.3.0.tgz", + "integrity": "sha1-DM2RLEuCcNBfBW+9E4FLU9OCWx4=", "requires": { - "bignumber.js": "^9.0.0" + "bignumber.js": "^7.0.0" } }, "json-schema": { @@ -3825,10 +3827,15 @@ "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.19.tgz", "integrity": "sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ==" }, + "lodash.at": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/lodash.at/-/lodash.at-4.6.0.tgz", + "integrity": "sha512-GOTh0SEp+Yosnlpjic+8cl2WM9MykorogkGA9xyIFkkObQ3H3kNZqZ+ohuq4K3FrSVo7hMcZBMataJemrxC3BA==" + }, "lodash.camelcase": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", - "integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY=" + "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==" }, "lodash.defaults": { "version": "4.2.0", @@ -3846,6 +3853,11 @@ "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=", "dev": true }, + "lodash.has": { + "version": "4.5.2", + "resolved": "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz", + "integrity": "sha512-rnYUdIo6xRCJnQmbVFEwcxF144erlD+M3YcJUVesflU9paQaE8p+fJDcIQrlMYbxoANFL+AB9hZrzSBBk5PL+g==" + }, "lodash.memoize": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", @@ -3930,33 +3942,65 @@ } }, "logger-sharelatex": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-2.2.0.tgz", - "integrity": "sha512-ko+OmE25XHJJCiz1R9EgwlfM7J/5olpunUfR3WcfuqOQrcUqsdBrDA2sOytngT0ViwjCR0Fh4qZVPwEWfmrvwA==", + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.9.1.tgz", + "integrity": "sha512-9s6JQnH/PN+Js2CmI8+J3MQCTNlRzP2Dh4pcekXrV6Jm5J4HzyPi+6d3zfBskZ4NBmaUVw9hC4p5dmdaRmh4mQ==", "requires": { - "@google-cloud/logging-bunyan": "^3.0.0", - "@overleaf/o-error": "^3.0.0", - "bunyan": "^1.8.14", - "node-fetch": "^2.6.0", - "raven": "^2.6.4", - "yn": "^4.0.0" + "@google-cloud/logging-bunyan": "^2.0.0", + "@overleaf/o-error": "^2.0.0", + "bunyan": "1.8.12", + "raven": "1.1.3", + "request": "2.88.0", + "yn": "^3.1.1" }, "dependencies": { + "@overleaf/o-error": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@overleaf/o-error/-/o-error-2.1.0.tgz", + "integrity": "sha512-Zd9sks9LrLw8ErHt/cXeWIkyxWAqNAvNGn7wIjLQJH6TTEEW835PWOhpch+hQwwWsTxWIx/JDj+IpZ3ouw925g==" + }, "bunyan": { - "version": "1.8.15", - "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.15.tgz", - "integrity": "sha512-0tECWShh6wUysgucJcBAoYegf3JJoZWibxdqhTm7OHPeT42qdjkZ29QCMcKwbgU1kiH+auSIasNRXMLWXafXig==", + "version": "1.8.12", + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.12.tgz", + "integrity": "sha1-8VDw9nSKvdcq6uhPBEA74u8RN5c=", "requires": { "dtrace-provider": "~0.8", - "moment": "^2.19.3", + "moment": "^2.10.6", "mv": "~2", "safe-json-stringify": "~1" } }, - "yn": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yn/-/yn-4.0.0.tgz", - "integrity": "sha512-huWiiCS4TxKc4SfgmTwW1K7JmXPPAmuXWYy4j9qjQo4+27Kni8mGhAAi1cloRWmBe2EqcLgt3IGqQoRL/MtPgg==" + "qs": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", + "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==" + }, + "request": { + "version": "2.88.0", + "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", + "integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==", + "requires": { + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "har-validator": "~5.1.0", + "http-signature": "~1.2.0", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "oauth-sign": "~0.9.0", + "performance-now": "^2.1.0", + "qs": "~6.5.2", + "safe-buffer": "^5.1.2", + "tough-cookie": "~2.4.3", + "tunnel-agent": "^0.6.0", + "uuid": "^3.3.2" + } } } }, @@ -4033,20 +4077,18 @@ } }, "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha1-HaJ+ZxAnGUdpXa9oSOhH8B2EuSA=", "requires": { - "yallist": "^4.0.0" - }, - "dependencies": { - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - } + "yallist": "^3.0.2" } }, + "lsmod": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/lsmod/-/lsmod-1.0.0.tgz", + "integrity": "sha1-mgD3bco26yP6BTUK/htYXUKZ5ks=" + }, "make-plural": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/make-plural/-/make-plural-4.3.0.tgz", @@ -4070,16 +4112,6 @@ "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.1.0.tgz", "integrity": "sha512-glc9y00wgtwcDmp7GaE/0b0OnxpNJsVf3ael/An6Fe2Q51LLwN1er6sdomLRzz5h0+yMpiYLhWYF5R7HeqVd4g==" }, - "md5": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/md5/-/md5-2.3.0.tgz", - "integrity": "sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==", - "requires": { - "charenc": "0.0.2", - "crypt": "0.0.2", - "is-buffer": "~1.1.6" - } - }, "media-typer": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", @@ -4611,9 +4643,9 @@ "integrity": "sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==" }, "node-forge": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", - "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" + "version": "0.9.2", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.9.2.tgz", + "integrity": "sha512-naKSScof4Wn+aoHU6HBsifh92Zeicm1GDQKd1vp3Y/kOi8ub0DozCa9KpvYNCXslFHYRmLNiqRopGdTGwNLpNw==" }, "node-pre-gyp": { "version": "0.16.0", @@ -4915,7 +4947,7 @@ "parse-ms": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.1.0.tgz", - "integrity": "sha1-NIVlp1PUOR+lJAKZVrFyy3dTCX0=" + "integrity": "sha512-kHt7kzLoS9VBZfUsiKjv43mr91ea+U05EyKkEtqp7vNbHxmaVuEqN7XxeEVnGrMtYOAxGrDElSi96K7EgO1zCA==" }, "parseurl": { "version": "1.3.3", @@ -5725,9 +5757,9 @@ } }, "protobufjs": { - "version": "6.10.2", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.2.tgz", - "integrity": "sha512-27yj+04uF6ya9l+qfpH187aqEzfCF4+Uit0I9ZBQVqK09hk/SQzKa2MUqUpXaVa7LOFRg1TSSr3lVxGOk6c0SQ==", + "version": "6.8.8", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz", + "integrity": "sha1-yLTxKC/XqQ5vWxCe0RyEr4KQjnw=", "requires": { "@protobufjs/aspromise": "^1.1.2", "@protobufjs/base64": "^1.1.2", @@ -5739,16 +5771,9 @@ "@protobufjs/path": "^1.1.2", "@protobufjs/pool": "^1.1.0", "@protobufjs/utf8": "^1.1.0", - "@types/long": "^4.0.1", - "@types/node": "^13.7.0", + "@types/long": "^4.0.0", + "@types/node": "^10.1.0", "long": "^4.0.0" - }, - "dependencies": { - "@types/node": { - "version": "13.13.48", - "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.48.tgz", - "integrity": "sha512-z8wvSsgWQzkr4sVuMEEOvwMdOQjiRY2Y/ZW4fDfjfe3+TfQrZqFKOthBgk2RnVEmtOKrkwdZ7uTvsxTBLjKGDQ==" - } } }, "proxy-addr": { @@ -5782,6 +5807,19 @@ "duplexify": "^4.1.1", "inherits": "^2.0.3", "pump": "^3.0.0" + }, + "dependencies": { + "duplexify": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", + "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", + "requires": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + } } }, "punycode": { @@ -5821,15 +5859,15 @@ "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==" }, "raven": { - "version": "2.6.4", - "resolved": "https://registry.npmjs.org/raven/-/raven-2.6.4.tgz", - "integrity": "sha512-6PQdfC4+DQSFncowthLf+B6Hr0JpPsFBgTVYTAOq7tCmx/kR4SXbeawtPch20+3QfUcQDoJBLjWW1ybvZ4kXTw==", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/raven/-/raven-1.1.3.tgz", + "integrity": "sha1-QnPBrm005CMPUbLAEEGjK5Iygio=", "requires": { "cookie": "0.3.1", - "md5": "^2.2.1", - "stack-trace": "0.0.10", - "timed-out": "4.0.1", - "uuid": "3.3.2" + "json-stringify-safe": "5.0.1", + "lsmod": "1.0.0", + "stack-trace": "0.0.9", + "uuid": "3.0.0" }, "dependencies": { "cookie": { @@ -5837,15 +5875,10 @@ "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" }, - "stack-trace": { - "version": "0.0.10", - "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", - "integrity": "sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA=" - }, "uuid": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", - "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.0.tgz", + "integrity": "sha1-Zyj8BFnEUNeWqZwxg3VpvfZy1yg=" } } }, @@ -6153,7 +6186,7 @@ "safe-json-stringify": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.2.0.tgz", - "integrity": "sha512-gH8eh2nZudPQO6TytOvbxnuhYBOvDBBLW52tz5q6X58lJcd/tkmqFR+5Z9adS8aJtURSXWThWy/xJtJwixErvg==", + "integrity": "sha1-NW5EvJjx+TzkXfFLzXwBzahuCv0=", "optional": true }, "safer-buffer": { @@ -6347,18 +6380,18 @@ } }, "snakecase-keys": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/snakecase-keys/-/snakecase-keys-3.2.1.tgz", - "integrity": "sha512-CjU5pyRfwOtaOITYv5C8DzpZ8XA/ieRsDpr93HI2r6e3YInC6moZpSQbmUtg8cTk58tq2x3jcG2gv+p1IZGmMA==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/snakecase-keys/-/snakecase-keys-3.2.0.tgz", + "integrity": "sha512-WTJ0NhCH/37J+PU3fuz0x5b6TvtWQChTcKPOndWoUy0pteKOe0hrHMzSRsJOWSIP48EQkzUEsgQPmrG3W8pFNQ==", "requires": { - "map-obj": "^4.1.0", + "map-obj": "^4.0.0", "to-snake-case": "^1.0.0" } }, "source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha1-dHIq8y6WFOnCh6jQu95IteLxomM=" + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" }, "source-map-support": { "version": "0.5.19", @@ -6413,7 +6446,7 @@ "split": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", - "integrity": "sha1-YFvZvjA6pZ+zX5Ip++oN3snqB9k=", + "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==", "requires": { "through": "2" } @@ -6443,8 +6476,7 @@ "stack-trace": { "version": "0.0.9", "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz", - "integrity": "sha1-qPbq7KkGdMMz58Q5U/J1tFFRBpU=", - "dev": true + "integrity": "sha1-qPbq7KkGdMMz58Q5U/J1tFFRBpU=" }, "standard-as-callback": { "version": "2.0.1", @@ -6614,21 +6646,21 @@ } }, "teeny-request": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz", - "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==", + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-6.0.3.tgz", + "integrity": "sha512-TZG/dfd2r6yeji19es1cUIwAlVD8y+/svB1kAC2Y0bjEyysrfbO8EZvJBRwIE6WkwmUoB7uvWLwTIhJbMXZ1Dw==", "requires": { "http-proxy-agent": "^4.0.0", "https-proxy-agent": "^5.0.0", - "node-fetch": "^2.6.1", + "node-fetch": "^2.2.0", "stream-events": "^1.0.5", - "uuid": "^8.0.0" + "uuid": "^7.0.0" }, "dependencies": { "uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-7.0.3.tgz", + "integrity": "sha512-DPSke0pXhTZgoF/d+WSt2QaKMCFSfx7QegxEWT+JOuHF5aWrKEn0G+ztjuJg/gG8/ItK+rbPCD/yNv8yyih6Cg==" } } }, @@ -6676,11 +6708,6 @@ } } }, - "timed-out": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/timed-out/-/timed-out-4.0.1.tgz", - "integrity": "sha1-8y6srFoXW+ol1/q1Zas+2HQe9W8=" - }, "timekeeper": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/timekeeper/-/timekeeper-2.2.0.tgz", @@ -6705,7 +6732,7 @@ "to-no-case": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/to-no-case/-/to-no-case-1.0.2.tgz", - "integrity": "sha1-xyKQcWTvaxeBMsjmmTAhLRtKoWo=" + "integrity": "sha512-Z3g735FxuZY8rodxV4gH7LxClE4H0hTIyHNIHdk+vpQxjLm0cwnKXq/OFVZ76SOQmto7txVcwSCwkU5kqp+FKg==" }, "to-regex-range": { "version": "5.0.1", @@ -6719,7 +6746,7 @@ "to-snake-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-snake-case/-/to-snake-case-1.0.0.tgz", - "integrity": "sha1-znRpE4l5RgGah+Yu366upMYIq4w=", + "integrity": "sha512-joRpzBAk1Bhi2eGEYBjukEWHOe/IvclOkiJl3DtA91jV6NwQ3MwXA4FHYeqk8BNp/D8bmi9tcNbRu/SozP0jbQ==", "requires": { "to-space-case": "^1.0.0" } @@ -6727,7 +6754,7 @@ "to-space-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-space-case/-/to-space-case-1.0.0.tgz", - "integrity": "sha1-sFLar7Gysp3HcM6gFj5ewOvJ/Bc=", + "integrity": "sha512-rLdvwXZ39VOn1IxGL3V6ZstoTbwLRckQmn/U8ZDLuWwIXNpuZDhQ3AiRUlhTbOXFVE9C+dR51wM0CBDhk31VcA==", "requires": { "to-no-case": "^1.0.0" } @@ -6737,6 +6764,22 @@ "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz", "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==" }, + "tough-cookie": { + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", + "integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==", + "requires": { + "psl": "^1.1.24", + "punycode": "^1.4.1" + }, + "dependencies": { + "punycode": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=" + } + } + }, "tslib": { "version": "1.11.2", "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.11.2.tgz", @@ -6772,9 +6815,9 @@ "dev": true }, "type-fest": { - "version": "0.21.3", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", - "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==" + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.12.0.tgz", + "integrity": "sha512-53RyidyjvkGpnWPMF9bQgFtWp+Sl8O2Rp13VavmJgfAP9WWG6q6TkrKU8iyJdnwnfgHI6k2hTlgqH4aSdjoTbg==" }, "type-is": { "version": "1.6.18", @@ -6929,6 +6972,11 @@ } } }, + "walkdir": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/walkdir/-/walkdir-0.4.1.tgz", + "integrity": "sha512-3eBwRyEln6E1MSzcxcVpQIhRG8Q1jLvEqRmCZqS3dsfXEDR/AhOF4d+jHg1qvDCpYaVRZjENPQyrVxAkQqxPgQ==" + }, "when": { "version": "3.7.8", "resolved": "https://registry.npmjs.org/when/-/when-3.7.8.tgz", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 32b4b90999..fbe12047a6 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -27,7 +27,7 @@ "diff-match-patch": "https://github.com/overleaf/diff-match-patch/archive/89805f9c671a77a263fc53461acd62aa7498f688.tar.gz", "express": "4.17.1", "lodash": "^4.17.19", - "logger-sharelatex": "^2.2.0", + "logger-sharelatex": "^1.9.1", "mongodb": "^3.6.0", "request": "^2.88.2", "requestretry": "^4.1.0", From 50b24043b7e108c7dfe86fcd73b0554e318c5f6a Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Sun, 28 Mar 2021 13:30:51 +0200 Subject: [PATCH 733/769] [perf] use MGET for fetching multiple keys in one operation - getDoc: from 13 down to 2 operations --- .../document-updater/app/js/RedisManager.js | 25 ++--- .../unit/js/RedisManager/RedisManagerTests.js | 96 +++++-------------- 2 files changed, 37 insertions(+), 84 deletions(-) diff --git a/services/document-updater/app/js/RedisManager.js b/services/document-updater/app/js/RedisManager.js index 104a720c95..35f62c8222 100644 --- a/services/document-updater/app/js/RedisManager.js +++ b/services/document-updater/app/js/RedisManager.js @@ -190,18 +190,19 @@ module.exports = RedisManager = { ) {} } const timer = new metrics.Timer('redis.get-doc') - const multi = rclient.multi() - multi.get(keys.docLines({ doc_id })) - multi.get(keys.docVersion({ doc_id })) - multi.get(keys.docHash({ doc_id })) - multi.get(keys.projectKey({ doc_id })) - multi.get(keys.ranges({ doc_id })) - multi.get(keys.pathname({ doc_id })) - multi.get(keys.projectHistoryId({ doc_id })) - multi.get(keys.unflushedTime({ doc_id })) - multi.get(keys.lastUpdatedAt({ doc_id })) - multi.get(keys.lastUpdatedBy({ doc_id })) - return multi.exec(function (error, ...rest) { + const collectKeys = [ + keys.docLines({ doc_id }), + keys.docVersion({ doc_id }), + keys.docHash({ doc_id }), + keys.projectKey({ doc_id }), + keys.ranges({ doc_id }), + keys.pathname({ doc_id }), + keys.projectHistoryId({ doc_id }), + keys.unflushedTime({ doc_id }), + keys.lastUpdatedAt({ doc_id }), + keys.lastUpdatedBy({ doc_id }) + ] + rclient.mget(...collectKeys, (error, ...rest) => { let [ docLines, version, diff --git a/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js index d14d0c23de..1937ddfb86 100644 --- a/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js +++ b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js @@ -141,10 +141,9 @@ describe('RedisManager', function () { this.json_ranges = JSON.stringify(this.ranges) this.unflushed_time = 12345 this.pathname = '/a/b/c.tex' - this.multi.get = sinon.stub() - this.multi.exec = sinon + this.rclient.mget = sinon .stub() - .callsArgWith(0, null, [ + .yields(null, [ this.jsonlines, this.version, this.hash, @@ -166,57 +165,20 @@ describe('RedisManager', function () { ) }) - it('should get the lines from redis', function () { - return this.multi.get - .calledWith(`doclines:${this.doc_id}`) - .should.equal(true) - }) - - it('should get the version from', function () { - return this.multi.get - .calledWith(`DocVersion:${this.doc_id}`) - .should.equal(true) - }) - - it('should get the hash', function () { - return this.multi.get - .calledWith(`DocHash:${this.doc_id}`) - .should.equal(true) - }) - - it('should get the ranges', function () { - return this.multi.get - .calledWith(`Ranges:${this.doc_id}`) - .should.equal(true) - }) - - it('should get the unflushed time', function () { - return this.multi.get - .calledWith(`UnflushedTime:${this.doc_id}`) - .should.equal(true) - }) - - it('should get the pathname', function () { - return this.multi.get - .calledWith(`Pathname:${this.doc_id}`) - .should.equal(true) - }) - - it('should get the projectHistoryId as an integer', function () { - return this.multi.get - .calledWith(`ProjectHistoryId:${this.doc_id}`) - .should.equal(true) - }) - - it('should get lastUpdatedAt', function () { - return this.multi.get - .calledWith(`lastUpdatedAt:${this.doc_id}`) - .should.equal(true) - }) - - it('should get lastUpdatedBy', function () { - return this.multi.get - .calledWith(`lastUpdatedBy:${this.doc_id}`) + it('should get all the details in one call to redis', function () { + this.rclient.mget + .calledWith( + `doclines:${this.doc_id}`, + `DocVersion:${this.doc_id}`, + `DocHash:${this.doc_id}`, + `ProjectId:${this.doc_id}`, + `Ranges:${this.doc_id}`, + `Pathname:${this.doc_id}`, + `ProjectHistoryId:${this.doc_id}`, + `UnflushedTime:${this.doc_id}`, + `lastUpdatedAt:${this.doc_id}`, + `lastUpdatedBy:${this.doc_id}` + ) .should.equal(true) }) @@ -249,9 +211,9 @@ describe('RedisManager', function () { describe('when the document is not present', function () { beforeEach(function () { - this.multi.exec = sinon + this.rclient.mget = sinon .stub() - .callsArgWith(0, null, [ + .yields(null, [ null, null, null, @@ -322,9 +284,9 @@ describe('RedisManager', function () { describe('with a corrupted document', function () { beforeEach(function () { this.badHash = 'INVALID-HASH-VALUE' - this.multi.exec = sinon + this.rclient.mget = sinon .stub() - .callsArgWith(0, null, [ + .yields(null, [ this.jsonlines, this.version, this.badHash, @@ -351,19 +313,9 @@ describe('RedisManager', function () { describe('with a slow request to redis', function () { beforeEach(function () { - this.multi.exec = sinon - .stub() - .callsArgWith(0, null, [ - this.jsonlines, - this.version, - this.badHash, - this.project_id, - this.json_ranges, - this.pathname, - this.unflushed_time - ]) this.clock = sinon.useFakeTimers() - this.multi.exec = (cb) => { + this.rclient.mget = (...args) => { + const cb = args.pop() this.clock.tick(6000) return cb(null, [ this.jsonlines, @@ -396,9 +348,9 @@ describe('RedisManager', function () { return describe('getDoc with an invalid project id', function () { beforeEach(function () { this.another_project_id = 'project-id-456' - this.multi.exec = sinon + this.rclient.mget = sinon .stub() - .callsArgWith(0, null, [ + .yields(null, [ this.jsonlines, this.version, this.hash, From 83bbdfa9d13726d01273e3f6f4ac315b561d15ae Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Tue, 13 Apr 2021 10:58:05 +0100 Subject: [PATCH 734/769] upgrade logger to 2.2.0 --- services/document-updater/package-lock.json | 605 +++++++++----------- services/document-updater/package.json | 2 +- 2 files changed, 276 insertions(+), 331 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index c347199789..4228277b85 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -155,19 +155,19 @@ } }, "@google-cloud/common": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-2.4.0.tgz", - "integrity": "sha512-zWFjBS35eI9leAHhjfeOYlK5Plcuj/77EzstnrJIZbKgF/nkqjcQuGiMCpzCwOfPyUbz8ZaEOYgbHa759AKbjg==", + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.6.0.tgz", + "integrity": "sha512-aHIFTqJZmeTNO9md8XxV+ywuvXF3xBm5WNmgWeeCK+XN5X+kGW0WEX94wGwj+/MdOnrVf4dL2RvSIt9J5yJG6Q==", "requires": { - "@google-cloud/projectify": "^1.0.0", - "@google-cloud/promisify": "^1.0.0", - "arrify": "^2.0.0", - "duplexify": "^3.6.0", + "@google-cloud/projectify": "^2.0.0", + "@google-cloud/promisify": "^2.0.0", + "arrify": "^2.0.1", + "duplexify": "^4.1.1", "ent": "^2.2.0", "extend": "^3.0.2", - "google-auth-library": "^5.5.0", - "retry-request": "^4.0.0", - "teeny-request": "^6.0.0" + "google-auth-library": "^7.0.2", + "retry-request": "^4.1.1", + "teeny-request": "^7.0.0" } }, "@google-cloud/debug-agent": { @@ -357,44 +357,52 @@ } }, "@google-cloud/logging": { - "version": "7.3.0", - "resolved": "https://registry.npmjs.org/@google-cloud/logging/-/logging-7.3.0.tgz", - "integrity": "sha512-xTW1V4MKpYC0mjSugyuiyUoZ9g6A42IhrrO3z7Tt3SmAb2IRj2Gf4RLoguKKncs340ooZFXrrVN/++t2Aj5zgg==", + "version": "9.2.0", + "resolved": "https://registry.npmjs.org/@google-cloud/logging/-/logging-9.2.0.tgz", + "integrity": "sha512-eQRDKPq9Pq0pbDeo2/OaVrPRX+TDqaxZ7JagDAJx20dqxVwBtaA1rBUcCEXCAFrMZ2cUYhj3sDVuzqNwSObF2Q==", "requires": { - "@google-cloud/common": "^2.2.2", - "@google-cloud/paginator": "^2.0.0", - "@google-cloud/projectify": "^1.0.0", - "@google-cloud/promisify": "^1.0.0", - "@opencensus/propagation-stackdriver": "0.0.20", - "arrify": "^2.0.0", - "dot-prop": "^5.1.0", + "@google-cloud/common": "^3.4.1", + "@google-cloud/paginator": "^3.0.0", + "@google-cloud/projectify": "^2.0.0", + "@google-cloud/promisify": "^2.0.0", + "@opencensus/propagation-stackdriver": "0.0.22", + "arrify": "^2.0.1", + "dot-prop": "^6.0.0", "eventid": "^1.0.0", "extend": "^3.0.2", - "gcp-metadata": "^3.1.0", - "google-auth-library": "^5.2.2", - "google-gax": "^1.11.0", - "is": "^3.3.0", + "gcp-metadata": "^4.0.0", + "google-auth-library": "^7.0.0", + "google-gax": "^2.9.2", "on-finished": "^2.3.0", - "pumpify": "^2.0.0", - "snakecase-keys": "^3.0.0", - "stream-events": "^1.0.4", - "through2": "^3.0.0", - "type-fest": "^0.12.0" + "pumpify": "^2.0.1", + "snakecase-keys": "^3.1.2", + "stream-events": "^1.0.5", + "through2": "^4.0.0" + }, + "dependencies": { + "through2": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/through2/-/through2-4.0.2.tgz", + "integrity": "sha512-iOqSav00cVxEEICeD7TjLB1sueEL+81Wpzp2bY17uZjZN0pWZPuo4suZ/61VujxmqSGFfgOcNuTZ85QJwNZQpw==", + "requires": { + "readable-stream": "3" + } + } } }, "@google-cloud/logging-bunyan": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@google-cloud/logging-bunyan/-/logging-bunyan-2.0.3.tgz", - "integrity": "sha512-8n9MwsCRd4v8WZg17+d3m7qInud7lYTm5rpwXHY0/lzWEJYjeiztT09BiCYh56EEhHr+ynymJnzUDZKazkywlg==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@google-cloud/logging-bunyan/-/logging-bunyan-3.0.2.tgz", + "integrity": "sha512-7BmXGZLYsnDs5UT9qvb0/rA0i2BbD3AyKqwXl/hP0pDGboCg0GE8viVmwzmY8f/cUzRZHAxOgV0bTENeTd6KEA==", "requires": { - "@google-cloud/logging": "^7.0.0", - "google-auth-library": "^5.0.0" + "@google-cloud/logging": "^9.0.0", + "google-auth-library": "^7.0.0" } }, "@google-cloud/paginator": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-2.0.3.tgz", - "integrity": "sha512-kp/pkb2p/p0d8/SKUu4mOq8+HGwF8NPzHWkj+VKrIPQPyMRw8deZtrO/OcSiy9C/7bpfU5Txah5ltUNfPkgEXg==", + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-3.0.5.tgz", + "integrity": "sha512-N4Uk4BT1YuskfRhKXBs0n9Lg2YTROZc6IMpkO/8DIHODtm5s3xY8K5vVBo23v/2XulY3azwITQlYWgT4GdLsUw==", "requires": { "arrify": "^2.0.0", "extend": "^3.0.2" @@ -596,14 +604,14 @@ } }, "@google-cloud/projectify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-1.0.4.tgz", - "integrity": "sha512-ZdzQUN02eRsmTKfBj9FDL0KNDIFNjBn/d6tHQmA/+FImH5DO6ZV8E7FzxMgAUiVAUq41RFAkb25p1oHOZ8psfg==" + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", + "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" }, "@google-cloud/promisify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-1.0.4.tgz", - "integrity": "sha512-VccZDcOql77obTnFh0TbNED/6ZbbmHDf8UMNnzO1d5g9V0Htfm4k5cllY8P1tJsRKC3zWYGRLaViiupcgVjBoQ==" + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz", + "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw==" }, "@google-cloud/trace-agent": { "version": "5.1.3", @@ -801,42 +809,84 @@ } }, "@grpc/grpc-js": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.0.5.tgz", - "integrity": "sha512-Hm+xOiqAhcpT9RYM8lc15dbQD7aQurM7ZU8ulmulepiPlN7iwBXXwP3vSBUimoFoApRqz7pSIisXU8pZaCB4og==", + "version": "1.2.12", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.2.12.tgz", + "integrity": "sha512-+gPCklP1eqIgrNPyzddYQdt9+GvZqPlLpIjIo+TveE+gbtp74VV1A2ju8ExeO8ma8f7MbpaGZx/KJPYVWL9eDw==", "requires": { + "@types/node": ">=12.12.47", + "google-auth-library": "^6.1.1", "semver": "^6.2.0" + }, + "dependencies": { + "google-auth-library": { + "version": "6.1.6", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.1.6.tgz", + "integrity": "sha512-Q+ZjUEvLQj/lrVHF/IQwRo6p3s8Nc44Zk/DALsN+ac3T4HY/g/3rrufkgtl+nZ1TW7DNAw5cTChdVp4apUXVgQ==", + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^4.0.0", + "gcp-metadata": "^4.2.0", + "gtoken": "^5.0.4", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + } + } } }, "@grpc/proto-loader": { - "version": "0.5.5", - "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.5.5.tgz", - "integrity": "sha512-WwN9jVNdHRQoOBo9FDH7qU+mgfjPc8GygPYms3M+y3fbQLfnCe/Kv/E01t7JRgnrsOHH8euvSbed3mIalXhwqQ==", + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.5.6.tgz", + "integrity": "sha512-DT14xgw3PSzPxwS13auTEwxhMMOoz33DPUKNtmYK/QYbBSpLXJy78FGGs5yVoxVobEqPm4iW9MOIoz0A3bLTRQ==", "requires": { "lodash.camelcase": "^4.3.0", "protobufjs": "^6.8.6" } }, "@opencensus/core": { - "version": "0.0.20", - "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.20.tgz", - "integrity": "sha512-vqOuTd2yuMpKohp8TNNGUAPjWEGjlnGfB9Rh5e3DKqeyR94YgierNs4LbMqxKtsnwB8Dm2yoEtRuUgoe5vD9DA==", + "version": "0.0.22", + "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.22.tgz", + "integrity": "sha512-ErazJtivjceNoOZI1bG9giQ6cWS45J4i6iPUtlp7dLNu58OLs/v+CD0FsaPCh47XgPxAI12vbBE8Ec09ViwHNA==", "requires": { "continuation-local-storage": "^3.2.1", "log-driver": "^1.2.7", - "semver": "^6.0.0", + "semver": "^7.0.0", "shimmer": "^1.2.0", - "uuid": "^3.2.1" + "uuid": "^8.0.0" + }, + "dependencies": { + "semver": { + "version": "7.3.5", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", + "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "requires": { + "lru-cache": "^6.0.0" + } + }, + "uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" + } } }, "@opencensus/propagation-stackdriver": { - "version": "0.0.20", - "resolved": "https://registry.npmjs.org/@opencensus/propagation-stackdriver/-/propagation-stackdriver-0.0.20.tgz", - "integrity": "sha512-P8yuHSLtce+yb+2EZjtTVqG7DQ48laC+IuOWi3X9q78s1Gni5F9+hmbmyP6Nb61jb5BEvXQX1s2rtRI6bayUWA==", + "version": "0.0.22", + "resolved": "https://registry.npmjs.org/@opencensus/propagation-stackdriver/-/propagation-stackdriver-0.0.22.tgz", + "integrity": "sha512-eBvf/ihb1mN8Yz/ASkz8nHzuMKqygu77+VNnUeR0yEh3Nj+ykB8VVR6lK+NAFXo1Rd1cOsTmgvuXAZgDAGleQQ==", "requires": { - "@opencensus/core": "^0.0.20", + "@opencensus/core": "^0.0.22", "hex2dec": "^1.0.1", - "uuid": "^3.2.1" + "uuid": "^8.0.0" + }, + "dependencies": { + "uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" + } } }, "@overleaf/metrics": { @@ -1003,14 +1053,6 @@ "integrity": "sha512-OCutwjDZ4aFS6PB1UZ988C4YgwlBHJd6wCeQqaLdmadZ/7e+w79+hbMUFC1QXDNCmdyoRfAFdm0RypzwR+Qpag==", "dev": true }, - "@types/fs-extra": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-8.1.1.tgz", - "integrity": "sha512-TcUlBem321DFQzBNuz8p0CLLKp0VvF/XH9E4KHNmgwyp4E3AfgI5cjiIVZWlbfThBop2qxFIh4+LeY6hVWWZ2w==", - "requires": { - "@types/node": "*" - } - }, "@types/json-schema": { "version": "7.0.4", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.4.tgz", @@ -1023,9 +1065,9 @@ "integrity": "sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w==" }, "@types/node": { - "version": "10.17.15", - "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.15.tgz", - "integrity": "sha512-daFGV9GSs6USfPgxceDA8nlSe48XrVCJfDeYm7eokxq/ye7iuOH87hKXgMtEAVLFapkczbZsx868PMDT1Y0a6A==" + "version": "14.14.37", + "resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.37.tgz", + "integrity": "sha512-XYmBiy+ohOR4Lh5jE379fV2IU+6Jn4g5qASinhitfyO71b/sCo6MKsMLF5tc7Zf2CE8hViVQyYSobJNke8OvUw==" }, "@types/semver": { "version": "7.3.4", @@ -1396,9 +1438,9 @@ } }, "bignumber.js": { - "version": "7.2.1", - "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz", - "integrity": "sha1-gMBIdZ2CaACAfEv9Uh5Q7bulel8=" + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz", + "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==" }, "binary-extensions": { "version": "2.2.0", @@ -1589,6 +1631,11 @@ "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", "dev": true }, + "charenc": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/charenc/-/charenc-0.0.2.tgz", + "integrity": "sha1-wKHS86cJLgN3S/qD8UwPxXkKhmc=" + }, "chokidar": { "version": "3.5.1", "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.1.tgz", @@ -1786,10 +1833,15 @@ "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" }, + "crypt": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/crypt/-/crypt-0.0.2.tgz", + "integrity": "sha1-iNf/fsDfuG9xPch7u0LQRNPmxBs=" + }, "d64": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz", - "integrity": "sha512-5eNy3WZziVYnrogqgXhcdEmqcDB2IHurTqLcrgssJsfkMVCUoUaZpK6cJjxxvLV2dUm5SuJMNcYfVGoin9UIRw==" + "integrity": "sha1-QAKofoUMv8n52XBrYPymE6MzbpA=" }, "damerau-levenshtein": { "version": "1.0.6", @@ -1917,9 +1969,9 @@ } }, "dot-prop": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz", - "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-6.0.1.tgz", + "integrity": "sha512-tE7ztYzXHIeyvc7N+hR3oi7FIbf/NIjVP9hmAt3yMXzrQ072/fpjGLx2GxNxGxUl5V73MEqYzioOMoVhGMJ5cA==", "requires": { "is-obj": "^2.0.0" } @@ -1934,43 +1986,14 @@ } }, "duplexify": { - "version": "3.7.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz", - "integrity": "sha1-Kk31MX9sz9kfhtb9JdjYoQO4gwk=", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", + "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", "requires": { - "end-of-stream": "^1.0.0", - "inherits": "^2.0.1", - "readable-stream": "^2.0.0", + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", "stream-shift": "^1.0.0" - }, - "dependencies": { - "isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" - }, - "readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "requires": { - "safe-buffer": "~5.1.0" - } - } } }, "ecc-jsbn": { @@ -3003,9 +3026,9 @@ } }, "gaxios": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-2.3.4.tgz", - "integrity": "sha512-US8UMj8C5pRnao3Zykc4AAVr+cffoNKRTg9Rsf2GiuZCW69vgJj38VK2PzlPuQU73FZ/nTk9/Av6/JGcE1N9vA==", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.2.0.tgz", + "integrity": "sha512-Ms7fNifGv0XVU+6eIyL9LB7RVESeML9+cMvkwGS70xyD6w2Z80wl6RiqiJ9k1KFlJCUTQqFFc8tXmPQfSKUe8g==", "requires": { "abort-controller": "^3.0.0", "extend": "^3.0.2", @@ -3015,12 +3038,12 @@ } }, "gcp-metadata": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-3.5.0.tgz", - "integrity": "sha512-ZQf+DLZ5aKcRpLzYUyBS3yo3N0JSa82lNDO8rj3nMSlovLcz2riKFBsYgDzeXcv75oo5eqB2lx+B14UvPoCRnA==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz", + "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==", "requires": { - "gaxios": "^2.1.0", - "json-bigint": "^0.3.0" + "gaxios": "^4.0.0", + "json-bigint": "^1.0.0" } }, "get-caller-file": { @@ -3072,76 +3095,52 @@ "dev": true }, "google-auth-library": { - "version": "5.10.1", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz", - "integrity": "sha512-rOlaok5vlpV9rSiUu5EpR0vVpc+PhN62oF4RyX/6++DG1VsaulAFEMlDYBLjJDDPI6OcNOCGAKy9UVB/3NIDXg==", + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-7.0.4.tgz", + "integrity": "sha512-o8irYyeijEiecTXeoEe8UKNEzV1X+uhR4b2oNdapDMZixypp0J+eHimGOyx5Joa3UAeokGngdtDLXtq9vDqG2Q==", "requires": { "arrify": "^2.0.0", "base64-js": "^1.3.0", "ecdsa-sig-formatter": "^1.0.11", "fast-text-encoding": "^1.0.0", - "gaxios": "^2.1.0", - "gcp-metadata": "^3.4.0", - "gtoken": "^4.1.0", + "gaxios": "^4.0.0", + "gcp-metadata": "^4.2.0", + "gtoken": "^5.0.4", "jws": "^4.0.0", - "lru-cache": "^5.0.0" + "lru-cache": "^6.0.0" } }, "google-gax": { - "version": "1.15.3", - "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-1.15.3.tgz", - "integrity": "sha512-3JKJCRumNm3x2EksUTw4P1Rad43FTpqrtW9jzpf3xSMYXx+ogaqTM1vGo7VixHB4xkAyATXVIa3OcNSh8H9zsQ==", + "version": "2.11.2", + "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-2.11.2.tgz", + "integrity": "sha512-PNqXv7Oi5XBMgoMWVxLZHUidfMv7cPHrDSDXqLyEd6kY6pqFnVKC8jt2T1df4JPSc2+VLPdeo6L7X9mbdQG8Xw==", "requires": { - "@grpc/grpc-js": "~1.0.3", + "@grpc/grpc-js": "~1.2.0", "@grpc/proto-loader": "^0.5.1", - "@types/fs-extra": "^8.0.1", "@types/long": "^4.0.0", "abort-controller": "^3.0.0", - "duplexify": "^3.6.0", - "google-auth-library": "^5.0.0", + "duplexify": "^4.0.0", + "fast-text-encoding": "^1.0.3", + "google-auth-library": "^7.0.2", "is-stream-ended": "^0.1.4", - "lodash.at": "^4.6.0", - "lodash.has": "^4.5.2", - "node-fetch": "^2.6.0", - "protobufjs": "^6.8.9", - "retry-request": "^4.0.0", - "semver": "^6.0.0", - "walkdir": "^0.4.0" + "node-fetch": "^2.6.1", + "protobufjs": "^6.10.2", + "retry-request": "^4.0.0" }, "dependencies": { - "@types/node": { - "version": "13.13.25", - "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.25.tgz", - "integrity": "sha512-6ZMK4xRcF2XrPdKmPYQxZkdHKV18xKgUFVvhIgw2iwaaO6weleLPHLBGPZmLhjo+m1N+MZXRAoBEBCCVqgO2zQ==" - }, - "protobufjs": { - "version": "6.10.1", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.1.tgz", - "integrity": "sha512-pb8kTchL+1Ceg4lFd5XUpK8PdWacbvV5SK2ULH2ebrYtl4GjJmS24m6CKME67jzV53tbJxHlnNOSqQHbTsR9JQ==", - "requires": { - "@protobufjs/aspromise": "^1.1.2", - "@protobufjs/base64": "^1.1.2", - "@protobufjs/codegen": "^2.0.4", - "@protobufjs/eventemitter": "^1.1.0", - "@protobufjs/fetch": "^1.1.0", - "@protobufjs/float": "^1.0.2", - "@protobufjs/inquire": "^1.1.0", - "@protobufjs/path": "^1.1.2", - "@protobufjs/pool": "^1.1.0", - "@protobufjs/utf8": "^1.1.0", - "@types/long": "^4.0.1", - "@types/node": "^13.7.0", - "long": "^4.0.0" - } + "fast-text-encoding": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.3.tgz", + "integrity": "sha512-dtm4QZH9nZtcDt8qJiOH9fcQd1NAgi+K1O2DbE6GG1PPCK/BWfOH3idCTRQ4ImXRUOyopDEgDEnVEE7Y/2Wrig==" } } }, "google-p12-pem": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-2.0.4.tgz", - "integrity": "sha512-S4blHBQWZRnEW44OcR7TL9WR+QCqByRvhNDZ/uuQfpxywfupikf/miba8js1jZi6ZOGv5slgSuoshCWh6EMDzg==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz", + "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==", "requires": { - "node-forge": "^0.9.0" + "node-forge": "^0.10.0" } }, "graceful-fs": { @@ -3157,21 +3156,13 @@ "dev": true }, "gtoken": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-4.1.4.tgz", - "integrity": "sha512-VxirzD0SWoFUo5p8RDP8Jt2AGyOmyYcT/pOUgDKJCK+iSw0TMqwrVfY37RXTNmoKwrzmDHSk0GMT9FsgVmnVSA==", + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.2.1.tgz", + "integrity": "sha512-OY0BfPKe3QnMsY9MzTHTSKn+Vl2l1CcLe6BwDEQj00mbbkl5nyQ/7EUREstg4fQNZ8iYE7br4JJ7TdKeDOPWmw==", "requires": { - "gaxios": "^2.1.0", - "google-p12-pem": "^2.0.0", - "jws": "^4.0.0", - "mime": "^2.2.0" - }, - "dependencies": { - "mime": { - "version": "2.4.6", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", - "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" - } + "gaxios": "^4.0.0", + "google-p12-pem": "^3.0.3", + "jws": "^4.0.0" } }, "har-schema": { @@ -3596,6 +3587,11 @@ "binary-extensions": "^2.0.0" } }, + "is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" + }, "is-callable": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", @@ -3728,11 +3724,11 @@ "dev": true }, "json-bigint": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-0.3.0.tgz", - "integrity": "sha1-DM2RLEuCcNBfBW+9E4FLU9OCWx4=", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", "requires": { - "bignumber.js": "^7.0.0" + "bignumber.js": "^9.0.0" } }, "json-schema": { @@ -3827,15 +3823,10 @@ "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.19.tgz", "integrity": "sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ==" }, - "lodash.at": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/lodash.at/-/lodash.at-4.6.0.tgz", - "integrity": "sha512-GOTh0SEp+Yosnlpjic+8cl2WM9MykorogkGA9xyIFkkObQ3H3kNZqZ+ohuq4K3FrSVo7hMcZBMataJemrxC3BA==" - }, "lodash.camelcase": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", - "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==" + "integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY=" }, "lodash.defaults": { "version": "4.2.0", @@ -3853,11 +3844,6 @@ "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=", "dev": true }, - "lodash.has": { - "version": "4.5.2", - "resolved": "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz", - "integrity": "sha512-rnYUdIo6xRCJnQmbVFEwcxF144erlD+M3YcJUVesflU9paQaE8p+fJDcIQrlMYbxoANFL+AB9hZrzSBBk5PL+g==" - }, "lodash.memoize": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", @@ -3942,65 +3928,33 @@ } }, "logger-sharelatex": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.9.1.tgz", - "integrity": "sha512-9s6JQnH/PN+Js2CmI8+J3MQCTNlRzP2Dh4pcekXrV6Jm5J4HzyPi+6d3zfBskZ4NBmaUVw9hC4p5dmdaRmh4mQ==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-2.2.0.tgz", + "integrity": "sha512-ko+OmE25XHJJCiz1R9EgwlfM7J/5olpunUfR3WcfuqOQrcUqsdBrDA2sOytngT0ViwjCR0Fh4qZVPwEWfmrvwA==", "requires": { - "@google-cloud/logging-bunyan": "^2.0.0", - "@overleaf/o-error": "^2.0.0", - "bunyan": "1.8.12", - "raven": "1.1.3", - "request": "2.88.0", - "yn": "^3.1.1" + "@google-cloud/logging-bunyan": "^3.0.0", + "@overleaf/o-error": "^3.0.0", + "bunyan": "^1.8.14", + "node-fetch": "^2.6.0", + "raven": "^2.6.4", + "yn": "^4.0.0" }, "dependencies": { - "@overleaf/o-error": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@overleaf/o-error/-/o-error-2.1.0.tgz", - "integrity": "sha512-Zd9sks9LrLw8ErHt/cXeWIkyxWAqNAvNGn7wIjLQJH6TTEEW835PWOhpch+hQwwWsTxWIx/JDj+IpZ3ouw925g==" - }, "bunyan": { - "version": "1.8.12", - "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.12.tgz", - "integrity": "sha1-8VDw9nSKvdcq6uhPBEA74u8RN5c=", + "version": "1.8.15", + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.15.tgz", + "integrity": "sha512-0tECWShh6wUysgucJcBAoYegf3JJoZWibxdqhTm7OHPeT42qdjkZ29QCMcKwbgU1kiH+auSIasNRXMLWXafXig==", "requires": { "dtrace-provider": "~0.8", - "moment": "^2.10.6", + "moment": "^2.19.3", "mv": "~2", "safe-json-stringify": "~1" } }, - "qs": { - "version": "6.5.2", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", - "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==" - }, - "request": { - "version": "2.88.0", - "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", - "integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==", - "requires": { - "aws-sign2": "~0.7.0", - "aws4": "^1.8.0", - "caseless": "~0.12.0", - "combined-stream": "~1.0.6", - "extend": "~3.0.2", - "forever-agent": "~0.6.1", - "form-data": "~2.3.2", - "har-validator": "~5.1.0", - "http-signature": "~1.2.0", - "is-typedarray": "~1.0.0", - "isstream": "~0.1.2", - "json-stringify-safe": "~5.0.1", - "mime-types": "~2.1.19", - "oauth-sign": "~0.9.0", - "performance-now": "^2.1.0", - "qs": "~6.5.2", - "safe-buffer": "^5.1.2", - "tough-cookie": "~2.4.3", - "tunnel-agent": "^0.6.0", - "uuid": "^3.3.2" - } + "yn": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yn/-/yn-4.0.0.tgz", + "integrity": "sha512-huWiiCS4TxKc4SfgmTwW1K7JmXPPAmuXWYy4j9qjQo4+27Kni8mGhAAi1cloRWmBe2EqcLgt3IGqQoRL/MtPgg==" } } }, @@ -4077,18 +4031,20 @@ } }, "lru-cache": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha1-HaJ+ZxAnGUdpXa9oSOhH8B2EuSA=", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", "requires": { - "yallist": "^3.0.2" + "yallist": "^4.0.0" + }, + "dependencies": { + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } } }, - "lsmod": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/lsmod/-/lsmod-1.0.0.tgz", - "integrity": "sha1-mgD3bco26yP6BTUK/htYXUKZ5ks=" - }, "make-plural": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/make-plural/-/make-plural-4.3.0.tgz", @@ -4112,6 +4068,16 @@ "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.1.0.tgz", "integrity": "sha512-glc9y00wgtwcDmp7GaE/0b0OnxpNJsVf3ael/An6Fe2Q51LLwN1er6sdomLRzz5h0+yMpiYLhWYF5R7HeqVd4g==" }, + "md5": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/md5/-/md5-2.3.0.tgz", + "integrity": "sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==", + "requires": { + "charenc": "0.0.2", + "crypt": "0.0.2", + "is-buffer": "~1.1.6" + } + }, "media-typer": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", @@ -4643,9 +4609,9 @@ "integrity": "sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==" }, "node-forge": { - "version": "0.9.2", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.9.2.tgz", - "integrity": "sha512-naKSScof4Wn+aoHU6HBsifh92Zeicm1GDQKd1vp3Y/kOi8ub0DozCa9KpvYNCXslFHYRmLNiqRopGdTGwNLpNw==" + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", + "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" }, "node-pre-gyp": { "version": "0.16.0", @@ -5757,9 +5723,9 @@ } }, "protobufjs": { - "version": "6.8.8", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz", - "integrity": "sha1-yLTxKC/XqQ5vWxCe0RyEr4KQjnw=", + "version": "6.10.2", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.2.tgz", + "integrity": "sha512-27yj+04uF6ya9l+qfpH187aqEzfCF4+Uit0I9ZBQVqK09hk/SQzKa2MUqUpXaVa7LOFRg1TSSr3lVxGOk6c0SQ==", "requires": { "@protobufjs/aspromise": "^1.1.2", "@protobufjs/base64": "^1.1.2", @@ -5771,9 +5737,16 @@ "@protobufjs/path": "^1.1.2", "@protobufjs/pool": "^1.1.0", "@protobufjs/utf8": "^1.1.0", - "@types/long": "^4.0.0", - "@types/node": "^10.1.0", + "@types/long": "^4.0.1", + "@types/node": "^13.7.0", "long": "^4.0.0" + }, + "dependencies": { + "@types/node": { + "version": "13.13.48", + "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.48.tgz", + "integrity": "sha512-z8wvSsgWQzkr4sVuMEEOvwMdOQjiRY2Y/ZW4fDfjfe3+TfQrZqFKOthBgk2RnVEmtOKrkwdZ7uTvsxTBLjKGDQ==" + } } }, "proxy-addr": { @@ -5807,19 +5780,6 @@ "duplexify": "^4.1.1", "inherits": "^2.0.3", "pump": "^3.0.0" - }, - "dependencies": { - "duplexify": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", - "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", - "requires": { - "end-of-stream": "^1.4.1", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1", - "stream-shift": "^1.0.0" - } - } } }, "punycode": { @@ -5859,15 +5819,15 @@ "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==" }, "raven": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/raven/-/raven-1.1.3.tgz", - "integrity": "sha1-QnPBrm005CMPUbLAEEGjK5Iygio=", + "version": "2.6.4", + "resolved": "https://registry.npmjs.org/raven/-/raven-2.6.4.tgz", + "integrity": "sha512-6PQdfC4+DQSFncowthLf+B6Hr0JpPsFBgTVYTAOq7tCmx/kR4SXbeawtPch20+3QfUcQDoJBLjWW1ybvZ4kXTw==", "requires": { "cookie": "0.3.1", - "json-stringify-safe": "5.0.1", - "lsmod": "1.0.0", - "stack-trace": "0.0.9", - "uuid": "3.0.0" + "md5": "^2.2.1", + "stack-trace": "0.0.10", + "timed-out": "4.0.1", + "uuid": "3.3.2" }, "dependencies": { "cookie": { @@ -5875,10 +5835,15 @@ "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" }, + "stack-trace": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", + "integrity": "sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA=" + }, "uuid": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.0.tgz", - "integrity": "sha1-Zyj8BFnEUNeWqZwxg3VpvfZy1yg=" + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", + "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" } } }, @@ -6186,7 +6151,7 @@ "safe-json-stringify": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.2.0.tgz", - "integrity": "sha1-NW5EvJjx+TzkXfFLzXwBzahuCv0=", + "integrity": "sha512-gH8eh2nZudPQO6TytOvbxnuhYBOvDBBLW52tz5q6X58lJcd/tkmqFR+5Z9adS8aJtURSXWThWy/xJtJwixErvg==", "optional": true }, "safer-buffer": { @@ -6380,11 +6345,11 @@ } }, "snakecase-keys": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/snakecase-keys/-/snakecase-keys-3.2.0.tgz", - "integrity": "sha512-WTJ0NhCH/37J+PU3fuz0x5b6TvtWQChTcKPOndWoUy0pteKOe0hrHMzSRsJOWSIP48EQkzUEsgQPmrG3W8pFNQ==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/snakecase-keys/-/snakecase-keys-3.2.1.tgz", + "integrity": "sha512-CjU5pyRfwOtaOITYv5C8DzpZ8XA/ieRsDpr93HI2r6e3YInC6moZpSQbmUtg8cTk58tq2x3jcG2gv+p1IZGmMA==", "requires": { - "map-obj": "^4.0.0", + "map-obj": "^4.1.0", "to-snake-case": "^1.0.0" } }, @@ -6476,7 +6441,8 @@ "stack-trace": { "version": "0.0.9", "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz", - "integrity": "sha1-qPbq7KkGdMMz58Q5U/J1tFFRBpU=" + "integrity": "sha1-qPbq7KkGdMMz58Q5U/J1tFFRBpU=", + "dev": true }, "standard-as-callback": { "version": "2.0.1", @@ -6646,21 +6612,21 @@ } }, "teeny-request": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-6.0.3.tgz", - "integrity": "sha512-TZG/dfd2r6yeji19es1cUIwAlVD8y+/svB1kAC2Y0bjEyysrfbO8EZvJBRwIE6WkwmUoB7uvWLwTIhJbMXZ1Dw==", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz", + "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==", "requires": { "http-proxy-agent": "^4.0.0", "https-proxy-agent": "^5.0.0", - "node-fetch": "^2.2.0", + "node-fetch": "^2.6.1", "stream-events": "^1.0.5", - "uuid": "^7.0.0" + "uuid": "^8.0.0" }, "dependencies": { "uuid": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-7.0.3.tgz", - "integrity": "sha512-DPSke0pXhTZgoF/d+WSt2QaKMCFSfx7QegxEWT+JOuHF5aWrKEn0G+ztjuJg/gG8/ItK+rbPCD/yNv8yyih6Cg==" + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" } } }, @@ -6708,6 +6674,11 @@ } } }, + "timed-out": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/timed-out/-/timed-out-4.0.1.tgz", + "integrity": "sha1-8y6srFoXW+ol1/q1Zas+2HQe9W8=" + }, "timekeeper": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/timekeeper/-/timekeeper-2.2.0.tgz", @@ -6732,7 +6703,7 @@ "to-no-case": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/to-no-case/-/to-no-case-1.0.2.tgz", - "integrity": "sha512-Z3g735FxuZY8rodxV4gH7LxClE4H0hTIyHNIHdk+vpQxjLm0cwnKXq/OFVZ76SOQmto7txVcwSCwkU5kqp+FKg==" + "integrity": "sha1-xyKQcWTvaxeBMsjmmTAhLRtKoWo=" }, "to-regex-range": { "version": "5.0.1", @@ -6746,7 +6717,7 @@ "to-snake-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-snake-case/-/to-snake-case-1.0.0.tgz", - "integrity": "sha512-joRpzBAk1Bhi2eGEYBjukEWHOe/IvclOkiJl3DtA91jV6NwQ3MwXA4FHYeqk8BNp/D8bmi9tcNbRu/SozP0jbQ==", + "integrity": "sha1-znRpE4l5RgGah+Yu366upMYIq4w=", "requires": { "to-space-case": "^1.0.0" } @@ -6754,7 +6725,7 @@ "to-space-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-space-case/-/to-space-case-1.0.0.tgz", - "integrity": "sha512-rLdvwXZ39VOn1IxGL3V6ZstoTbwLRckQmn/U8ZDLuWwIXNpuZDhQ3AiRUlhTbOXFVE9C+dR51wM0CBDhk31VcA==", + "integrity": "sha1-sFLar7Gysp3HcM6gFj5ewOvJ/Bc=", "requires": { "to-no-case": "^1.0.0" } @@ -6764,22 +6735,6 @@ "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz", "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==" }, - "tough-cookie": { - "version": "2.4.3", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", - "integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==", - "requires": { - "psl": "^1.1.24", - "punycode": "^1.4.1" - }, - "dependencies": { - "punycode": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=" - } - } - }, "tslib": { "version": "1.11.2", "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.11.2.tgz", @@ -6814,11 +6769,6 @@ "integrity": "sha512-f9Uv6ezcpvCQjJU0Zqbg+65qdcszv3qUQsZfjdRbWiZ7AMenrX1u0lNk9EoWWX6e1F+NULyg27mtdeZ5WhpljA==", "dev": true }, - "type-fest": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.12.0.tgz", - "integrity": "sha512-53RyidyjvkGpnWPMF9bQgFtWp+Sl8O2Rp13VavmJgfAP9WWG6q6TkrKU8iyJdnwnfgHI6k2hTlgqH4aSdjoTbg==" - }, "type-is": { "version": "1.6.18", "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", @@ -6972,11 +6922,6 @@ } } }, - "walkdir": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/walkdir/-/walkdir-0.4.1.tgz", - "integrity": "sha512-3eBwRyEln6E1MSzcxcVpQIhRG8Q1jLvEqRmCZqS3dsfXEDR/AhOF4d+jHg1qvDCpYaVRZjENPQyrVxAkQqxPgQ==" - }, "when": { "version": "3.7.8", "resolved": "https://registry.npmjs.org/when/-/when-3.7.8.tgz", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index fbe12047a6..32b4b90999 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -27,7 +27,7 @@ "diff-match-patch": "https://github.com/overleaf/diff-match-patch/archive/89805f9c671a77a263fc53461acd62aa7498f688.tar.gz", "express": "4.17.1", "lodash": "^4.17.19", - "logger-sharelatex": "^1.9.1", + "logger-sharelatex": "^2.2.0", "mongodb": "^3.6.0", "request": "^2.88.2", "requestretry": "^4.1.0", From 178440395f2131ffedb9ba26b6aa399cc366c25c Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Sun, 28 Mar 2021 13:18:21 +0200 Subject: [PATCH 735/769] [perf] switch write sequence for doc contents and doc tracking Doc contents are added only after the tracking has been setup. All read paths on the tracking have been checked to gracefully handle the case of existing doc_id but missing doc contents. - getDoc: -1 operation REF: 0a2b47c660c60b95e360d8f3b3e30b862ceb6d79 --- .../document-updater/app/js/RedisManager.js | 90 ++++++------------- .../unit/js/RedisManager/RedisManagerTests.js | 78 ---------------- 2 files changed, 27 insertions(+), 141 deletions(-) diff --git a/services/document-updater/app/js/RedisManager.js b/services/document-updater/app/js/RedisManager.js index 35f62c8222..4c0d144529 100644 --- a/services/document-updater/app/js/RedisManager.js +++ b/services/document-updater/app/js/RedisManager.js @@ -84,28 +84,23 @@ module.exports = RedisManager = { logger.error({ err: error, doc_id, project_id }, error.message) return callback(error) } - const multi = rclient.multi() - multi.set(keys.docLines({ doc_id }), docLines) - multi.set(keys.projectKey({ doc_id }), project_id) - multi.set(keys.docVersion({ doc_id }), version) - multi.set(keys.docHash({ doc_id }), docHash) - if (ranges != null) { - multi.set(keys.ranges({ doc_id }), ranges) - } else { - multi.del(keys.ranges({ doc_id })) - } - multi.set(keys.pathname({ doc_id }), pathname) - multi.set(keys.projectHistoryId({ doc_id }), projectHistoryId) - return multi.exec(function (error, result) { - if (error != null) { - return callback(error) + // update docsInProject set before writing doc contents + rclient.sadd(keys.docsInProject({ project_id }), doc_id, (error) => { + if (error) return callback(error) + + const multi = rclient.multi() + multi.set(keys.docLines({ doc_id }), docLines) + multi.set(keys.projectKey({ doc_id }), project_id) + multi.set(keys.docVersion({ doc_id }), version) + multi.set(keys.docHash({ doc_id }), docHash) + if (ranges != null) { + multi.set(keys.ranges({ doc_id }), ranges) + } else { + multi.del(keys.ranges({ doc_id })) } - // update docsInProject set - return rclient.sadd( - keys.docsInProject({ project_id }), - doc_id, - callback - ) + multi.set(keys.pathname({ doc_id }), pathname) + multi.set(keys.projectHistoryId({ doc_id }), projectHistoryId) + multi.exec(callback) }) }) }, @@ -269,48 +264,17 @@ module.exports = RedisManager = { projectHistoryId = parseInt(projectHistoryId) } - // doc is not in redis, bail out - if (docLines == null) { - return callback( - null, - docLines, - version, - ranges, - pathname, - projectHistoryId, - unflushedTime, - lastUpdatedAt, - lastUpdatedBy - ) - } - - // doc should be in project set, check if missing (workaround for missing docs from putDoc) - return rclient.sadd(keys.docsInProject({ project_id }), doc_id, function ( - error, - result - ) { - if (error != null) { - return callback(error) - } - if (result !== 0) { - // doc should already be in set - logger.error( - { project_id, doc_id, doc_project_id }, - 'doc missing from docsInProject set' - ) - } - return callback( - null, - docLines, - version, - ranges, - pathname, - projectHistoryId, - unflushedTime, - lastUpdatedAt, - lastUpdatedBy - ) - }) + callback( + null, + docLines, + version, + ranges, + pathname, + projectHistoryId, + unflushedTime, + lastUpdatedAt, + lastUpdatedBy + ) }) }, diff --git a/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js index 1937ddfb86..d8f21844cd 100644 --- a/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js +++ b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js @@ -182,12 +182,6 @@ describe('RedisManager', function () { .should.equal(true) }) - it('should check if the document is in the DocsIn set', function () { - return this.rclient.sadd - .calledWith(`DocsIn:${this.project_id}`) - .should.equal(true) - }) - it('should return the document', function () { return this.callback .calledWithExactly( @@ -209,78 +203,6 @@ describe('RedisManager', function () { }) }) - describe('when the document is not present', function () { - beforeEach(function () { - this.rclient.mget = sinon - .stub() - .yields(null, [ - null, - null, - null, - null, - null, - null, - null, - null, - null, - null - ]) - this.rclient.sadd = sinon.stub().yields() - return this.RedisManager.getDoc( - this.project_id, - this.doc_id, - this.callback - ) - }) - - it('should not check if the document is in the DocsIn set', function () { - return this.rclient.sadd - .calledWith(`DocsIn:${this.project_id}`) - .should.equal(false) - }) - - it('should return an empty result', function () { - return this.callback - .calledWithExactly(null, null, 0, {}, null, null, null, null, null) - .should.equal(true) - }) - - return it('should not log any errors', function () { - return this.logger.error.calledWith().should.equal(false) - }) - }) - - describe('when the document is missing from the DocsIn set', function () { - beforeEach(function () { - this.rclient.sadd = sinon.stub().yields(null, 1) - return this.RedisManager.getDoc( - this.project_id, - this.doc_id, - this.callback - ) - }) - - it('should log an error', function () { - return this.logger.error.calledWith().should.equal(true) - }) - - return it('should return the document', function () { - return this.callback - .calledWithExactly( - null, - this.lines, - this.version, - this.ranges, - this.pathname, - this.projectHistoryId, - this.unflushed_time, - this.lastUpdatedAt, - this.lastUpdatedBy - ) - .should.equal(true) - }) - }) - describe('with a corrupted document', function () { beforeEach(function () { this.badHash = 'INVALID-HASH-VALUE' From 6e551f9b343a04b4793e90d067b7d059e3033ce0 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Sun, 28 Mar 2021 13:30:51 +0200 Subject: [PATCH 736/769] [perf] use MGET/MSET/DEL for manipulating multiple keys in one operation In some cases we can get rid of MULTI/EXEC operations too. - putDocInMemory: from 10 down to 2 operations - removeDocFromMemory: from 14+4 down to 4+4 operations - updateDoc: from 13 down to 8 operations --- .../document-updater/app/js/RedisManager.js | 73 +++--- .../unit/js/RedisManager/RedisManagerTests.js | 235 ++++++------------ 2 files changed, 111 insertions(+), 197 deletions(-) diff --git a/services/document-updater/app/js/RedisManager.js b/services/document-updater/app/js/RedisManager.js index 4c0d144529..73d85f60d5 100644 --- a/services/document-updater/app/js/RedisManager.js +++ b/services/document-updater/app/js/RedisManager.js @@ -88,19 +88,18 @@ module.exports = RedisManager = { rclient.sadd(keys.docsInProject({ project_id }), doc_id, (error) => { if (error) return callback(error) - const multi = rclient.multi() - multi.set(keys.docLines({ doc_id }), docLines) - multi.set(keys.projectKey({ doc_id }), project_id) - multi.set(keys.docVersion({ doc_id }), version) - multi.set(keys.docHash({ doc_id }), docHash) - if (ranges != null) { - multi.set(keys.ranges({ doc_id }), ranges) - } else { - multi.del(keys.ranges({ doc_id })) - } - multi.set(keys.pathname({ doc_id }), pathname) - multi.set(keys.projectHistoryId({ doc_id }), projectHistoryId) - multi.exec(callback) + rclient.mset( + { + [keys.docLines({ doc_id })]: docLines, + [keys.projectKey({ doc_id })]: project_id, + [keys.docVersion({ doc_id })]: version, + [keys.docHash({ doc_id })]: docHash, + [keys.ranges({ doc_id })]: ranges, + [keys.pathname({ doc_id })]: pathname, + [keys.projectHistoryId({ doc_id })]: projectHistoryId + }, + callback + ) }) }) }, @@ -119,17 +118,19 @@ module.exports = RedisManager = { let multi = rclient.multi() multi.strlen(keys.docLines({ doc_id })) - multi.del(keys.docLines({ doc_id })) - multi.del(keys.projectKey({ doc_id })) - multi.del(keys.docVersion({ doc_id })) - multi.del(keys.docHash({ doc_id })) - multi.del(keys.ranges({ doc_id })) - multi.del(keys.pathname({ doc_id })) - multi.del(keys.projectHistoryId({ doc_id })) - multi.del(keys.projectHistoryType({ doc_id })) - multi.del(keys.unflushedTime({ doc_id })) - multi.del(keys.lastUpdatedAt({ doc_id })) - multi.del(keys.lastUpdatedBy({ doc_id })) + multi.del( + keys.docLines({ doc_id }), + keys.projectKey({ doc_id }), + keys.docVersion({ doc_id }), + keys.docHash({ doc_id }), + keys.ranges({ doc_id }), + keys.pathname({ doc_id }), + keys.projectHistoryId({ doc_id }), + keys.projectHistoryType({ doc_id }), + keys.unflushedTime({ doc_id }), + keys.lastUpdatedAt({ doc_id }), + keys.lastUpdatedBy({ doc_id }) + ) return multi.exec(function (error, response) { if (error != null) { return callback(error) @@ -483,19 +484,19 @@ module.exports = RedisManager = { return callback(error) } const multi = rclient.multi() - multi.set(keys.docLines({ doc_id }), newDocLines) // index 0 - multi.set(keys.docVersion({ doc_id }), newVersion) // index 1 - multi.set(keys.docHash({ doc_id }), newHash) // index 2 + multi.mset({ + [keys.docLines({ doc_id })]: newDocLines, + [keys.docVersion({ doc_id })]: newVersion, + [keys.docHash({ doc_id })]: newHash, + [keys.ranges({ doc_id })]: ranges, + [keys.lastUpdatedAt({ doc_id })]: Date.now(), + [keys.lastUpdatedBy({ doc_id })]: updateMeta && updateMeta.user_id + }) multi.ltrim( keys.docOps({ doc_id }), -RedisManager.DOC_OPS_MAX_LENGTH, -1 ) // index 3 - if (ranges != null) { - multi.set(keys.ranges({ doc_id }), ranges) // index 4 - } else { - multi.del(keys.ranges({ doc_id })) // also index 4 - } // push the ops last so we can get the lengths at fixed index position 7 if (jsonOps.length > 0) { multi.rpush(keys.docOps({ doc_id }), ...Array.from(jsonOps)) // index 5 @@ -519,12 +520,6 @@ module.exports = RedisManager = { // hasn't been modified before (the content in mongo has been // valid up to this point). Otherwise leave it alone ("NX" flag). multi.set(keys.unflushedTime({ doc_id }), Date.now(), 'NX') - multi.set(keys.lastUpdatedAt({ doc_id }), Date.now()) // index 8 - if (updateMeta != null ? updateMeta.user_id : undefined) { - multi.set(keys.lastUpdatedBy({ doc_id }), updateMeta.user_id) // index 9 - } else { - multi.del(keys.lastUpdatedBy({ doc_id })) // index 9 - } } return multi.exec(function (error, result) { let docUpdateCount @@ -536,7 +531,7 @@ module.exports = RedisManager = { docUpdateCount = undefined // only using project history, don't bother with track-changes } else { // project is using old track-changes history service - docUpdateCount = result[7] // length of uncompressedHistoryOps queue (index 7) + docUpdateCount = result[4] } if ( diff --git a/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js index d8f21844cd..29329e8411 100644 --- a/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js +++ b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js @@ -153,7 +153,6 @@ describe('RedisManager', function () { this.projectHistoryId.toString(), this.unflushed_time ]) - return (this.rclient.sadd = sinon.stub().yields(null, 0)) }) describe('successfully', function () { @@ -469,6 +468,7 @@ describe('RedisManager', function () { this.project_update_list_length = sinon.stub() this.RedisManager.getDocVersion = sinon.stub() + this.multi.mset = sinon.stub() this.multi.set = sinon.stub() this.multi.rpush = sinon.stub() this.multi.expire = sinon.stub() @@ -477,9 +477,6 @@ describe('RedisManager', function () { this.multi.exec = sinon .stub() .callsArgWith(0, null, [ - this.hash, - null, - null, null, null, null, @@ -524,27 +521,16 @@ describe('RedisManager', function () { .should.equal(true) }) - it('should set the doclines', function () { - return this.multi.set - .calledWith(`doclines:${this.doc_id}`, JSON.stringify(this.lines)) - .should.equal(true) - }) - - it('should set the version', function () { - return this.multi.set - .calledWith(`DocVersion:${this.doc_id}`, this.version) - .should.equal(true) - }) - - it('should set the hash', function () { - return this.multi.set - .calledWith(`DocHash:${this.doc_id}`, this.hash) - .should.equal(true) - }) - - it('should set the ranges', function () { - return this.multi.set - .calledWith(`Ranges:${this.doc_id}`, JSON.stringify(this.ranges)) + it('should set most details in a single MSET call', function () { + this.multi.mset + .calledWith({ + [`doclines:${this.doc_id}`]: JSON.stringify(this.lines), + [`DocVersion:${this.doc_id}`]: this.version, + [`DocHash:${this.doc_id}`]: this.hash, + [`Ranges:${this.doc_id}`]: JSON.stringify(this.ranges), + [`lastUpdatedAt:${this.doc_id}`]: Date.now(), + [`lastUpdatedBy:${this.doc_id}`]: 'last-author-fake-id' + }) .should.equal(true) }) @@ -554,18 +540,6 @@ describe('RedisManager', function () { .should.equal(true) }) - it('should set the last updated time', function () { - return this.multi.set - .calledWith(`lastUpdatedAt:${this.doc_id}`, Date.now()) - .should.equal(true) - }) - - it('should set the last updater', function () { - return this.multi.set - .calledWith(`lastUpdatedBy:${this.doc_id}`, 'last-author-fake-id') - .should.equal(true) - }) - it('should push the doc op into the doc ops list', function () { return this.multi.rpush .calledWith( @@ -747,8 +721,15 @@ describe('RedisManager', function () { }) return it('should still set the doclines', function () { - return this.multi.set - .calledWith(`doclines:${this.doc_id}`, JSON.stringify(this.lines)) + this.multi.mset + .calledWith({ + [`doclines:${this.doc_id}`]: JSON.stringify(this.lines), + [`DocVersion:${this.doc_id}`]: this.version, + [`DocHash:${this.doc_id}`]: this.hash, + [`Ranges:${this.doc_id}`]: JSON.stringify(this.ranges), + [`lastUpdatedAt:${this.doc_id}`]: Date.now(), + [`lastUpdatedBy:${this.doc_id}`]: 'last-author-fake-id' + }) .should.equal(true) }) }) @@ -770,15 +751,16 @@ describe('RedisManager', function () { ) }) - it('should not set the ranges', function () { - return this.multi.set - .calledWith(`Ranges:${this.doc_id}`, JSON.stringify(this.ranges)) - .should.equal(false) - }) - - return it('should delete the ranges key', function () { - return this.multi.del - .calledWith(`Ranges:${this.doc_id}`) + it('should set empty ranges', function () { + this.multi.mset + .calledWith({ + [`doclines:${this.doc_id}`]: JSON.stringify(this.lines), + [`DocVersion:${this.doc_id}`]: this.version, + [`DocHash:${this.doc_id}`]: this.hash, + [`Ranges:${this.doc_id}`]: null, + [`lastUpdatedAt:${this.doc_id}`]: Date.now(), + [`lastUpdatedBy:${this.doc_id}`]: 'last-author-fake-id' + }) .should.equal(true) }) }) @@ -866,15 +848,16 @@ describe('RedisManager', function () { ) }) - it('should set the last updater to null', function () { - return this.multi.del - .calledWith(`lastUpdatedBy:${this.doc_id}`) - .should.equal(true) - }) - - return it('should still set the last updated time', function () { - return this.multi.set - .calledWith(`lastUpdatedAt:${this.doc_id}`, Date.now()) + it('should unset last updater', function () { + this.multi.mset + .calledWith({ + [`doclines:${this.doc_id}`]: JSON.stringify(this.lines), + [`DocVersion:${this.doc_id}`]: this.version, + [`DocHash:${this.doc_id}`]: this.hash, + [`Ranges:${this.doc_id}`]: JSON.stringify(this.ranges), + [`lastUpdatedAt:${this.doc_id}`]: Date.now(), + [`lastUpdatedBy:${this.doc_id}`]: undefined + }) .should.equal(true) }) }) @@ -882,16 +865,14 @@ describe('RedisManager', function () { describe('putDocInMemory', function () { beforeEach(function () { - this.multi.set = sinon.stub() + this.rclient.mset = sinon.stub().yields(null) this.rclient.sadd = sinon.stub().yields() - this.multi.del = sinon.stub() this.lines = ['one', 'two', 'three', 'これは'] this.version = 42 this.hash = crypto .createHash('sha1') .update(JSON.stringify(this.lines), 'utf8') .digest('hex') - this.multi.exec = sinon.stub().callsArgWith(0, null, [this.hash]) this.ranges = { comments: 'mock', entries: 'mock' } return (this.pathname = '/a/b/c.tex') }) @@ -910,45 +891,17 @@ describe('RedisManager', function () { ) }) - it('should set the lines', function () { - return this.multi.set - .calledWith(`doclines:${this.doc_id}`, JSON.stringify(this.lines)) - .should.equal(true) - }) - - it('should set the version', function () { - return this.multi.set - .calledWith(`DocVersion:${this.doc_id}`, this.version) - .should.equal(true) - }) - - it('should set the hash', function () { - return this.multi.set - .calledWith(`DocHash:${this.doc_id}`, this.hash) - .should.equal(true) - }) - - it('should set the ranges', function () { - return this.multi.set - .calledWith(`Ranges:${this.doc_id}`, JSON.stringify(this.ranges)) - .should.equal(true) - }) - - it('should set the project_id for the doc', function () { - return this.multi.set - .calledWith(`ProjectId:${this.doc_id}`, this.project_id) - .should.equal(true) - }) - - it('should set the pathname for the doc', function () { - return this.multi.set - .calledWith(`Pathname:${this.doc_id}`, this.pathname) - .should.equal(true) - }) - - it('should set the projectHistoryId for the doc', function () { - return this.multi.set - .calledWith(`ProjectHistoryId:${this.doc_id}`, this.projectHistoryId) + it('should set all the details in a single MSET call', function () { + this.rclient.mset + .calledWith({ + [`doclines:${this.doc_id}`]: JSON.stringify(this.lines), + [`ProjectId:${this.doc_id}`]: this.project_id, + [`DocVersion:${this.doc_id}`]: this.version, + [`DocHash:${this.doc_id}`]: this.hash, + [`Ranges:${this.doc_id}`]: JSON.stringify(this.ranges), + [`Pathname:${this.doc_id}`]: this.pathname, + [`ProjectHistoryId:${this.doc_id}`]: this.projectHistoryId + }) .should.equal(true) }) @@ -977,17 +930,19 @@ describe('RedisManager', function () { ) }) - it('should delete the ranges key', function () { - return this.multi.del - .calledWith(`Ranges:${this.doc_id}`) + it('should unset ranges', function () { + this.rclient.mset + .calledWith({ + [`doclines:${this.doc_id}`]: JSON.stringify(this.lines), + [`ProjectId:${this.doc_id}`]: this.project_id, + [`DocVersion:${this.doc_id}`]: this.version, + [`DocHash:${this.doc_id}`]: this.hash, + [`Ranges:${this.doc_id}`]: null, + [`Pathname:${this.doc_id}`]: this.pathname, + [`ProjectHistoryId:${this.doc_id}`]: this.projectHistoryId + }) .should.equal(true) }) - - return it('should not set the ranges', function () { - return this.multi.set - .calledWith(`Ranges:${this.doc_id}`, JSON.stringify(this.ranges)) - .should.equal(false) - }) }) describe('with null bytes in the serialized doc lines', function () { @@ -1070,33 +1025,21 @@ describe('RedisManager', function () { .should.equal(true) }) - it('should delete the lines', function () { + it('should delete the details in a singe call', function () { return this.multi.del - .calledWith(`doclines:${this.doc_id}`) - .should.equal(true) - }) - - it('should delete the version', function () { - return this.multi.del - .calledWith(`DocVersion:${this.doc_id}`) - .should.equal(true) - }) - - it('should delete the hash', function () { - return this.multi.del - .calledWith(`DocHash:${this.doc_id}`) - .should.equal(true) - }) - - it('should delete the unflushed time', function () { - return this.multi.del - .calledWith(`UnflushedTime:${this.doc_id}`) - .should.equal(true) - }) - - it('should delete the project_id for the doc', function () { - return this.multi.del - .calledWith(`ProjectId:${this.doc_id}`) + .calledWith( + `doclines:${this.doc_id}`, + `ProjectId:${this.doc_id}`, + `DocVersion:${this.doc_id}`, + `DocHash:${this.doc_id}`, + `Ranges:${this.doc_id}`, + `Pathname:${this.doc_id}`, + `ProjectHistoryId:${this.doc_id}`, + `ProjectHistoryType:${this.doc_id}`, + `UnflushedTime:${this.doc_id}`, + `lastUpdatedAt:${this.doc_id}`, + `lastUpdatedBy:${this.doc_id}` + ) .should.equal(true) }) @@ -1105,30 +1048,6 @@ describe('RedisManager', function () { .calledWith(`DocsIn:${this.project_id}`, this.doc_id) .should.equal(true) }) - - it('should delete the pathname for the doc', function () { - return this.multi.del - .calledWith(`Pathname:${this.doc_id}`) - .should.equal(true) - }) - - it('should delete the pathname for the doc', function () { - return this.multi.del - .calledWith(`ProjectHistoryId:${this.doc_id}`) - .should.equal(true) - }) - - it('should delete lastUpdatedAt', function () { - return this.multi.del - .calledWith(`lastUpdatedAt:${this.doc_id}`) - .should.equal(true) - }) - - return it('should delete lastUpdatedBy', function () { - return this.multi.del - .calledWith(`lastUpdatedBy:${this.doc_id}`) - .should.equal(true) - }) }) describe('clearProjectState', function () { From 34fc349646c631005cb19e750ce730b60efb2197 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Sun, 28 Mar 2021 19:31:46 +0200 Subject: [PATCH 737/769] [benchmarks] add benchmark for multi vs mget/mset --- .../benchmarks/multi_vs_mget_mset.rb | 188 ++++++++++++++++++ 1 file changed, 188 insertions(+) create mode 100644 services/document-updater/benchmarks/multi_vs_mget_mset.rb diff --git a/services/document-updater/benchmarks/multi_vs_mget_mset.rb b/services/document-updater/benchmarks/multi_vs_mget_mset.rb new file mode 100644 index 0000000000..ea953cda14 --- /dev/null +++ b/services/document-updater/benchmarks/multi_vs_mget_mset.rb @@ -0,0 +1,188 @@ +require "benchmark" +require "redis" + +N = (ARGV.first || 1).to_i +DOC_ID = (ARGV.last || "606072b20bb4d3109fb5b122") + +@r = Redis.new + + +def get + @r.get("doclines:{#{DOC_ID}}") + @r.get("DocVersion:{#{DOC_ID}}") + @r.get("DocHash:{#{DOC_ID}}") + @r.get("ProjectId:{#{DOC_ID}}") + @r.get("Ranges:{#{DOC_ID}}") + @r.get("Pathname:{#{DOC_ID}}") + @r.get("ProjectHistoryId:{#{DOC_ID}}") + @r.get("UnflushedTime:{#{DOC_ID}}") + @r.get("lastUpdatedAt:{#{DOC_ID}}") + @r.get("lastUpdatedBy:{#{DOC_ID}}") +end + +def mget + @r.mget( + "doclines:{#{DOC_ID}}", + "DocVersion:{#{DOC_ID}}", + "DocHash:{#{DOC_ID}}", + "ProjectId:{#{DOC_ID}}", + "Ranges:{#{DOC_ID}}", + "Pathname:{#{DOC_ID}}", + "ProjectHistoryId:{#{DOC_ID}}", + "UnflushedTime:{#{DOC_ID}}", + "lastUpdatedAt:{#{DOC_ID}}", + "lastUpdatedBy:{#{DOC_ID}}", + ) +end + +def set + @r.set("doclines:{#{DOC_ID}}", "[\"@book{adams1995hitchhiker,\",\" title={The Hitchhiker's Guide to the Galaxy},\",\" author={Adams, D.},\",\" isbn={9781417642595},\",\" url={http://books.google.com/books?id=W-xMPgAACAAJ},\",\" year={1995},\",\" publisher={San Val}\",\"}\",\"\"]") + @r.set("DocVersion:{#{DOC_ID}}", "0") + @r.set("DocHash:{#{DOC_ID}}", "0075bb0629c6c13d0d68918443648bbfe7d98869") + @r.set("ProjectId:{#{DOC_ID}}", "606072b20bb4d3109fb5b11e") + @r.set("Ranges:{#{DOC_ID}}", "") + @r.set("Pathname:{#{DOC_ID}}", "/references.bib") + @r.set("ProjectHistoryId:{#{DOC_ID}}", "") + @r.set("UnflushedTime:{#{DOC_ID}}", "") + @r.set("lastUpdatedAt:{#{DOC_ID}}", "") + @r.set("lastUpdatedBy:{#{DOC_ID}}", "") +end + +def mset + @r.mset( + "doclines:{#{DOC_ID}}", "[\"@book{adams1995hitchhiker,\",\" title={The Hitchhiker's Guide to the Galaxy},\",\" author={Adams, D.},\",\" isbn={9781417642595},\",\" url={http://books.google.com/books?id=W-xMPgAACAAJ},\",\" year={1995},\",\" publisher={San Val}\",\"}\",\"\"]", + "DocVersion:{#{DOC_ID}}", "0", + "DocHash:{#{DOC_ID}}", "0075bb0629c6c13d0d68918443648bbfe7d98869", + "ProjectId:{#{DOC_ID}}", "606072b20bb4d3109fb5b11e", + "Ranges:{#{DOC_ID}}", "", + "Pathname:{#{DOC_ID}}", "/references.bib", + "ProjectHistoryId:{#{DOC_ID}}", "", + "UnflushedTime:{#{DOC_ID}}", "", + "lastUpdatedAt:{#{DOC_ID}}", "", + "lastUpdatedBy:{#{DOC_ID}}", "", + ) +end + + +def benchmark_multi_get(benchmark, i) + benchmark.report("#{i}: multi get") do + N.times do + @r.multi do + get + end + end + end +end + +def benchmark_mget(benchmark, i) + benchmark.report("#{i}: mget") do + N.times do + mget + end + end +end + +def benchmark_multi_set(benchmark, i) + benchmark.report("#{i}: multi set") do + N.times do + @r.multi do + set + end + end + end +end + +def benchmark_mset(benchmark, i) + benchmark.report("#{i}: mset") do + N.times do + mset + end + end +end + + +# init +set + +Benchmark.bmbm do |benchmark| + 3.times do |i| + benchmark_multi_get(benchmark, i) + benchmark_mget(benchmark, i) + benchmark_multi_set(benchmark, i) + benchmark_mset(benchmark, i) + end +end + + + +=begin +# Results + +I could not max out the redis-server process with this benchmark. +The ruby process hit 100% of a modern i7 CPU thread and the redis-server process + barely hit 50% of a CPU thread. + +Based on the timings below, mget is about 3 times faster and mset about 4 times + faster than multiple get/set commands in a multi. +=end + +=begin +$ redis-server --version +Redis server v=5.0.7 sha=00000000:0 malloc=jemalloc-5.2.1 bits=64 build=636cde3b5c7a3923 +$ ruby multi_vs_mget_mset.rb 100000 +Rehearsal ------------------------------------------------ +0: multi get 12.132423 4.246689 16.379112 ( 16.420069) +0: mget 4.499457 0.947556 5.447013 ( 6.274883) +0: multi set 12.685936 4.495241 17.181177 ( 17.225984) +0: mset 2.543401 0.913448 3.456849 ( 4.554799) +1: multi get 13.397207 4.581881 17.979088 ( 18.027755) +1: mget 4.551287 1.160531 5.711818 ( 6.579168) +1: multi set 13.018957 4.927175 17.946132 ( 17.987502) +1: mset 2.561096 1.048416 3.609512 ( 4.780087) +2: multi get 13.224422 5.014475 18.238897 ( 18.284152) +2: mget 4.664434 1.051083 5.715517 ( 6.592088) +2: multi set 12.972284 4.600422 17.572706 ( 17.613185) +2: mset 2.621344 0.984123 3.605467 ( 4.766855) +------------------------------------- total: 132.843288sec + + user system total real +0: multi get 13.341552 4.900892 18.242444 ( 18.289912) +0: mget 5.056534 0.960954 6.017488 ( 6.971189) +0: multi set 12.989880 4.823793 17.813673 ( 17.858393) +0: mset 2.543434 1.025352 3.568786 ( 4.723040) +1: multi get 13.059379 4.674345 17.733724 ( 17.777859) +1: mget 4.698754 0.915637 5.614391 ( 6.489614) +1: multi set 12.608293 4.729163 17.337456 ( 17.372993) +1: mset 2.645290 0.940584 3.585874 ( 4.744134) +2: multi get 13.678224 4.732373 18.410597 ( 18.457525) +2: mget 4.716749 1.072064 5.788813 ( 6.697683) +2: multi set 13.058710 4.889801 17.948511 ( 17.988742) +2: mset 2.311854 0.989166 3.301020 ( 4.346467) +=end + +=begin +# multi get/set run at about O(65'000) operations per second +$ redis-cli info | grep 'instantaneous_ops_per_sec' +instantaneous_ops_per_sec:65557 + +# mget runs at about O(15'000) operations per second +$ redis-cli info | grep 'instantaneous_ops_per_sec' +instantaneous_ops_per_sec:14580 + +# mset runs at about O(20'000) operations per second +$ redis-cli info | grep 'instantaneous_ops_per_sec' +instantaneous_ops_per_sec:20792 + +These numbers are pretty reasonable: +multi: 100'000 * 12 ops / 18s = 66'666 ops/s +mget : 100'000 * 1 ops / 7s = 14'285 ops/s +mset : 100'000 * 1 ops / 5s = 20'000 ops/s + + + +Bonus: Running three benchmarks in parallel on different keys. +multi get: O(125'000) ops/s and 80% CPU load of redis-server +multi set: O(130'000) ops/s and 90% CPU load of redis-server +mget : O( 30'000) ops/s and 70% CPU load of redis-server +mset : O( 40'000) ops/s and 90% CPU load of redis-server +=end From b1a14460543d621efb27eba42d0007c04046c507 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 29 Apr 2021 10:29:34 +0100 Subject: [PATCH 738/769] remove the limit of 300 maxSockets --- services/document-updater/config/settings.defaults.js | 3 --- 1 file changed, 3 deletions(-) diff --git a/services/document-updater/config/settings.defaults.js b/services/document-updater/config/settings.defaults.js index bac86ff55f..f9c5f54921 100755 --- a/services/document-updater/config/settings.defaults.js +++ b/services/document-updater/config/settings.defaults.js @@ -1,6 +1,3 @@ -const http = require('http') -http.globalAgent.maxSockets = 300 - module.exports = { internal: { documentupdater: { From 01112e10ab4a9a7c4f441e083147a0b4085e43db Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 29 Apr 2021 11:45:07 +0100 Subject: [PATCH 739/769] upgrade ioredis via @overleaf/redis-wrapper also update dependencies --- services/document-updater/package-lock.json | 128 ++++++++++---------- services/document-updater/package.json | 12 +- 2 files changed, 70 insertions(+), 70 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 4228277b85..00a0a69f21 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -904,16 +904,26 @@ } }, "@overleaf/o-error": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@overleaf/o-error/-/o-error-3.1.0.tgz", - "integrity": "sha512-TWJ80ozJ1LeugGTJyGQSPEuTkZ9LqZD7/ndLE6azKa03SU/mKV/FINcfk8atpVil8iv1hHQwzYZc35klplpMpQ==" + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@overleaf/o-error/-/o-error-3.3.1.tgz", + "integrity": "sha512-1FRBYZO0lbJ0U+FRGZVS8ou6RhEw3e2B86WW/NbtBw554g0h5iC8ESf+juIfPMU/WDf/JDIFbg3eB/LnP2RSow==", + "requires": { + "core-js": "^3.8.3" + }, + "dependencies": { + "core-js": { + "version": "3.11.1", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.11.1.tgz", + "integrity": "sha512-k93Isqg7e4txZWMGNYwevZL9MiogLk8pd1PtwrmFmi8IBq4GXqUaVW/a33Llt6amSI36uSjd0GWwc9pTT9ALlQ==" + } + } }, "@overleaf/redis-wrapper": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@overleaf/redis-wrapper/-/redis-wrapper-2.0.0.tgz", - "integrity": "sha512-lREuhDPNgmKyOmL1g6onfRzDLWOG/POsE4Vd7ZzLnKDYt9SbOIujtx3CxI2qtQAKBYHf/hfyrbtyX3Ib2yTvYA==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@overleaf/redis-wrapper/-/redis-wrapper-2.0.1.tgz", + "integrity": "sha512-1TwCbEKJFz2yUhmwy2hQzy04NBhnseT371X2AU2szkNJ8Ip1C1HwJt1UAK/7Nh+hY7kFfH7Qpk+bZUF9f/rUMQ==", "requires": { - "ioredis": "~4.17.3" + "ioredis": "~4.27.1" } }, "@protobufjs/aspromise": { @@ -1534,9 +1544,9 @@ "dev": true }, "bson": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/bson/-/bson-1.1.5.tgz", - "integrity": "sha512-kDuEzldR21lHciPQAIulLs1LZlCXdLziXI6Mb/TDkwXhb//UORJNPXgcRs2CuO4H0DcMkpfT3/ySsP3unoZjBg==" + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/bson/-/bson-1.1.6.tgz", + "integrity": "sha512-EvVNVeGo4tHxwi8L6bPj3y3itEvStdwvvlojVxxbyYfoaxJ6keLgrTuKdyfEAszFK+H3olzBuafE0yoh0D1gdg==" }, "buffer-equal-constant-time": { "version": "1.0.1", @@ -1924,9 +1934,9 @@ "integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=" }, "denque": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/denque/-/denque-1.4.1.tgz", - "integrity": "sha1-Z0T/dkHBSMP4ppwwflEjXB9KN88=" + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/denque/-/denque-1.5.0.tgz", + "integrity": "sha512-CYiCSgIF1p6EUByQPlGkKnP1M9g0ZV3qMIrqMqZqdwazygIA/YP2vrbcyl1h/WppKJTdl1F85cXIle+394iDAQ==" }, "depd": { "version": "1.1.2", @@ -3532,25 +3542,26 @@ } }, "ioredis": { - "version": "4.17.3", - "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.17.3.tgz", - "integrity": "sha512-iRvq4BOYzNFkDnSyhx7cmJNOi1x/HWYe+A4VXHBu4qpwJaGT1Mp+D2bVGJntH9K/Z/GeOM/Nprb8gB3bmitz1Q==", + "version": "4.27.1", + "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.27.1.tgz", + "integrity": "sha512-PaFNFeBbOcEYHXAdrJuy7uesJcyvzStTM1aYMchTuky+VgKqDbXhnTJHaDsjAwcTwPx8Asatx+l2DW8zZ2xlsQ==", "requires": { "cluster-key-slot": "^1.1.0", - "debug": "^4.1.1", + "debug": "^4.3.1", "denque": "^1.1.0", "lodash.defaults": "^4.2.0", "lodash.flatten": "^4.4.0", - "redis-commands": "1.5.0", + "p-map": "^2.1.0", + "redis-commands": "1.7.0", "redis-errors": "^1.2.0", "redis-parser": "^3.0.0", - "standard-as-callback": "^2.0.1" + "standard-as-callback": "^2.1.0" }, "dependencies": { "debug": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz", - "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==", + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", + "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", "requires": { "ms": "2.1.2" } @@ -3819,9 +3830,9 @@ } }, "lodash": { - "version": "4.17.19", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.19.tgz", - "integrity": "sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ==" + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" }, "lodash.camelcase": { "version": "4.3.0", @@ -4469,14 +4480,14 @@ "optional": true }, "mongodb": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.6.0.tgz", - "integrity": "sha512-/XWWub1mHZVoqEsUppE0GV7u9kanLvHxho6EvBxQbShXTKYF9trhZC2NzbulRGeG7xMJHD8IOWRcdKx5LPjAjQ==", + "version": "3.6.6", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.6.6.tgz", + "integrity": "sha512-WlirMiuV1UPbej5JeCMqE93JRfZ/ZzqE7nJTwP85XzjAF4rRSeq2bGCb1cjfoHLOF06+HxADaPGqT0g3SbVT1w==", "requires": { - "bl": "^2.2.0", + "bl": "^2.2.1", "bson": "^1.1.4", "denque": "^1.4.1", - "require_optional": "^1.0.1", + "optional-require": "^1.0.2", "safe-buffer": "^5.1.2", "saslprep": "^1.0.0" } @@ -4839,6 +4850,11 @@ "mimic-fn": "^2.1.0" } }, + "optional-require": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/optional-require/-/optional-require-1.0.3.tgz", + "integrity": "sha512-RV2Zp2MY2aeYK5G+B/Sps8lW5NHAzE5QClbFP15j+PWmP+T9PxlJXBOOLoSAdgwFvS4t0aMR4vpedMkbHfh0nA==" + }, "optionator": { "version": "0.8.3", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", @@ -4890,6 +4906,11 @@ "p-limit": "^2.0.0" } }, + "p-map": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-2.1.0.tgz", + "integrity": "sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==" + }, "p-try": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", @@ -5907,9 +5928,9 @@ } }, "redis-commands": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.5.0.tgz", - "integrity": "sha1-gNLiBpj+aI8icSf/nlFkp90X54U=" + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.7.0.tgz", + "integrity": "sha512-nJWqw3bTFy21hX/CPKHth6sfhZbdiHP6bTawSgQBlKOVRG7EZkfHbbHwQJnrE4vsQf0CMNE+3gJ4Fmm16vdVlQ==" }, "redis-errors": { "version": "1.2.0", @@ -5998,12 +6019,12 @@ } }, "requestretry": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/requestretry/-/requestretry-4.1.0.tgz", - "integrity": "sha512-q3IT2vz5vkcMT6xgwB/BWzsmnu7N/27l9fW86U48gt9Mwrce5rSEyFvpAW7Il1/B78/NBUlYBvcCY1RzWUWy7w==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/requestretry/-/requestretry-4.1.2.tgz", + "integrity": "sha512-N1WAp+8eOy8NfsVBChcSxNCKvPY1azOpliQ4Sby4WDe0HFEhdKywlNZeROMBQ+BI3Jpc0eNOT1KVFGREawtahA==", "requires": { "extend": "^3.0.2", - "lodash": "^4.17.10", + "lodash": "^4.17.15", "when": "^3.7.7" } }, @@ -6056,22 +6077,6 @@ "integrity": "sha1-eZlTn8ngR6N5KPoZb44VY9q9Nt4=", "dev": true }, - "require_optional": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/require_optional/-/require_optional-1.0.1.tgz", - "integrity": "sha1-TPNaQkf2TKPfjC7yCMxJSxyo/C4=", - "requires": { - "resolve-from": "^2.0.0", - "semver": "^5.1.0" - }, - "dependencies": { - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" - } - } - }, "resolve": { "version": "1.15.1", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.1.tgz", @@ -6080,11 +6085,6 @@ "path-parse": "^1.0.6" } }, - "resolve-from": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz", - "integrity": "sha1-lICrIOlP+h2egKgEx+oUdhGWa1c=" - }, "restore-cursor": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", @@ -6246,9 +6246,9 @@ "integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==" }, "settings-sharelatex": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/settings-sharelatex/-/settings-sharelatex-1.1.0.tgz", - "integrity": "sha1-Tv4vUpPbjxwVlnEEx5BfqHD/mS0=", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/settings-sharelatex/-/settings-sharelatex-1.3.0.tgz", + "integrity": "sha512-AWSeCiY1eHi+z6DF4bmTyC3tusOc6EF5zDkC28aAOhEAbgVEIuY034kx1X7a4j9HE0iVs0sVOlZrQMWJBVk3oQ==", "requires": { "coffee-script": "1.6.0" }, @@ -6445,9 +6445,9 @@ "dev": true }, "standard-as-callback": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.0.1.tgz", - "integrity": "sha1-7YuyVkjhWDF1m2Ajvbh+a2CzgSY=" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.1.0.tgz", + "integrity": "sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A==" }, "statuses": { "version": "1.5.0", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 32b4b90999..d26f8978c3 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -19,19 +19,19 @@ }, "dependencies": { "@overleaf/metrics": "^3.5.1", - "@overleaf/o-error": "^3.1.0", - "@overleaf/redis-wrapper": "^2.0.0", + "@overleaf/o-error": "^3.3.1", + "@overleaf/redis-wrapper": "^2.0.1", "async": "^2.5.0", "body-parser": "^1.19.0", "bunyan": "~0.22.1", "diff-match-patch": "https://github.com/overleaf/diff-match-patch/archive/89805f9c671a77a263fc53461acd62aa7498f688.tar.gz", "express": "4.17.1", - "lodash": "^4.17.19", + "lodash": "^4.17.21", "logger-sharelatex": "^2.2.0", - "mongodb": "^3.6.0", + "mongodb": "^3.6.6", "request": "^2.88.2", - "requestretry": "^4.1.0", - "settings-sharelatex": "^1.1.0" + "requestretry": "^4.1.2", + "settings-sharelatex": "^1.3.0" }, "devDependencies": { "babel-eslint": "^10.1.0", From 33fad08c47427bd1a91f684f72bf9f12debf35fa Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Thu, 29 Apr 2021 15:30:48 +0100 Subject: [PATCH 740/769] [misc] add linting for missing explicit dependencies and fix any errors --- services/document-updater/.eslintrc | 13 +++++++++++-- services/document-updater/buildscript.txt | 2 +- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/services/document-updater/.eslintrc b/services/document-updater/.eslintrc index 76dad1561d..321353f971 100644 --- a/services/document-updater/.eslintrc +++ b/services/document-updater/.eslintrc @@ -22,7 +22,10 @@ "rules": { // Swap the no-unused-expressions rule with a more chai-friendly one "no-unused-expressions": 0, - "chai-friendly/no-unused-expressions": "error" + "chai-friendly/no-unused-expressions": "error", + + // Do not allow importing of implicit dependencies. + "import/no-extraneous-dependencies": "error" }, "overrides": [ { @@ -57,7 +60,13 @@ "files": ["app/**/*.js", "app.js", "index.js"], "rules": { // don't allow console.log in backend code - "no-console": "error" + "no-console": "error", + + // Do not allow importing of implicit dependencies. + "import/no-extraneous-dependencies": ["error", { + // Do not allow importing of devDependencies. + "devDependencies": false + }] } } ] diff --git a/services/document-updater/buildscript.txt b/services/document-updater/buildscript.txt index 2d2f00495e..b7b22bd21d 100644 --- a/services/document-updater/buildscript.txt +++ b/services/document-updater/buildscript.txt @@ -5,4 +5,4 @@ document-updater --env-pass-through= --node-version=12.21.0 --public-repo=True ---script-version=3.7.0 +--script-version=3.8.0 From a4ae0ea12f720ca2477292f4677075b50a5a4fb1 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Thu, 6 May 2021 09:39:52 +0100 Subject: [PATCH 741/769] [ShareJsUpdateManager] double check doc size before flushing --- .../app/js/ShareJsUpdateManager.js | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/services/document-updater/app/js/ShareJsUpdateManager.js b/services/document-updater/app/js/ShareJsUpdateManager.js index 607ae2d9fa..8ae91df32c 100644 --- a/services/document-updater/app/js/ShareJsUpdateManager.js +++ b/services/document-updater/app/js/ShareJsUpdateManager.js @@ -87,6 +87,20 @@ module.exports = ShareJsUpdateManager = { if (error != null) { return callback(error) } + const docSizeAfter = data.snapshot.length + if (docSizeAfter > Settings.max_doc_length) { + const docSizeBefore = lines.join('\n').length + const err = new Error( + 'blocking persistence of ShareJs update: doc size exceeds limits' + ) + logger.error( + { project_id, doc_id, err, docSizeBefore, docSizeAfter }, + err.message + ) + metrics.inc('sharejs.other-error') + const publicError = 'Update takes doc over max doc size' + return callback(publicError) + } // only check hash when present and no other updates have been applied if (update.hash != null && incomingUpdateVersion === version) { const ourHash = ShareJsUpdateManager._computeHash(data.snapshot) From c707d0b345da315e0b537c3d95ac4fd5b6ac4daa Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Thu, 6 May 2021 17:19:23 +0100 Subject: [PATCH 742/769] [RedisManager] block inserting of too large docs into redis --- services/document-updater/app/js/RedisManager.js | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/services/document-updater/app/js/RedisManager.js b/services/document-updater/app/js/RedisManager.js index 73d85f60d5..d81c7151b3 100644 --- a/services/document-updater/app/js/RedisManager.js +++ b/services/document-updater/app/js/RedisManager.js @@ -72,6 +72,13 @@ module.exports = RedisManager = { logger.error({ err: error, doc_id, docLines }, error.message) return callback(error) } + // Do a cheap size check on the serialized blob. + if (docLines.length > Settings.max_doc_length) { + const docSize = docLines.length + const err = new Error('blocking doc insert into redis: doc is too large') + logger.error({ project_id, doc_id, err, docSize }, err.message) + return callback(err) + } const docHash = RedisManager._computeHash(docLines) // record bytes sent to redis metrics.summary('redis.docLines', docLines.length, { status: 'set' }) From 757e1e98c519961ccd036af7d7ce7f1dd2478118 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Thu, 6 May 2021 17:30:50 +0100 Subject: [PATCH 743/769] [RedisManager] block updating of too large docs in redis --- services/document-updater/app/js/RedisManager.js | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/services/document-updater/app/js/RedisManager.js b/services/document-updater/app/js/RedisManager.js index d81c7151b3..418e3ec6d4 100644 --- a/services/document-updater/app/js/RedisManager.js +++ b/services/document-updater/app/js/RedisManager.js @@ -468,6 +468,13 @@ module.exports = RedisManager = { logger.error({ err: error, doc_id, newDocLines }, error.message) return callback(error) } + // Do a cheap size check on the serialized blob. + if (newDocLines.length > Settings.max_doc_length) { + const err = new Error('blocking doc update: doc is too large') + const docSize = newDocLines.length + logger.error({ project_id, doc_id, err, docSize }, err.message) + return callback(err) + } const newHash = RedisManager._computeHash(newDocLines) const opVersions = appliedOps.map((op) => (op != null ? op.v : undefined)) From 309ad818f69ffc71f46c8299d21a0db95d4a5538 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Thu, 6 May 2021 18:11:52 +0100 Subject: [PATCH 744/769] [SizeCheckTests] add acceptance tests for size limit checks --- .../test/acceptance/js/SizeCheckTests.js | 129 ++++++++++++++++++ 1 file changed, 129 insertions(+) create mode 100644 services/document-updater/test/acceptance/js/SizeCheckTests.js diff --git a/services/document-updater/test/acceptance/js/SizeCheckTests.js b/services/document-updater/test/acceptance/js/SizeCheckTests.js new file mode 100644 index 0000000000..288cc485e1 --- /dev/null +++ b/services/document-updater/test/acceptance/js/SizeCheckTests.js @@ -0,0 +1,129 @@ +const { expect } = require('chai') +const Settings = require('settings-sharelatex') + +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') + +describe('SizeChecks', function () { + before(function (done) { + DocUpdaterApp.ensureRunning(done) + }) + beforeEach(function () { + this.version = 0 + this.update = { + doc: this.doc_id, + op: [ + { + i: 'insert some more lines that will bring it above the limit\n', + p: 42 + } + ], + v: this.version + } + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + }) + + describe('when a doc is above the doc size limit already', function () { + beforeEach(function () { + this.lines = ['0123456789'.repeat(Settings.max_doc_length / 10 + 1)] + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + v: this.version + }) + }) + + it('should error when fetching the doc', function (done) { + DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res) => { + if (error) return done(error) + expect(res.statusCode).to.equal(500) + done() + }) + }) + + describe('when trying to update', function () { + beforeEach(function (done) { + const update = { + doc: this.doc_id, + op: this.update.op, + v: this.version + } + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + update, + (error) => { + if (error != null) { + throw error + } + setTimeout(done, 200) + } + ) + }) + + it('should still error when fetching the doc', function (done) { + DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res) => { + if (error) return done(error) + expect(res.statusCode).to.equal(500) + done() + }) + }) + }) + }) + + describe('when a doc is just below the doc size limit', function () { + beforeEach(function () { + this.lines = ['0123456789'.repeat(Settings.max_doc_length / 10 - 1)] + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + v: this.version + }) + }) + + it('should be able to fetch the doc', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) return done(error) + expect(doc.lines).to.deep.equal(this.lines) + done() + } + ) + }) + + describe('when trying to update', function () { + beforeEach(function (done) { + const update = { + doc: this.doc_id, + op: this.update.op, + v: this.version + } + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + update, + (error) => { + if (error != null) { + throw error + } + setTimeout(done, 200) + } + ) + }) + + it('should not update the doc', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) return done(error) + expect(doc.lines).to.deep.equal(this.lines) + done() + } + ) + }) + }) + }) +}) From da81a97b827d7b5e21355b71c70f0734b3d92f00 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 11 May 2021 17:18:40 +0000 Subject: [PATCH 745/769] Bump hosted-git-info from 2.8.8 to 2.8.9 Bumps [hosted-git-info](https://github.com/npm/hosted-git-info) from 2.8.8 to 2.8.9. - [Release notes](https://github.com/npm/hosted-git-info/releases) - [Changelog](https://github.com/npm/hosted-git-info/blob/v2.8.9/CHANGELOG.md) - [Commits](https://github.com/npm/hosted-git-info/compare/v2.8.8...v2.8.9) Signed-off-by: dependabot[bot] --- services/document-updater/package-lock.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 00a0a69f21..67a88a7e6e 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -3244,9 +3244,9 @@ "integrity": "sha1-jhzkvvNqdPfVcjw/swkMKGAHczg=" }, "hosted-git-info": { - "version": "2.8.8", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.8.tgz", - "integrity": "sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==", + "version": "2.8.9", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", "dev": true }, "http-errors": { From edacebb53cbd5bd17c312d19eaa44430063478fe Mon Sep 17 00:00:00 2001 From: Alf Eaton Date: Wed, 19 May 2021 20:25:04 +0100 Subject: [PATCH 746/769] Remove unused parameter --- .../test/cluster_failover/coffee/test_blpop_failover.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.coffee b/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.coffee index e36f31f670..60a1ddaaa5 100644 --- a/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.coffee +++ b/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.coffee @@ -35,7 +35,7 @@ do sendPings = () -> sendPing () -> setTimeout sendPings, PING_DELAY -do listenInBackground = (cb = () ->) -> +do listenInBackground = () -> listenForPing (error, value) -> console.error "[RECEIVING ERROR]", error.message if error setTimeout listenInBackground From e1e0d26b1fc0b85184805a9d6947d7f416a1e043 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 19 May 2021 20:25:06 +0100 Subject: [PATCH 747/769] decaffeinate: Rename expire_docops.coffee and 2 other files from .coffee to .js --- .../document-updater/{expire_docops.coffee => expire_docops.js} | 0 .../coffee/{test_blpop_failover.coffee => test_blpop_failover.js} | 0 .../{test_pubsub_failover.coffee => test_pubsub_failover.js} | 0 3 files changed, 0 insertions(+), 0 deletions(-) rename services/document-updater/{expire_docops.coffee => expire_docops.js} (100%) rename services/document-updater/test/cluster_failover/coffee/{test_blpop_failover.coffee => test_blpop_failover.js} (100%) rename services/document-updater/test/cluster_failover/coffee/{test_pubsub_failover.coffee => test_pubsub_failover.js} (100%) diff --git a/services/document-updater/expire_docops.coffee b/services/document-updater/expire_docops.js similarity index 100% rename from services/document-updater/expire_docops.coffee rename to services/document-updater/expire_docops.js diff --git a/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.coffee b/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.js similarity index 100% rename from services/document-updater/test/cluster_failover/coffee/test_blpop_failover.coffee rename to services/document-updater/test/cluster_failover/coffee/test_blpop_failover.js diff --git a/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.coffee b/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.js similarity index 100% rename from services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.coffee rename to services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.js From 5ad0b6eea23be24fe69cc1155b60c792a718258f Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 19 May 2021 20:25:07 +0100 Subject: [PATCH 748/769] decaffeinate: Convert expire_docops.coffee and 2 other files to JS --- services/document-updater/expire_docops.js | 90 ++++++++++--------- .../coffee/test_blpop_failover.js | 80 ++++++++++------- .../coffee/test_pubsub_failover.js | 66 ++++++++------ 3 files changed, 135 insertions(+), 101 deletions(-) diff --git a/services/document-updater/expire_docops.js b/services/document-updater/expire_docops.js index ff25b6f842..59f498a181 100644 --- a/services/document-updater/expire_docops.js +++ b/services/document-updater/expire_docops.js @@ -1,44 +1,54 @@ -Settings = require "settings-sharelatex" -rclient = require("@overleaf/redis-wrapper").createClient(Settings.redis.documentupdater) -keys = Settings.redis.documentupdater.key_schema -async = require "async" -RedisManager = require "./app/js/RedisManager" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const Settings = require("settings-sharelatex"); +const rclient = require("@overleaf/redis-wrapper").createClient(Settings.redis.documentupdater); +let keys = Settings.redis.documentupdater.key_schema; +const async = require("async"); +const RedisManager = require("./app/js/RedisManager"); -getKeysFromNode = (node, pattern, callback) -> - cursor = 0 # redis iterator - keySet = {} # use hash to avoid duplicate results - # scan over all keys looking for pattern - doIteration = (cb) -> - node.scan cursor, "MATCH", pattern, "COUNT", 1000, (error, reply) -> - return callback(error) if error? - [cursor, keys] = reply - console.log "SCAN", keys.length - for key in keys - keySet[key] = true - if cursor == '0' # note redis returns string result not numeric - return callback(null, Object.keys(keySet)) - else - doIteration() - doIteration() +const getKeysFromNode = function(node, pattern, callback) { + let cursor = 0; // redis iterator + const keySet = {}; // use hash to avoid duplicate results + // scan over all keys looking for pattern + var doIteration = cb => node.scan(cursor, "MATCH", pattern, "COUNT", 1000, function(error, reply) { + if (error != null) { return callback(error); } + [cursor, keys] = Array.from(reply); + console.log("SCAN", keys.length); + for (let key of Array.from(keys)) { + keySet[key] = true; + } + if (cursor === '0') { // note redis returns string result not numeric + return callback(null, Object.keys(keySet)); + } else { + return doIteration(); + } + }); + return doIteration(); +}; -getKeys = (pattern, callback) -> - nodes = rclient.nodes?('master') || [ rclient ]; - console.log "GOT NODES", nodes.length - doKeyLookupForNode = (node, cb) -> - getKeysFromNode node, pattern, cb - async.concatSeries nodes, doKeyLookupForNode, callback +const getKeys = function(pattern, callback) { + const nodes = (typeof rclient.nodes === 'function' ? rclient.nodes('master') : undefined) || [ rclient ]; + console.log("GOT NODES", nodes.length); + const doKeyLookupForNode = (node, cb) => getKeysFromNode(node, pattern, cb); + return async.concatSeries(nodes, doKeyLookupForNode, callback); +}; -TTL = 60 * 60 # 1 hour -expireDocOps = (callback) -> - getKeys keys.docOps(doc_id: "*"), (error, keys) -> - async.mapSeries keys, - (key, cb) -> - console.log "EXPIRE #{key} #{RedisManager.DOC_OPS_TTL}" - rclient.expire key, RedisManager.DOC_OPS_TTL, cb - callback +const TTL = 60 * 60; // 1 hour +const expireDocOps = callback => getKeys(keys.docOps({doc_id: "*"}), (error, keys) => async.mapSeries(keys, + function(key, cb) { + console.log(`EXPIRE ${key} ${RedisManager.DOC_OPS_TTL}`); + return rclient.expire(key, RedisManager.DOC_OPS_TTL, cb); + }, + callback)); -setTimeout () -> # Give redis a chance to connect - expireDocOps (error) -> - throw error if error? - process.exit() -, 1000 +setTimeout(() => // Give redis a chance to connect +expireDocOps(function(error) { + if (error != null) { throw error; } + return process.exit(); +}) +, 1000); diff --git a/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.js b/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.js index 60a1ddaaa5..7c29240717 100644 --- a/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.js +++ b/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.js @@ -1,41 +1,53 @@ -redis = require "@overleaf/redis-wrapper" -rclient1 = redis.createClient(cluster: [{ - port: "7000" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let listenInBackground, sendPings; +const redis = require("@overleaf/redis-wrapper"); +const rclient1 = redis.createClient({cluster: [{ + port: "7000", host: "localhost" -}]) +}]}); -rclient2 = redis.createClient(cluster: [{ - port: "7000" +const rclient2 = redis.createClient({cluster: [{ + port: "7000", host: "localhost" -}]) +}]}); -counter = 0 -sendPing = (cb = () ->) -> - rclient1.rpush "test-blpop", counter, (error) -> - console.error "[SENDING ERROR]", error.message if error? - if !error? - counter += 1 - cb() +let counter = 0; +const sendPing = function(cb) { + if (cb == null) { cb = function() {}; } + return rclient1.rpush("test-blpop", counter, function(error) { + if (error != null) { console.error("[SENDING ERROR]", error.message); } + if ((error == null)) { + counter += 1; + } + return cb(); + }); +}; -previous = null -listenForPing = (cb) -> - rclient2.blpop "test-blpop", 200, (error, result) -> - return cb(error) if error? - [key, value] = result - value = parseInt(value, 10) - if value % 10 == 0 - console.log "." - if previous? and value != previous + 1 - error = new Error("Counter not in order. Got #{value}, expected #{previous + 1}") - previous = value - return cb(error, value) +let previous = null; +const listenForPing = cb => rclient2.blpop("test-blpop", 200, function(error, result) { + if (error != null) { return cb(error); } + let [key, value] = Array.from(result); + value = parseInt(value, 10); + if ((value % 10) === 0) { + console.log("."); + } + if ((previous != null) && (value !== (previous + 1))) { + error = new Error(`Counter not in order. Got ${value}, expected ${previous + 1}`); + } + previous = value; + return cb(error, value); +}); -PING_DELAY = 100 -do sendPings = () -> - sendPing () -> - setTimeout sendPings, PING_DELAY +const PING_DELAY = 100; +(sendPings = () => sendPing(() => setTimeout(sendPings, PING_DELAY)))(); -do listenInBackground = () -> - listenForPing (error, value) -> - console.error "[RECEIVING ERROR]", error.message if error - setTimeout listenInBackground +(listenInBackground = () => listenForPing(function(error, value) { + if (error) { console.error("[RECEIVING ERROR]", error.message); } + return setTimeout(listenInBackground); +}))(); diff --git a/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.js b/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.js index eccf952504..295ffc5b7b 100644 --- a/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.js +++ b/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.js @@ -1,33 +1,45 @@ -redis = require "@overleaf/redis-wrapper" -rclient1 = redis.createClient(cluster: [{ - port: "7000" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let sendPings; +const redis = require("@overleaf/redis-wrapper"); +const rclient1 = redis.createClient({cluster: [{ + port: "7000", host: "localhost" -}]) +}]}); -rclient2 = redis.createClient(cluster: [{ - port: "7000" +const rclient2 = redis.createClient({cluster: [{ + port: "7000", host: "localhost" -}]) +}]}); -counter = 0 -sendPing = (cb = () ->) -> - rclient1.publish "test-pubsub", counter, (error) -> - console.error "[SENDING ERROR]", error.message if error? - if !error? - counter += 1 - cb() +let counter = 0; +const sendPing = function(cb) { + if (cb == null) { cb = function() {}; } + return rclient1.publish("test-pubsub", counter, function(error) { + if (error != null) { console.error("[SENDING ERROR]", error.message); } + if ((error == null)) { + counter += 1; + } + return cb(); + }); +}; -previous = null -rclient2.subscribe "test-pubsub" -rclient2.on "message", (channel, value) -> - value = parseInt(value, 10) - if value % 10 == 0 - console.log "." - if previous? and value != previous + 1 - console.error "[RECEIVING ERROR]", "Counter not in order. Got #{value}, expected #{previous + 1}" - previous = value +let previous = null; +rclient2.subscribe("test-pubsub"); +rclient2.on("message", function(channel, value) { + value = parseInt(value, 10); + if ((value % 10) === 0) { + console.log("."); + } + if ((previous != null) && (value !== (previous + 1))) { + console.error("[RECEIVING ERROR]", `Counter not in order. Got ${value}, expected ${previous + 1}`); + } + return previous = value; +}); -PING_DELAY = 100 -do sendPings = () -> - sendPing () -> - setTimeout sendPings, PING_DELAY +const PING_DELAY = 100; +(sendPings = () => sendPing(() => setTimeout(sendPings, PING_DELAY)))(); From 03cbad4e9f10cc7fbe89b5f620d4948c927a3657 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 19 May 2021 20:25:07 +0100 Subject: [PATCH 749/769] decaffeinate: Run post-processing cleanups on expire_docops.coffee and 2 other files --- services/document-updater/expire_docops.js | 8 +++++++- .../cluster_failover/coffee/test_blpop_failover.js | 11 ++++++++--- .../cluster_failover/coffee/test_pubsub_failover.js | 9 +++++++-- 3 files changed, 22 insertions(+), 6 deletions(-) diff --git a/services/document-updater/expire_docops.js b/services/document-updater/expire_docops.js index 59f498a181..811b0cc019 100644 --- a/services/document-updater/expire_docops.js +++ b/services/document-updater/expire_docops.js @@ -1,3 +1,9 @@ +/* eslint-disable + handle-callback-err, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -19,7 +25,7 @@ const getKeysFromNode = function(node, pattern, callback) { if (error != null) { return callback(error); } [cursor, keys] = Array.from(reply); console.log("SCAN", keys.length); - for (let key of Array.from(keys)) { + for (const key of Array.from(keys)) { keySet[key] = true; } if (cursor === '0') { // note redis returns string result not numeric diff --git a/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.js b/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.js index 7c29240717..a356e683d4 100644 --- a/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.js +++ b/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -20,7 +25,7 @@ const rclient2 = redis.createClient({cluster: [{ let counter = 0; const sendPing = function(cb) { if (cb == null) { cb = function() {}; } - return rclient1.rpush("test-blpop", counter, function(error) { + return rclient1.rpush("test-blpop", counter, (error) => { if (error != null) { console.error("[SENDING ERROR]", error.message); } if ((error == null)) { counter += 1; @@ -30,7 +35,7 @@ const sendPing = function(cb) { }; let previous = null; -const listenForPing = cb => rclient2.blpop("test-blpop", 200, function(error, result) { +const listenForPing = cb => rclient2.blpop("test-blpop", 200, (error, result) => { if (error != null) { return cb(error); } let [key, value] = Array.from(result); value = parseInt(value, 10); @@ -47,7 +52,7 @@ const listenForPing = cb => rclient2.blpop("test-blpop", 200, function(error, re const PING_DELAY = 100; (sendPings = () => sendPing(() => setTimeout(sendPings, PING_DELAY)))(); -(listenInBackground = () => listenForPing(function(error, value) { +(listenInBackground = () => listenForPing((error, value) => { if (error) { console.error("[RECEIVING ERROR]", error.message); } return setTimeout(listenInBackground); }))(); diff --git a/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.js b/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.js index 295ffc5b7b..670c7afa3a 100644 --- a/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.js +++ b/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns @@ -19,7 +24,7 @@ const rclient2 = redis.createClient({cluster: [{ let counter = 0; const sendPing = function(cb) { if (cb == null) { cb = function() {}; } - return rclient1.publish("test-pubsub", counter, function(error) { + return rclient1.publish("test-pubsub", counter, (error) => { if (error != null) { console.error("[SENDING ERROR]", error.message); } if ((error == null)) { counter += 1; @@ -30,7 +35,7 @@ const sendPing = function(cb) { let previous = null; rclient2.subscribe("test-pubsub"); -rclient2.on("message", function(channel, value) { +rclient2.on("message", (channel, value) => { value = parseInt(value, 10); if ((value % 10) === 0) { console.log("."); From de50e595574c552c6d13211244924151a935d096 Mon Sep 17 00:00:00 2001 From: Alf Eaton Date: Tue, 25 May 2021 14:27:45 +0100 Subject: [PATCH 750/769] Run format:fix --- services/document-updater/expire_docops.js | 104 ++++++++++-------- .../coffee/test_blpop_failover.js | 104 +++++++++++------- .../coffee/test_pubsub_failover.js | 85 ++++++++------ 3 files changed, 173 insertions(+), 120 deletions(-) diff --git a/services/document-updater/expire_docops.js b/services/document-updater/expire_docops.js index 811b0cc019..ffafbe7255 100644 --- a/services/document-updater/expire_docops.js +++ b/services/document-updater/expire_docops.js @@ -11,50 +11,68 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const Settings = require("settings-sharelatex"); -const rclient = require("@overleaf/redis-wrapper").createClient(Settings.redis.documentupdater); -let keys = Settings.redis.documentupdater.key_schema; -const async = require("async"); -const RedisManager = require("./app/js/RedisManager"); +const Settings = require('settings-sharelatex') +const rclient = require('@overleaf/redis-wrapper').createClient( + Settings.redis.documentupdater +) +let keys = Settings.redis.documentupdater.key_schema +const async = require('async') +const RedisManager = require('./app/js/RedisManager') -const getKeysFromNode = function(node, pattern, callback) { - let cursor = 0; // redis iterator - const keySet = {}; // use hash to avoid duplicate results - // scan over all keys looking for pattern - var doIteration = cb => node.scan(cursor, "MATCH", pattern, "COUNT", 1000, function(error, reply) { - if (error != null) { return callback(error); } - [cursor, keys] = Array.from(reply); - console.log("SCAN", keys.length); - for (const key of Array.from(keys)) { - keySet[key] = true; - } - if (cursor === '0') { // note redis returns string result not numeric - return callback(null, Object.keys(keySet)); - } else { - return doIteration(); - } - }); - return doIteration(); -}; +const getKeysFromNode = function (node, pattern, callback) { + let cursor = 0 // redis iterator + const keySet = {} // use hash to avoid duplicate results + // scan over all keys looking for pattern + var doIteration = (cb) => + node.scan(cursor, 'MATCH', pattern, 'COUNT', 1000, function (error, reply) { + if (error != null) { + return callback(error) + } + ;[cursor, keys] = Array.from(reply) + console.log('SCAN', keys.length) + for (const key of Array.from(keys)) { + keySet[key] = true + } + if (cursor === '0') { + // note redis returns string result not numeric + return callback(null, Object.keys(keySet)) + } else { + return doIteration() + } + }) + return doIteration() +} -const getKeys = function(pattern, callback) { - const nodes = (typeof rclient.nodes === 'function' ? rclient.nodes('master') : undefined) || [ rclient ]; - console.log("GOT NODES", nodes.length); - const doKeyLookupForNode = (node, cb) => getKeysFromNode(node, pattern, cb); - return async.concatSeries(nodes, doKeyLookupForNode, callback); -}; +const getKeys = function (pattern, callback) { + const nodes = (typeof rclient.nodes === 'function' + ? rclient.nodes('master') + : undefined) || [rclient] + console.log('GOT NODES', nodes.length) + const doKeyLookupForNode = (node, cb) => getKeysFromNode(node, pattern, cb) + return async.concatSeries(nodes, doKeyLookupForNode, callback) +} -const TTL = 60 * 60; // 1 hour -const expireDocOps = callback => getKeys(keys.docOps({doc_id: "*"}), (error, keys) => async.mapSeries(keys, - function(key, cb) { - console.log(`EXPIRE ${key} ${RedisManager.DOC_OPS_TTL}`); - return rclient.expire(key, RedisManager.DOC_OPS_TTL, cb); - }, - callback)); +const TTL = 60 * 60 // 1 hour +const expireDocOps = (callback) => + getKeys(keys.docOps({ doc_id: '*' }), (error, keys) => + async.mapSeries( + keys, + function (key, cb) { + console.log(`EXPIRE ${key} ${RedisManager.DOC_OPS_TTL}`) + return rclient.expire(key, RedisManager.DOC_OPS_TTL, cb) + }, + callback + ) + ) -setTimeout(() => // Give redis a chance to connect -expireDocOps(function(error) { - if (error != null) { throw error; } - return process.exit(); -}) -, 1000); +setTimeout( + () => + // Give redis a chance to connect + expireDocOps(function (error) { + if (error != null) { + throw error + } + return process.exit() + }), + 1000 +) diff --git a/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.js b/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.js index a356e683d4..3d7cae99da 100644 --- a/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.js +++ b/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.js @@ -10,49 +10,69 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let listenInBackground, sendPings; -const redis = require("@overleaf/redis-wrapper"); -const rclient1 = redis.createClient({cluster: [{ - port: "7000", - host: "localhost" -}]}); - -const rclient2 = redis.createClient({cluster: [{ - port: "7000", - host: "localhost" -}]}); - -let counter = 0; -const sendPing = function(cb) { - if (cb == null) { cb = function() {}; } - return rclient1.rpush("test-blpop", counter, (error) => { - if (error != null) { console.error("[SENDING ERROR]", error.message); } - if ((error == null)) { - counter += 1; - } - return cb(); - }); -}; - -let previous = null; -const listenForPing = cb => rclient2.blpop("test-blpop", 200, (error, result) => { - if (error != null) { return cb(error); } - let [key, value] = Array.from(result); - value = parseInt(value, 10); - if ((value % 10) === 0) { - console.log("."); +let listenInBackground, sendPings +const redis = require('@overleaf/redis-wrapper') +const rclient1 = redis.createClient({ + cluster: [ + { + port: '7000', + host: 'localhost' } - if ((previous != null) && (value !== (previous + 1))) { - error = new Error(`Counter not in order. Got ${value}, expected ${previous + 1}`); + ] +}) + +const rclient2 = redis.createClient({ + cluster: [ + { + port: '7000', + host: 'localhost' } - previous = value; - return cb(error, value); -}); + ] +}) -const PING_DELAY = 100; -(sendPings = () => sendPing(() => setTimeout(sendPings, PING_DELAY)))(); +let counter = 0 +const sendPing = function (cb) { + if (cb == null) { + cb = function () {} + } + return rclient1.rpush('test-blpop', counter, (error) => { + if (error != null) { + console.error('[SENDING ERROR]', error.message) + } + if (error == null) { + counter += 1 + } + return cb() + }) +} -(listenInBackground = () => listenForPing((error, value) => { - if (error) { console.error("[RECEIVING ERROR]", error.message); } - return setTimeout(listenInBackground); -}))(); +let previous = null +const listenForPing = (cb) => + rclient2.blpop('test-blpop', 200, (error, result) => { + if (error != null) { + return cb(error) + } + let [key, value] = Array.from(result) + value = parseInt(value, 10) + if (value % 10 === 0) { + console.log('.') + } + if (previous != null && value !== previous + 1) { + error = new Error( + `Counter not in order. Got ${value}, expected ${previous + 1}` + ) + } + previous = value + return cb(error, value) + }) + +const PING_DELAY = 100 +;(sendPings = () => sendPing(() => setTimeout(sendPings, PING_DELAY)))() + +;(listenInBackground = () => + listenForPing((error, value) => { + if (error) { + console.error('[RECEIVING ERROR]', error.message) + } + return setTimeout(listenInBackground) + }))() diff --git a/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.js b/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.js index 670c7afa3a..627fb82a0d 100644 --- a/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.js +++ b/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.js @@ -9,42 +9,57 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let sendPings; -const redis = require("@overleaf/redis-wrapper"); -const rclient1 = redis.createClient({cluster: [{ - port: "7000", - host: "localhost" -}]}); +let sendPings +const redis = require('@overleaf/redis-wrapper') +const rclient1 = redis.createClient({ + cluster: [ + { + port: '7000', + host: 'localhost' + } + ] +}) -const rclient2 = redis.createClient({cluster: [{ - port: "7000", - host: "localhost" -}]}); +const rclient2 = redis.createClient({ + cluster: [ + { + port: '7000', + host: 'localhost' + } + ] +}) -let counter = 0; -const sendPing = function(cb) { - if (cb == null) { cb = function() {}; } - return rclient1.publish("test-pubsub", counter, (error) => { - if (error != null) { console.error("[SENDING ERROR]", error.message); } - if ((error == null)) { - counter += 1; - } - return cb(); - }); -}; +let counter = 0 +const sendPing = function (cb) { + if (cb == null) { + cb = function () {} + } + return rclient1.publish('test-pubsub', counter, (error) => { + if (error != null) { + console.error('[SENDING ERROR]', error.message) + } + if (error == null) { + counter += 1 + } + return cb() + }) +} -let previous = null; -rclient2.subscribe("test-pubsub"); -rclient2.on("message", (channel, value) => { - value = parseInt(value, 10); - if ((value % 10) === 0) { - console.log("."); - } - if ((previous != null) && (value !== (previous + 1))) { - console.error("[RECEIVING ERROR]", `Counter not in order. Got ${value}, expected ${previous + 1}`); - } - return previous = value; -}); +let previous = null +rclient2.subscribe('test-pubsub') +rclient2.on('message', (channel, value) => { + value = parseInt(value, 10) + if (value % 10 === 0) { + console.log('.') + } + if (previous != null && value !== previous + 1) { + console.error( + '[RECEIVING ERROR]', + `Counter not in order. Got ${value}, expected ${previous + 1}` + ) + } + return (previous = value) +}) -const PING_DELAY = 100; -(sendPings = () => sendPing(() => setTimeout(sendPings, PING_DELAY)))(); +const PING_DELAY = 100 +;(sendPings = () => sendPing(() => setTimeout(sendPings, PING_DELAY)))() From f4db3d8f45a3356b319b873dcbe92b88aef51308 Mon Sep 17 00:00:00 2001 From: Alf Eaton Date: Tue, 25 May 2021 14:32:03 +0100 Subject: [PATCH 751/769] Manual tidying --- services/document-updater/expire_docops.js | 29 +++++-------------- .../coffee/test_blpop_failover.js | 16 ++-------- .../coffee/test_pubsub_failover.js | 13 +-------- 3 files changed, 11 insertions(+), 47 deletions(-) diff --git a/services/document-updater/expire_docops.js b/services/document-updater/expire_docops.js index ffafbe7255..3eba0d547c 100644 --- a/services/document-updater/expire_docops.js +++ b/services/document-updater/expire_docops.js @@ -1,16 +1,3 @@ -/* eslint-disable - handle-callback-err, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ const Settings = require('settings-sharelatex') const rclient = require('@overleaf/redis-wrapper').createClient( Settings.redis.documentupdater @@ -23,17 +10,17 @@ const getKeysFromNode = function (node, pattern, callback) { let cursor = 0 // redis iterator const keySet = {} // use hash to avoid duplicate results // scan over all keys looking for pattern - var doIteration = (cb) => - node.scan(cursor, 'MATCH', pattern, 'COUNT', 1000, function (error, reply) { - if (error != null) { + const doIteration = () => + node.scan(cursor, 'MATCH', pattern, 'COUNT', 1000, function(error, reply) { + if (error) { return callback(error) } - ;[cursor, keys] = Array.from(reply) + ;[cursor, keys] = reply console.log('SCAN', keys.length) - for (const key of Array.from(keys)) { + for (const key of keys) { keySet[key] = true } - if (cursor === '0') { + if (cursor==='0') { // note redis returns string result not numeric return callback(null, Object.keys(keySet)) } else { @@ -52,8 +39,8 @@ const getKeys = function (pattern, callback) { return async.concatSeries(nodes, doKeyLookupForNode, callback) } -const TTL = 60 * 60 // 1 hour const expireDocOps = (callback) => + // eslint-disable-next-line handle-callback-err getKeys(keys.docOps({ doc_id: '*' }), (error, keys) => async.mapSeries( keys, @@ -69,7 +56,7 @@ setTimeout( () => // Give redis a chance to connect expireDocOps(function (error) { - if (error != null) { + if (error) { throw error } return process.exit() diff --git a/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.js b/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.js index 3d7cae99da..6d4b3ee55d 100644 --- a/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.js +++ b/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.js @@ -1,15 +1,3 @@ -/* eslint-disable - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ let listenInBackground, sendPings const redis = require('@overleaf/redis-wrapper') const rclient1 = redis.createClient({ @@ -52,7 +40,7 @@ const listenForPing = (cb) => if (error != null) { return cb(error) } - let [key, value] = Array.from(result) + let [, value] = Array.from(result) value = parseInt(value, 10) if (value % 10 === 0) { console.log('.') @@ -70,7 +58,7 @@ const PING_DELAY = 100 ;(sendPings = () => sendPing(() => setTimeout(sendPings, PING_DELAY)))() ;(listenInBackground = () => - listenForPing((error, value) => { + listenForPing((error) => { if (error) { console.error('[RECEIVING ERROR]', error.message) } diff --git a/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.js b/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.js index 627fb82a0d..3da52be287 100644 --- a/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.js +++ b/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.js @@ -1,14 +1,3 @@ -/* eslint-disable - no-return-assign, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ let sendPings const redis = require('@overleaf/redis-wrapper') const rclient1 = redis.createClient({ @@ -35,7 +24,7 @@ const sendPing = function (cb) { cb = function () {} } return rclient1.publish('test-pubsub', counter, (error) => { - if (error != null) { + if (error) { console.error('[SENDING ERROR]', error.message) } if (error == null) { From 358786fccd78cd20fe4bd576e019ce0b98a53b2e Mon Sep 17 00:00:00 2001 From: Alf Eaton Date: Tue, 25 May 2021 15:45:42 +0100 Subject: [PATCH 752/769] Run format:fix --- services/document-updater/expire_docops.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/document-updater/expire_docops.js b/services/document-updater/expire_docops.js index 3eba0d547c..398bf5229a 100644 --- a/services/document-updater/expire_docops.js +++ b/services/document-updater/expire_docops.js @@ -11,7 +11,7 @@ const getKeysFromNode = function (node, pattern, callback) { const keySet = {} // use hash to avoid duplicate results // scan over all keys looking for pattern const doIteration = () => - node.scan(cursor, 'MATCH', pattern, 'COUNT', 1000, function(error, reply) { + node.scan(cursor, 'MATCH', pattern, 'COUNT', 1000, function (error, reply) { if (error) { return callback(error) } @@ -20,7 +20,7 @@ const getKeysFromNode = function (node, pattern, callback) { for (const key of keys) { keySet[key] = true } - if (cursor==='0') { + if (cursor === '0') { // note redis returns string result not numeric return callback(null, Object.keys(keySet)) } else { From 24fd9449f23a4e06cc4564ed0520adfd771785d3 Mon Sep 17 00:00:00 2001 From: Alf Eaton Date: Tue, 25 May 2021 15:52:19 +0100 Subject: [PATCH 753/769] Run format:fix --- .../test/cluster_failover/coffee/test_blpop_failover.js | 1 - 1 file changed, 1 deletion(-) diff --git a/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.js b/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.js index 6d4b3ee55d..e3f52f6339 100644 --- a/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.js +++ b/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.js @@ -56,7 +56,6 @@ const listenForPing = (cb) => const PING_DELAY = 100 ;(sendPings = () => sendPing(() => setTimeout(sendPings, PING_DELAY)))() - ;(listenInBackground = () => listenForPing((error) => { if (error) { From 5961e85b1002a6616dfec16e68d3e3c39525148b Mon Sep 17 00:00:00 2001 From: Alf Eaton Date: Tue, 25 May 2021 16:56:57 +0100 Subject: [PATCH 754/769] Fix path in test script --- services/document-updater/test/stress/js/run.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/document-updater/test/stress/js/run.js b/services/document-updater/test/stress/js/run.js index 3ce482b19b..da78735a95 100644 --- a/services/document-updater/test/stress/js/run.js +++ b/services/document-updater/test/stress/js/run.js @@ -16,7 +16,7 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const DocUpdaterClient = require('../../acceptance/coffee/helpers/DocUpdaterClient') +const DocUpdaterClient = require('../../acceptance/js/helpers/DocUpdaterClient') // MockTrackChangesApi = require "../../acceptance/js/helpers/MockTrackChangesApi" // MockWebApi = require "../../acceptance/js/helpers/MockWebApi" const assert = require('assert') From 674ad4acf05c0e1d4870973e4cad939d592f3618 Mon Sep 17 00:00:00 2001 From: Alf Eaton Date: Tue, 25 May 2021 16:57:24 +0100 Subject: [PATCH 755/769] Rename folder from coffee to js --- .../test/cluster_failover/{coffee => js}/test_blpop_failover.js | 0 .../test/cluster_failover/{coffee => js}/test_pubsub_failover.js | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename services/document-updater/test/cluster_failover/{coffee => js}/test_blpop_failover.js (100%) rename services/document-updater/test/cluster_failover/{coffee => js}/test_pubsub_failover.js (100%) diff --git a/services/document-updater/test/cluster_failover/coffee/test_blpop_failover.js b/services/document-updater/test/cluster_failover/js/test_blpop_failover.js similarity index 100% rename from services/document-updater/test/cluster_failover/coffee/test_blpop_failover.js rename to services/document-updater/test/cluster_failover/js/test_blpop_failover.js diff --git a/services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.js b/services/document-updater/test/cluster_failover/js/test_pubsub_failover.js similarity index 100% rename from services/document-updater/test/cluster_failover/coffee/test_pubsub_failover.js rename to services/document-updater/test/cluster_failover/js/test_pubsub_failover.js From 0090748379c875f31cff4bd3ba84dc971544fe5f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 11 Jun 2021 14:36:06 +0000 Subject: [PATCH 756/769] Bump glob-parent from 5.1.1 to 5.1.2 Bumps [glob-parent](https://github.com/gulpjs/glob-parent) from 5.1.1 to 5.1.2. - [Release notes](https://github.com/gulpjs/glob-parent/releases) - [Changelog](https://github.com/gulpjs/glob-parent/blob/main/CHANGELOG.md) - [Commits](https://github.com/gulpjs/glob-parent/compare/v5.1.1...v5.1.2) --- updated-dependencies: - dependency-name: glob-parent dependency-type: indirect ... Signed-off-by: dependabot[bot] --- services/document-updater/package-lock.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 67a88a7e6e..1f3bc72c96 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -3090,9 +3090,9 @@ } }, "glob-parent": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.1.tgz", - "integrity": "sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "dev": true, "requires": { "is-glob": "^4.0.1" From 835dbe784b34235d9a098b6383a7c56cea712ec1 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Mon, 12 Jul 2021 17:35:45 +0100 Subject: [PATCH 757/769] [misc] install bunyan as production dependency ``` Error: Cannot find module 'bunyan' Require stack: - .../node_modules/@google-cloud/logging-bunyan/build/src/middleware/express.js - .../node_modules/@google-cloud/logging-bunyan/build/src/index.js - .../node_modules/logger-sharelatex/logging-manager.js - .../node_modules/logger-sharelatex/index.js - .../app.js ``` --- services/document-updater/package-lock.json | 24 ++++++++------------- services/document-updater/package.json | 2 +- 2 files changed, 10 insertions(+), 16 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index 67a88a7e6e..bda3f21608 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -1540,7 +1540,7 @@ "browser-stdout": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", - "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "integrity": "sha1-uqVZ7hTO1zRSIputcyZGfGH6vWA=", "dev": true }, "bson": { @@ -1564,20 +1564,14 @@ "integrity": "sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA==" }, "bunyan": { - "version": "0.22.3", - "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-0.22.3.tgz", - "integrity": "sha1-ehncG0yMZF90AkGnQPIkUUfGfsI=", + "version": "1.8.15", + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.15.tgz", + "integrity": "sha512-0tECWShh6wUysgucJcBAoYegf3JJoZWibxdqhTm7OHPeT42qdjkZ29QCMcKwbgU1kiH+auSIasNRXMLWXafXig==", "requires": { - "dtrace-provider": "0.2.8", - "mv": "~2" - }, - "dependencies": { - "dtrace-provider": { - "version": "0.2.8", - "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.2.8.tgz", - "integrity": "sha512-wufYnYt4ISHnT9MEiRgQ3trXuolt7mICTa/ckT+KYHR667K9H82lmI8KM7zKUJ8l5I343A34wJnvL++1TJn1iA==", - "optional": true - } + "dtrace-provider": "~0.8", + "moment": "^2.19.3", + "mv": "~2", + "safe-json-stringify": "~1" } }, "bytes": { @@ -6566,7 +6560,7 @@ "stubs": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", - "integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==" + "integrity": "sha1-6NK6H6nJBXAwPAMLaQD31fiavls=" }, "supports-color": { "version": "5.4.0", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index d26f8978c3..230fd6f7cb 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -23,7 +23,7 @@ "@overleaf/redis-wrapper": "^2.0.1", "async": "^2.5.0", "body-parser": "^1.19.0", - "bunyan": "~0.22.1", + "bunyan": "^1.8.15", "diff-match-patch": "https://github.com/overleaf/diff-match-patch/archive/89805f9c671a77a263fc53461acd62aa7498f688.tar.gz", "express": "4.17.1", "lodash": "^4.17.21", From 519dd274bf91851269c8f55c6678b9e0d2d1734f Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Mon, 12 Jul 2021 17:47:15 +0100 Subject: [PATCH 758/769] [misc] switch from settings-sharelatex to @overleaf/settings --- services/document-updater/app.js | 2 +- .../app/js/DeleteQueueManager.js | 2 +- .../app/js/DispatchManager.js | 2 +- .../document-updater/app/js/HistoryManager.js | 2 +- .../app/js/HistoryRedisManager.js | 2 +- .../document-updater/app/js/HttpController.js | 2 +- .../document-updater/app/js/LockManager.js | 2 +- .../app/js/PersistenceManager.js | 2 +- services/document-updater/app/js/Profiler.js | 2 +- .../document-updater/app/js/ProjectFlusher.js | 2 +- .../app/js/ProjectHistoryRedisManager.js | 2 +- .../app/js/RateLimitManager.js | 2 +- .../app/js/RealTimeRedisManager.js | 2 +- .../document-updater/app/js/RedisManager.js | 2 +- .../app/js/ShareJsUpdateManager.js | 2 +- .../document-updater/app/js/UpdateManager.js | 2 +- services/document-updater/app/js/mongodb.js | 2 +- services/document-updater/expire_docops.js | 2 +- services/document-updater/package-lock.json | 20 +++++-------------- services/document-updater/package.json | 4 ++-- .../js/ApplyingUpdatesToADocTests.js | 2 +- .../ApplyingUpdatesToProjectStructureTests.js | 2 +- .../acceptance/js/SettingADocumentTests.js | 2 +- .../test/acceptance/js/SizeCheckTests.js | 2 +- .../acceptance/js/helpers/DocUpdaterClient.js | 2 +- .../DispatchManager/DispatchManagerTests.js | 2 +- .../js/HistoryManager/HistoryManagerTests.js | 2 +- .../HistoryRedisManagerTests.js | 2 +- .../unit/js/LockManager/ReleasingTheLock.js | 2 +- .../test/unit/js/LockManager/tryLockTests.js | 2 +- .../PersistenceManagerTests.js | 2 +- .../ProjectHistoryRedisManagerTests.js | 2 +- .../js/RateLimitManager/RateLimitManager.js | 2 +- .../RealTimeRedisManagerTests.js | 2 +- .../unit/js/RedisManager/RedisManagerTests.js | 2 +- .../js/UpdateManager/UpdateManagerTests.js | 2 +- 36 files changed, 41 insertions(+), 51 deletions(-) diff --git a/services/document-updater/app.js b/services/document-updater/app.js index d8b67dd31e..553ab9403b 100644 --- a/services/document-updater/app.js +++ b/services/document-updater/app.js @@ -2,7 +2,7 @@ const Metrics = require('@overleaf/metrics') Metrics.initialize('doc-updater') const express = require('express') -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const logger = require('logger-sharelatex') logger.initialize('document-updater') diff --git a/services/document-updater/app/js/DeleteQueueManager.js b/services/document-updater/app/js/DeleteQueueManager.js index fc5bb904e3..36466f8b10 100644 --- a/services/document-updater/app/js/DeleteQueueManager.js +++ b/services/document-updater/app/js/DeleteQueueManager.js @@ -13,7 +13,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ let DeleteQueueManager -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const RedisManager = require('./RedisManager') const ProjectManager = require('./ProjectManager') const logger = require('logger-sharelatex') diff --git a/services/document-updater/app/js/DispatchManager.js b/services/document-updater/app/js/DispatchManager.js index c600adec7b..e94e6cde0e 100644 --- a/services/document-updater/app/js/DispatchManager.js +++ b/services/document-updater/app/js/DispatchManager.js @@ -15,7 +15,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ let DispatchManager -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const logger = require('logger-sharelatex') const Keys = require('./UpdateKeys') const redis = require('@overleaf/redis-wrapper') diff --git a/services/document-updater/app/js/HistoryManager.js b/services/document-updater/app/js/HistoryManager.js index 18c5b7f6d2..4b7de3f5af 100644 --- a/services/document-updater/app/js/HistoryManager.js +++ b/services/document-updater/app/js/HistoryManager.js @@ -15,7 +15,7 @@ let HistoryManager const async = require('async') const logger = require('logger-sharelatex') const request = require('request') -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const HistoryRedisManager = require('./HistoryRedisManager') const ProjectHistoryRedisManager = require('./ProjectHistoryRedisManager') const RedisManager = require('./RedisManager') diff --git a/services/document-updater/app/js/HistoryRedisManager.js b/services/document-updater/app/js/HistoryRedisManager.js index 20ce0651dd..bd7b3672f4 100644 --- a/services/document-updater/app/js/HistoryRedisManager.js +++ b/services/document-updater/app/js/HistoryRedisManager.js @@ -12,7 +12,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ let HistoryRedisManager -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const rclient = require('@overleaf/redis-wrapper').createClient( Settings.redis.history ) diff --git a/services/document-updater/app/js/HttpController.js b/services/document-updater/app/js/HttpController.js index 5e47cf5bf1..9959a84b07 100644 --- a/services/document-updater/app/js/HttpController.js +++ b/services/document-updater/app/js/HttpController.js @@ -3,7 +3,7 @@ const HistoryManager = require('./HistoryManager') const ProjectManager = require('./ProjectManager') const Errors = require('./Errors') const logger = require('logger-sharelatex') -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const Metrics = require('./Metrics') const ProjectFlusher = require('./ProjectFlusher') const DeleteQueueManager = require('./DeleteQueueManager') diff --git a/services/document-updater/app/js/LockManager.js b/services/document-updater/app/js/LockManager.js index 1c4427ed44..c23379d811 100644 --- a/services/document-updater/app/js/LockManager.js +++ b/services/document-updater/app/js/LockManager.js @@ -13,7 +13,7 @@ */ let LockManager const metrics = require('./Metrics') -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const redis = require('@overleaf/redis-wrapper') const rclient = redis.createClient(Settings.redis.lock) const keys = Settings.redis.lock.key_schema diff --git a/services/document-updater/app/js/PersistenceManager.js b/services/document-updater/app/js/PersistenceManager.js index fca23a7c47..664d36a3c7 100644 --- a/services/document-updater/app/js/PersistenceManager.js +++ b/services/document-updater/app/js/PersistenceManager.js @@ -15,7 +15,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ let PersistenceManager -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const Errors = require('./Errors') const Metrics = require('./Metrics') const logger = require('logger-sharelatex') diff --git a/services/document-updater/app/js/Profiler.js b/services/document-updater/app/js/Profiler.js index 60431e64cb..6f7a66a7aa 100644 --- a/services/document-updater/app/js/Profiler.js +++ b/services/document-updater/app/js/Profiler.js @@ -9,7 +9,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ let Profiler -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const logger = require('logger-sharelatex') const deltaMs = function (ta, tb) { diff --git a/services/document-updater/app/js/ProjectFlusher.js b/services/document-updater/app/js/ProjectFlusher.js index 7f9783ac0d..704600fbc3 100644 --- a/services/document-updater/app/js/ProjectFlusher.js +++ b/services/document-updater/app/js/ProjectFlusher.js @@ -13,7 +13,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const request = require('request') -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const RedisManager = require('./RedisManager') const { rclient } = RedisManager const docUpdaterKeys = Settings.redis.documentupdater.key_schema diff --git a/services/document-updater/app/js/ProjectHistoryRedisManager.js b/services/document-updater/app/js/ProjectHistoryRedisManager.js index ab7a9d0faa..3d8b0cd95a 100644 --- a/services/document-updater/app/js/ProjectHistoryRedisManager.js +++ b/services/document-updater/app/js/ProjectHistoryRedisManager.js @@ -14,7 +14,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ let ProjectHistoryRedisManager -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const projectHistoryKeys = __guard__( Settings.redis != null ? Settings.redis.project_history : undefined, (x) => x.key_schema diff --git a/services/document-updater/app/js/RateLimitManager.js b/services/document-updater/app/js/RateLimitManager.js index 831b34eae1..9b699235b7 100644 --- a/services/document-updater/app/js/RateLimitManager.js +++ b/services/document-updater/app/js/RateLimitManager.js @@ -10,7 +10,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ let RateLimiter -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const logger = require('logger-sharelatex') const Metrics = require('./Metrics') diff --git a/services/document-updater/app/js/RealTimeRedisManager.js b/services/document-updater/app/js/RealTimeRedisManager.js index 1059dc6079..298fb26940 100644 --- a/services/document-updater/app/js/RealTimeRedisManager.js +++ b/services/document-updater/app/js/RealTimeRedisManager.js @@ -12,7 +12,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ let RealTimeRedisManager -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const rclient = require('@overleaf/redis-wrapper').createClient( Settings.redis.documentupdater ) diff --git a/services/document-updater/app/js/RedisManager.js b/services/document-updater/app/js/RedisManager.js index 418e3ec6d4..59eb10e332 100644 --- a/services/document-updater/app/js/RedisManager.js +++ b/services/document-updater/app/js/RedisManager.js @@ -14,7 +14,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ let RedisManager -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const rclient = require('@overleaf/redis-wrapper').createClient( Settings.redis.documentupdater ) diff --git a/services/document-updater/app/js/ShareJsUpdateManager.js b/services/document-updater/app/js/ShareJsUpdateManager.js index 8ae91df32c..19bfee9ea4 100644 --- a/services/document-updater/app/js/ShareJsUpdateManager.js +++ b/services/document-updater/app/js/ShareJsUpdateManager.js @@ -16,7 +16,7 @@ let ShareJsUpdateManager const ShareJsModel = require('./sharejs/server/model') const ShareJsDB = require('./ShareJsDB') const logger = require('logger-sharelatex') -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const Keys = require('./UpdateKeys') const { EventEmitter } = require('events') const util = require('util') diff --git a/services/document-updater/app/js/UpdateManager.js b/services/document-updater/app/js/UpdateManager.js index e92e40f44c..999994d556 100644 --- a/services/document-updater/app/js/UpdateManager.js +++ b/services/document-updater/app/js/UpdateManager.js @@ -20,7 +20,7 @@ const RedisManager = require('./RedisManager') const RealTimeRedisManager = require('./RealTimeRedisManager') const ShareJsUpdateManager = require('./ShareJsUpdateManager') const HistoryManager = require('./HistoryManager') -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const _ = require('lodash') const async = require('async') const logger = require('logger-sharelatex') diff --git a/services/document-updater/app/js/mongodb.js b/services/document-updater/app/js/mongodb.js index 86b015a308..115d0adcb7 100644 --- a/services/document-updater/app/js/mongodb.js +++ b/services/document-updater/app/js/mongodb.js @@ -1,4 +1,4 @@ -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const { MongoClient, ObjectId } = require('mongodb') const clientPromise = MongoClient.connect( diff --git a/services/document-updater/expire_docops.js b/services/document-updater/expire_docops.js index 398bf5229a..b9fafb072d 100644 --- a/services/document-updater/expire_docops.js +++ b/services/document-updater/expire_docops.js @@ -1,4 +1,4 @@ -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const rclient = require('@overleaf/redis-wrapper').createClient( Settings.redis.documentupdater ) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index bda3f21608..ff38a00247 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -926,6 +926,11 @@ "ioredis": "~4.27.1" } }, + "@overleaf/settings": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@overleaf/settings/-/settings-2.1.1.tgz", + "integrity": "sha512-vcJwqCGFKmQxTP/syUqCeMaSRjHmBcQgKOACR9He2uJcErg2GZPa1go+nGvszMbkElM4HfRKm/MfxvqHhoN4TQ==" + }, "@protobufjs/aspromise": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", @@ -6239,21 +6244,6 @@ "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz", "integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==" }, - "settings-sharelatex": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/settings-sharelatex/-/settings-sharelatex-1.3.0.tgz", - "integrity": "sha512-AWSeCiY1eHi+z6DF4bmTyC3tusOc6EF5zDkC28aAOhEAbgVEIuY034kx1X7a4j9HE0iVs0sVOlZrQMWJBVk3oQ==", - "requires": { - "coffee-script": "1.6.0" - }, - "dependencies": { - "coffee-script": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", - "integrity": "sha1-gIs5bhEPU9AhoZpO8fZb4OjjX6M=" - } - } - }, "shebang-command": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 230fd6f7cb..44c57d0e05 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -21,6 +21,7 @@ "@overleaf/metrics": "^3.5.1", "@overleaf/o-error": "^3.3.1", "@overleaf/redis-wrapper": "^2.0.1", + "@overleaf/settings": "^2.1.1", "async": "^2.5.0", "body-parser": "^1.19.0", "bunyan": "^1.8.15", @@ -30,8 +31,7 @@ "logger-sharelatex": "^2.2.0", "mongodb": "^3.6.6", "request": "^2.88.2", - "requestretry": "^4.1.2", - "settings-sharelatex": "^1.3.0" + "requestretry": "^4.1.2" }, "devDependencies": { "babel-eslint": "^10.1.0", diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js index adf045645e..4c069008bc 100644 --- a/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js +++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js @@ -14,7 +14,7 @@ const sinon = require('sinon') const { expect } = require('chai') const async = require('async') -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const rclient_history = require('@overleaf/redis-wrapper').createClient( Settings.redis.history ) // note: this is track changes, not project-history diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js index 3bc2c793e1..ea4cc7b0ee 100644 --- a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js +++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js @@ -1,5 +1,5 @@ const sinon = require('sinon') -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const rclientProjectHistory = require('@overleaf/redis-wrapper').createClient( Settings.redis.project_history ) diff --git a/services/document-updater/test/acceptance/js/SettingADocumentTests.js b/services/document-updater/test/acceptance/js/SettingADocumentTests.js index 7d2307c526..60d46dcdec 100644 --- a/services/document-updater/test/acceptance/js/SettingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/SettingADocumentTests.js @@ -1,6 +1,6 @@ const sinon = require('sinon') const { expect } = require('chai') -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const docUpdaterRedis = require('@overleaf/redis-wrapper').createClient( Settings.redis.documentupdater ) diff --git a/services/document-updater/test/acceptance/js/SizeCheckTests.js b/services/document-updater/test/acceptance/js/SizeCheckTests.js index 288cc485e1..ed893884be 100644 --- a/services/document-updater/test/acceptance/js/SizeCheckTests.js +++ b/services/document-updater/test/acceptance/js/SizeCheckTests.js @@ -1,5 +1,5 @@ const { expect } = require('chai') -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const MockWebApi = require('./helpers/MockWebApi') const DocUpdaterClient = require('./helpers/DocUpdaterClient') diff --git a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js index 719df741c3..ee356d3bf7 100644 --- a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js +++ b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js @@ -1,5 +1,5 @@ let DocUpdaterClient -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const _ = require('lodash') const rclient = require('@overleaf/redis-wrapper').createClient( Settings.redis.documentupdater diff --git a/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js b/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js index 81ef37f4be..d4cb72b89d 100644 --- a/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js +++ b/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js @@ -23,7 +23,7 @@ describe('DispatchManager', function () { this.DispatchManager = SandboxedModule.require(modulePath, { requires: { './UpdateManager': (this.UpdateManager = {}), - 'settings-sharelatex': (this.settings = { + '@overleaf/settings': (this.settings = { redis: { documentupdater: {} } diff --git a/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js b/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js index df3261b0f4..5f1529ab0b 100644 --- a/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js +++ b/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js @@ -21,7 +21,7 @@ describe('HistoryManager', function () { this.HistoryManager = SandboxedModule.require(modulePath, { requires: { request: (this.request = {}), - 'settings-sharelatex': (this.Settings = { + '@overleaf/settings': (this.Settings = { apis: { project_history: { enabled: true, diff --git a/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js b/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js index f9b719991a..626398fc6b 100644 --- a/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js +++ b/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js @@ -26,7 +26,7 @@ describe('HistoryRedisManager', function () { this.HistoryRedisManager = SandboxedModule.require(modulePath, { requires: { '@overleaf/redis-wrapper': { createClient: () => this.rclient }, - 'settings-sharelatex': { + '@overleaf/settings': { redis: { history: (this.settings = { key_schema: { diff --git a/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js b/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js index 3d6cf7a5ba..177869d0db 100644 --- a/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js +++ b/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js @@ -30,7 +30,7 @@ describe('LockManager - releasing the lock', function () { '@overleaf/redis-wrapper': { createClient: () => this.client }, - 'settings-sharelatex': { + '@overleaf/settings': { redis: { lock: { key_schema: { diff --git a/services/document-updater/test/unit/js/LockManager/tryLockTests.js b/services/document-updater/test/unit/js/LockManager/tryLockTests.js index fb49e94aa1..ef92b50214 100644 --- a/services/document-updater/test/unit/js/LockManager/tryLockTests.js +++ b/services/document-updater/test/unit/js/LockManager/tryLockTests.js @@ -29,7 +29,7 @@ describe('LockManager - trying the lock', function () { } }, './Metrics': { inc() {} }, - 'settings-sharelatex': { + '@overleaf/settings': { redis: { lock: { key_schema: { diff --git a/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js b/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js index 4015ef2662..a9137175b6 100644 --- a/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js +++ b/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js @@ -23,7 +23,7 @@ describe('PersistenceManager', function () { this.PersistenceManager = SandboxedModule.require(modulePath, { requires: { requestretry: this.request, - 'settings-sharelatex': (this.Settings = {}), + '@overleaf/settings': (this.Settings = {}), './Metrics': (this.Metrics = { Timer: (Timer = (function () { Timer = class Timer { diff --git a/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js b/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js index 1ff3d53ded..8e20214e88 100644 --- a/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js +++ b/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js @@ -28,7 +28,7 @@ describe('ProjectHistoryRedisManager', function () { modulePath, { requires: { - 'settings-sharelatex': (this.settings = { + '@overleaf/settings': (this.settings = { redis: { project_history: { key_schema: { diff --git a/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js b/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js index 8fef08051f..6c488c4c29 100644 --- a/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js +++ b/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js @@ -20,7 +20,7 @@ describe('RateLimitManager', function () { let Timer this.RateLimitManager = SandboxedModule.require(modulePath, { requires: { - 'settings-sharelatex': (this.settings = {}), + '@overleaf/settings': (this.settings = {}), './Metrics': (this.Metrics = { Timer: (Timer = (function () { Timer = class Timer { diff --git a/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js b/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js index 83cd5f99ce..c05ede76df 100644 --- a/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js +++ b/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js @@ -29,7 +29,7 @@ describe('RealTimeRedisManager', function () { createClient: (config) => config.name === 'pubsub' ? this.pubsubClient : this.rclient }, - 'settings-sharelatex': { + '@overleaf/settings': { redis: { documentupdater: (this.settings = { key_schema: { diff --git a/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js index 29329e8411..7679d217a3 100644 --- a/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js +++ b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js @@ -27,7 +27,7 @@ describe('RedisManager', function () { this.RedisManager = SandboxedModule.require(modulePath, { requires: { './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), - 'settings-sharelatex': (this.settings = { + '@overleaf/settings': (this.settings = { documentupdater: { logHashErrors: { write: true, read: true } }, apis: { project_history: { enabled: true } diff --git a/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js b/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js index cac51b9f5f..907fac3d12 100644 --- a/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js +++ b/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js @@ -40,7 +40,7 @@ describe('UpdateManager', function () { return Timer })()) }), - 'settings-sharelatex': (this.Settings = {}), + '@overleaf/settings': (this.Settings = {}), './DocumentManager': (this.DocumentManager = {}), './RangesManager': (this.RangesManager = {}), './SnapshotManager': (this.SnapshotManager = {}), From 34de603e90e8097243a1ac05bce115540bd04cb5 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Mon, 12 Jul 2021 17:51:01 +0100 Subject: [PATCH 759/769] [misc] run npm dedupe --- services/document-updater/package-lock.json | 860 +++----------------- 1 file changed, 115 insertions(+), 745 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index ff38a00247..de158d6016 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -188,134 +188,16 @@ "split": "^1.0.0" }, "dependencies": { - "@google-cloud/common": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.6.0.tgz", - "integrity": "sha512-aHIFTqJZmeTNO9md8XxV+ywuvXF3xBm5WNmgWeeCK+XN5X+kGW0WEX94wGwj+/MdOnrVf4dL2RvSIt9J5yJG6Q==", - "requires": { - "@google-cloud/projectify": "^2.0.0", - "@google-cloud/promisify": "^2.0.0", - "arrify": "^2.0.1", - "duplexify": "^4.1.1", - "ent": "^2.2.0", - "extend": "^3.0.2", - "google-auth-library": "^7.0.2", - "retry-request": "^4.1.1", - "teeny-request": "^7.0.0" - } - }, - "@google-cloud/projectify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", - "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" - }, - "@google-cloud/promisify": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz", - "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw==" - }, "acorn": { "version": "8.1.0", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.1.0.tgz", "integrity": "sha512-LWCF/Wn0nfHOmJ9rzQApGnxnvgfROzGilS8936rqN/lfcYkY9MYZzdMqN+2NJ4SlTc+m5HiSa+kNfDtI64dwUA==" }, - "bignumber.js": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz", - "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==" - }, "coffeescript": { "version": "2.5.1", "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.5.1.tgz", "integrity": "sha512-J2jRPX0eeFh5VKyVnoLrfVFgLZtnnmp96WQSLAS8OrLm2wtQLcnikYKe1gViJKDH7vucjuhHvBKKBP3rKcD1tQ==" }, - "duplexify": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", - "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", - "requires": { - "end-of-stream": "^1.4.1", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1", - "stream-shift": "^1.0.0" - } - }, - "gaxios": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.2.0.tgz", - "integrity": "sha512-Ms7fNifGv0XVU+6eIyL9LB7RVESeML9+cMvkwGS70xyD6w2Z80wl6RiqiJ9k1KFlJCUTQqFFc8tXmPQfSKUe8g==", - "requires": { - "abort-controller": "^3.0.0", - "extend": "^3.0.2", - "https-proxy-agent": "^5.0.0", - "is-stream": "^2.0.0", - "node-fetch": "^2.3.0" - } - }, - "gcp-metadata": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz", - "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==", - "requires": { - "gaxios": "^4.0.0", - "json-bigint": "^1.0.0" - } - }, - "google-auth-library": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-7.0.3.tgz", - "integrity": "sha512-6wJNYqY1QUr5I2lWaUkkzOT2b9OCNhNQrdFOt/bsBbGb7T7NCdEvrBsXraUm+KTUGk2xGlQ7m9RgUd4Llcw8NQ==", - "requires": { - "arrify": "^2.0.0", - "base64-js": "^1.3.0", - "ecdsa-sig-formatter": "^1.0.11", - "fast-text-encoding": "^1.0.0", - "gaxios": "^4.0.0", - "gcp-metadata": "^4.2.0", - "gtoken": "^5.0.4", - "jws": "^4.0.0", - "lru-cache": "^6.0.0" - } - }, - "google-p12-pem": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz", - "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==", - "requires": { - "node-forge": "^0.10.0" - } - }, - "gtoken": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.2.1.tgz", - "integrity": "sha512-OY0BfPKe3QnMsY9MzTHTSKn+Vl2l1CcLe6BwDEQj00mbbkl5nyQ/7EUREstg4fQNZ8iYE7br4JJ7TdKeDOPWmw==", - "requires": { - "gaxios": "^4.0.0", - "google-p12-pem": "^3.0.3", - "jws": "^4.0.0" - } - }, - "json-bigint": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", - "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", - "requires": { - "bignumber.js": "^9.0.0" - } - }, - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "requires": { - "yallist": "^4.0.0" - } - }, - "node-forge": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", - "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" - }, "p-limit": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", @@ -332,18 +214,6 @@ "lru-cache": "^6.0.0" } }, - "teeny-request": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz", - "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==", - "requires": { - "http-proxy-agent": "^4.0.0", - "https-proxy-agent": "^5.0.0", - "node-fetch": "^2.6.1", - "stream-events": "^1.0.5", - "uuid": "^8.0.0" - } - }, "uuid": { "version": "8.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", @@ -428,149 +298,11 @@ "teeny-request": "^7.0.0" }, "dependencies": { - "@google-cloud/common": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.6.0.tgz", - "integrity": "sha512-aHIFTqJZmeTNO9md8XxV+ywuvXF3xBm5WNmgWeeCK+XN5X+kGW0WEX94wGwj+/MdOnrVf4dL2RvSIt9J5yJG6Q==", - "requires": { - "@google-cloud/projectify": "^2.0.0", - "@google-cloud/promisify": "^2.0.0", - "arrify": "^2.0.1", - "duplexify": "^4.1.1", - "ent": "^2.2.0", - "extend": "^3.0.2", - "google-auth-library": "^7.0.2", - "retry-request": "^4.1.1", - "teeny-request": "^7.0.0" - } - }, - "@google-cloud/projectify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", - "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" - }, - "@google-cloud/promisify": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz", - "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw==" - }, "@types/node": { "version": "13.13.48", "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.48.tgz", "integrity": "sha512-z8wvSsgWQzkr4sVuMEEOvwMdOQjiRY2Y/ZW4fDfjfe3+TfQrZqFKOthBgk2RnVEmtOKrkwdZ7uTvsxTBLjKGDQ==" }, - "bignumber.js": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz", - "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==" - }, - "duplexify": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", - "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", - "requires": { - "end-of-stream": "^1.4.1", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1", - "stream-shift": "^1.0.0" - } - }, - "gaxios": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.2.0.tgz", - "integrity": "sha512-Ms7fNifGv0XVU+6eIyL9LB7RVESeML9+cMvkwGS70xyD6w2Z80wl6RiqiJ9k1KFlJCUTQqFFc8tXmPQfSKUe8g==", - "requires": { - "abort-controller": "^3.0.0", - "extend": "^3.0.2", - "https-proxy-agent": "^5.0.0", - "is-stream": "^2.0.0", - "node-fetch": "^2.3.0" - } - }, - "gcp-metadata": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz", - "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==", - "requires": { - "gaxios": "^4.0.0", - "json-bigint": "^1.0.0" - } - }, - "google-auth-library": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-7.0.3.tgz", - "integrity": "sha512-6wJNYqY1QUr5I2lWaUkkzOT2b9OCNhNQrdFOt/bsBbGb7T7NCdEvrBsXraUm+KTUGk2xGlQ7m9RgUd4Llcw8NQ==", - "requires": { - "arrify": "^2.0.0", - "base64-js": "^1.3.0", - "ecdsa-sig-formatter": "^1.0.11", - "fast-text-encoding": "^1.0.0", - "gaxios": "^4.0.0", - "gcp-metadata": "^4.2.0", - "gtoken": "^5.0.4", - "jws": "^4.0.0", - "lru-cache": "^6.0.0" - } - }, - "google-p12-pem": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz", - "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==", - "requires": { - "node-forge": "^0.10.0" - } - }, - "gtoken": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.2.1.tgz", - "integrity": "sha512-OY0BfPKe3QnMsY9MzTHTSKn+Vl2l1CcLe6BwDEQj00mbbkl5nyQ/7EUREstg4fQNZ8iYE7br4JJ7TdKeDOPWmw==", - "requires": { - "gaxios": "^4.0.0", - "google-p12-pem": "^3.0.3", - "jws": "^4.0.0" - } - }, - "json-bigint": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", - "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", - "requires": { - "bignumber.js": "^9.0.0" - } - }, - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "requires": { - "yallist": "^4.0.0" - } - }, - "node-forge": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", - "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" - }, - "protobufjs": { - "version": "6.10.2", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.2.tgz", - "integrity": "sha512-27yj+04uF6ya9l+qfpH187aqEzfCF4+Uit0I9ZBQVqK09hk/SQzKa2MUqUpXaVa7LOFRg1TSSr3lVxGOk6c0SQ==", - "requires": { - "@protobufjs/aspromise": "^1.1.2", - "@protobufjs/base64": "^1.1.2", - "@protobufjs/codegen": "^2.0.4", - "@protobufjs/eventemitter": "^1.1.0", - "@protobufjs/fetch": "^1.1.0", - "@protobufjs/float": "^1.0.2", - "@protobufjs/inquire": "^1.1.0", - "@protobufjs/path": "^1.1.2", - "@protobufjs/pool": "^1.1.0", - "@protobufjs/utf8": "^1.1.0", - "@types/long": "^4.0.1", - "@types/node": "^13.7.0", - "long": "^4.0.0" - } - }, "semver": { "version": "7.3.5", "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", @@ -579,18 +311,6 @@ "lru-cache": "^6.0.0" } }, - "teeny-request": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz", - "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==", - "requires": { - "http-proxy-agent": "^4.0.0", - "https-proxy-agent": "^5.0.0", - "node-fetch": "^2.6.1", - "stream-events": "^1.0.5", - "uuid": "^8.0.0" - } - }, "uuid": { "version": "8.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", @@ -636,146 +356,6 @@ "uuid": "^8.0.0" }, "dependencies": { - "@google-cloud/common": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.6.0.tgz", - "integrity": "sha512-aHIFTqJZmeTNO9md8XxV+ywuvXF3xBm5WNmgWeeCK+XN5X+kGW0WEX94wGwj+/MdOnrVf4dL2RvSIt9J5yJG6Q==", - "requires": { - "@google-cloud/projectify": "^2.0.0", - "@google-cloud/promisify": "^2.0.0", - "arrify": "^2.0.1", - "duplexify": "^4.1.1", - "ent": "^2.2.0", - "extend": "^3.0.2", - "google-auth-library": "^7.0.2", - "retry-request": "^4.1.1", - "teeny-request": "^7.0.0" - } - }, - "@google-cloud/projectify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", - "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" - }, - "@google-cloud/promisify": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz", - "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw==" - }, - "@opencensus/core": { - "version": "0.0.22", - "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.22.tgz", - "integrity": "sha512-ErazJtivjceNoOZI1bG9giQ6cWS45J4i6iPUtlp7dLNu58OLs/v+CD0FsaPCh47XgPxAI12vbBE8Ec09ViwHNA==", - "requires": { - "continuation-local-storage": "^3.2.1", - "log-driver": "^1.2.7", - "semver": "^7.0.0", - "shimmer": "^1.2.0", - "uuid": "^8.0.0" - } - }, - "@opencensus/propagation-stackdriver": { - "version": "0.0.22", - "resolved": "https://registry.npmjs.org/@opencensus/propagation-stackdriver/-/propagation-stackdriver-0.0.22.tgz", - "integrity": "sha512-eBvf/ihb1mN8Yz/ASkz8nHzuMKqygu77+VNnUeR0yEh3Nj+ykB8VVR6lK+NAFXo1Rd1cOsTmgvuXAZgDAGleQQ==", - "requires": { - "@opencensus/core": "^0.0.22", - "hex2dec": "^1.0.1", - "uuid": "^8.0.0" - } - }, - "bignumber.js": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz", - "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==" - }, - "duplexify": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", - "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", - "requires": { - "end-of-stream": "^1.4.1", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1", - "stream-shift": "^1.0.0" - } - }, - "gaxios": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.2.0.tgz", - "integrity": "sha512-Ms7fNifGv0XVU+6eIyL9LB7RVESeML9+cMvkwGS70xyD6w2Z80wl6RiqiJ9k1KFlJCUTQqFFc8tXmPQfSKUe8g==", - "requires": { - "abort-controller": "^3.0.0", - "extend": "^3.0.2", - "https-proxy-agent": "^5.0.0", - "is-stream": "^2.0.0", - "node-fetch": "^2.3.0" - } - }, - "gcp-metadata": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz", - "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==", - "requires": { - "gaxios": "^4.0.0", - "json-bigint": "^1.0.0" - } - }, - "google-auth-library": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-7.0.3.tgz", - "integrity": "sha512-6wJNYqY1QUr5I2lWaUkkzOT2b9OCNhNQrdFOt/bsBbGb7T7NCdEvrBsXraUm+KTUGk2xGlQ7m9RgUd4Llcw8NQ==", - "requires": { - "arrify": "^2.0.0", - "base64-js": "^1.3.0", - "ecdsa-sig-formatter": "^1.0.11", - "fast-text-encoding": "^1.0.0", - "gaxios": "^4.0.0", - "gcp-metadata": "^4.2.0", - "gtoken": "^5.0.4", - "jws": "^4.0.0", - "lru-cache": "^6.0.0" - } - }, - "google-p12-pem": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz", - "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==", - "requires": { - "node-forge": "^0.10.0" - } - }, - "gtoken": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.2.1.tgz", - "integrity": "sha512-OY0BfPKe3QnMsY9MzTHTSKn+Vl2l1CcLe6BwDEQj00mbbkl5nyQ/7EUREstg4fQNZ8iYE7br4JJ7TdKeDOPWmw==", - "requires": { - "gaxios": "^4.0.0", - "google-p12-pem": "^3.0.3", - "jws": "^4.0.0" - } - }, - "json-bigint": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", - "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", - "requires": { - "bignumber.js": "^9.0.0" - } - }, - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "requires": { - "yallist": "^4.0.0" - } - }, - "node-forge": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", - "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" - }, "semver": { "version": "7.3.5", "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", @@ -784,18 +364,6 @@ "lru-cache": "^6.0.0" } }, - "teeny-request": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz", - "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==", - "requires": { - "http-proxy-agent": "^4.0.0", - "https-proxy-agent": "^5.0.0", - "node-fetch": "^2.6.1", - "stream-events": "^1.0.5", - "uuid": "^8.0.0" - } - }, "uuid": { "version": "8.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", @@ -1229,14 +797,6 @@ "dev": true, "requires": { "type-fest": "^0.11.0" - }, - "dependencies": { - "type-fest": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.11.0.tgz", - "integrity": "sha512-OdjXJxnCN1AvyLSzeKIgXTXxV+99ZuXl3Hpo9XpJAv9MBcHrrJOQ5kV7ypXOuQie+AmWG25hLbiKdwYTifzcfQ==", - "dev": true - } } }, "ansi-regex": { @@ -1653,7 +1213,6 @@ "requires": { "anymatch": "~3.1.1", "braces": "~3.0.2", - "fsevents": "~2.3.1", "glob-parent": "~5.1.0", "is-binary-path": "~2.1.0", "is-glob": "~4.0.1", @@ -1842,6 +1401,27 @@ "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" }, + "cross-spawn": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", + "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "dev": true, + "requires": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + } + } + }, "crypt": { "version": "0.0.2", "resolved": "https://registry.npmjs.org/crypt/-/crypt-0.0.2.tgz", @@ -2022,6 +1602,11 @@ "safe-buffer": "^5.0.1" } }, + "ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" + }, "emitter-listener": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/emitter-listener/-/emitter-listener-1.1.2.tgz", @@ -2155,27 +1740,6 @@ "v8-compile-cache": "^2.0.3" }, "dependencies": { - "cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", - "dev": true, - "requires": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - }, - "dependencies": { - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true - } - } - }, "debug": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", @@ -2195,19 +1759,8 @@ } }, "minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", - "dev": true - }, - "mkdirp": { - "version": "0.5.5", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", - "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", - "dev": true, - "requires": { - "minimist": "^1.2.5" - } + "version": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" }, "ms": { "version": "2.1.2", @@ -2274,10 +1827,8 @@ }, "dependencies": { "find-up": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", + "version": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", - "dev": true, "requires": { "locate-path": "^2.0.0" } @@ -2286,7 +1837,6 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", - "dev": true, "requires": { "p-locate": "^2.0.0", "path-exists": "^3.0.0" @@ -2296,7 +1846,6 @@ "version": "1.3.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", - "dev": true, "requires": { "p-try": "^1.0.0" } @@ -2305,7 +1854,6 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", - "dev": true, "requires": { "p-limit": "^1.1.0" } @@ -2313,17 +1861,7 @@ "p-try": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", - "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", - "dev": true - }, - "pkg-dir": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", - "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=", - "dev": true, - "requires": { - "find-up": "^2.1.0" - } + "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=" } } }, @@ -2397,31 +1935,16 @@ } }, "find-up": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", + "version": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", - "dev": true, "requires": { "locate-path": "^2.0.0" } }, - "load-json-file": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz", - "integrity": "sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg=", - "dev": true, - "requires": { - "graceful-fs": "^4.1.2", - "parse-json": "^2.2.0", - "pify": "^2.0.0", - "strip-bom": "^3.0.0" - } - }, "locate-path": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", - "dev": true, "requires": { "p-locate": "^2.0.0", "path-exists": "^3.0.0" @@ -2431,7 +1954,6 @@ "version": "1.3.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", - "dev": true, "requires": { "p-try": "^1.0.0" } @@ -2440,7 +1962,6 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", - "dev": true, "requires": { "p-limit": "^1.1.0" } @@ -2448,53 +1969,11 @@ "p-try": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", - "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", - "dev": true - }, - "parse-json": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", - "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", - "dev": true, - "requires": { - "error-ex": "^1.2.0" - } - }, - "path-type": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", - "integrity": "sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=", - "dev": true, - "requires": { - "pify": "^2.0.0" - } + "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=" }, "pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", - "dev": true - }, - "read-pkg": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz", - "integrity": "sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg=", - "dev": true, - "requires": { - "load-json-file": "^2.0.0", - "normalize-package-data": "^2.3.2", - "path-type": "^2.0.0" - } - }, - "read-pkg-up": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-2.0.0.tgz", - "integrity": "sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4=", - "dev": true, - "requires": { - "find-up": "^2.0.0", - "read-pkg": "^2.0.0" - } + "version": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=" } } }, @@ -2932,16 +2411,6 @@ "asynckit": "^0.4.0", "combined-stream": "^1.0.6", "mime-types": "^2.1.12" - }, - "dependencies": { - "mime-types": { - "version": "2.1.26", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.26.tgz", - "integrity": "sha512-01paPWYgLrkqAyrlDorC1uDwl2p3qZT7yl806vW7DvDoxwXi46jsjFbg+WdwotBIk6/MbEhO/dh5aZ5sNj/dWQ==", - "requires": { - "mime-db": "1.43.0" - } - } } }, "forwarded": { @@ -2967,13 +2436,6 @@ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" }, - "fsevents": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", - "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", - "dev": true, - "optional": true - }, "function-bind": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", @@ -3356,14 +2818,6 @@ "requires": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" - }, - "dependencies": { - "resolve-from": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", - "dev": true - } } }, "imurmurhash": { @@ -3483,12 +2937,6 @@ "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "dev": true }, - "lodash": { - "version": "4.17.19", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.19.tgz", - "integrity": "sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ==", - "dev": true - }, "rxjs": { "version": "6.6.2", "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.6.2.tgz", @@ -3818,6 +3266,17 @@ "type-check": "~0.3.2" } }, + "load-json-file": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz", + "integrity": "sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg=", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "parse-json": "^2.2.0", + "strip-bom": "^3.0.0" + } + }, "locate-path": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", @@ -3950,17 +3409,6 @@ "yn": "^4.0.0" }, "dependencies": { - "bunyan": { - "version": "1.8.15", - "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.15.tgz", - "integrity": "sha512-0tECWShh6wUysgucJcBAoYegf3JJoZWibxdqhTm7OHPeT42qdjkZ29QCMcKwbgU1kiH+auSIasNRXMLWXafXig==", - "requires": { - "dtrace-provider": "~0.8", - "moment": "^2.19.3", - "mv": "~2", - "safe-json-stringify": "~1" - } - }, "yn": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yn/-/yn-4.0.0.tgz", @@ -4164,12 +3612,6 @@ "brace-expansion": "^1.1.7" } }, - "minimist": { - "version": "0.0.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", - "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=", - "optional": true - }, "minipass": { "version": "2.9.0", "resolved": "https://registry.npmjs.org/minipass/-/minipass-2.9.0.tgz", @@ -4511,17 +3953,6 @@ "mkdirp": "~0.5.1", "ncp": "~2.0.0", "rimraf": "~2.4.0" - }, - "dependencies": { - "mkdirp": { - "version": "0.5.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", - "optional": true, - "requires": { - "minimist": "0.0.8" - } - } } }, "nan": { @@ -4818,13 +4249,6 @@ "integrity": "sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww==", "requires": { "ee-first": "1.1.1" - }, - "dependencies": { - "ee-first": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" - } } }, "on-headers": { @@ -4930,6 +4354,15 @@ "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-1.0.0.tgz", "integrity": "sha512-X4kUkCTHU1N/kEbwK9FpUJ0UZQa90VzeczfS704frR30gljxDG0pSziws06XlK+CGRSo/1wtG1mFIdBFQTMQNw==" }, + "parse-json": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", + "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", + "dev": true, + "requires": { + "error-ex": "^1.2.0" + } + }, "parse-ms": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.1.0.tgz", @@ -4943,8 +4376,7 @@ "path-exists": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", - "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", - "dev": true + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=" }, "path-is-absolute": { "version": "1.0.1", @@ -4973,6 +4405,12 @@ "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" }, + "path-type": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", + "integrity": "sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=", + "dev": true + }, "performance-now": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", @@ -4989,6 +4427,12 @@ "resolved": "https://registry.npmjs.org/pify/-/pify-5.0.0.tgz", "integrity": "sha512-eW/gHNMlxdSP6dmG6uJip6FXN0EQBwm2clYYd8Wul42Cwu/DK8HEftzsapcNdYe2MfLiIwZqsDk2RDEsTE79hA==" }, + "pkg-dir": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", + "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=", + "dev": true + }, "pprof": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/pprof/-/pprof-3.0.0.tgz", @@ -5024,26 +4468,6 @@ "yocto-queue": "^0.1.0" } }, - "protobufjs": { - "version": "6.10.2", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.2.tgz", - "integrity": "sha512-27yj+04uF6ya9l+qfpH187aqEzfCF4+Uit0I9ZBQVqK09hk/SQzKa2MUqUpXaVa7LOFRg1TSSr3lVxGOk6c0SQ==", - "requires": { - "@protobufjs/aspromise": "^1.1.2", - "@protobufjs/base64": "^1.1.2", - "@protobufjs/codegen": "^2.0.4", - "@protobufjs/eventemitter": "^1.1.0", - "@protobufjs/fetch": "^1.1.0", - "@protobufjs/float": "^1.0.2", - "@protobufjs/inquire": "^1.1.0", - "@protobufjs/path": "^1.1.2", - "@protobufjs/pool": "^1.1.0", - "@protobufjs/utf8": "^1.1.0", - "@types/long": "^4.0.1", - "@types/node": "^13.7.0", - "long": "^4.0.0" - } - }, "source-map": { "version": "0.7.3", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", @@ -5105,19 +4529,6 @@ "restore-cursor": "^2.0.0" } }, - "cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", - "dev": true, - "requires": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - } - }, "debug": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", @@ -5260,19 +4671,8 @@ "dev": true }, "minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", - "dev": true - }, - "mkdirp": { - "version": "0.5.5", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", - "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", - "dev": true, - "requires": { - "minimist": "^1.2.5" - } + "version": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" }, "ms": { "version": "2.1.2", @@ -5390,19 +4790,6 @@ "restore-cursor": "^2.0.0" } }, - "cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", - "dev": true, - "requires": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - } - }, "debug": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", @@ -5584,19 +4971,8 @@ "dev": true }, "minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", - "dev": true - }, - "mkdirp": { - "version": "0.5.5", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", - "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", - "dev": true, - "requires": { - "minimist": "^1.2.5" - } + "version": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" }, "ms": { "version": "2.1.2", @@ -5907,6 +5283,26 @@ "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", "dev": true }, + "read-pkg": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz", + "integrity": "sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg=", + "dev": true, + "requires": { + "load-json-file": "^2.0.0", + "normalize-package-data": "^2.3.2", + "path-type": "^2.0.0" + } + }, + "read-pkg-up": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-2.0.0.tgz", + "integrity": "sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4=", + "dev": true, + "requires": { + "read-pkg": "^2.0.0" + } + }, "readable-stream": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", @@ -5993,27 +5389,10 @@ "uuid": "^3.3.2" }, "dependencies": { - "mime-types": { - "version": "2.1.26", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.26.tgz", - "integrity": "sha512-01paPWYgLrkqAyrlDorC1uDwl2p3qZT7yl806vW7DvDoxwXi46jsjFbg+WdwotBIk6/MbEhO/dh5aZ5sNj/dWQ==", - "requires": { - "mime-db": "1.43.0" - } - }, "qs": { "version": "6.5.2", "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==" - }, - "tough-cookie": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", - "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", - "requires": { - "psl": "^1.1.28", - "punycode": "^2.1.1" - } } } }, @@ -6084,6 +5463,12 @@ "path-parse": "^1.0.6" } }, + "resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true + }, "restore-cursor": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", @@ -6633,28 +6018,9 @@ "readable-stream": "2 || 3" }, "dependencies": { - "readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "requires": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - } - }, "safe-buffer": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz", + "version": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz", "integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==" - }, - "string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "requires": { - "safe-buffer": "~5.2.0" - } } } }, @@ -6719,6 +6085,15 @@ "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz", "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==" }, + "tough-cookie": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", + "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", + "requires": { + "psl": "^1.1.28", + "punycode": "^2.1.1" + } + }, "tslib": { "version": "1.11.2", "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.11.2.tgz", @@ -6753,6 +6128,12 @@ "integrity": "sha512-f9Uv6ezcpvCQjJU0Zqbg+65qdcszv3qUQsZfjdRbWiZ7AMenrX1u0lNk9EoWWX6e1F+NULyg27mtdeZ5WhpljA==", "dev": true }, + "type-fest": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.11.0.tgz", + "integrity": "sha512-OdjXJxnCN1AvyLSzeKIgXTXxV+99ZuXl3Hpo9XpJAv9MBcHrrJOQ5kV7ypXOuQie+AmWG25hLbiKdwYTifzcfQ==", + "dev": true + }, "type-is": { "version": "1.6.18", "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", @@ -6996,19 +6377,8 @@ }, "dependencies": { "minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", - "dev": true - }, - "mkdirp": { - "version": "0.5.5", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", - "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", - "dev": true, - "requires": { - "minimist": "^1.2.5" - } + "version": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" } } }, From 98097b0804746b30b6b0a4763d0eae72b507ab14 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Tue, 13 Jul 2021 10:07:00 +0100 Subject: [PATCH 760/769] [misc] goodbye coffee-script --- services/document-updater/package-lock.json | 132 ++++++++++++++++++-- services/document-updater/package.json | 1 - 2 files changed, 124 insertions(+), 9 deletions(-) diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index de158d6016..ad5f52aa32 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -1213,6 +1213,7 @@ "requires": { "anymatch": "~3.1.1", "braces": "~3.0.2", + "fsevents": "~2.3.1", "glob-parent": "~5.1.0", "is-binary-path": "~2.1.0", "is-glob": "~4.0.1", @@ -1261,12 +1262,6 @@ "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=" }, - "coffee-script": { - "version": "1.12.7", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.12.7.tgz", - "integrity": "sha512-fLeEhqwymYat/MpTPUjSKHVYYl0ec2mOyALEMLmzr5i1isuG+6jfI2j2d5oBO3VIzgUXgBVIcOT9uH1TFxBckw==", - "dev": true - }, "color-convert": { "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", @@ -2436,6 +2431,13 @@ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" }, + "fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "optional": true + }, "function-bind": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", @@ -3274,7 +3276,16 @@ "requires": { "graceful-fs": "^4.1.2", "parse-json": "^2.2.0", + "pify": "^2.0.0", "strip-bom": "^3.0.0" + }, + "dependencies": { + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "dev": true + } } }, "locate-path": { @@ -4409,7 +4420,18 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", "integrity": "sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=", - "dev": true + "dev": true, + "requires": { + "pify": "^2.0.0" + }, + "dependencies": { + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "dev": true + } + } }, "performance-now": { "version": "2.1.0", @@ -4431,7 +4453,55 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=", - "dev": true + "dev": true, + "requires": { + "find-up": "^2.1.0" + }, + "dependencies": { + "find-up": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", + "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "dev": true, + "requires": { + "locate-path": "^2.0.0" + } + }, + "locate-path": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", + "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", + "dev": true, + "requires": { + "p-locate": "^2.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", + "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", + "dev": true, + "requires": { + "p-try": "^1.0.0" + } + }, + "p-locate": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", + "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "dev": true, + "requires": { + "p-limit": "^1.1.0" + } + }, + "p-try": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", + "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", + "dev": true + } + } }, "pprof": { "version": "3.0.0", @@ -5300,7 +5370,53 @@ "integrity": "sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4=", "dev": true, "requires": { + "find-up": "^2.0.0", "read-pkg": "^2.0.0" + }, + "dependencies": { + "find-up": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", + "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "dev": true, + "requires": { + "locate-path": "^2.0.0" + } + }, + "locate-path": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", + "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", + "dev": true, + "requires": { + "p-locate": "^2.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", + "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", + "dev": true, + "requires": { + "p-try": "^1.0.0" + } + }, + "p-locate": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", + "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "dev": true, + "requires": { + "p-limit": "^1.1.0" + } + }, + "p-try": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", + "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", + "dev": true + } } }, "readable-stream": { diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 44c57d0e05..b033ddedfb 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -37,7 +37,6 @@ "babel-eslint": "^10.1.0", "chai": "^3.5.0", "cluster-key-slot": "^1.0.5", - "coffee-script": "^1.12.7", "eslint": "^6.8.0", "eslint-config-prettier": "^6.10.0", "eslint-config-standard": "^14.1.0", From 2a3f2641404a56c2c1004c19be2b18cddd66033d Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Tue, 13 Jul 2021 11:55:13 +0100 Subject: [PATCH 761/769] [misc] upgrade build scripts to version 3.11.0 and cleanup packages ``` npm uninstall prettier-eslint-cli eslint-plugin-standard eslint-plugin-jsx-a11y eslint-plugin-react eslint-config-standard-jsx eslint-config-standard-react babel-eslint npm dedupe ``` --- services/document-updater/.eslintrc | 2 +- .../document-updater/.github/dependabot.yml | 2 +- services/document-updater/.prettierrc | 6 +- services/document-updater/buildscript.txt | 2 +- services/document-updater/package-lock.json | 3231 +++++------------ services/document-updater/package.json | 35 +- 6 files changed, 863 insertions(+), 2415 deletions(-) diff --git a/services/document-updater/.eslintrc b/services/document-updater/.eslintrc index 321353f971..1c14f50efe 100644 --- a/services/document-updater/.eslintrc +++ b/services/document-updater/.eslintrc @@ -3,9 +3,9 @@ // https://github.com/sharelatex/sharelatex-dev-environment { "extends": [ + "eslint:recommended", "standard", "prettier", - "prettier/standard" ], "parserOptions": { "ecmaVersion": 2018 diff --git a/services/document-updater/.github/dependabot.yml b/services/document-updater/.github/dependabot.yml index e2c64a3351..c856753655 100644 --- a/services/document-updater/.github/dependabot.yml +++ b/services/document-updater/.github/dependabot.yml @@ -20,4 +20,4 @@ updates: # future if we reorganise teams labels: - "dependencies" - - "Team-Magma" + - "type:maintenance" diff --git a/services/document-updater/.prettierrc b/services/document-updater/.prettierrc index 24f9ec526f..c92c3526e7 100644 --- a/services/document-updater/.prettierrc +++ b/services/document-updater/.prettierrc @@ -2,6 +2,10 @@ # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment { + "arrowParens": "avoid", "semi": false, - "singleQuote": true + "singleQuote": true, + "trailingComma": "es5", + "tabWidth": 2, + "useTabs": false } diff --git a/services/document-updater/buildscript.txt b/services/document-updater/buildscript.txt index b7b22bd21d..b75d1c1e34 100644 --- a/services/document-updater/buildscript.txt +++ b/services/document-updater/buildscript.txt @@ -5,4 +5,4 @@ document-updater --env-pass-through= --node-version=12.21.0 --public-repo=True ---script-version=3.8.0 +--script-version=3.11.0 diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json index ad5f52aa32..2fe0324acd 100644 --- a/services/document-updater/package-lock.json +++ b/services/document-updater/package-lock.json @@ -5,134 +5,67 @@ "requires": true, "dependencies": { "@babel/code-frame": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.8.3.tgz", - "integrity": "sha512-a9gxpmdXtZEInkCSHUJDLHZVBgb1QS0jhss4cPP93EW7s+uC5bikET2twEF3KV+7rDblJcmNvTR7VJejqd2C2g==", + "version": "7.12.11", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.11.tgz", + "integrity": "sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw==", "dev": true, "requires": { - "@babel/highlight": "^7.8.3" + "@babel/highlight": "^7.10.4" } }, - "@babel/generator": { - "version": "7.8.8", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.8.8.tgz", - "integrity": "sha512-HKyUVu69cZoclptr8t8U5b6sx6zoWjh8jiUhnuj3MpZuKT2dJ8zPTuiy31luq32swhI0SpwItCIlU8XW7BZeJg==", - "dev": true, - "requires": { - "@babel/types": "^7.8.7", - "jsesc": "^2.5.1", - "lodash": "^4.17.13", - "source-map": "^0.5.0" - }, - "dependencies": { - "source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", - "dev": true - } - } - }, - "@babel/helper-function-name": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.8.3.tgz", - "integrity": "sha512-BCxgX1BC2hD/oBlIFUgOCQDOPV8nSINxCwM3o93xP4P9Fq6aV5sgv2cOOITDMtCfQ+3PvHp3l689XZvAM9QyOA==", - "dev": true, - "requires": { - "@babel/helper-get-function-arity": "^7.8.3", - "@babel/template": "^7.8.3", - "@babel/types": "^7.8.3" - } - }, - "@babel/helper-get-function-arity": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.8.3.tgz", - "integrity": "sha512-FVDR+Gd9iLjUMY1fzE2SR0IuaJToR4RkCDARVfsBBPSP53GEqSFjD8gNyxg246VUyc/ALRxFaAK8rVG7UT7xRA==", - "dev": true, - "requires": { - "@babel/types": "^7.8.3" - } - }, - "@babel/helper-split-export-declaration": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.8.3.tgz", - "integrity": "sha512-3x3yOeyBhW851hroze7ElzdkeRXQYQbFIb7gLK1WQYsw2GWDay5gAJNw1sWJ0VFP6z5J1whqeXH/WCdCjZv6dA==", - "dev": true, - "requires": { - "@babel/types": "^7.8.3" - } + "@babel/helper-validator-identifier": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.14.5.tgz", + "integrity": "sha512-5lsetuxCLilmVGyiLEfoHBRX8UCFD+1m2x3Rj97WrW3V7H3u4RWRXA4evMjImCsin2J2YT0QaVDGf+z8ondbAg==", + "dev": true }, "@babel/highlight": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.8.3.tgz", - "integrity": "sha512-PX4y5xQUvy0fnEVHrYOarRPXVWafSjTW9T0Hab8gVIawpl2Sj0ORyrygANq+KjcNlSSTw0YCLSNA8OyZ1I4yEg==", + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.14.5.tgz", + "integrity": "sha512-qf9u2WFWVV0MppaL877j2dBtQIDgmidgjGk5VIMw3OadXvYaXn66U1BFlH2t4+t3i+8PhedppRv+i40ABzd+gg==", "dev": true, "requires": { + "@babel/helper-validator-identifier": "^7.14.5", "chalk": "^2.0.0", - "esutils": "^2.0.2", "js-tokens": "^4.0.0" } }, - "@babel/parser": { - "version": "7.8.8", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.8.8.tgz", - "integrity": "sha512-mO5GWzBPsPf6865iIbzNE0AvkKF3NE+2S3eRUpE+FE07BOAkXh6G+GW/Pj01hhXjve1WScbaIO4UlY1JKeqCcA==", - "dev": true - }, - "@babel/runtime": { - "version": "7.9.6", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.9.6.tgz", - "integrity": "sha512-64AF1xY3OAkFHqOb9s4jpgk1Mm5vDZ4L3acHvAml+53nO1XbXLuDodsVpO4OIUsmemlUHMxNdYMNJmsvOwLrvQ==", + "@eslint/eslintrc": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.2.tgz", + "integrity": "sha512-8nmGq/4ycLpIwzvhI4tNDmQztZ8sp+hI7cyG8i1nQDhkAbRzHpXPidRAHlNvCZQpJTKw5ItIpMw9RSToGF00mg==", "dev": true, "requires": { - "regenerator-runtime": "^0.13.4" - } - }, - "@babel/runtime-corejs3": { - "version": "7.9.6", - "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.9.6.tgz", - "integrity": "sha512-6toWAfaALQjt3KMZQc6fABqZwUDDuWzz+cAfPhqyEnzxvdWOAkjwPNxgF8xlmo7OWLsSjaKjsskpKHRLaMArOA==", - "dev": true, - "requires": { - "core-js-pure": "^3.0.0", - "regenerator-runtime": "^0.13.4" - } - }, - "@babel/template": { - "version": "7.8.6", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.8.6.tgz", - "integrity": "sha512-zbMsPMy/v0PWFZEhQJ66bqjhH+z0JgMoBWuikXybgG3Gkd/3t5oQ1Rw2WQhnSrsOmsKXnZOx15tkC4qON/+JPg==", - "dev": true, - "requires": { - "@babel/code-frame": "^7.8.3", - "@babel/parser": "^7.8.6", - "@babel/types": "^7.8.6" - } - }, - "@babel/traverse": { - "version": "7.8.6", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.8.6.tgz", - "integrity": "sha512-2B8l0db/DPi8iinITKuo7cbPznLCEk0kCxDoB9/N6gGNg/gxOXiR/IcymAFPiBwk5w6TtQ27w4wpElgp9btR9A==", - "dev": true, - "requires": { - "@babel/code-frame": "^7.8.3", - "@babel/generator": "^7.8.6", - "@babel/helper-function-name": "^7.8.3", - "@babel/helper-split-export-declaration": "^7.8.3", - "@babel/parser": "^7.8.6", - "@babel/types": "^7.8.6", - "debug": "^4.1.0", - "globals": "^11.1.0", - "lodash": "^4.17.13" + "ajv": "^6.12.4", + "debug": "^4.1.1", + "espree": "^7.3.0", + "globals": "^13.9.0", + "ignore": "^4.0.6", + "import-fresh": "^3.2.1", + "js-yaml": "^3.13.1", + "minimatch": "^3.0.4", + "strip-json-comments": "^3.1.1" }, "dependencies": { - "debug": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", "dev": true, "requires": { - "ms": "^2.1.1" + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "debug": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", + "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", + "dev": true, + "requires": { + "ms": "2.1.2" } }, "ms": { @@ -143,17 +76,6 @@ } } }, - "@babel/types": { - "version": "7.8.7", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.8.7.tgz", - "integrity": "sha512-k2TreEHxFA4CjGkL+GYjRyx35W0Mr7DP5+9q6WMkyKXB+904bYmG40syjMFV0oLlhhFCwWl0vA0DyzTDkwAiJw==", - "dev": true, - "requires": { - "esutils": "^2.0.2", - "lodash": "^4.17.13", - "to-fast-properties": "^2.0.0" - } - }, "@google-cloud/common": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.6.0.tgz", @@ -188,24 +110,6 @@ "split": "^1.0.0" }, "dependencies": { - "acorn": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.1.0.tgz", - "integrity": "sha512-LWCF/Wn0nfHOmJ9rzQApGnxnvgfROzGilS8936rqN/lfcYkY9MYZzdMqN+2NJ4SlTc+m5HiSa+kNfDtI64dwUA==" - }, - "coffeescript": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.5.1.tgz", - "integrity": "sha512-J2jRPX0eeFh5VKyVnoLrfVFgLZtnnmp96WQSLAS8OrLm2wtQLcnikYKe1gViJKDH7vucjuhHvBKKBP3rKcD1tQ==" - }, - "p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "requires": { - "yocto-queue": "^0.1.0" - } - }, "semver": { "version": "7.3.5", "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", @@ -413,6 +317,40 @@ "protobufjs": "^6.8.6" } }, + "@humanwhocodes/config-array": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.5.0.tgz", + "integrity": "sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg==", + "dev": true, + "requires": { + "@humanwhocodes/object-schema": "^1.2.0", + "debug": "^4.1.1", + "minimatch": "^3.0.4" + }, + "dependencies": { + "debug": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", + "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", + "dev": true, + "requires": { + "ms": "2.1.2" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + } + } + }, + "@humanwhocodes/object-schema": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.0.tgz", + "integrity": "sha512-wdppn25U8z/2yiaT6YGquE6X8sSv7hNMWSXYSSU1jGv/yd6XqjXgTDJ8KP4NgjTXfJ3GbRjeeb8RTV7a/VpM+w==", + "dev": true + }, "@opencensus/core": { "version": "0.0.22", "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.22.tgz", @@ -477,13 +415,6 @@ "integrity": "sha512-1FRBYZO0lbJ0U+FRGZVS8ou6RhEw3e2B86WW/NbtBw554g0h5iC8ESf+juIfPMU/WDf/JDIFbg3eB/LnP2RSow==", "requires": { "core-js": "^3.8.3" - }, - "dependencies": { - "core-js": { - "version": "3.11.1", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.11.1.tgz", - "integrity": "sha512-k93Isqg7e4txZWMGNYwevZL9MiogLk8pd1PtwrmFmi8IBq4GXqUaVW/a33Llt6amSI36uSjd0GWwc9pTT9ALlQ==" - } } }, "@overleaf/redis-wrapper": { @@ -560,14 +491,6 @@ "dev": true, "requires": { "type-detect": "4.0.8" - }, - "dependencies": { - "type-detect": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", - "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", - "dev": true - } } }, "@sinonjs/fake-timers": { @@ -598,14 +521,6 @@ "@sinonjs/commons": "^1.6.0", "lodash.get": "^4.4.2", "type-detect": "^4.0.8" - }, - "dependencies": { - "type-detect": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", - "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", - "dev": true - } } }, "@sinonjs/text-encoding": { @@ -619,29 +534,11 @@ "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==" }, - "@types/color-name": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@types/color-name/-/color-name-1.1.1.tgz", - "integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==", - "dev": true - }, "@types/console-log-level": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/@types/console-log-level/-/console-log-level-1.4.0.tgz", "integrity": "sha512-x+OscEQwcx5Biair4enH7ov9W+clcqUWaZRaxn5IkT4yNWWjRr2oiYDkY/x1uXSTVZOQ2xlbFQySaQGB+VdXGQ==" }, - "@types/eslint-visitor-keys": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@types/eslint-visitor-keys/-/eslint-visitor-keys-1.0.0.tgz", - "integrity": "sha512-OCutwjDZ4aFS6PB1UZ988C4YgwlBHJd6wCeQqaLdmadZ/7e+w79+hbMUFC1QXDNCmdyoRfAFdm0RypzwR+Qpag==", - "dev": true - }, - "@types/json-schema": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.4.tgz", - "integrity": "sha512-8+KAKzEvSUdeo+kmqnKrqgeE+LcA0tjYWFY7RPProVYwnqDjukzO+3b6dLD56rYX5TdWejnEOLJYOIeh4CXKuA==", - "dev": true - }, "@types/long": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.1.tgz", @@ -657,59 +554,6 @@ "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.4.tgz", "integrity": "sha512-+nVsLKlcUCeMzD2ufHEYuJ9a2ovstb6Dp52A5VsoKxDXgvE051XgHI/33I1EymwkRGQkwnA0LkhnUzituGs4EQ==" }, - "@typescript-eslint/experimental-utils": { - "version": "1.13.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-1.13.0.tgz", - "integrity": "sha512-zmpS6SyqG4ZF64ffaJ6uah6tWWWgZ8m+c54XXgwFtUv0jNz8aJAVx8chMCvnk7yl6xwn8d+d96+tWp7fXzTuDg==", - "dev": true, - "requires": { - "@types/json-schema": "^7.0.3", - "@typescript-eslint/typescript-estree": "1.13.0", - "eslint-scope": "^4.0.0" - }, - "dependencies": { - "eslint-scope": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-4.0.3.tgz", - "integrity": "sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==", - "dev": true, - "requires": { - "esrecurse": "^4.1.0", - "estraverse": "^4.1.1" - } - } - } - }, - "@typescript-eslint/parser": { - "version": "1.13.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-1.13.0.tgz", - "integrity": "sha512-ITMBs52PCPgLb2nGPoeT4iU3HdQZHcPaZVw+7CsFagRJHUhyeTgorEwHXhFf3e7Evzi8oujKNpHc8TONth8AdQ==", - "dev": true, - "requires": { - "@types/eslint-visitor-keys": "^1.0.0", - "@typescript-eslint/experimental-utils": "1.13.0", - "@typescript-eslint/typescript-estree": "1.13.0", - "eslint-visitor-keys": "^1.0.0" - } - }, - "@typescript-eslint/typescript-estree": { - "version": "1.13.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-1.13.0.tgz", - "integrity": "sha512-b5rCmd2e6DCC6tCTN9GSUAuxdYwCM/k/2wdjHGrIRGPSJotWMCe/dGpi66u42bhuh8q3QBzqM4TMA1GUUCJvdw==", - "dev": true, - "requires": { - "lodash.unescape": "4.0.1", - "semver": "5.5.0" - }, - "dependencies": { - "semver": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.5.0.tgz", - "integrity": "sha512-4SJ3dm0WAwWy/NVeioZh5AntkdJoWKxHxcmyP622fOkgHa4z3R0TdBJICINyaSDE6uNwVc8gZr+ZinwZAH4xIA==", - "dev": true - } - } - }, "@ungap/promise-all-settled": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@ungap/promise-all-settled/-/promise-all-settled-1.1.2.tgz", @@ -739,15 +583,14 @@ } }, "acorn": { - "version": "6.4.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.1.tgz", - "integrity": "sha512-ZVA9k326Nwrj3Cj9jlh3wGFutC2ZornPNARZwsNYqQYgN0EsV2d53w5RN/co65Ohn4sUAUtb1rSUAOD6XN9idA==", - "dev": true + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.1.0.tgz", + "integrity": "sha512-LWCF/Wn0nfHOmJ9rzQApGnxnvgfROzGilS8936rqN/lfcYkY9MYZzdMqN+2NJ4SlTc+m5HiSa+kNfDtI64dwUA==" }, "acorn-jsx": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.2.0.tgz", - "integrity": "sha512-HiUX/+K2YpkpJ+SzBffkM/AQ2YE03S0U1kjTLVpoJdhZMOWy8qvXVN9JdLqv2QsaQ6MPYQIuNmwD8zOiYUofLQ==", + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", "dev": true }, "agent-base": { @@ -790,19 +633,10 @@ "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", "dev": true }, - "ansi-escapes": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.1.tgz", - "integrity": "sha512-JWF7ocqNrp8u9oqpgV+wH5ftbt+cfvv+PTjOvKLT3AdYly/LmORARfEVT1iyjwN+4MqE5UmVKoAdIBqeoCHgLA==", - "dev": true, - "requires": { - "type-fest": "^0.11.0" - } - }, "ansi-regex": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", - "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", "dev": true }, "ansi-styles": { @@ -863,40 +697,50 @@ "sprintf-js": "~1.0.2" } }, - "aria-query": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-3.0.0.tgz", - "integrity": "sha1-ZbP8wcoRVajJrmTW7uKX8V1RM8w=", - "dev": true, - "requires": { - "ast-types-flow": "0.0.7", - "commander": "^2.11.0" - } - }, "array-flatten": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" }, "array-includes": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.1.tgz", - "integrity": "sha512-c2VXaCHl7zPsvpkFsw4nxvFie4fh1ur9bpcgsVkIjqn0H/Xwdg+7fv3n2r/isyS8EBj5b06M9kHyZuIr4El6WQ==", + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.3.tgz", + "integrity": "sha512-gcem1KlBU7c9rB+Rq8/3PPKsK2kjqeEBa3bD5kkQo4nYlOHQCJqIJFqBXDEfwaRuYTT4E+FxA9xez7Gf/e3Q7A==", "dev": true, "requires": { + "call-bind": "^1.0.2", "define-properties": "^1.1.3", - "es-abstract": "^1.17.0", + "es-abstract": "^1.18.0-next.2", + "get-intrinsic": "^1.1.1", "is-string": "^1.0.5" } }, "array.prototype.flat": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.2.3.tgz", - "integrity": "sha512-gBlRZV0VSmfPIeWfuuy56XZMvbVfbEUnOXUvt3F/eUUUSyzlgLxhEX4YAEpxNAogRGehPSnfXyPtYyKAhkzQhQ==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.2.4.tgz", + "integrity": "sha512-4470Xi3GAPAjZqFcljX2xzckv1qeKPizoNkiS0+O4IoPR2ZNpcjE0pkhdihlDouK+x6QOast26B4Q/O9DJnwSg==", "dev": true, "requires": { + "call-bind": "^1.0.0", "define-properties": "^1.1.3", - "es-abstract": "^1.17.0-next.1" + "es-abstract": "^1.18.0-next.1" + }, + "dependencies": { + "has-symbols": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz", + "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==" + }, + "is-callable": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.3.tgz", + "integrity": "sha512-J1DcMe8UYTBSrKezuIUTUwjXsho29693unXM2YhJUTR2txK/eG47bvNa/wipPFmZFgr/N6f1GA66dv0mEyTIyQ==" + }, + "is-string": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.6.tgz", + "integrity": "sha512-2gdzbKUuqtQ3lYNrUTQYoClPhm7oQu4UdpSZMp1/DGgkHBT8E2Z1l0yMdb6D4zNAxwDiMv8MdulKROJGNl0Q0w==" + } } }, "arrify": { @@ -920,19 +764,13 @@ "assertion-error": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", - "integrity": "sha1-5gtrDo8wG9l+U3UhW9pAbIURjAs=", - "dev": true - }, - "ast-types-flow": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/ast-types-flow/-/ast-types-flow-0.0.7.tgz", - "integrity": "sha1-9wtzXGvKGlycItmCw+Oef+ujva0=", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", "dev": true }, "astral-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-1.0.0.tgz", - "integrity": "sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", + "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", "dev": true }, "async": { @@ -974,26 +812,6 @@ "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.9.1.tgz", "integrity": "sha512-wMHVg2EOHaMRxbzgFJ9gtjOOCrI80OHLG14rxi28XwOW8ux6IiEbRCGGGqCtdAIg4FQCbW20k9RsT4y3gJlFug==" }, - "axobject-query": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-2.1.2.tgz", - "integrity": "sha512-ICt34ZmrVt8UQnvPl6TVyDTkmhXmAyAT4Jh5ugfGUX4MOrZ+U/ZY6/sdylRw3qGNr9Ub5AJsaHeDMzNLehRdOQ==", - "dev": true - }, - "babel-eslint": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/babel-eslint/-/babel-eslint-10.1.0.tgz", - "integrity": "sha512-ifWaTHQ0ce+448CYop8AdrQiBsGrnC+bMgfyKFdi6EsPLTAWG+QfyDeM6OH+FmWnKvEq5NnBMLvlBUPKQZoDSg==", - "dev": true, - "requires": { - "@babel/code-frame": "^7.0.0", - "@babel/parser": "^7.7.0", - "@babel/traverse": "^7.7.0", - "@babel/types": "^7.7.0", - "eslint-visitor-keys": "^1.0.0", - "resolve": "^1.12.0" - } - }, "balanced-match": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", @@ -1078,12 +896,6 @@ "type-is": "~1.6.17" } }, - "boolify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/boolify/-/boolify-1.0.1.tgz", - "integrity": "sha1-tcCeF8rNET0Rt7s+04TMASmU2Gs=", - "dev": true - }, "brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", @@ -1144,6 +956,16 @@ "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", "integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==" }, + "call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "dev": true, + "requires": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + } + }, "callsites": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", @@ -1151,36 +973,37 @@ "dev": true }, "camelcase": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", - "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.2.0.tgz", + "integrity": "sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg==", "dev": true }, - "camelcase-keys": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.2.2.tgz", - "integrity": "sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg==", - "dev": true, - "requires": { - "camelcase": "^5.3.1", - "map-obj": "^4.0.0", - "quick-lru": "^4.0.1" - } - }, "caseless": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" }, "chai": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/chai/-/chai-3.5.0.tgz", - "integrity": "sha1-TQJjewZ/6Vi9v906QOxW/vc3Mkc=", + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.3.4.tgz", + "integrity": "sha512-yS5H68VYOCtN1cjfwumDSuzn/9c+yza4f3reKXlE5rUg7SFcCEy90gJvydNgOYtblyf4Zi6jIWRnXOgErta0KA==", "dev": true, "requires": { - "assertion-error": "^1.0.1", - "deep-eql": "^0.1.3", - "type-detect": "^1.0.0" + "assertion-error": "^1.1.0", + "check-error": "^1.0.2", + "deep-eql": "^3.0.1", + "get-func-name": "^2.0.0", + "pathval": "^1.1.1", + "type-detect": "^4.0.5" + } + }, + "chai-as-promised": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/chai-as-promised/-/chai-as-promised-7.1.1.tgz", + "integrity": "sha512-azL6xMoi+uxu6z4rhWQ1jbdUhOMhis2PvscD/xjLqNMkv3BPPp2JyyuTHOrf9BOosGpNQ11v6BKv/g57RXbiaA==", + "dev": true, + "requires": { + "check-error": "^1.0.2" } }, "chalk": { @@ -1194,17 +1017,17 @@ "supports-color": "^5.3.0" } }, - "chardet": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz", - "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", - "dev": true - }, "charenc": { "version": "0.0.2", "resolved": "https://registry.npmjs.org/charenc/-/charenc-0.0.2.tgz", "integrity": "sha1-wKHS86cJLgN3S/qD8UwPxXkKhmc=" }, + "check-error": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", + "integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=", + "dev": true + }, "chokidar": { "version": "3.5.1", "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.1.tgz", @@ -1213,7 +1036,6 @@ "requires": { "anymatch": "~3.1.1", "braces": "~3.0.2", - "fsevents": "~2.3.1", "glob-parent": "~5.1.0", "is-binary-path": "~2.1.0", "is-glob": "~4.0.1", @@ -1226,30 +1048,15 @@ "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==" }, - "cli-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", - "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", - "dev": true, - "requires": { - "restore-cursor": "^3.1.0" - } - }, - "cli-width": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.1.tgz", - "integrity": "sha512-GRMWDxpOB6Dgk2E5Uo+3eEBvtOOlimMmpbFiKuLFnQzYDavtLFY3K5ona41jgN/WdRZtG7utuVSVTL4HbZHGkw==", - "dev": true - }, "cliui": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", - "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", "dev": true, "requires": { - "string-width": "^3.1.0", - "strip-ansi": "^5.2.0", - "wrap-ansi": "^5.1.0" + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" } }, "cluster-key-slot": { @@ -1262,6 +1069,11 @@ "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=" }, + "coffeescript": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.5.1.tgz", + "integrity": "sha512-J2jRPX0eeFh5VKyVnoLrfVFgLZtnnmp96WQSLAS8OrLm2wtQLcnikYKe1gViJKDH7vucjuhHvBKKBP3rKcD1tQ==" + }, "color-convert": { "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", @@ -1285,18 +1097,6 @@ "delayed-stream": "~1.0.0" } }, - "commander": { - "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", - "dev": true - }, - "common-tags": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.0.tgz", - "integrity": "sha512-6P6g0uetGpW/sdyUy/iQQCbFF0kWVMSIVSyYz7Zgjcgh8mgw8PQzDNZeyZ5DQ2gM7LBoZPHmnjz8rUthkBG5tw==", - "dev": true - }, "compressible": { "version": "2.0.18", "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz", @@ -1341,12 +1141,6 @@ "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz", "integrity": "sha512-VZzbIORbP+PPcN/gg3DXClTLPLg5Slwd5fL2MIc+o1qZ4BXBvWyc6QxPk6T/Mkr6IVjRpoAGf32XxP3ZWMVRcQ==" }, - "contains-path": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/contains-path/-/contains-path-0.1.0.tgz", - "integrity": "sha1-/ozxhP9mcLa67wGp1IYaXL7EEgo=", - "dev": true - }, "content-disposition": { "version": "0.5.3", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.3.tgz", @@ -1380,16 +1174,9 @@ "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" }, "core-js": { - "version": "3.6.5", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.6.5.tgz", - "integrity": "sha512-vZVEEwZoIsI+vPEuoF9Iqf5H7/M3eeQqWlQnYa8FSKKePuYTf5MWnxb5SDAzCa60b3JBRS5g9b+Dq7b1y/RCrA==", - "dev": true - }, - "core-js-pure": { - "version": "3.6.5", - "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.6.5.tgz", - "integrity": "sha512-lacdXOimsiD0QyNf9BC/mxivNJ/ybBGJXQFKzRekp1WTHoVUWsUHEn+2T8GJAzzIhyOuXA+gOxCVN3l+5PLPUA==", - "dev": true + "version": "3.11.1", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.11.1.tgz", + "integrity": "sha512-k93Isqg7e4txZWMGNYwevZL9MiogLk8pd1PtwrmFmi8IBq4GXqUaVW/a33Llt6amSI36uSjd0GWwc9pTT9ALlQ==" }, "core-util-is": { "version": "1.0.2", @@ -1397,24 +1184,14 @@ "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" }, "cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", "dev": true, "requires": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - }, - "dependencies": { - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true - } + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" } }, "crypt": { @@ -1427,12 +1204,6 @@ "resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz", "integrity": "sha1-QAKofoUMv8n52XBrYPymE6MzbpA=" }, - "damerau-levenshtein": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.6.tgz", - "integrity": "sha512-JVrozIeElnj3QzfUIt8tB8YMluBJom4Vw9qTPpjGYQ9fYlB3D/rb6OordUxf3xeFB35LKWs0xqcO5U6ySvBtug==", - "dev": true - }, "dashdash": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", @@ -1450,26 +1221,18 @@ } }, "decamelize": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", + "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", "dev": true }, "deep-eql": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-0.1.3.tgz", - "integrity": "sha1-71WKyrjeJSBs1xOQbXTlaTDrafI=", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz", + "integrity": "sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==", "dev": true, "requires": { - "type-detect": "0.1.1" - }, - "dependencies": { - "type-detect": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-0.1.1.tgz", - "integrity": "sha1-C6XsKohWQORw6k6FBZcZANrFiCI=", - "dev": true - } + "type-detect": "^4.0.0" } }, "deep-extend": { @@ -1537,12 +1300,6 @@ "version": "https://github.com/overleaf/diff-match-patch/archive/89805f9c671a77a263fc53461acd62aa7498f688.tar.gz", "integrity": "sha512-rX+9ry8tosctHzJfYG9Vjpof6wTYYA/oFHnzpv6O1vkUd+5dTc9LpZCTUv+FK8i4grpITxY8BYSk8A3u4anwJQ==" }, - "dlv": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", - "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==", - "dev": true - }, "doctrine": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", @@ -1611,9 +1368,9 @@ } }, "emoji-regex": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", - "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", "dev": true }, "encodeurl": { @@ -1629,6 +1386,15 @@ "once": "^1.4.0" } }, + "enquirer": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz", + "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==", + "dev": true, + "requires": { + "ansi-colors": "^4.1.1" + } + }, "ent": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz", @@ -1644,22 +1410,24 @@ } }, "es-abstract": { - "version": "1.17.5", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.5.tgz", - "integrity": "sha512-BR9auzDbySxOcfog0tLECW8l28eRGpDpU3Dm3Hp4q/N+VtLTmyj4EUN088XZWQDW/hzj6sYRDXeOFsaAODKvpg==", + "version": "1.18.3", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.3.tgz", + "integrity": "sha512-nQIr12dxV7SSxE6r6f1l3DtAeEYdsGpps13dR0TwJg1S8gyp4ZPgy3FZcHBgbiQqnoqSTb+oC+kO4UQ0C/J8vw==", "dev": true, "requires": { + "call-bind": "^1.0.2", "es-to-primitive": "^1.2.1", "function-bind": "^1.1.1", + "get-intrinsic": "^1.1.1", "has": "^1.0.3", - "has-symbols": "^1.0.1", - "is-callable": "^1.1.5", - "is-regex": "^1.0.5", - "object-inspect": "^1.7.0", + "is-negative-zero": "^2.0.1", + "is-regex": "^1.1.3", + "object-inspect": "^1.10.3", "object-keys": "^1.1.1", - "object.assign": "^4.1.0", - "string.prototype.trimleft": "^2.1.1", - "string.prototype.trimright": "^2.1.1" + "object.assign": "^4.1.2", + "string.prototype.trimend": "^1.0.4", + "string.prototype.trimstart": "^1.0.4", + "unbox-primitive": "^1.0.1" } }, "es-to-primitive": { @@ -1691,71 +1459,167 @@ "dev": true }, "eslint": { - "version": "6.8.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.8.0.tgz", - "integrity": "sha512-K+Iayyo2LtyYhDSYwz5D5QdWw0hCacNzyq1Y821Xna2xSJj7cijoLLYmLxTQgcgZ9mC61nryMy9S7GRbYpI5Ig==", + "version": "7.30.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-7.30.0.tgz", + "integrity": "sha512-VLqz80i3as3NdloY44BQSJpFw534L9Oh+6zJOUaViV4JPd+DaHwutqP7tcpkW3YiXbK6s05RZl7yl7cQn+lijg==", "dev": true, "requires": { - "@babel/code-frame": "^7.0.0", + "@babel/code-frame": "7.12.11", + "@eslint/eslintrc": "^0.4.2", + "@humanwhocodes/config-array": "^0.5.0", "ajv": "^6.10.0", - "chalk": "^2.1.0", - "cross-spawn": "^6.0.5", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", "debug": "^4.0.1", "doctrine": "^3.0.0", - "eslint-scope": "^5.0.0", - "eslint-utils": "^1.4.3", - "eslint-visitor-keys": "^1.1.0", - "espree": "^6.1.2", - "esquery": "^1.0.1", + "enquirer": "^2.3.5", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^5.1.1", + "eslint-utils": "^2.1.0", + "eslint-visitor-keys": "^2.0.0", + "espree": "^7.3.1", + "esquery": "^1.4.0", "esutils": "^2.0.2", - "file-entry-cache": "^5.0.1", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", "functional-red-black-tree": "^1.0.1", - "glob-parent": "^5.0.0", - "globals": "^12.1.0", + "glob-parent": "^5.1.2", + "globals": "^13.6.0", "ignore": "^4.0.6", "import-fresh": "^3.0.0", "imurmurhash": "^0.1.4", - "inquirer": "^7.0.0", "is-glob": "^4.0.0", "js-yaml": "^3.13.1", "json-stable-stringify-without-jsonify": "^1.0.1", - "levn": "^0.3.0", - "lodash": "^4.17.14", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", "minimatch": "^3.0.4", - "mkdirp": "^0.5.1", "natural-compare": "^1.4.0", - "optionator": "^0.8.3", + "optionator": "^0.9.1", "progress": "^2.0.0", - "regexpp": "^2.0.1", - "semver": "^6.1.2", - "strip-ansi": "^5.2.0", - "strip-json-comments": "^3.0.1", - "table": "^5.2.3", + "regexpp": "^3.1.0", + "semver": "^7.2.1", + "strip-ansi": "^6.0.0", + "strip-json-comments": "^3.1.0", + "table": "^6.0.9", "text-table": "^0.2.0", "v8-compile-cache": "^2.0.3" }, "dependencies": { - "debug": { + "chalk": { "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.1.tgz", + "integrity": "sha512-diHzdDKxcU+bAsUboHLPEDQiw0qEe0qd7SYUn3HgcFlWgbDcfLGswOHYeGrHKzG9z6UYf01d9VFMfZxPM1xZSg==", "dev": true, "requires": { - "ms": "^2.1.1" + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } } }, - "globals": { - "version": "12.4.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-12.4.0.tgz", - "integrity": "sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg==", + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "requires": { - "type-fest": "^0.8.1" + "color-name": "~1.1.4" } }, - "minimist": { - "version": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "debug": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", + "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", + "dev": true, + "requires": { + "ms": "2.1.2" + } + }, + "escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true + }, + "eslint-visitor-keys": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", + "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", + "dev": true + }, + "estraverse": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz", + "integrity": "sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==" + }, + "fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true + }, + "glob": { + "version": "7.1.7", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", + "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "requires": { + "is-glob": "^4.0.1" + } + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" + }, + "json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" }, "ms": { "version": "2.1.2", @@ -1763,48 +1627,41 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "dev": true }, - "type-fest": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", - "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", - "dev": true + "rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "requires": { + "glob": "^7.1.3" + } + }, + "semver": { + "version": "7.3.5", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", + "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "dev": true, + "requires": { + "lru-cache": "^6.0.0" + } } } }, "eslint-config-prettier": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-6.11.0.tgz", - "integrity": "sha512-oB8cpLWSAjOVFEJhhyMZh6NOEOtBVziaqdDQ86+qhDHFbZXoRTM7pNSvFRfW/W/L/LrQ38C99J5CGuRBBzBsdA==", - "dev": true, - "requires": { - "get-stdin": "^6.0.0" - } + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.3.0.tgz", + "integrity": "sha512-BgZuLUSeKzvlL/VUjx/Yb787VQ26RU3gGjA3iiFvdsp/2bMfVIWUVP7tjxtjS0e+HP409cPlPvNkQloz8C91ew==", + "dev": true }, "eslint-config-standard": { - "version": "14.1.1", - "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-14.1.1.tgz", - "integrity": "sha512-Z9B+VR+JIXRxz21udPTL9HpFMyoMUEeX1G251EQ6e05WD9aPVtVBn09XUmZ259wCMlCDmYDSZG62Hhm+ZTJcUg==", + "version": "16.0.3", + "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-16.0.3.tgz", + "integrity": "sha512-x4fmJL5hGqNJKGHSjnLdgA6U6h1YW/G2dW9fA+cyVur4SK6lyue8+UgNKWlZtUDTXvgKDD/Oa3GQjmB5kjtVvg==", "dev": true }, - "eslint-config-standard-jsx": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/eslint-config-standard-jsx/-/eslint-config-standard-jsx-8.1.0.tgz", - "integrity": "sha512-ULVC8qH8qCqbU792ZOO6DaiaZyHNS/5CZt3hKqHkEhVlhPEPN3nfBqqxJCyp59XrjIBZPu1chMYe9T2DXZ7TMw==", - "dev": true - }, - "eslint-config-standard-react": { - "version": "9.2.0", - "resolved": "https://registry.npmjs.org/eslint-config-standard-react/-/eslint-config-standard-react-9.2.0.tgz", - "integrity": "sha512-u+KRP2uCtthZ/W4DlLWCC59GZNV/y9k9yicWWammgTs/Omh8ZUUPF3EnYm81MAcbkYQq2Wg0oxutAhi/FQ8mIw==", - "dev": true, - "requires": { - "eslint-config-standard-jsx": "^8.0.0" - } - }, "eslint-import-resolver-node": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.3.tgz", - "integrity": "sha512-b8crLDo0M5RSe5YG8Pu2DYBj71tSB6OvXkfzwbJU2w7y8P4/yo0MyF8jU26IEuEuHF2K5/gcAJE3LhQGqBBbVg==", + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.4.tgz", + "integrity": "sha512-ogtf+5AB/O+nM6DIeBUNr2fuT7ot9Qg/1harBfBtaP13ekEWFQEEMP94BCB7zaNW3gyY+8SHYF00rnqYwXKWOA==", "dev": true, "requires": { "debug": "^2.6.9", @@ -1812,64 +1669,42 @@ } }, "eslint-module-utils": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.6.0.tgz", - "integrity": "sha512-6j9xxegbqe8/kZY8cYpcp0xhbK0EgJlg3g9mib3/miLaExuuwc3n5UEfSnU6hWMbT0FAYVvDbL9RrRgpUeQIvA==", + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.6.1.tgz", + "integrity": "sha512-ZXI9B8cxAJIH4nfkhTwcRTEAnrVfobYqwjWy/QMCZ8rHkZHFjf9yO4BzpiF9kCSfNlMG54eKigISHpX0+AaT4A==", "dev": true, "requires": { - "debug": "^2.6.9", + "debug": "^3.2.7", "pkg-dir": "^2.0.0" }, "dependencies": { - "find-up": { - "version": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", - "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, "requires": { - "locate-path": "^2.0.0" + "ms": "^2.1.1" } }, - "locate-path": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", - "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", - "requires": { - "p-locate": "^2.0.0", - "path-exists": "^3.0.0" - } - }, - "p-limit": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", - "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", - "requires": { - "p-try": "^1.0.0" - } - }, - "p-locate": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", - "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", - "requires": { - "p-limit": "^1.1.0" - } - }, - "p-try": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", - "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=" + "ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true } } }, "eslint-plugin-chai-expect": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-chai-expect/-/eslint-plugin-chai-expect-2.1.0.tgz", - "integrity": "sha512-rd0/4mjMV6c3i0o4DKkWI4uaFN9DK707kW+/fDphaDI6HVgxXnhML9Xgt5vHnTXmSSnDhupuCFBgsEAEpchXmQ==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-chai-expect/-/eslint-plugin-chai-expect-2.2.0.tgz", + "integrity": "sha512-ExTJKhgeYMfY8wDj3UiZmgpMKJOUHGNHmWMlxT49JUDB1vTnw0sSNfXJSxnX+LcebyBD/gudXzjzD136WqPJrQ==", "dev": true }, "eslint-plugin-chai-friendly": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-chai-friendly/-/eslint-plugin-chai-friendly-0.5.0.tgz", - "integrity": "sha512-Pxe6z8C9fP0pn2X2nGFU/b3GBOCM/5FVus1hsMwJsXP3R7RiXFl7g0ksJbsc0GxiLyidTW4mEFk77qsNn7Tk7g==", + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-chai-friendly/-/eslint-plugin-chai-friendly-0.6.0.tgz", + "integrity": "sha512-Uvvv1gkbRGp/qfN15B0kQyQWg+oFA8buDSqrwmW3egNSk/FpqH2MjQqKOuKwmEL6w4QIQrIjDp+gg6kGGmD3oQ==", "dev": true }, "eslint-plugin-es": { @@ -1880,134 +1715,77 @@ "requires": { "eslint-utils": "^2.0.0", "regexpp": "^3.0.0" - }, - "dependencies": { - "eslint-utils": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.0.0.tgz", - "integrity": "sha512-0HCPuJv+7Wv1bACm8y5/ECVfYdfsAm9xmVb7saeFlxjPYALefjhbYoCkBjPdPzGH8wWyTpAez82Fh3VKYEZ8OA==", - "dev": true, - "requires": { - "eslint-visitor-keys": "^1.1.0" - } - }, - "regexpp": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.1.0.tgz", - "integrity": "sha512-ZOIzd8yVsQQA7j8GCSlPGXwg5PfmA1mrq0JP4nGhh54LaKN3xdai/vHUDu74pKwV8OxseMS65u2NImosQcSD0Q==", - "dev": true - } } }, "eslint-plugin-import": { - "version": "2.20.2", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.20.2.tgz", - "integrity": "sha512-FObidqpXrR8OnCh4iNsxy+WACztJLXAHBO5hK79T1Hc77PgQZkyDGA5Ag9xAvRpglvLNxhH/zSmZ70/pZ31dHg==", + "version": "2.23.4", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.23.4.tgz", + "integrity": "sha512-6/wP8zZRsnQFiR3iaPFgh5ImVRM1WN5NUWfTIRqwOdeiGJlBcSk82o1FEVq8yXmy4lkIzTo7YhHCIxlU/2HyEQ==", "dev": true, "requires": { - "array-includes": "^3.0.3", - "array.prototype.flat": "^1.2.1", - "contains-path": "^0.1.0", + "array-includes": "^3.1.3", + "array.prototype.flat": "^1.2.4", "debug": "^2.6.9", - "doctrine": "1.5.0", - "eslint-import-resolver-node": "^0.3.2", - "eslint-module-utils": "^2.4.1", + "doctrine": "^2.1.0", + "eslint-import-resolver-node": "^0.3.4", + "eslint-module-utils": "^2.6.1", + "find-up": "^2.0.0", "has": "^1.0.3", + "is-core-module": "^2.4.0", "minimatch": "^3.0.4", - "object.values": "^1.1.0", - "read-pkg-up": "^2.0.0", - "resolve": "^1.12.0" + "object.values": "^1.1.3", + "pkg-up": "^2.0.0", + "read-pkg-up": "^3.0.0", + "resolve": "^1.20.0", + "tsconfig-paths": "^3.9.0" }, "dependencies": { "doctrine": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-1.5.0.tgz", - "integrity": "sha1-N53Ocw9hZvds76TmcHoVmwLFpvo=", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", "dev": true, "requires": { - "esutils": "^2.0.2", - "isarray": "^1.0.0" + "esutils": "^2.0.2" } }, - "find-up": { - "version": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", - "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", - "requires": { - "locate-path": "^2.0.0" - } + "has-symbols": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz", + "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==" }, - "locate-path": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", - "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", - "requires": { - "p-locate": "^2.0.0", - "path-exists": "^3.0.0" - } + "is-callable": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.3.tgz", + "integrity": "sha512-J1DcMe8UYTBSrKezuIUTUwjXsho29693unXM2YhJUTR2txK/eG47bvNa/wipPFmZFgr/N6f1GA66dv0mEyTIyQ==" }, "p-limit": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", + "version": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", "requires": { "p-try": "^1.0.0" } }, - "p-locate": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", - "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "resolve": { + "version": "1.20.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz", + "integrity": "sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==", + "dev": true, "requires": { - "p-limit": "^1.1.0" + "is-core-module": "^2.2.0", + "path-parse": "^1.0.6" } - }, - "p-try": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", - "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=" - }, - "pify": { - "version": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=" } } }, - "eslint-plugin-jsx-a11y": { - "version": "6.2.3", - "resolved": "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.2.3.tgz", - "integrity": "sha512-CawzfGt9w83tyuVekn0GDPU9ytYtxyxyFZ3aSWROmnRRFQFT2BiPJd7jvRdzNDi6oLWaS2asMeYSNMjWTV4eNg==", - "dev": true, - "requires": { - "@babel/runtime": "^7.4.5", - "aria-query": "^3.0.0", - "array-includes": "^3.0.3", - "ast-types-flow": "^0.0.7", - "axobject-query": "^2.0.2", - "damerau-levenshtein": "^1.0.4", - "emoji-regex": "^7.0.2", - "has": "^1.0.3", - "jsx-ast-utils": "^2.2.1" - } - }, "eslint-plugin-mocha": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-mocha/-/eslint-plugin-mocha-6.3.0.tgz", - "integrity": "sha512-Cd2roo8caAyG21oKaaNTj7cqeYRWW1I2B5SfpKRp0Ip1gkfwoR1Ow0IGlPWnNjzywdF4n+kHL8/9vM6zCJUxdg==", + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-mocha/-/eslint-plugin-mocha-8.2.0.tgz", + "integrity": "sha512-8oOR47Ejt+YJPNQzedbiklDqS1zurEaNrxXpRs+Uk4DMDPVmKNagShFeUaYsfvWP55AhI+P1non5QZAHV6K78A==", "dev": true, "requires": { - "eslint-utils": "^2.0.0", - "ramda": "^0.27.0" - }, - "dependencies": { - "eslint-utils": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.0.0.tgz", - "integrity": "sha512-0HCPuJv+7Wv1bACm8y5/ECVfYdfsAm9xmVb7saeFlxjPYALefjhbYoCkBjPdPzGH8wWyTpAez82Fh3VKYEZ8OA==", - "dev": true, - "requires": { - "eslint-visitor-keys": "^1.1.0" - } - } + "eslint-utils": "^2.1.0", + "ramda": "^0.27.1" } }, "eslint-plugin-node": { @@ -2024,15 +1802,6 @@ "semver": "^6.1.0" }, "dependencies": { - "eslint-utils": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.0.0.tgz", - "integrity": "sha512-0HCPuJv+7Wv1bACm8y5/ECVfYdfsAm9xmVb7saeFlxjPYALefjhbYoCkBjPdPzGH8wWyTpAez82Fh3VKYEZ8OA==", - "dev": true, - "requires": { - "eslint-visitor-keys": "^1.1.0" - } - }, "ignore": { "version": "5.1.4", "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.4.tgz", @@ -2056,57 +1825,20 @@ "integrity": "sha512-VoM09vT7bfA7D+upt+FjeBO5eHIJQBUWki1aPvB+vbNiHS3+oGIJGIeyBtKQTME6UPXXy3vV07OL1tHd3ANuDw==", "dev": true }, - "eslint-plugin-react": { - "version": "7.19.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.19.0.tgz", - "integrity": "sha512-SPT8j72CGuAP+JFbT0sJHOB80TX/pu44gQ4vXH/cq+hQTiY2PuZ6IHkqXJV6x1b28GDdo1lbInjKUrrdUf0LOQ==", - "dev": true, - "requires": { - "array-includes": "^3.1.1", - "doctrine": "^2.1.0", - "has": "^1.0.3", - "jsx-ast-utils": "^2.2.3", - "object.entries": "^1.1.1", - "object.fromentries": "^2.0.2", - "object.values": "^1.1.1", - "prop-types": "^15.7.2", - "resolve": "^1.15.1", - "semver": "^6.3.0", - "string.prototype.matchall": "^4.0.2", - "xregexp": "^4.3.0" - }, - "dependencies": { - "doctrine": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", - "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", - "dev": true, - "requires": { - "esutils": "^2.0.2" - } - } - } - }, - "eslint-plugin-standard": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-standard/-/eslint-plugin-standard-4.0.1.tgz", - "integrity": "sha512-v/KBnfyaOMPmZc/dmc6ozOdWqekGp7bBGq4jLAecEfPGmfKiWS4sA8sC0LqiV9w5qmXAtXVn4M3p1jSyhY85SQ==", - "dev": true - }, "eslint-scope": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.0.tgz", - "integrity": "sha512-iiGRvtxWqgtx5m8EyQUJihBloE4EnYeGE/bz1wSPwJE6tZuJUtHlhqDM4Xj2ukE8Dyy1+HCZ4hE0fzIVMzb58w==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", + "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", "dev": true, "requires": { - "esrecurse": "^4.1.0", + "esrecurse": "^4.3.0", "estraverse": "^4.1.1" } }, "eslint-utils": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-1.4.3.tgz", - "integrity": "sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", + "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", "dev": true, "requires": { "eslint-visitor-keys": "^1.1.0" @@ -2119,20 +1851,26 @@ "dev": true }, "espree": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-6.2.1.tgz", - "integrity": "sha512-ysCxRQY3WaXJz9tdbWOwuWr5Y/XrPTGX9Kiz3yoUXwW0VZ4w30HTkQLaGx/+ttFjF8i+ACbArnB4ce68a9m5hw==", + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-7.3.1.tgz", + "integrity": "sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g==", "dev": true, "requires": { - "acorn": "^7.1.1", - "acorn-jsx": "^5.2.0", - "eslint-visitor-keys": "^1.1.0" + "acorn": "^7.4.0", + "acorn-jsx": "^5.3.1", + "eslint-visitor-keys": "^1.3.0" }, "dependencies": { "acorn": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.0.tgz", - "integrity": "sha512-+G7P8jJmCHr+S+cLfQxygbWhXy+8YTVGzAkpEbcLo2mLoL7tij/VG41QSHACSf5QgYRhMZYHuNc6drJaO0Da+w==", + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", + "dev": true + }, + "eslint-visitor-keys": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", "dev": true } } @@ -2144,31 +1882,28 @@ "dev": true }, "esquery": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.3.1.tgz", - "integrity": "sha512-olpvt9QG0vniUBZspVRN6lwB7hOZoTRtT+jzR+tS4ffYx2mzbw+z0XCOk44aaLYKApNX5nMm+E+P6o25ip/DHQ==", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", + "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", + "dev": true + }, + "esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", "dev": true, "requires": { - "estraverse": "^5.1.0" + "estraverse": "^5.2.0" }, "dependencies": { "estraverse": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.1.0.tgz", - "integrity": "sha512-FyohXK+R0vE+y1nHLoBM7ZTyqRpqAlhdZHCWIWEviFLiGB8b04H6bQs8G+XTthacvT8VuwvteiP7RJSxMs8UEw==", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz", + "integrity": "sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==", "dev": true } } }, - "esrecurse": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.2.1.tgz", - "integrity": "sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ==", - "dev": true, - "requires": { - "estraverse": "^4.1.0" - } - }, "estraverse": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", @@ -2242,17 +1977,6 @@ "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" }, - "external-editor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", - "integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==", - "dev": true, - "requires": { - "chardet": "^0.7.0", - "iconv-lite": "^0.4.24", - "tmp": "^0.0.33" - } - }, "extsprintf": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", @@ -2285,22 +2009,13 @@ "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.0.tgz", "integrity": "sha1-PlzoKTQJz6pxd6cbnKhOGx5vJe8=" }, - "figures": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", - "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", - "dev": true, - "requires": { - "escape-string-regexp": "^1.0.5" - } - }, "file-entry-cache": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-5.0.1.tgz", - "integrity": "sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", "dev": true, "requires": { - "flat-cache": "^2.0.1" + "flat-cache": "^3.0.4" } }, "file-uri-to-path": { @@ -2332,12 +2047,12 @@ } }, "find-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", + "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", "dev": true, "requires": { - "locate-path": "^3.0.0" + "locate-path": "^2.0.0" } }, "findit2": { @@ -2352,45 +2067,18 @@ "dev": true }, "flat-cache": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-2.0.1.tgz", - "integrity": "sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", + "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", "dev": true, "requires": { - "flatted": "^2.0.0", - "rimraf": "2.6.3", - "write": "1.0.3" - }, - "dependencies": { - "glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", - "dev": true, - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, - "rimraf": { - "version": "2.6.3", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", - "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==", - "dev": true, - "requires": { - "glob": "^7.1.3" - } - } + "flatted": "^3.1.0" } }, "flatted": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.2.tgz", - "integrity": "sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.1.tgz", + "integrity": "sha512-OMQjaErSFHmHqZe+PSidH5n8j3O0F2DdnVh8JB4j4eUQ2k6KvB0qGfrKIhapvez5JerBbmWkaLYUYWISaESoXg==", "dev": true }, "forever-agent": { @@ -2431,13 +2119,6 @@ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" }, - "fsevents": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", - "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", - "dev": true, - "optional": true - }, "function-bind": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", @@ -2525,12 +2206,23 @@ "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", "dev": true }, - "get-stdin": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-6.0.0.tgz", - "integrity": "sha512-jp4tHawyV7+fkkSKyvjuLZswblUtz+SQKzSWnBbii16BuZksJlU1wuBYXY75r+duh/llF1ur6oNwi+2ZzjKZ7g==", + "get-func-name": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", + "integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=", "dev": true }, + "get-intrinsic": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz", + "integrity": "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==", + "dev": true, + "requires": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1" + } + }, "getpass": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", @@ -2562,10 +2254,13 @@ } }, "globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "dev": true + "version": "13.10.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.10.0.tgz", + "integrity": "sha512-piHC3blgLGFjvOuMmWZX60f+na1lXFDhQXBf1UYp2fXPXqvEUbOhNwi6BsQ0bQishwedgnjkwv1d9zKf+MWw3g==", + "dev": true, + "requires": { + "type-fest": "^0.20.2" + } }, "google-auth-library": { "version": "7.0.4", @@ -2617,9 +2312,9 @@ } }, "graceful-fs": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz", - "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==", + "version": "4.2.6", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.6.tgz", + "integrity": "sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ==", "dev": true }, "growl": { @@ -2661,22 +2356,11 @@ "function-bind": "^1.1.1" } }, - "has-ansi": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", - "integrity": "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=", - "dev": true, - "requires": { - "ansi-regex": "^2.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", - "dev": true - } - } + "has-bigints": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.1.tgz", + "integrity": "sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA==", + "dev": true }, "has-flag": { "version": "3.0.0", @@ -2828,12 +2512,6 @@ "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", "dev": true }, - "indent-string": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", - "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", - "dev": true - }, "inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", @@ -2853,143 +2531,6 @@ "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" }, - "inquirer": { - "version": "7.3.3", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.3.3.tgz", - "integrity": "sha512-JG3eIAj5V9CwcGvuOmoo6LB9kbAYT8HXffUl6memuszlwDC/qvFAJw49XJ5NROSFNPxp3iQg1GqkFhaY/CR0IA==", - "dev": true, - "requires": { - "ansi-escapes": "^4.2.1", - "chalk": "^4.1.0", - "cli-cursor": "^3.1.0", - "cli-width": "^3.0.0", - "external-editor": "^3.0.3", - "figures": "^3.0.0", - "lodash": "^4.17.19", - "mute-stream": "0.0.8", - "run-async": "^2.4.0", - "rxjs": "^6.6.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0", - "through": "^2.3.6" - }, - "dependencies": { - "ansi-regex": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", - "dev": true - }, - "ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", - "dev": true, - "requires": { - "@types/color-name": "^1.1.1", - "color-convert": "^2.0.1" - } - }, - "chalk": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", - "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", - "dev": true, - "requires": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - } - }, - "cli-width": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-3.0.0.tgz", - "integrity": "sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==", - "dev": true - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true - }, - "has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true - }, - "is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true - }, - "rxjs": { - "version": "6.6.2", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.6.2.tgz", - "integrity": "sha512-BHdBMVoWC2sL26w//BCu3YzKT4s2jip/WhwsGEDmeKYBhKDZeYezVUnHatYB7L85v5xs0BAQmg6BEYJEKxBabg==", - "dev": true, - "requires": { - "tslib": "^1.9.0" - } - }, - "string-width": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", - "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", - "dev": true, - "requires": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.0" - } - }, - "strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.0" - } - }, - "supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", - "dev": true, - "requires": { - "has-flag": "^4.0.0" - } - } - } - }, - "internal-slot": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.2.tgz", - "integrity": "sha512-2cQNfwhAfJIkU4KZPkDI+Gj5yNNnbqi40W9Gge6dfnk4TocEVm00B3bdiL+JINrbGJil2TeHvM4rETGzk/f/0g==", - "dev": true, - "requires": { - "es-abstract": "^1.17.0-next.1", - "has": "^1.0.3", - "side-channel": "^1.0.2" - } - }, "ioredis": { "version": "4.27.1", "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.27.1.tgz", @@ -3038,6 +2579,12 @@ "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", "dev": true }, + "is-bigint": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.2.tgz", + "integrity": "sha512-0JV5+SOCQkIdzjBK9buARcV804Ddu7A0Qet6sHi3FimE9ne6m4BGQZfRn+NZiXbBk4F4XmHfDZIipLj9pX8dSA==", + "dev": true + }, "is-binary-path": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", @@ -3047,6 +2594,15 @@ "binary-extensions": "^2.0.0" } }, + "is-boolean-object": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.1.tgz", + "integrity": "sha512-bXdQWkECBUIAcCkeH1unwJLIpZYaa5VvuygSyS/c2lf719mTKZDU5UdDRlpd01UjADgmW8RfqaP+mRaVPdr/Ng==", + "dev": true, + "requires": { + "call-bind": "^1.0.2" + } + }, "is-buffer": { "version": "1.1.6", "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", @@ -3058,6 +2614,15 @@ "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==", "dev": true }, + "is-core-module": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.5.0.tgz", + "integrity": "sha512-TXCMSDsEHMEEZ6eCA8rwRDbLu55MRGmrctljsBX/2v1d9/GzqHOxW5c5oPSgrUt2vBFXebu9rGqckXGPWOlYpg==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, "is-date-object": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz", @@ -3084,12 +2649,24 @@ "is-extglob": "^2.1.1" } }, + "is-negative-zero": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.1.tgz", + "integrity": "sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w==", + "dev": true + }, "is-number": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", "dev": true }, + "is-number-object": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.5.tgz", + "integrity": "sha512-RU0lI/n95pMoUKu9v1BZP5MBcZuNSVJkMkAG2dJqC4z2GlkGUNeH68SuHuBKBD/XFe+LHZ+f9BKkLET60Niedw==", + "dev": true + }, "is-obj": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", @@ -3102,12 +2679,12 @@ "dev": true }, "is-regex": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", - "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.3.tgz", + "integrity": "sha512-qSVXFz28HM7y+IWX6vLCsexdlvzT1PJNFSBuaQLQ5o0IEw8UDYW6/2+eCMVyIsbM8CNLX2a/QWmSpyxYEHY7CQ==", "dev": true, "requires": { - "has": "^1.0.3" + "call-bind": "^1.0.2" } }, "is-stream": { @@ -3177,12 +2754,6 @@ "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=" }, - "jsesc": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", - "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", - "dev": true - }, "json-bigint": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", @@ -3191,6 +2762,12 @@ "bignumber.js": "^9.0.0" } }, + "json-parse-better-errors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", + "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", + "dev": true + }, "json-schema": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", @@ -3212,6 +2789,15 @@ "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" }, + "json5": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.0.tgz", + "integrity": "sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA==", + "dev": true, + "requires": { + "minimist": "^1.2.5" + } + }, "jsprim": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", @@ -3223,16 +2809,6 @@ "verror": "1.10.0" } }, - "jsx-ast-utils": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-2.2.3.tgz", - "integrity": "sha512-EdIHFMm+1BPynpKOpdPqiOsvnIrInRGJD7bzPZdPkjitQEqpdpUuFpq4T0npZFKTiB3RhWFdGN+oqOJIdhDhQA==", - "dev": true, - "requires": { - "array-includes": "^3.0.3", - "object.assign": "^4.1.0" - } - }, "just-extend": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-4.1.0.tgz", @@ -3259,42 +2835,42 @@ } }, "levn": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", - "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=", + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", "dev": true, "requires": { - "prelude-ls": "~1.1.2", - "type-check": "~0.3.2" + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" } }, "load-json-file": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz", - "integrity": "sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg=", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", + "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", "dev": true, "requires": { "graceful-fs": "^4.1.2", - "parse-json": "^2.2.0", - "pify": "^2.0.0", + "parse-json": "^4.0.0", + "pify": "^3.0.0", "strip-bom": "^3.0.0" }, "dependencies": { "pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", "dev": true } } }, "locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", + "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", "dev": true, "requires": { - "p-locate": "^3.0.0", + "p-locate": "^2.0.0", "path-exists": "^3.0.0" } }, @@ -3308,6 +2884,12 @@ "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", "integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY=" }, + "lodash.clonedeep": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz", + "integrity": "sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=", + "dev": true + }, "lodash.defaults": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz", @@ -3324,22 +2906,16 @@ "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=", "dev": true }, - "lodash.memoize": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", - "integrity": "sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4=", - "dev": true - }, "lodash.merge": { "version": "4.6.2", "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", "dev": true }, - "lodash.unescape": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/lodash.unescape/-/lodash.unescape-4.0.1.tgz", - "integrity": "sha1-vyJJiGzlFM2hEvrpIYzcBlIR/Jw=", + "lodash.truncate": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/lodash.truncate/-/lodash.truncate-4.4.2.tgz", + "integrity": "sha1-WjUNoLERO4N+z//VgSy+WNbq4ZM=", "dev": true }, "log-driver": { @@ -3427,78 +3003,11 @@ } } }, - "loglevel": { - "version": "1.6.8", - "resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.6.8.tgz", - "integrity": "sha512-bsU7+gc9AJ2SqpzxwU3+1fedl8zAntbtC5XYlt3s2j1hJcn2PsXSmgN8TaLG/J1/2mod4+cE/3vNL70/c1RNCA==", - "dev": true - }, - "loglevel-colored-level-prefix": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/loglevel-colored-level-prefix/-/loglevel-colored-level-prefix-1.0.0.tgz", - "integrity": "sha1-akAhj9x64V/HbD0PPmdsRlOIYD4=", - "dev": true, - "requires": { - "chalk": "^1.1.3", - "loglevel": "^1.4.1" - }, - "dependencies": { - "ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", - "dev": true - }, - "ansi-styles": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", - "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=", - "dev": true - }, - "chalk": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", - "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", - "dev": true, - "requires": { - "ansi-styles": "^2.2.1", - "escape-string-regexp": "^1.0.2", - "has-ansi": "^2.0.0", - "strip-ansi": "^3.0.0", - "supports-color": "^2.0.0" - } - }, - "strip-ansi": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", - "dev": true, - "requires": { - "ansi-regex": "^2.0.0" - } - }, - "supports-color": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", - "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=", - "dev": true - } - } - }, "long": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", "integrity": "sha1-mntxz7fTYaGU6lVSQckvdGjVvyg=" }, - "loose-envify": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", - "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", - "dev": true, - "requires": { - "js-tokens": "^3.0.0 || ^4.0.0" - } - }, "lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -3514,24 +3023,6 @@ } } }, - "make-plural": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/make-plural/-/make-plural-4.3.0.tgz", - "integrity": "sha512-xTYd4JVHpSCW+aqDof6w/MebaMVNTVYBZhbB/vi513xXdiPT92JMVCo0Jq8W2UZnzYRFeVbQiQ+I25l13JuKvA==", - "dev": true, - "requires": { - "minimist": "^1.2.0" - }, - "dependencies": { - "minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", - "dev": true, - "optional": true - } - } - }, "map-obj": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.1.0.tgz", @@ -3563,29 +3054,6 @@ "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=" }, - "messageformat": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/messageformat/-/messageformat-2.3.0.tgz", - "integrity": "sha512-uTzvsv0lTeQxYI2y1NPa1lItL5VRI8Gb93Y2K2ue5gBPyrbJxfDi/EYWxh2PKv5yO42AJeeqblS9MJSh/IEk4w==", - "dev": true, - "requires": { - "make-plural": "^4.3.0", - "messageformat-formatters": "^2.0.1", - "messageformat-parser": "^4.1.2" - } - }, - "messageformat-formatters": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/messageformat-formatters/-/messageformat-formatters-2.0.1.tgz", - "integrity": "sha512-E/lQRXhtHwGuiQjI7qxkLp8AHbMD5r2217XNe/SREbBlSawe0lOqsFb7rflZJmlQFSULNLIqlcjjsCPlB3m3Mg==", - "dev": true - }, - "messageformat-parser": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/messageformat-parser/-/messageformat-parser-4.1.3.tgz", - "integrity": "sha512-2fU3XDCanRqeOCkn7R5zW5VQHWf+T3hH65SzuqRvjatBK7r4uyFa5mEX+k6F9Bd04LVM5G4/BHBTUJsOdW7uyg==", - "dev": true - }, "methods": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", @@ -3609,12 +3077,6 @@ "mime-db": "1.43.0" } }, - "mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "dev": true - }, "minimatch": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", @@ -3623,6 +3085,11 @@ "brace-expansion": "^1.1.7" } }, + "minimist": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" + }, "minipass": { "version": "2.9.0", "resolved": "https://registry.npmjs.org/minipass/-/minipass-2.9.0.tgz", @@ -3646,13 +3113,6 @@ "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", "requires": { "minimist": "^1.2.5" - }, - "dependencies": { - "minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" - } } }, "mocha": { @@ -3688,17 +3148,10 @@ "yargs-unparser": "2.0.0" }, "dependencies": { - "ansi-regex": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", - "dev": true - }, "ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, "requires": { "color-convert": "^2.0.1" } @@ -3709,22 +3162,10 @@ "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", "dev": true }, - "cliui": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", - "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", - "dev": true, - "requires": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.0", - "wrap-ansi": "^7.0.0" - } - }, "color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, "requires": { "color-name": "~1.1.4" } @@ -3732,8 +3173,7 @@ "color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "debug": { "version": "4.3.1", @@ -3752,12 +3192,6 @@ } } }, - "emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true - }, "escape-string-regexp": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", @@ -3797,8 +3231,7 @@ "is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" }, "js-yaml": { "version": "4.0.0", @@ -3824,15 +3257,6 @@ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", "dev": true }, - "p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "dev": true, - "requires": { - "yocto-queue": "^0.1.0" - } - }, "p-locate": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", @@ -3848,26 +3272,6 @@ "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", "dev": true }, - "string-width": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz", - "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==", - "dev": true, - "requires": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.0" - } - }, - "strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.0" - } - }, "supports-color": { "version": "8.1.1", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", @@ -3876,47 +3280,6 @@ "requires": { "has-flag": "^4.0.0" } - }, - "which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - }, - "wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "requires": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - } - }, - "yargs": { - "version": "16.2.0", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", - "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", - "dev": true, - "requires": { - "cliui": "^7.0.2", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.0", - "y18n": "^5.0.5", - "yargs-parser": "^20.2.2" - } - }, - "yargs-parser": { - "version": "20.2.4", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", - "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", - "dev": true } } }, @@ -3949,12 +3312,6 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" }, - "mute-stream": { - "version": "0.0.8", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", - "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", - "dev": true - }, "mv": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", @@ -4019,12 +3376,6 @@ "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz", "integrity": "sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==" }, - "nice-try": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", - "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", - "dev": true - }, "nise": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/nise/-/nise-4.0.3.tgz", @@ -4195,9 +3546,9 @@ "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" }, "object-inspect": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", - "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==", + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.11.0.tgz", + "integrity": "sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==", "dev": true }, "object-keys": { @@ -4207,51 +3558,26 @@ "dev": true }, "object.assign": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.0.tgz", - "integrity": "sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w==", - "dev": true, - "requires": { - "define-properties": "^1.1.2", - "function-bind": "^1.1.1", - "has-symbols": "^1.0.0", - "object-keys": "^1.0.11" - } - }, - "object.entries": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.1.tgz", - "integrity": "sha512-ilqR7BgdyZetJutmDPfXCDffGa0/Yzl2ivVNpbx/g4UeWrCdRnFDUBrKJGLhGieRHDATnyZXWBeCb29k9CJysQ==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.2.tgz", + "integrity": "sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ==", "dev": true, "requires": { + "call-bind": "^1.0.0", "define-properties": "^1.1.3", - "es-abstract": "^1.17.0-next.1", - "function-bind": "^1.1.1", - "has": "^1.0.3" - } - }, - "object.fromentries": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.2.tgz", - "integrity": "sha512-r3ZiBH7MQppDJVLx6fhD618GKNG40CZYH9wgwdhKxBDDbQgjeWGGd4AtkZad84d291YxvWe7bJGuE65Anh0dxQ==", - "dev": true, - "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.17.0-next.1", - "function-bind": "^1.1.1", - "has": "^1.0.3" + "has-symbols": "^1.0.1", + "object-keys": "^1.1.1" } }, "object.values": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.1.tgz", - "integrity": "sha512-WTa54g2K8iu0kmS/us18jEmdv1a4Wi//BZ/DTVYEcH0XhLM5NYdpDHja3gt57VrZLcNAO2WGA+KpWsDBaHt6eA==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.4.tgz", + "integrity": "sha512-TnGo7j4XSnKQoK3MfvkzqKCi0nVe/D9I9IjwTNYdb/fxYHpjrluHVOgw0AF6jrRFGMPHdfuidR09tIDiIvnaSg==", "dev": true, "requires": { + "call-bind": "^1.0.2", "define-properties": "^1.1.3", - "es-abstract": "^1.17.0-next.1", - "function-bind": "^1.1.1", - "has": "^1.0.3" + "es-abstract": "^1.18.2" } }, "on-finished": { @@ -4275,32 +3601,23 @@ "wrappy": "1" } }, - "onetime": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.1.tgz", - "integrity": "sha512-ZpZpjcJeugQfWsfyQlshVoowIIQ1qBGSVll4rfDq6JJVO//fesjoX808hXWfBjY+ROZgpKDI5TRSRBSoJiZ8eg==", - "dev": true, - "requires": { - "mimic-fn": "^2.1.0" - } - }, "optional-require": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/optional-require/-/optional-require-1.0.3.tgz", "integrity": "sha512-RV2Zp2MY2aeYK5G+B/Sps8lW5NHAzE5QClbFP15j+PWmP+T9PxlJXBOOLoSAdgwFvS4t0aMR4vpedMkbHfh0nA==" }, "optionator": { - "version": "0.8.3", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", - "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", + "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", "dev": true, "requires": { - "deep-is": "~0.1.3", - "fast-levenshtein": "~2.0.6", - "levn": "~0.3.0", - "prelude-ls": "~1.1.2", - "type-check": "~0.3.2", - "word-wrap": "~1.2.3" + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.3" } }, "os-homedir": { @@ -4323,34 +3640,24 @@ } }, "p-limit": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.2.tgz", - "integrity": "sha512-WGR+xHecKTr7EbUEhyLSh5Dube9JtdiG78ufaeLxTgpudf/20KqyMioIUZJAezlTIi6evxuoUs9YXc11cU+yzQ==", - "dev": true, + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", "requires": { - "p-try": "^2.0.0" + "yocto-queue": "^0.1.0" } }, "p-locate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", - "dev": true, - "requires": { - "p-limit": "^2.0.0" - } + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", + "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "dev": true }, "p-map": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/p-map/-/p-map-2.1.0.tgz", "integrity": "sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==" }, - "p-try": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha1-yyhoVA4xPWHeWPr741zpAE1VQOY=", - "dev": true - }, "parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", @@ -4366,12 +3673,13 @@ "integrity": "sha512-X4kUkCTHU1N/kEbwK9FpUJ0UZQa90VzeczfS704frR30gljxDG0pSziws06XlK+CGRSo/1wtG1mFIdBFQTMQNw==" }, "parse-json": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", - "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", "dev": true, "requires": { - "error-ex": "^1.2.0" + "error-ex": "^1.3.1", + "json-parse-better-errors": "^1.0.1" } }, "parse-ms": { @@ -4387,23 +3695,18 @@ "path-exists": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", - "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=" + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", + "dev": true }, "path-is-absolute": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" }, - "path-is-inside": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz", - "integrity": "sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM=", - "dev": true - }, "path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", "dev": true }, "path-parse": { @@ -4417,22 +3720,28 @@ "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" }, "path-type": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", - "integrity": "sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", + "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", "dev": true, "requires": { - "pify": "^2.0.0" + "pify": "^3.0.0" }, "dependencies": { "pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", "dev": true } } }, + "pathval": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", + "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", + "dev": true + }, "performance-now": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", @@ -4458,48 +3767,32 @@ "find-up": "^2.1.0" }, "dependencies": { - "find-up": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", - "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", - "dev": true, - "requires": { - "locate-path": "^2.0.0" - } - }, - "locate-path": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", - "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", - "dev": true, - "requires": { - "p-locate": "^2.0.0", - "path-exists": "^3.0.0" - } - }, "p-limit": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", - "dev": true, "requires": { "p-try": "^1.0.0" } - }, - "p-locate": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", - "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", - "dev": true, + } + } + }, + "pkg-up": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pkg-up/-/pkg-up-2.0.0.tgz", + "integrity": "sha1-yBmscoBZpGHKscOImivjxJoATX8=", + "dev": true, + "requires": { + "find-up": "^2.1.0" + }, + "dependencies": { + "p-limit": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", + "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", "requires": { - "p-limit": "^1.1.0" + "p-try": "^1.0.0" } - }, - "p-try": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", - "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", - "dev": true } } }, @@ -4530,14 +3823,6 @@ "resolved": "https://registry.npmjs.org/delay/-/delay-4.4.1.tgz", "integrity": "sha512-aL3AhqtfhOlT/3ai6sWXeqwnw63ATNpnUiN4HL7x9q+My5QtHlO3OIkasmug9LKzpheLdmUKGRKnYXYAS7FQkQ==" }, - "p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "requires": { - "yocto-queue": "^0.1.0" - } - }, "source-map": { "version": "0.7.3", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", @@ -4546,583 +3831,17 @@ } }, "prelude-ls": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", - "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", "dev": true }, "prettier": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.0.5.tgz", - "integrity": "sha512-7PtVymN48hGcO4fGjybyBSIWDsLU4H4XlvOHfq91pz9kkGlonzwTfYkaIEwiRg/dAJF9YlbsduBAgtYLi+8cFg==", + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.3.2.tgz", + "integrity": "sha512-lnJzDfJ66zkMy58OL5/NY5zp70S7Nz6KqcKkXYzn2tMVrNxvbqaBpg7H3qHaLxCJ5lNMsGuM8+ohS7cZrthdLQ==", "dev": true }, - "prettier-eslint": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/prettier-eslint/-/prettier-eslint-9.0.1.tgz", - "integrity": "sha512-KZT65QTosSAqBBqmrC+RpXbsMRe7Os2YSR9cAfFbDlyPAopzA/S5bioiZ3rpziNQNSJaOxmtXSx07EQ+o2Dlug==", - "dev": true, - "requires": { - "@typescript-eslint/parser": "^1.10.2", - "common-tags": "^1.4.0", - "core-js": "^3.1.4", - "dlv": "^1.1.0", - "eslint": "^5.0.0", - "indent-string": "^4.0.0", - "lodash.merge": "^4.6.0", - "loglevel-colored-level-prefix": "^1.0.0", - "prettier": "^1.7.0", - "pretty-format": "^23.0.1", - "require-relative": "^0.8.7", - "typescript": "^3.2.1", - "vue-eslint-parser": "^2.0.2" - }, - "dependencies": { - "ansi-escapes": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", - "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", - "dev": true - }, - "ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", - "dev": true - }, - "cli-cursor": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", - "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", - "dev": true, - "requires": { - "restore-cursor": "^2.0.0" - } - }, - "debug": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", - "dev": true, - "requires": { - "ms": "^2.1.1" - } - }, - "eslint": { - "version": "5.16.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-5.16.0.tgz", - "integrity": "sha512-S3Rz11i7c8AA5JPv7xAH+dOyq/Cu/VXHiHXBPOU1k/JAM5dXqQPt3qcrhpHSorXmrpu2g0gkIBVXAqCpzfoZIg==", - "dev": true, - "requires": { - "@babel/code-frame": "^7.0.0", - "ajv": "^6.9.1", - "chalk": "^2.1.0", - "cross-spawn": "^6.0.5", - "debug": "^4.0.1", - "doctrine": "^3.0.0", - "eslint-scope": "^4.0.3", - "eslint-utils": "^1.3.1", - "eslint-visitor-keys": "^1.0.0", - "espree": "^5.0.1", - "esquery": "^1.0.1", - "esutils": "^2.0.2", - "file-entry-cache": "^5.0.1", - "functional-red-black-tree": "^1.0.1", - "glob": "^7.1.2", - "globals": "^11.7.0", - "ignore": "^4.0.6", - "import-fresh": "^3.0.0", - "imurmurhash": "^0.1.4", - "inquirer": "^6.2.2", - "js-yaml": "^3.13.0", - "json-stable-stringify-without-jsonify": "^1.0.1", - "levn": "^0.3.0", - "lodash": "^4.17.11", - "minimatch": "^3.0.4", - "mkdirp": "^0.5.1", - "natural-compare": "^1.4.0", - "optionator": "^0.8.2", - "path-is-inside": "^1.0.2", - "progress": "^2.0.0", - "regexpp": "^2.0.1", - "semver": "^5.5.1", - "strip-ansi": "^4.0.0", - "strip-json-comments": "^2.0.1", - "table": "^5.2.3", - "text-table": "^0.2.0" - } - }, - "eslint-scope": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-4.0.3.tgz", - "integrity": "sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==", - "dev": true, - "requires": { - "esrecurse": "^4.1.0", - "estraverse": "^4.1.1" - } - }, - "espree": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-5.0.1.tgz", - "integrity": "sha512-qWAZcWh4XE/RwzLJejfcofscgMc9CamR6Tn1+XRXNzrvUSSbiAjGOI/fggztjIi7y9VLPqnICMIPiGyr8JaZ0A==", - "dev": true, - "requires": { - "acorn": "^6.0.7", - "acorn-jsx": "^5.0.0", - "eslint-visitor-keys": "^1.0.0" - } - }, - "figures": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", - "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=", - "dev": true, - "requires": { - "escape-string-regexp": "^1.0.5" - } - }, - "glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", - "dev": true, - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, - "inquirer": { - "version": "6.5.2", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-6.5.2.tgz", - "integrity": "sha512-cntlB5ghuB0iuO65Ovoi8ogLHiWGs/5yNrtUcKjFhSSiVeAIVpD7koaSU9RM8mpXw5YDi9RdYXGQMaOURB7ycQ==", - "dev": true, - "requires": { - "ansi-escapes": "^3.2.0", - "chalk": "^2.4.2", - "cli-cursor": "^2.1.0", - "cli-width": "^2.0.0", - "external-editor": "^3.0.3", - "figures": "^2.0.0", - "lodash": "^4.17.12", - "mute-stream": "0.0.7", - "run-async": "^2.2.0", - "rxjs": "^6.4.0", - "string-width": "^2.1.0", - "strip-ansi": "^5.1.0", - "through": "^2.3.6" - }, - "dependencies": { - "ansi-regex": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", - "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", - "dev": true - }, - "strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", - "dev": true, - "requires": { - "ansi-regex": "^4.1.0" - } - } - } - }, - "mimic-fn": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", - "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", - "dev": true - }, - "minimist": { - "version": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - }, - "mute-stream": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", - "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=", - "dev": true - }, - "onetime": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", - "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", - "dev": true, - "requires": { - "mimic-fn": "^1.0.0" - } - }, - "prettier": { - "version": "1.19.1", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz", - "integrity": "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==", - "dev": true - }, - "restore-cursor": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", - "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", - "dev": true, - "requires": { - "onetime": "^2.0.0", - "signal-exit": "^3.0.2" - } - }, - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true - }, - "string-width": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", - "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", - "dev": true, - "requires": { - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^4.0.0" - } - }, - "strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", - "dev": true, - "requires": { - "ansi-regex": "^3.0.0" - } - }, - "strip-json-comments": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", - "dev": true - } - } - }, - "prettier-eslint-cli": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/prettier-eslint-cli/-/prettier-eslint-cli-5.0.0.tgz", - "integrity": "sha512-cei9UbN1aTrz3sQs88CWpvY/10PYTevzd76zoG1tdJ164OhmNTFRKPTOZrutVvscoQWzbnLKkviS3gu5JXwvZg==", - "dev": true, - "requires": { - "arrify": "^2.0.1", - "boolify": "^1.0.0", - "camelcase-keys": "^6.0.0", - "chalk": "^2.4.2", - "common-tags": "^1.8.0", - "core-js": "^3.1.4", - "eslint": "^5.0.0", - "find-up": "^4.1.0", - "get-stdin": "^7.0.0", - "glob": "^7.1.4", - "ignore": "^5.1.2", - "lodash.memoize": "^4.1.2", - "loglevel-colored-level-prefix": "^1.0.0", - "messageformat": "^2.2.1", - "prettier-eslint": "^9.0.0", - "rxjs": "^6.5.2", - "yargs": "^13.2.4" - }, - "dependencies": { - "ansi-escapes": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", - "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", - "dev": true - }, - "ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", - "dev": true - }, - "cli-cursor": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", - "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", - "dev": true, - "requires": { - "restore-cursor": "^2.0.0" - } - }, - "debug": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", - "dev": true, - "requires": { - "ms": "^2.1.1" - } - }, - "eslint": { - "version": "5.16.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-5.16.0.tgz", - "integrity": "sha512-S3Rz11i7c8AA5JPv7xAH+dOyq/Cu/VXHiHXBPOU1k/JAM5dXqQPt3qcrhpHSorXmrpu2g0gkIBVXAqCpzfoZIg==", - "dev": true, - "requires": { - "@babel/code-frame": "^7.0.0", - "ajv": "^6.9.1", - "chalk": "^2.1.0", - "cross-spawn": "^6.0.5", - "debug": "^4.0.1", - "doctrine": "^3.0.0", - "eslint-scope": "^4.0.3", - "eslint-utils": "^1.3.1", - "eslint-visitor-keys": "^1.0.0", - "espree": "^5.0.1", - "esquery": "^1.0.1", - "esutils": "^2.0.2", - "file-entry-cache": "^5.0.1", - "functional-red-black-tree": "^1.0.1", - "glob": "^7.1.2", - "globals": "^11.7.0", - "ignore": "^4.0.6", - "import-fresh": "^3.0.0", - "imurmurhash": "^0.1.4", - "inquirer": "^6.2.2", - "js-yaml": "^3.13.0", - "json-stable-stringify-without-jsonify": "^1.0.1", - "levn": "^0.3.0", - "lodash": "^4.17.11", - "minimatch": "^3.0.4", - "mkdirp": "^0.5.1", - "natural-compare": "^1.4.0", - "optionator": "^0.8.2", - "path-is-inside": "^1.0.2", - "progress": "^2.0.0", - "regexpp": "^2.0.1", - "semver": "^5.5.1", - "strip-ansi": "^4.0.0", - "strip-json-comments": "^2.0.1", - "table": "^5.2.3", - "text-table": "^0.2.0" - }, - "dependencies": { - "ignore": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", - "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", - "dev": true - } - } - }, - "eslint-scope": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-4.0.3.tgz", - "integrity": "sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==", - "dev": true, - "requires": { - "esrecurse": "^4.1.0", - "estraverse": "^4.1.1" - } - }, - "espree": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-5.0.1.tgz", - "integrity": "sha512-qWAZcWh4XE/RwzLJejfcofscgMc9CamR6Tn1+XRXNzrvUSSbiAjGOI/fggztjIi7y9VLPqnICMIPiGyr8JaZ0A==", - "dev": true, - "requires": { - "acorn": "^6.0.7", - "acorn-jsx": "^5.0.0", - "eslint-visitor-keys": "^1.0.0" - } - }, - "figures": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", - "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=", - "dev": true, - "requires": { - "escape-string-regexp": "^1.0.5" - } - }, - "find-up": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", - "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", - "dev": true, - "requires": { - "locate-path": "^5.0.0", - "path-exists": "^4.0.0" - } - }, - "get-stdin": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-7.0.0.tgz", - "integrity": "sha512-zRKcywvrXlXsA0v0i9Io4KDRaAw7+a1ZpjRwl9Wox8PFlVCCHra7E9c4kqXCoCM9nR5tBkaTTZRBoCm60bFqTQ==", - "dev": true - }, - "glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", - "dev": true, - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, - "ignore": { - "version": "5.1.4", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.4.tgz", - "integrity": "sha512-MzbUSahkTW1u7JpKKjY7LCARd1fU5W2rLdxlM4kdkayuCwZImjkpluF9CM1aLewYJguPDqewLam18Y6AU69A8A==", - "dev": true - }, - "inquirer": { - "version": "6.5.2", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-6.5.2.tgz", - "integrity": "sha512-cntlB5ghuB0iuO65Ovoi8ogLHiWGs/5yNrtUcKjFhSSiVeAIVpD7koaSU9RM8mpXw5YDi9RdYXGQMaOURB7ycQ==", - "dev": true, - "requires": { - "ansi-escapes": "^3.2.0", - "chalk": "^2.4.2", - "cli-cursor": "^2.1.0", - "cli-width": "^2.0.0", - "external-editor": "^3.0.3", - "figures": "^2.0.0", - "lodash": "^4.17.12", - "mute-stream": "0.0.7", - "run-async": "^2.2.0", - "rxjs": "^6.4.0", - "string-width": "^2.1.0", - "strip-ansi": "^5.1.0", - "through": "^2.3.6" - }, - "dependencies": { - "ansi-regex": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", - "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", - "dev": true - }, - "strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", - "dev": true, - "requires": { - "ansi-regex": "^4.1.0" - } - } - } - }, - "locate-path": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", - "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", - "dev": true, - "requires": { - "p-locate": "^4.1.0" - } - }, - "mimic-fn": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", - "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", - "dev": true - }, - "minimist": { - "version": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - }, - "mute-stream": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", - "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=", - "dev": true - }, - "onetime": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", - "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", - "dev": true, - "requires": { - "mimic-fn": "^1.0.0" - } - }, - "p-locate": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", - "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", - "dev": true, - "requires": { - "p-limit": "^2.2.0" - } - }, - "path-exists": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", - "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", - "dev": true - }, - "restore-cursor": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", - "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", - "dev": true, - "requires": { - "onetime": "^2.0.0", - "signal-exit": "^3.0.2" - } - }, - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true - }, - "string-width": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", - "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", - "dev": true, - "requires": { - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^4.0.0" - } - }, - "strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", - "dev": true, - "requires": { - "ansi-regex": "^3.0.0" - } - }, - "strip-json-comments": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", - "dev": true - } - } - }, "prettier-linter-helpers": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz", @@ -5132,24 +3851,6 @@ "fast-diff": "^1.1.2" } }, - "pretty-format": { - "version": "23.6.0", - "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-23.6.0.tgz", - "integrity": "sha512-zf9NV1NSlDLDjycnwm6hpFATCGl/K1lt0R/GdkAK2O5LN/rwJoB+Mh93gGJjut4YbmecbfgLWVGSTCr0Ewvvbw==", - "dev": true, - "requires": { - "ansi-regex": "^3.0.0", - "ansi-styles": "^3.2.0" - }, - "dependencies": { - "ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", - "dev": true - } - } - }, "pretty-ms": { "version": "7.0.1", "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-7.0.1.tgz", @@ -5177,17 +3878,6 @@ "tdigest": "^0.1.1" } }, - "prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dev": true, - "requires": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, "protobufjs": { "version": "6.10.2", "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.2.tgz", @@ -5258,16 +3948,10 @@ "resolved": "https://registry.npmjs.org/qs/-/qs-6.7.0.tgz", "integrity": "sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==" }, - "quick-lru": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz", - "integrity": "sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==", - "dev": true - }, "ramda": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/ramda/-/ramda-0.27.0.tgz", - "integrity": "sha512-pVzZdDpWwWqEVVLshWUHjNwuVP7SfcmPraYuqocJp1yo2U1R7P+5QAfDhdItkuoGqIBnBYrtPp7rEPqDn9HlZA==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/ramda/-/ramda-0.27.1.tgz", + "integrity": "sha512-PgIdVpn5y5Yns8vqb8FzBUEYn98V3xcPgawAkkgj0YJ0qDsnHCiNmZYfOGMgOvoB0eWFLpYbhxUR3mxfDIMvpw==", "dev": true }, "randombytes": { @@ -5335,11 +4019,6 @@ "strip-json-comments": "~2.0.1" }, "dependencies": { - "minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" - }, "strip-json-comments": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", @@ -5347,75 +4026,34 @@ } } }, - "react-is": { - "version": "16.13.1", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", - "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", - "dev": true - }, "read-pkg": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz", - "integrity": "sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", + "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=", "dev": true, "requires": { - "load-json-file": "^2.0.0", + "load-json-file": "^4.0.0", "normalize-package-data": "^2.3.2", - "path-type": "^2.0.0" + "path-type": "^3.0.0" } }, "read-pkg-up": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-2.0.0.tgz", - "integrity": "sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-3.0.0.tgz", + "integrity": "sha1-PtSWaF26D4/hGNBpHcUfSh/5bwc=", "dev": true, "requires": { "find-up": "^2.0.0", - "read-pkg": "^2.0.0" + "read-pkg": "^3.0.0" }, "dependencies": { - "find-up": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", - "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", - "dev": true, - "requires": { - "locate-path": "^2.0.0" - } - }, - "locate-path": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", - "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", - "dev": true, - "requires": { - "p-locate": "^2.0.0", - "path-exists": "^3.0.0" - } - }, "p-limit": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", - "dev": true, "requires": { "p-try": "^1.0.0" } - }, - "p-locate": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", - "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", - "dev": true, - "requires": { - "p-limit": "^1.1.0" - } - }, - "p-try": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", - "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", - "dev": true } } }, @@ -5456,26 +4094,10 @@ "redis-errors": "^1.0.0" } }, - "regenerator-runtime": { - "version": "0.13.5", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.5.tgz", - "integrity": "sha512-ZS5w8CpKFinUzOwW3c83oPeVXoNsrLsaCoLtJvAClH135j/R77RuymhiSErhm2lKcwSCIpmvIWSbDkIfAqKQlA==", - "dev": true - }, - "regexp.prototype.flags": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.3.0.tgz", - "integrity": "sha512-2+Q0C5g951OlYlJz6yu5/M33IcsESLlLfsyIaLJaG4FA2r4yP8MvVMJUUP/fVBkSpbbbZlS5gynbEWLipiiXiQ==", - "dev": true, - "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.17.0-next.1" - } - }, "regexpp": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz", - "integrity": "sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", + "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", "dev": true }, "request": { @@ -5528,6 +4150,12 @@ "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", "dev": true }, + "require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true + }, "require-in-the-middle": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-5.1.0.tgz", @@ -5559,18 +4187,6 @@ "integrity": "sha1-rW8wwTvs15cBDEaK+ndcDAprR/o=", "dev": true }, - "require-main-filename": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", - "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", - "dev": true - }, - "require-relative": { - "version": "0.8.7", - "resolved": "https://registry.npmjs.org/require-relative/-/require-relative-0.8.7.tgz", - "integrity": "sha1-eZlTn8ngR6N5KPoZb44VY9q9Nt4=", - "dev": true - }, "resolve": { "version": "1.15.1", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.1.tgz", @@ -5585,16 +4201,6 @@ "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", "dev": true }, - "restore-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", - "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", - "dev": true, - "requires": { - "onetime": "^5.1.0", - "signal-exit": "^3.0.2" - } - }, "retry-request": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-4.1.1.tgz", @@ -5628,21 +4234,6 @@ "glob": "^6.0.1" } }, - "run-async": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.1.tgz", - "integrity": "sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==", - "dev": true - }, - "rxjs": { - "version": "6.5.5", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.5.5.tgz", - "integrity": "sha512-WfQI+1gohdf0Dai/Bbmk5L5ItH5tYqm3ki2c5GdWhKjalzjg93N3avFjVStyZZz+A2Em+ZxKH5bNghw9UeylGQ==", - "dev": true, - "requires": { - "tslib": "^1.9.0" - } - }, "safe-buffer": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", @@ -5746,18 +4337,18 @@ "integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==" }, "shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", "dev": true, "requires": { - "shebang-regex": "^1.0.0" + "shebang-regex": "^3.0.0" } }, "shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", "dev": true }, "shimmer": { @@ -5765,16 +4356,6 @@ "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz", "integrity": "sha1-YQhZ994ye1h+/r9QH7QxF/mv8zc=" }, - "side-channel": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.2.tgz", - "integrity": "sha512-7rL9YlPHg7Ancea1S96Pa8/QWb4BtXL/TZvS6B8XFetGBeuhAsfmUspK6DokBeZ64+Kj9TCNRD/30pVz1BvQNA==", - "dev": true, - "requires": { - "es-abstract": "^1.17.0-next.1", - "object-inspect": "^1.7.0" - } - }, "signal-exit": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", @@ -5819,14 +4400,21 @@ } }, "slice-ansi": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-2.1.0.tgz", - "integrity": "sha512-Qu+VC3EwYLldKa1fCxuuvULvSJOKEgk9pi8dZeCVK7TqBfUNTH4sFkk4joj8afVSfAYgJoSOetjx9QWOJ5mYoQ==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", + "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", "dev": true, "requires": { - "ansi-styles": "^3.2.0", - "astral-regex": "^1.0.0", - "is-fullwidth-code-point": "^2.0.0" + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true + } } }, "snakecase-keys": { @@ -5862,9 +4450,9 @@ } }, "spdx-correct": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.0.tgz", - "integrity": "sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz", + "integrity": "sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==", "dev": true, "requires": { "spdx-expression-parse": "^3.0.0", @@ -5872,15 +4460,15 @@ } }, "spdx-exceptions": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz", - "integrity": "sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz", + "integrity": "sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==", "dev": true }, "spdx-expression-parse": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz", - "integrity": "sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", + "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", "dev": true, "requires": { "spdx-exceptions": "^2.1.0", @@ -5888,9 +4476,9 @@ } }, "spdx-license-ids": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz", - "integrity": "sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q==", + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.9.tgz", + "integrity": "sha512-Ki212dKK4ogX+xDo4CtOZBVIwhsKBEfsEEcwmJfLQzirgc2jIWdzg40Unxz/HzEUqM1WFzVlQSMF9kZZ2HboLQ==", "dev": true }, "split": { @@ -5953,70 +4541,33 @@ "integrity": "sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==" }, "string-width": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", - "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz", + "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==", "dev": true, "requires": { - "emoji-regex": "^7.0.1", - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^5.1.0" - } - }, - "string.prototype.matchall": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.2.tgz", - "integrity": "sha512-N/jp6O5fMf9os0JU3E72Qhf590RSRZU/ungsL/qJUYVTNv7hTG0P/dbPjxINVN9jpscu3nzYwKESU3P3RY5tOg==", - "dev": true, - "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.17.0", - "has-symbols": "^1.0.1", - "internal-slot": "^1.0.2", - "regexp.prototype.flags": "^1.3.0", - "side-channel": "^1.0.2" + "emoji-regex": "^8.0.0", + "strip-ansi": "^6.0.0" } }, "string.prototype.trimend": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.1.tgz", - "integrity": "sha512-LRPxFUaTtpqYsTeNKaFOw3R4bxIzWOnbQ837QfBylo8jIxtcbK/A/sMV7Q+OAV/vWo+7s25pOE10KYSjaSO06g==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.4.tgz", + "integrity": "sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A==", "dev": true, "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.17.5" - } - }, - "string.prototype.trimleft": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.2.tgz", - "integrity": "sha512-gCA0tza1JBvqr3bfAIFJGqfdRTyPae82+KTnm3coDXkZN9wnuW3HjGgN386D7hfv5CHQYCI022/rJPVlqXyHSw==", - "dev": true, - "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.17.5", - "string.prototype.trimstart": "^1.0.0" - } - }, - "string.prototype.trimright": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.2.tgz", - "integrity": "sha512-ZNRQ7sY3KroTaYjRS6EbNiiHrOkjihL9aQE/8gfQ4DtAC/aEBRHFJa44OmoWxGGqXuJlfKkZW4WcXErGr+9ZFg==", - "dev": true, - "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.17.5", - "string.prototype.trimend": "^1.0.0" + "call-bind": "^1.0.2", + "define-properties": "^1.1.3" } }, "string.prototype.trimstart": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.1.tgz", - "integrity": "sha512-XxZn+QpvrBI1FOcg6dIpxUPgWCPuNXvMD72aaRaUQv1eD4e/Qy8i/hFTe0BUmD60p/QA6bh1avmuPTfNjqVWRw==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.4.tgz", + "integrity": "sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw==", "dev": true, "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.17.5" + "call-bind": "^1.0.2", + "define-properties": "^1.1.3" } }, "string_decoder": { @@ -6028,12 +4579,12 @@ } }, "strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", "dev": true, "requires": { - "ansi-regex": "^4.1.0" + "ansi-regex": "^5.0.0" } }, "strip-bom": { @@ -6063,15 +4614,30 @@ } }, "table": { - "version": "5.4.6", - "resolved": "https://registry.npmjs.org/table/-/table-5.4.6.tgz", - "integrity": "sha512-wmEc8m4fjnob4gt5riFRtTu/6+4rSe12TpAELNSqHMfF3IqnA+CH37USM6/YR3qRZv7e56kAEAtd6nKZaxe0Ug==", + "version": "6.7.1", + "resolved": "https://registry.npmjs.org/table/-/table-6.7.1.tgz", + "integrity": "sha512-ZGum47Yi6KOOFDE8m223td53ath2enHcYLgOCjGr5ngu8bdIARQk6mN/wRMv4yMRcHnCSnHbCEha4sobQx5yWg==", "dev": true, "requires": { - "ajv": "^6.10.2", - "lodash": "^4.17.14", - "slice-ansi": "^2.1.0", - "string-width": "^3.0.0" + "ajv": "^8.0.1", + "lodash.clonedeep": "^4.5.0", + "lodash.truncate": "^4.4.2", + "slice-ansi": "^4.0.0", + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0" + }, + "dependencies": { + "ajv": { + "version": "8.6.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.6.1.tgz", + "integrity": "sha512-42VLtQUOLefAvKFAQIxIZDaThq6om/PrfP0CYk3/vn+y4BMNkKnbli8ON2QCiHov4KkzOSJ/xSoBJdayiiYvVQ==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + } + } } }, "tar": { @@ -6151,21 +4717,6 @@ "integrity": "sha512-W3AmPTJWZkRwu+iSNxPIsLZ2ByADsOLbbLxe46UJyWj3mlYLlwucKiq+/dPm0l9wTzqoF3/2PH0AGFCebjq23A==", "dev": true }, - "tmp": { - "version": "0.0.33", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", - "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", - "dev": true, - "requires": { - "os-tmpdir": "~1.0.2" - } - }, - "to-fast-properties": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=", - "dev": true - }, "to-no-case": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/to-no-case/-/to-no-case-1.0.2.tgz", @@ -6210,11 +4761,16 @@ "punycode": "^2.1.1" } }, - "tslib": { - "version": "1.11.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.11.2.tgz", - "integrity": "sha512-tTSkux6IGPnUGUd1XAZHcpu85MOkIl5zX49pO+jfsie3eP0B6pyhOlLXm3cAC6T7s+euSDDUUV+Acop5WmtkVg==", - "dev": true + "tsconfig-paths": { + "version": "3.10.1", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.10.1.tgz", + "integrity": "sha512-rETidPDgCpltxF7MjBZlAFPUHv5aHH2MymyPvh+vEyWAED4Eb/WeMbsnD/JDr4OKPOA1TssDHgIcpTN5Kh0p6Q==", + "dev": true, + "requires": { + "json5": "^2.2.0", + "minimist": "^1.2.0", + "strip-bom": "^3.0.0" + } }, "tunnel-agent": { "version": "0.6.0", @@ -6230,24 +4786,24 @@ "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=" }, "type-check": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", - "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=", + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", "dev": true, "requires": { - "prelude-ls": "~1.1.2" + "prelude-ls": "^1.2.1" } }, "type-detect": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-1.0.0.tgz", - "integrity": "sha512-f9Uv6ezcpvCQjJU0Zqbg+65qdcszv3qUQsZfjdRbWiZ7AMenrX1u0lNk9EoWWX6e1F+NULyg27mtdeZ5WhpljA==", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", "dev": true }, "type-fest": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.11.0.tgz", - "integrity": "sha512-OdjXJxnCN1AvyLSzeKIgXTXxV+99ZuXl3Hpo9XpJAv9MBcHrrJOQ5kV7ypXOuQie+AmWG25hLbiKdwYTifzcfQ==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", "dev": true }, "type-is": { @@ -6259,11 +4815,25 @@ "mime-types": "~2.1.24" } }, - "typescript": { - "version": "3.8.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.8.3.tgz", - "integrity": "sha512-MYlEfn5VrLNsgudQTVJeNaQFUAI7DkhnOjdpAp4T+ku1TfQClewlbSuTVHiA+8skNBgaf02TL/kLOvig4y3G8w==", - "dev": true + "unbox-primitive": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.1.tgz", + "integrity": "sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw==", + "dev": true, + "requires": { + "function-bind": "^1.1.1", + "has-bigints": "^1.0.1", + "has-symbols": "^1.0.2", + "which-boxed-primitive": "^1.0.2" + }, + "dependencies": { + "has-symbols": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz", + "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==", + "dev": true + } + } }, "underscore": { "version": "1.6.0", @@ -6299,9 +4869,9 @@ "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" }, "v8-compile-cache": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.1.1.tgz", - "integrity": "sha512-8OQ9CL+VWyt3JStj7HX7/ciTL2V3Rl1Wf5OL+SNTm0yK1KvtReVulksyeRnCANHHuUxHlQig+JJDlUhBt1NQDQ==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz", + "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==", "dev": true }, "validate-npm-package-license": { @@ -6329,99 +4899,32 @@ "extsprintf": "^1.2.0" } }, - "vue-eslint-parser": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/vue-eslint-parser/-/vue-eslint-parser-2.0.3.tgz", - "integrity": "sha512-ZezcU71Owm84xVF6gfurBQUGg8WQ+WZGxgDEQu1IHFBZNx7BFZg3L1yHxrCBNNwbwFtE1GuvfJKMtb6Xuwc/Bw==", - "dev": true, - "requires": { - "debug": "^3.1.0", - "eslint-scope": "^3.7.1", - "eslint-visitor-keys": "^1.0.0", - "espree": "^3.5.2", - "esquery": "^1.0.0", - "lodash": "^4.17.4" - }, - "dependencies": { - "acorn": { - "version": "5.7.4", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-5.7.4.tgz", - "integrity": "sha512-1D++VG7BhrtvQpNbBzovKNc1FLGGEE/oGe7b9xJm/RFHMBeUaUGpluV9RLjZa47YFdPcDAenEYuq9pQPcMdLJg==", - "dev": true - }, - "acorn-jsx": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-3.0.1.tgz", - "integrity": "sha1-r9+UiPsezvyDSPb7IvRk4ypYs2s=", - "dev": true, - "requires": { - "acorn": "^3.0.4" - }, - "dependencies": { - "acorn": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-3.3.0.tgz", - "integrity": "sha1-ReN/s56No/JbruP/U2niu18iAXo=", - "dev": true - } - } - }, - "debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", - "dev": true, - "requires": { - "ms": "^2.1.1" - } - }, - "eslint-scope": { - "version": "3.7.3", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-3.7.3.tgz", - "integrity": "sha512-W+B0SvF4gamyCTmUc+uITPY0989iXVfKvhwtmJocTaYoc/3khEHmEmvfY/Gn9HA9VV75jrQECsHizkNw1b68FA==", - "dev": true, - "requires": { - "esrecurse": "^4.1.0", - "estraverse": "^4.1.1" - } - }, - "espree": { - "version": "3.5.4", - "resolved": "https://registry.npmjs.org/espree/-/espree-3.5.4.tgz", - "integrity": "sha512-yAcIQxtmMiB/jL32dzEp2enBeidsB7xWPLNiw3IIkpVds1P+h7qF9YwJq1yUNzp2OKXgAprs4F61ih66UsoD1A==", - "dev": true, - "requires": { - "acorn": "^5.5.0", - "acorn-jsx": "^3.0.0" - } - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - } - } - }, "when": { "version": "3.7.8", "resolved": "https://registry.npmjs.org/when/-/when-3.7.8.tgz", "integrity": "sha1-xxMLan6gRpPoQs3J56Hyqjmjn4I=" }, "which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", "dev": true, "requires": { "isexe": "^2.0.0" } }, - "which-module": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", - "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", - "dev": true + "which-boxed-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", + "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", + "dev": true, + "requires": { + "is-bigint": "^1.0.1", + "is-boolean-object": "^1.1.0", + "is-number-object": "^1.0.4", + "is-string": "^1.0.5", + "is-symbol": "^1.0.3" + } }, "wide-align": { "version": "1.1.3", @@ -6468,14 +4971,13 @@ "dev": true }, "wrap-ansi": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", - "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", "dev": true, "requires": { - "ansi-styles": "^3.2.0", - "string-width": "^3.0.0", - "strip-ansi": "^5.0.0" + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" } }, "wrappy": { @@ -6483,30 +4985,6 @@ "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" }, - "write": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/write/-/write-1.0.3.tgz", - "integrity": "sha512-/lg70HAjtkUgWPVZhZcm+T4hkL8Zbtp1nFNOn3lRrxnlv50SRBv7cR7RqR+GMsd3hUXy9hWBo4CHTbFTcOYwig==", - "dev": true, - "requires": { - "mkdirp": "^0.5.1" - }, - "dependencies": { - "minimist": { - "version": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" - } - } - }, - "xregexp": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/xregexp/-/xregexp-4.3.0.tgz", - "integrity": "sha512-7jXDIFXh5yJ/orPn4SXjuVrWWoi4Cr8jfV1eHv9CixKSbU+jY4mxfrBwAuDvupPNKpMUY+FeIqsVw/JLT9+B8g==", - "dev": true, - "requires": { - "@babel/runtime-corejs3": "^7.8.3" - } - }, "y18n": { "version": "5.0.6", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.6.tgz", @@ -6519,40 +4997,25 @@ "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" }, "yargs": { - "version": "13.3.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.2.tgz", - "integrity": "sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==", + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", "dev": true, "requires": { - "cliui": "^5.0.0", - "find-up": "^3.0.0", - "get-caller-file": "^2.0.1", + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", - "require-main-filename": "^2.0.0", - "set-blocking": "^2.0.0", - "string-width": "^3.0.0", - "which-module": "^2.0.0", - "y18n": "^4.0.0", - "yargs-parser": "^13.1.2" - }, - "dependencies": { - "y18n": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.1.tgz", - "integrity": "sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ==", - "dev": true - } + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" } }, "yargs-parser": { - "version": "13.1.2", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.2.tgz", - "integrity": "sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==", - "dev": true, - "requires": { - "camelcase": "^5.0.0", - "decamelize": "^1.2.0" - } + "version": "20.2.4", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", + "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", + "dev": true }, "yargs-unparser": { "version": "2.0.0", @@ -6564,20 +5027,6 @@ "decamelize": "^4.0.0", "flat": "^5.0.2", "is-plain-obj": "^2.1.0" - }, - "dependencies": { - "camelcase": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.2.0.tgz", - "integrity": "sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg==", - "dev": true - }, - "decamelize": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", - "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", - "dev": true - } } }, "yn": { diff --git a/services/document-updater/package.json b/services/document-updater/package.json index b033ddedfb..c46eb73c60 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -13,9 +13,10 @@ "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js", "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", "nodemon": "nodemon --config nodemon.json", - "lint": "node_modules/.bin/eslint --max-warnings 0 .", - "format": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --list-different", - "format:fix": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --write" + "lint": "eslint --max-warnings 0 --format unix .", + "format": "prettier --list-different $PWD/'**/*.js'", + "format:fix": "prettier --write $PWD/'**/*.js'", + "lint:fix": "eslint --fix ." }, "dependencies": { "@overleaf/metrics": "^3.5.1", @@ -34,27 +35,21 @@ "requestretry": "^4.1.2" }, "devDependencies": { - "babel-eslint": "^10.1.0", - "chai": "^3.5.0", + "chai": "^4.2.0", + "chai-as-promised": "^7.1.1", "cluster-key-slot": "^1.0.5", - "eslint": "^6.8.0", - "eslint-config-prettier": "^6.10.0", - "eslint-config-standard": "^14.1.0", - "eslint-config-standard-jsx": "^8.1.0", - "eslint-config-standard-react": "^9.2.0", - "eslint-plugin-chai-expect": "^2.1.0", - "eslint-plugin-chai-friendly": "^0.5.0", - "eslint-plugin-import": "^2.20.1", - "eslint-plugin-jsx-a11y": "^6.2.3", - "eslint-plugin-mocha": "^6.3.0", - "eslint-plugin-node": "^11.0.0", + "eslint": "^7.21.0", + "eslint-config-prettier": "^8.1.0", + "eslint-config-standard": "^16.0.2", + "eslint-plugin-chai-expect": "^2.2.0", + "eslint-plugin-chai-friendly": "^0.6.0", + "eslint-plugin-import": "^2.22.1", + "eslint-plugin-mocha": "^8.0.0", + "eslint-plugin-node": "^11.1.0", "eslint-plugin-prettier": "^3.1.2", "eslint-plugin-promise": "^4.2.1", - "eslint-plugin-react": "^7.19.0", - "eslint-plugin-standard": "^4.0.1", "mocha": "^8.3.2", - "prettier": "^2.0.0", - "prettier-eslint-cli": "^5.0.0", + "prettier": "^2.2.1", "sandboxed-module": "^2.0.4", "sinon": "^9.0.2", "timekeeper": "^2.0.0" From c532376e216a719107d33a1c74bfecccd31d9f9e Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Tue, 13 Jul 2021 12:04:42 +0100 Subject: [PATCH 762/769] [misc] run format_fix and lint:fix --- services/document-updater/app.js | 30 +- .../app/js/DeleteQueueManager.js | 115 ++- services/document-updater/app/js/DiffCodec.js | 6 +- .../app/js/DispatchManager.js | 8 +- .../app/js/DocumentManager.js | 793 +++++++++--------- services/document-updater/app/js/Errors.js | 2 +- .../document-updater/app/js/HistoryManager.js | 82 +- .../app/js/HistoryRedisManager.js | 2 +- .../document-updater/app/js/HttpController.js | 92 +- .../document-updater/app/js/LockManager.js | 104 +-- .../app/js/LoggerSerializers.js | 8 +- .../app/js/PersistenceManager.js | 16 +- .../document-updater/app/js/ProjectFlusher.js | 60 +- .../app/js/ProjectHistoryRedisManager.js | 22 +- .../document-updater/app/js/ProjectManager.js | 33 +- .../document-updater/app/js/RangesManager.js | 6 +- .../document-updater/app/js/RangesTracker.js | 28 +- .../app/js/RateLimitManager.js | 6 +- .../app/js/RealTimeRedisManager.js | 4 +- .../document-updater/app/js/RedisManager.js | 472 ++++++----- services/document-updater/app/js/ShareJsDB.js | 2 +- .../app/js/ShareJsUpdateManager.js | 4 +- .../app/js/SnapshotManager.js | 4 +- .../document-updater/app/js/UpdateKeys.js | 2 +- .../document-updater/app/js/UpdateManager.js | 305 +++---- services/document-updater/app/js/mongodb.js | 2 +- .../document-updater/app/js/sharejs/count.js | 2 +- .../app/js/sharejs/helpers.js | 124 +-- .../app/js/sharejs/json-api.js | 2 +- .../document-updater/app/js/sharejs/json.js | 8 +- .../document-updater/app/js/sharejs/model.js | 49 +- .../app/js/sharejs/server/model.js | 49 +- .../document-updater/app/js/sharejs/simple.js | 2 +- .../app/js/sharejs/text-api.js | 4 +- .../app/js/sharejs/text-composable-api.js | 4 +- .../app/js/sharejs/text-composable.js | 2 +- .../app/js/sharejs/text-tp2-api.js | 4 +- .../app/js/sharejs/text-tp2.js | 4 +- .../document-updater/app/js/sharejs/text.js | 12 +- .../app/js/sharejs/types/count.js | 2 +- .../app/js/sharejs/types/helpers.js | 124 +-- .../app/js/sharejs/types/json-api.js | 2 +- .../app/js/sharejs/types/json.js | 8 +- .../app/js/sharejs/types/model.js | 49 +- .../app/js/sharejs/types/simple.js | 2 +- .../app/js/sharejs/types/text-api.js | 4 +- .../js/sharejs/types/text-composable-api.js | 4 +- .../app/js/sharejs/types/text-composable.js | 2 +- .../app/js/sharejs/types/text-tp2-api.js | 4 +- .../app/js/sharejs/types/text-tp2.js | 4 +- .../app/js/sharejs/types/text.js | 14 +- .../config/settings.defaults.js | 40 +- services/document-updater/expire_docops.js | 2 +- .../js/ApplyingUpdatesToADocTests.js | 136 +-- .../ApplyingUpdatesToProjectStructureTests.js | 76 +- .../acceptance/js/DeletingADocumentTests.js | 18 +- .../acceptance/js/DeletingAProjectTests.js | 54 +- .../acceptance/js/FlushingAProjectTests.js | 36 +- .../test/acceptance/js/FlushingDocsTests.js | 20 +- .../acceptance/js/GettingADocumentTests.js | 26 +- .../acceptance/js/GettingProjectDocsTests.js | 20 +- .../test/acceptance/js/RangesTests.js | 176 ++-- .../acceptance/js/SettingADocumentTests.js | 46 +- .../test/acceptance/js/SizeCheckTests.js | 18 +- .../acceptance/js/helpers/DocUpdaterApp.js | 4 +- .../acceptance/js/helpers/DocUpdaterClient.js | 20 +- .../js/helpers/MockProjectHistoryApi.js | 6 +- .../js/helpers/MockTrackChangesApi.js | 8 +- .../test/acceptance/js/helpers/MockWebApi.js | 8 +- .../js/test_blpop_failover.js | 18 +- .../js/test_pubsub_failover.js | 14 +- services/document-updater/test/setup.js | 10 +- .../document-updater/test/stress/js/run.js | 27 +- .../test/unit/js/DiffCodec/DiffCodecTests.js | 12 +- .../DispatchManager/DispatchManagerTests.js | 16 +- .../DocumentManager/DocumentManagerTests.js | 24 +- .../js/HistoryManager/HistoryManagerTests.js | 28 +- .../HistoryRedisManagerTests.js | 14 +- .../js/HttpController/HttpControllerTests.js | 80 +- .../unit/js/LockManager/CheckingTheLock.js | 6 +- .../unit/js/LockManager/ReleasingTheLock.js | 14 +- .../test/unit/js/LockManager/getLockTests.js | 6 +- .../test/unit/js/LockManager/tryLockTests.js | 36 +- .../PersistenceManagerTests.js | 24 +- .../ProjectHistoryRedisManagerTests.js | 26 +- .../flushAndDeleteProjectTests.js | 16 +- .../js/ProjectManager/flushProjectTests.js | 14 +- .../js/ProjectManager/getProjectDocsTests.js | 18 +- .../js/ProjectManager/updateProjectTests.js | 34 +- .../js/RangesManager/RangesManagerTests.js | 158 ++-- .../js/RateLimitManager/RateLimitManager.js | 16 +- .../RealTimeRedisManagerTests.js | 28 +- .../unit/js/RedisManager/RedisManagerTests.js | 54 +- .../unit/js/ShareJS/TextTransformTests.js | 4 +- .../test/unit/js/ShareJsDB/ShareJsDBTests.js | 8 +- .../ShareJsUpdateManagerTests.js | 32 +- .../js/UpdateManager/UpdateManagerTests.js | 40 +- 97 files changed, 2113 insertions(+), 2071 deletions(-) diff --git a/services/document-updater/app.js b/services/document-updater/app.js index 553ab9403b..c724b74d33 100644 --- a/services/document-updater/app.js +++ b/services/document-updater/app.js @@ -114,7 +114,7 @@ const pubsubClient = require('@overleaf/redis-wrapper').createClient( Settings.redis.pubsub ) app.get('/health_check/redis', (req, res, next) => { - pubsubClient.healthCheck((error) => { + pubsubClient.healthCheck(error => { if (error) { logger.err({ err: error }, 'failed redis health check') return res.sendStatus(500) @@ -128,7 +128,7 @@ const docUpdaterRedisClient = require('@overleaf/redis-wrapper').createClient( Settings.redis.documentupdater ) app.get('/health_check/redis_cluster', (req, res, next) => { - docUpdaterRedisClient.healthCheck((error) => { + docUpdaterRedisClient.healthCheck(error => { if (error) { logger.err({ err: error }, 'failed redis cluster health check') return res.sendStatus(500) @@ -141,32 +141,32 @@ app.get('/health_check/redis_cluster', (req, res, next) => { app.get('/health_check', (req, res, next) => { async.series( [ - (cb) => { - pubsubClient.healthCheck((error) => { + cb => { + pubsubClient.healthCheck(error => { if (error) { logger.err({ err: error }, 'failed redis health check') } cb(error) }) }, - (cb) => { - docUpdaterRedisClient.healthCheck((error) => { + cb => { + docUpdaterRedisClient.healthCheck(error => { if (error) { logger.err({ err: error }, 'failed redis cluster health check') } cb(error) }) }, - (cb) => { - mongodb.healthCheck((error) => { + cb => { + mongodb.healthCheck(error => { if (error) { logger.err({ err: error }, 'failed mongo health check') } cb(error) }) - } + }, ], - (error) => { + error => { if (error) { return res.sendStatus(500) } else { @@ -189,7 +189,7 @@ app.use((error, req, res, next) => { } }) -const shutdownCleanly = (signal) => () => { +const shutdownCleanly = signal => () => { logger.log({ signal }, 'received interrupt, cleaning up') Settings.shuttingDown = true setTimeout(() => { @@ -198,8 +198,8 @@ const shutdownCleanly = (signal) => () => { }, 10000) } -const watchForEvent = (eventName) => { - docUpdaterRedisClient.on(eventName, (e) => { +const watchForEvent = eventName => { + docUpdaterRedisClient.on(eventName, e => { console.log(`redis event: ${eventName} ${e}`) // eslint-disable-line no-console }) } @@ -236,7 +236,7 @@ if (!module.parent) { } }) }) - .catch((err) => { + .catch(err => { logger.fatal({ err }, 'Cannot connect to mongo. Exiting.') process.exit(1) }) @@ -251,7 +251,7 @@ for (const signal of [ 'SIGUSR1', 'SIGUSR2', 'SIGTERM', - 'SIGABRT' + 'SIGABRT', ]) { process.on(signal, shutdownCleanly(signal)) } diff --git a/services/document-updater/app/js/DeleteQueueManager.js b/services/document-updater/app/js/DeleteQueueManager.js index 36466f8b10..492f8d3360 100644 --- a/services/document-updater/app/js/DeleteQueueManager.js +++ b/services/document-updater/app/js/DeleteQueueManager.js @@ -43,44 +43,44 @@ module.exports = DeleteQueueManager = { let count = 0 const flushProjectIfNotModified = (project_id, flushTimestamp, cb) => - ProjectManager.getProjectDocsTimestamps(project_id, function ( - err, - timestamps - ) { - if (err != null) { - return callback(err) - } - if (timestamps.length === 0) { - logger.log( - { project_id }, - 'skipping flush of queued project - no timestamps' - ) - return cb() - } - // are any of the timestamps newer than the time the project was flushed? - for (const timestamp of Array.from(timestamps)) { - if (timestamp > flushTimestamp) { - metrics.inc('queued-delete-skipped') - logger.debug( - { project_id, timestamps, flushTimestamp }, - 'found newer timestamp, will skip delete' + ProjectManager.getProjectDocsTimestamps( + project_id, + function (err, timestamps) { + if (err != null) { + return callback(err) + } + if (timestamps.length === 0) { + logger.log( + { project_id }, + 'skipping flush of queued project - no timestamps' ) return cb() } - } - logger.log({ project_id, flushTimestamp }, 'flushing queued project') - return ProjectManager.flushAndDeleteProjectWithLocks( - project_id, - { skip_history_flush: false }, - function (err) { - if (err != null) { - logger.err({ project_id, err }, 'error flushing queued project') + // are any of the timestamps newer than the time the project was flushed? + for (const timestamp of Array.from(timestamps)) { + if (timestamp > flushTimestamp) { + metrics.inc('queued-delete-skipped') + logger.debug( + { project_id, timestamps, flushTimestamp }, + 'found newer timestamp, will skip delete' + ) + return cb() } - metrics.inc('queued-delete-completed') - return cb(null, true) } - ) - }) + logger.log({ project_id, flushTimestamp }, 'flushing queued project') + return ProjectManager.flushAndDeleteProjectWithLocks( + project_id, + { skip_history_flush: false }, + function (err) { + if (err != null) { + logger.err({ project_id, err }, 'error flushing queued project') + } + metrics.inc('queued-delete-completed') + return cb(null, true) + } + ) + } + ) var flushNextProject = function () { const now = Date.now() @@ -92,30 +92,29 @@ module.exports = DeleteQueueManager = { logger.log('hit count limit on flushing old projects') return callback(null, count) } - return RedisManager.getNextProjectToFlushAndDelete(cutoffTime, function ( - err, - project_id, - flushTimestamp, - queueLength - ) { - if (err != null) { - return callback(err) - } - if (project_id == null) { - return callback(null, count) - } - logger.log({ project_id, queueLength }, 'flushing queued project') - metrics.globalGauge('queued-flush-backlog', queueLength) - return flushProjectIfNotModified(project_id, flushTimestamp, function ( - err, - flushed - ) { - if (flushed) { - count++ + return RedisManager.getNextProjectToFlushAndDelete( + cutoffTime, + function (err, project_id, flushTimestamp, queueLength) { + if (err != null) { + return callback(err) } - return flushNextProject() - }) - }) + if (project_id == null) { + return callback(null, count) + } + logger.log({ project_id, queueLength }, 'flushing queued project') + metrics.globalGauge('queued-flush-backlog', queueLength) + return flushProjectIfNotModified( + project_id, + flushTimestamp, + function (err, flushed) { + if (flushed) { + count++ + } + return flushNextProject() + } + ) + } + ) } return flushNextProject() @@ -133,12 +132,12 @@ module.exports = DeleteQueueManager = { { timeout: 1000, min_delete_age: 3 * 60 * 1000, - limit: 1000 // high value, to ensure we always flush enough projects + limit: 1000, // high value, to ensure we always flush enough projects }, (err, flushed) => setTimeout(doFlush, flushed > 10 ? SHORT_DELAY : LONG_DELAY) ) } return doFlush() - } + }, } diff --git a/services/document-updater/app/js/DiffCodec.js b/services/document-updater/app/js/DiffCodec.js index 59b7dee67b..5c017f0d4e 100644 --- a/services/document-updater/app/js/DiffCodec.js +++ b/services/document-updater/app/js/DiffCodec.js @@ -21,13 +21,13 @@ module.exports = { if (type === this.ADDED) { ops.push({ i: content, - p: position + p: position, }) position += content.length } else if (type === this.REMOVED) { ops.push({ d: content, - p: position + p: position, }) } else if (type === this.UNCHANGED) { position += content.length @@ -36,5 +36,5 @@ module.exports = { } } callback(null, ops) - } + }, } diff --git a/services/document-updater/app/js/DispatchManager.js b/services/document-updater/app/js/DispatchManager.js index e94e6cde0e..d567fade2e 100644 --- a/services/document-updater/app/js/DispatchManager.js +++ b/services/document-updater/app/js/DispatchManager.js @@ -57,7 +57,7 @@ module.exports = DispatchManager = { Keys.splitProjectIdAndDocId(doc_key) ) // Dispatch this in the background - const backgroundTask = (cb) => + const backgroundTask = cb => UpdateManager.processOutstandingUpdatesWithLock( project_id, doc_id, @@ -91,7 +91,7 @@ module.exports = DispatchManager = { if (Settings.shuttingDown) { return } - return worker._waitForUpdateThenDispatchWorker((error) => { + return worker._waitForUpdateThenDispatchWorker(error => { if (error != null) { logger.error({ err: error }, 'Error in worker process') throw error @@ -99,7 +99,7 @@ module.exports = DispatchManager = { return worker.run() } }) - } + }, } return worker @@ -110,5 +110,5 @@ module.exports = DispatchManager = { _.times(number, function (shardNumber) { return DispatchManager.createDispatcher(RateLimiter, shardNumber).run() }) - } + }, } diff --git a/services/document-updater/app/js/DocumentManager.js b/services/document-updater/app/js/DocumentManager.js index b6c4510f9f..a79d4aa187 100644 --- a/services/document-updater/app/js/DocumentManager.js +++ b/services/document-updater/app/js/DocumentManager.js @@ -47,94 +47,102 @@ module.exports = DocumentManager = { return _callback(...Array.from(args || [])) } - return RedisManager.getDoc(project_id, doc_id, function ( - error, - lines, - version, - ranges, - pathname, - projectHistoryId, - unflushedTime - ) { - if (error != null) { - return callback(error) - } - if (lines == null || version == null) { - logger.log( - { project_id, doc_id }, - 'doc not in redis so getting from persistence API' - ) - return PersistenceManager.getDoc(project_id, doc_id, function ( - error, - lines, - version, - ranges, - pathname, - projectHistoryId, - projectHistoryType - ) { - if (error != null) { - return callback(error) - } + return RedisManager.getDoc( + project_id, + doc_id, + function ( + error, + lines, + version, + ranges, + pathname, + projectHistoryId, + unflushedTime + ) { + if (error != null) { + return callback(error) + } + if (lines == null || version == null) { logger.log( - { - project_id, - doc_id, + { project_id, doc_id }, + 'doc not in redis so getting from persistence API' + ) + return PersistenceManager.getDoc( + project_id, + doc_id, + function ( + error, lines, version, + ranges, pathname, projectHistoryId, projectHistoryType - }, - 'got doc from persistence API' - ) - return RedisManager.putDocInMemory( - project_id, - doc_id, - lines, - version, - ranges, - pathname, - projectHistoryId, - function (error) { + ) { if (error != null) { return callback(error) } - return RedisManager.setHistoryType( + logger.log( + { + project_id, + doc_id, + lines, + version, + pathname, + projectHistoryId, + projectHistoryType, + }, + 'got doc from persistence API' + ) + return RedisManager.putDocInMemory( + project_id, doc_id, - projectHistoryType, + lines, + version, + ranges, + pathname, + projectHistoryId, function (error) { if (error != null) { return callback(error) } - return callback( - null, - lines, - version, - ranges || {}, - pathname, - projectHistoryId, - null, - false + return RedisManager.setHistoryType( + doc_id, + projectHistoryType, + function (error) { + if (error != null) { + return callback(error) + } + return callback( + null, + lines, + version, + ranges || {}, + pathname, + projectHistoryId, + null, + false + ) + } ) } ) } ) - }) - } else { - return callback( - null, - lines, - version, - ranges, - pathname, - projectHistoryId, - unflushedTime, - true - ) + } else { + return callback( + null, + lines, + version, + ranges, + pathname, + projectHistoryId, + unflushedTime, + true + ) + } } - }) + ) }, getDocAndRecentOps(project_id, doc_id, fromVersion, _callback) { @@ -155,49 +163,46 @@ module.exports = DocumentManager = { return _callback(...Array.from(args || [])) } - return DocumentManager.getDoc(project_id, doc_id, function ( - error, - lines, - version, - ranges, - pathname, - projectHistoryId - ) { - if (error != null) { - return callback(error) - } - if (fromVersion === -1) { - return callback( - null, - lines, - version, - [], - ranges, - pathname, - projectHistoryId - ) - } else { - return RedisManager.getPreviousDocOps( - doc_id, - fromVersion, - version, - function (error, ops) { - if (error != null) { - return callback(error) + return DocumentManager.getDoc( + project_id, + doc_id, + function (error, lines, version, ranges, pathname, projectHistoryId) { + if (error != null) { + return callback(error) + } + if (fromVersion === -1) { + return callback( + null, + lines, + version, + [], + ranges, + pathname, + projectHistoryId + ) + } else { + return RedisManager.getPreviousDocOps( + doc_id, + fromVersion, + version, + function (error, ops) { + if (error != null) { + return callback(error) + } + return callback( + null, + lines, + version, + ops, + ranges, + pathname, + projectHistoryId + ) } - return callback( - null, - lines, - version, - ops, - ranges, - pathname, - projectHistoryId - ) - } - ) + ) + } } - }) + ) }, setDoc(project_id, doc_id, newLines, source, user_id, undoing, _callback) { @@ -215,95 +220,107 @@ module.exports = DocumentManager = { } const UpdateManager = require('./UpdateManager') - return DocumentManager.getDoc(project_id, doc_id, function ( - error, - oldLines, - version, - ranges, - pathname, - projectHistoryId, - unflushedTime, - alreadyLoaded - ) { - if (error != null) { - return callback(error) - } - - if (oldLines != null && oldLines.length > 0 && oldLines[0].text != null) { - logger.log( - { doc_id, project_id, oldLines, newLines }, - 'document is JSON so not updating' - ) - return callback(null) - } - - logger.log( - { doc_id, project_id, oldLines, newLines }, - 'setting a document via http' - ) - return DiffCodec.diffAsShareJsOp(oldLines, newLines, function ( + return DocumentManager.getDoc( + project_id, + doc_id, + function ( error, - op + oldLines, + version, + ranges, + pathname, + projectHistoryId, + unflushedTime, + alreadyLoaded ) { if (error != null) { return callback(error) } - if (undoing) { - for (const o of Array.from(op || [])) { - o.u = true - } // Turn on undo flag for each op for track changes - } - const update = { - doc: doc_id, - op, - v: version, - meta: { - type: 'external', - source, - user_id - } - } - return UpdateManager.applyUpdate(project_id, doc_id, update, function ( - error - ) { - if (error != null) { - return callback(error) - } - // If the document was loaded already, then someone has it open - // in a project, and the usual flushing mechanism will happen. - // Otherwise we should remove it immediately since nothing else - // is using it. - if (alreadyLoaded) { - return DocumentManager.flushDocIfLoaded( - project_id, - doc_id, - function (error) { - if (error != null) { - return callback(error) - } - return callback(null) - } - ) - } else { - return DocumentManager.flushAndDeleteDoc( - project_id, - doc_id, - {}, - function (error) { - // There is no harm in flushing project history if the previous - // call failed and sometimes it is required - HistoryManager.flushProjectChangesAsync(project_id) + if ( + oldLines != null && + oldLines.length > 0 && + oldLines[0].text != null + ) { + logger.log( + { doc_id, project_id, oldLines, newLines }, + 'document is JSON so not updating' + ) + return callback(null) + } + + logger.log( + { doc_id, project_id, oldLines, newLines }, + 'setting a document via http' + ) + return DiffCodec.diffAsShareJsOp( + oldLines, + newLines, + function (error, op) { + if (error != null) { + return callback(error) + } + if (undoing) { + for (const o of Array.from(op || [])) { + o.u = true + } // Turn on undo flag for each op for track changes + } + const update = { + doc: doc_id, + op, + v: version, + meta: { + type: 'external', + source, + user_id, + }, + } + return UpdateManager.applyUpdate( + project_id, + doc_id, + update, + function (error) { if (error != null) { return callback(error) } - return callback(null) + // If the document was loaded already, then someone has it open + // in a project, and the usual flushing mechanism will happen. + // Otherwise we should remove it immediately since nothing else + // is using it. + if (alreadyLoaded) { + return DocumentManager.flushDocIfLoaded( + project_id, + doc_id, + function (error) { + if (error != null) { + return callback(error) + } + return callback(null) + } + ) + } else { + return DocumentManager.flushAndDeleteDoc( + project_id, + doc_id, + {}, + function (error) { + // There is no harm in flushing project history if the previous + // call failed and sometimes it is required + HistoryManager.flushProjectChangesAsync(project_id) + + if (error != null) { + return callback(error) + } + return callback(null) + } + ) + } } ) } - }) - }) - }) + ) + } + ) }, flushDocIfLoaded(project_id, doc_id, _callback) { @@ -315,42 +332,49 @@ module.exports = DocumentManager = { timer.done() return _callback(...Array.from(args || [])) } - return RedisManager.getDoc(project_id, doc_id, function ( - error, - lines, - version, - ranges, - pathname, - projectHistoryId, - unflushedTime, - lastUpdatedAt, - lastUpdatedBy - ) { - if (error != null) { - return callback(error) - } - if (lines == null || version == null) { - logger.log({ project_id, doc_id }, 'doc is not loaded so not flushing') - return callback(null) // TODO: return a flag to bail out, as we go on to remove doc from memory? - } else { - logger.log({ project_id, doc_id, version }, 'flushing doc') - return PersistenceManager.setDoc( - project_id, - doc_id, - lines, - version, - ranges, - lastUpdatedAt, - lastUpdatedBy, - function (error) { - if (error != null) { - return callback(error) + return RedisManager.getDoc( + project_id, + doc_id, + function ( + error, + lines, + version, + ranges, + pathname, + projectHistoryId, + unflushedTime, + lastUpdatedAt, + lastUpdatedBy + ) { + if (error != null) { + return callback(error) + } + if (lines == null || version == null) { + logger.log( + { project_id, doc_id }, + 'doc is not loaded so not flushing' + ) + return callback(null) // TODO: return a flag to bail out, as we go on to remove doc from memory? + } else { + logger.log({ project_id, doc_id, version }, 'flushing doc') + return PersistenceManager.setDoc( + project_id, + doc_id, + lines, + version, + ranges, + lastUpdatedAt, + lastUpdatedBy, + function (error) { + if (error != null) { + return callback(error) + } + return RedisManager.clearUnflushedTime(doc_id, callback) } - return RedisManager.clearUnflushedTime(doc_id, callback) - } - ) + ) + } } - }) + ) }, flushAndDeleteDoc(project_id, doc_id, options, _callback) { @@ -360,32 +384,36 @@ module.exports = DocumentManager = { return _callback(...Array.from(args || [])) } - return DocumentManager.flushDocIfLoaded(project_id, doc_id, function ( - error - ) { - if (error != null) { - if (options.ignoreFlushErrors) { - logger.warn( - { project_id, doc_id, err: error }, - 'ignoring flush error while deleting document' - ) - } else { - return callback(error) - } - } - - // Flush in the background since it requires a http request - HistoryManager.flushDocChangesAsync(project_id, doc_id) - - return RedisManager.removeDocFromMemory(project_id, doc_id, function ( - error - ) { + return DocumentManager.flushDocIfLoaded( + project_id, + doc_id, + function (error) { if (error != null) { - return callback(error) + if (options.ignoreFlushErrors) { + logger.warn( + { project_id, doc_id, err: error }, + 'ignoring flush error while deleting document' + ) + } else { + return callback(error) + } } - return callback(null) - }) - }) + + // Flush in the background since it requires a http request + HistoryManager.flushDocChangesAsync(project_id, doc_id) + + return RedisManager.removeDocFromMemory( + project_id, + doc_id, + function (error) { + if (error != null) { + return callback(error) + } + return callback(null) + } + ) + } + ) }, acceptChanges(project_id, doc_id, change_ids, _callback) { @@ -401,44 +429,44 @@ module.exports = DocumentManager = { return _callback(...Array.from(args || [])) } - return DocumentManager.getDoc(project_id, doc_id, function ( - error, - lines, - version, - ranges - ) { - if (error != null) { - return callback(error) - } - if (lines == null || version == null) { - return callback( - new Errors.NotFoundError(`document not found: ${doc_id}`) - ) - } - return RangesManager.acceptChanges(change_ids, ranges, function ( - error, - new_ranges - ) { + return DocumentManager.getDoc( + project_id, + doc_id, + function (error, lines, version, ranges) { if (error != null) { return callback(error) } - return RedisManager.updateDocument( - project_id, - doc_id, - lines, - version, - [], - new_ranges, - {}, - function (error) { + if (lines == null || version == null) { + return callback( + new Errors.NotFoundError(`document not found: ${doc_id}`) + ) + } + return RangesManager.acceptChanges( + change_ids, + ranges, + function (error, new_ranges) { if (error != null) { return callback(error) } - return callback() + return RedisManager.updateDocument( + project_id, + doc_id, + lines, + version, + [], + new_ranges, + {}, + function (error) { + if (error != null) { + return callback(error) + } + return callback() + } + ) } ) - }) - }) + } + ) }, deleteComment(project_id, doc_id, comment_id, _callback) { @@ -451,44 +479,44 @@ module.exports = DocumentManager = { return _callback(...Array.from(args || [])) } - return DocumentManager.getDoc(project_id, doc_id, function ( - error, - lines, - version, - ranges - ) { - if (error != null) { - return callback(error) - } - if (lines == null || version == null) { - return callback( - new Errors.NotFoundError(`document not found: ${doc_id}`) - ) - } - return RangesManager.deleteComment(comment_id, ranges, function ( - error, - new_ranges - ) { + return DocumentManager.getDoc( + project_id, + doc_id, + function (error, lines, version, ranges) { if (error != null) { return callback(error) } - return RedisManager.updateDocument( - project_id, - doc_id, - lines, - version, - [], - new_ranges, - {}, - function (error) { + if (lines == null || version == null) { + return callback( + new Errors.NotFoundError(`document not found: ${doc_id}`) + ) + } + return RangesManager.deleteComment( + comment_id, + ranges, + function (error, new_ranges) { if (error != null) { return callback(error) } - return callback() + return RedisManager.updateDocument( + project_id, + doc_id, + lines, + version, + [], + new_ranges, + {}, + function (error) { + if (error != null) { + return callback(error) + } + return callback() + } + ) } ) - }) - }) + } + ) }, renameDoc(project_id, doc_id, user_id, update, projectHistoryId, _callback) { @@ -515,73 +543,94 @@ module.exports = DocumentManager = { if (callback == null) { callback = function (error, doc) {} } - return DocumentManager.getDoc(project_id, doc_id, function ( - error, - lines, - version, - ranges, - pathname, - projectHistoryId, - unflushedTime, - alreadyLoaded - ) { - if (error != null) { - return callback(error) - } - // if doc was already loaded see if it needs to be flushed - if ( - alreadyLoaded && - unflushedTime != null && - Date.now() - unflushedTime > MAX_UNFLUSHED_AGE + return DocumentManager.getDoc( + project_id, + doc_id, + function ( + error, + lines, + version, + ranges, + pathname, + projectHistoryId, + unflushedTime, + alreadyLoaded ) { - return DocumentManager.flushDocIfLoaded(project_id, doc_id, function ( - error + if (error != null) { + return callback(error) + } + // if doc was already loaded see if it needs to be flushed + if ( + alreadyLoaded && + unflushedTime != null && + Date.now() - unflushedTime > MAX_UNFLUSHED_AGE ) { - if (error != null) { - return callback(error) - } + return DocumentManager.flushDocIfLoaded( + project_id, + doc_id, + function (error) { + if (error != null) { + return callback(error) + } + return callback(null, lines, version) + } + ) + } else { return callback(null, lines, version) - }) - } else { - return callback(null, lines, version) + } } - }) + ) }, resyncDocContents(project_id, doc_id, callback) { logger.log({ project_id, doc_id }, 'start resyncing doc contents') - return RedisManager.getDoc(project_id, doc_id, function ( - error, - lines, - version, - ranges, - pathname, - projectHistoryId - ) { - if (error != null) { - return callback(error) - } + return RedisManager.getDoc( + project_id, + doc_id, + function (error, lines, version, ranges, pathname, projectHistoryId) { + if (error != null) { + return callback(error) + } - if (lines == null || version == null) { - logger.log( - { project_id, doc_id }, - 'resyncing doc contents - not found in redis - retrieving from web' - ) - return PersistenceManager.getDoc(project_id, doc_id, function ( - error, - lines, - version, - ranges, - pathname, - projectHistoryId - ) { - if (error != null) { - logger.error( - { project_id, doc_id, getDocError: error }, - 'resyncing doc contents - error retrieving from web' - ) - return callback(error) - } + if (lines == null || version == null) { + logger.log( + { project_id, doc_id }, + 'resyncing doc contents - not found in redis - retrieving from web' + ) + return PersistenceManager.getDoc( + project_id, + doc_id, + function ( + error, + lines, + version, + ranges, + pathname, + projectHistoryId + ) { + if (error != null) { + logger.error( + { project_id, doc_id, getDocError: error }, + 'resyncing doc contents - error retrieving from web' + ) + return callback(error) + } + return ProjectHistoryRedisManager.queueResyncDocContent( + project_id, + projectHistoryId, + doc_id, + lines, + version, + pathname, + callback + ) + } + ) + } else { + logger.log( + { project_id, doc_id }, + 'resyncing doc contents - doc in redis - will queue in redis' + ) return ProjectHistoryRedisManager.queueResyncDocContent( project_id, projectHistoryId, @@ -591,23 +640,9 @@ module.exports = DocumentManager = { pathname, callback ) - }) - } else { - logger.log( - { project_id, doc_id }, - 'resyncing doc contents - doc in redis - will queue in redis' - ) - return ProjectHistoryRedisManager.queueResyncDocContent( - project_id, - projectHistoryId, - doc_id, - lines, - version, - pathname, - callback - ) + } } - }) + ) }, getDocWithLock(project_id, doc_id, callback) { @@ -769,5 +804,5 @@ module.exports = DocumentManager = { doc_id, callback ) - } + }, } diff --git a/services/document-updater/app/js/Errors.js b/services/document-updater/app/js/Errors.js index c67c2c8422..78afd925bf 100644 --- a/services/document-updater/app/js/Errors.js +++ b/services/document-updater/app/js/Errors.js @@ -41,5 +41,5 @@ module.exports = Errors = { NotFoundError, OpRangeNotAvailableError, ProjectStateChangedError, - DeleteMismatchError + DeleteMismatchError, } diff --git a/services/document-updater/app/js/HistoryManager.js b/services/document-updater/app/js/HistoryManager.js index 4b7de3f5af..107e81d979 100644 --- a/services/document-updater/app/js/HistoryManager.js +++ b/services/document-updater/app/js/HistoryManager.js @@ -32,44 +32,44 @@ module.exports = HistoryManager = { ) return } - return RedisManager.getHistoryType(doc_id, function ( - err, - projectHistoryType - ) { - if (err != null) { - logger.warn({ err, doc_id }, 'error getting history type') + return RedisManager.getHistoryType( + doc_id, + function (err, projectHistoryType) { + if (err != null) { + logger.warn({ err, doc_id }, 'error getting history type') + } + // if there's an error continue and flush to track-changes for safety + if ( + Settings.disableDoubleFlush && + projectHistoryType === 'project-history' + ) { + return logger.debug( + { doc_id, projectHistoryType }, + 'skipping track-changes flush' + ) + } else { + metrics.inc('history-flush', 1, { status: 'track-changes' }) + const url = `${Settings.apis.trackchanges.url}/project/${project_id}/doc/${doc_id}/flush` + logger.log( + { project_id, doc_id, url, projectHistoryType }, + 'flushing doc in track changes api' + ) + return request.post(url, function (error, res, body) { + if (error != null) { + return logger.error( + { error, doc_id, project_id }, + 'track changes doc to track changes api' + ) + } else if (res.statusCode < 200 && res.statusCode >= 300) { + return logger.error( + { doc_id, project_id }, + `track changes api returned a failure status code: ${res.statusCode}` + ) + } + }) + } } - // if there's an error continue and flush to track-changes for safety - if ( - Settings.disableDoubleFlush && - projectHistoryType === 'project-history' - ) { - return logger.debug( - { doc_id, projectHistoryType }, - 'skipping track-changes flush' - ) - } else { - metrics.inc('history-flush', 1, { status: 'track-changes' }) - const url = `${Settings.apis.trackchanges.url}/project/${project_id}/doc/${doc_id}/flush` - logger.log( - { project_id, doc_id, url, projectHistoryType }, - 'flushing doc in track changes api' - ) - return request.post(url, function (error, res, body) { - if (error != null) { - return logger.error( - { error, doc_id, project_id }, - 'track changes doc to track changes api' - ) - } else if (res.statusCode < 200 && res.statusCode >= 300) { - return logger.error( - { doc_id, project_id }, - `track changes api returned a failure status code: ${res.statusCode}` - ) - } - }) - } - }) + ) }, // flush changes in the background @@ -77,7 +77,7 @@ module.exports = HistoryManager = { if ( !__guard__( Settings.apis != null ? Settings.apis.project_history : undefined, - (x) => x.enabled + x => x.enabled ) ) { return @@ -97,7 +97,7 @@ module.exports = HistoryManager = { if ( !__guard__( Settings.apis != null ? Settings.apis.project_history : undefined, - (x) => x.enabled + x => x.enabled ) ) { return callback() @@ -157,7 +157,7 @@ module.exports = HistoryManager = { if ( __guard__( Settings.apis != null ? Settings.apis.project_history : undefined, - (x) => x.enabled + x => x.enabled ) ) { if ( @@ -253,7 +253,7 @@ module.exports = HistoryManager = { ) } ) - } + }, } function __guard__(value, transform) { diff --git a/services/document-updater/app/js/HistoryRedisManager.js b/services/document-updater/app/js/HistoryRedisManager.js index bd7b3672f4..1979b89013 100644 --- a/services/document-updater/app/js/HistoryRedisManager.js +++ b/services/document-updater/app/js/HistoryRedisManager.js @@ -41,5 +41,5 @@ module.exports = HistoryRedisManager = { return callback() } ) - } + }, } diff --git a/services/document-updater/app/js/HttpController.js b/services/document-updater/app/js/HttpController.js index 9959a84b07..6bffb6ec4a 100644 --- a/services/document-updater/app/js/HttpController.js +++ b/services/document-updater/app/js/HttpController.js @@ -24,7 +24,7 @@ module.exports = { updateProject, resyncProjectHistory, flushAllProjects, - flushQueuedProjects + flushQueuedProjects, } function getDoc(req, res, next) { @@ -59,7 +59,7 @@ function getDoc(req, res, next) { version, ops, ranges, - pathname + pathname, }) } ) @@ -104,7 +104,7 @@ function getProjectDocsAndFlushIfOld(req, res, next) { logger.log( { projectId, - result: result.map((doc) => `${doc._id}:${doc.v}`) + result: result.map(doc => `${doc._id}:${doc.v}`), }, 'got docs via http' ) @@ -118,7 +118,7 @@ function clearProjectState(req, res, next) { const projectId = req.params.project_id const timer = new Metrics.Timer('http.clearProjectState') logger.log({ projectId }, 'clearing project state via http') - ProjectManager.clearProjectState(projectId, (error) => { + ProjectManager.clearProjectState(projectId, error => { timer.done() if (error) { next(error) @@ -152,7 +152,7 @@ function setDoc(req, res, next) { source, userId, undoing, - (error) => { + error => { timer.done() if (error) { return next(error) @@ -168,7 +168,7 @@ function flushDocIfLoaded(req, res, next) { const projectId = req.params.project_id logger.log({ projectId, docId }, 'flushing doc via http') const timer = new Metrics.Timer('http.flushDoc') - DocumentManager.flushDocIfLoadedWithLock(projectId, docId, (error) => { + DocumentManager.flushDocIfLoadedWithLock(projectId, docId, error => { timer.done() if (error) { return next(error) @@ -188,7 +188,7 @@ function deleteDoc(req, res, next) { projectId, docId, { ignoreFlushErrors }, - (error) => { + error => { timer.done() // There is no harm in flushing project history if the previous call // failed and sometimes it is required @@ -207,7 +207,7 @@ function flushProject(req, res, next) { const projectId = req.params.project_id logger.log({ projectId }, 'flushing project via http') const timer = new Metrics.Timer('http.flushProject') - ProjectManager.flushProjectWithLocks(projectId, (error) => { + ProjectManager.flushProjectWithLocks(projectId, error => { timer.done() if (error) { return next(error) @@ -228,7 +228,7 @@ function deleteProject(req, res, next) { options.skip_history_flush = true } // don't flush history when realtime shuts down if (req.query.background) { - ProjectManager.queueFlushAndDeleteProject(projectId, (error) => { + ProjectManager.queueFlushAndDeleteProject(projectId, error => { if (error) { return next(error) } @@ -237,18 +237,14 @@ function deleteProject(req, res, next) { }) // No Content } else { const timer = new Metrics.Timer('http.deleteProject') - ProjectManager.flushAndDeleteProjectWithLocks( - projectId, - options, - (error) => { - timer.done() - if (error) { - return next(error) - } - logger.log({ projectId }, 'deleted project via http') - res.sendStatus(204) // No Content + ProjectManager.flushAndDeleteProjectWithLocks(projectId, options, error => { + timer.done() + if (error) { + return next(error) } - ) + logger.log({ projectId }, 'deleted project via http') + res.sendStatus(204) // No Content + }) } } @@ -261,7 +257,7 @@ function deleteMultipleProjects(req, res, next) { logger.log({ projectId }, 'queue delete of project via http') ProjectManager.queueFlushAndDeleteProject(projectId, cb) }, - (error) => { + error => { if (error) { return next(error) } @@ -281,45 +277,35 @@ function acceptChanges(req, res, next) { `accepting ${changeIds.length} changes via http` ) const timer = new Metrics.Timer('http.acceptChanges') - DocumentManager.acceptChangesWithLock( - projectId, - docId, - changeIds, - (error) => { - timer.done() - if (error) { - return next(error) - } - logger.log( - { projectId, docId }, - `accepted ${changeIds.length} changes via http` - ) - res.sendStatus(204) // No Content + DocumentManager.acceptChangesWithLock(projectId, docId, changeIds, error => { + timer.done() + if (error) { + return next(error) } - ) + logger.log( + { projectId, docId }, + `accepted ${changeIds.length} changes via http` + ) + res.sendStatus(204) // No Content + }) } function deleteComment(req, res, next) { const { project_id: projectId, doc_id: docId, - comment_id: commentId + comment_id: commentId, } = req.params logger.log({ projectId, docId, commentId }, 'deleting comment via http') const timer = new Metrics.Timer('http.deleteComment') - DocumentManager.deleteCommentWithLock( - projectId, - docId, - commentId, - (error) => { - timer.done() - if (error) { - return next(error) - } - logger.log({ projectId, docId, commentId }, 'deleted comment via http') - res.sendStatus(204) // No Content + DocumentManager.deleteCommentWithLock(projectId, docId, commentId, error => { + timer.done() + if (error) { + return next(error) } - ) + logger.log({ projectId, docId, commentId }, 'deleted comment via http') + res.sendStatus(204) // No Content + }) } function updateProject(req, res, next) { @@ -333,7 +319,7 @@ function updateProject(req, res, next) { userId, updates, version, - (error) => { + error => { timer.done() if (error) { return next(error) @@ -357,7 +343,7 @@ function resyncProjectHistory(req, res, next) { projectHistoryId, docs, files, - (error) => { + error => { if (error) { return next(error) } @@ -372,7 +358,7 @@ function flushAllProjects(req, res, next) { const options = { limit: req.query.limit || 1000, concurrency: req.query.concurrency || 5, - dryRun: req.query.dryRun || false + dryRun: req.query.dryRun || false, } ProjectFlusher.flushAllProjects(options, (err, projectIds) => { if (err) { @@ -389,7 +375,7 @@ function flushQueuedProjects(req, res, next) { const options = { limit: req.query.limit || 1000, timeout: 5 * 60 * 1000, - min_delete_age: req.query.min_delete_age || 5 * 60 * 1000 + min_delete_age: req.query.min_delete_age || 5 * 60 * 1000, } DeleteQueueManager.flushAndDeleteOldProjects(options, (err, flushed) => { if (err) { diff --git a/services/document-updater/app/js/LockManager.js b/services/document-updater/app/js/LockManager.js index c23379d811..70447bad3f 100644 --- a/services/document-updater/app/js/LockManager.js +++ b/services/document-updater/app/js/LockManager.js @@ -54,36 +54,41 @@ module.exports = LockManager = { const lockValue = LockManager.randomLock() const key = keys.blockingKey({ doc_id }) const profile = new Profiler('tryLock', { doc_id, key, lockValue }) - return rclient.set(key, lockValue, 'EX', this.LOCK_TTL, 'NX', function ( - err, - gotLock - ) { - if (err != null) { - return callback(err) - } - if (gotLock === 'OK') { - metrics.inc('doc-not-blocking') - const timeTaken = profile.log('got lock').end() - if (timeTaken > MAX_REDIS_REQUEST_LENGTH) { - // took too long, so try to free the lock - return LockManager.releaseLock(doc_id, lockValue, function ( - err, - result - ) { - if (err != null) { - return callback(err) - } // error freeing lock - return callback(null, false) - }) // tell caller they didn't get the lock - } else { - return callback(null, true, lockValue) + return rclient.set( + key, + lockValue, + 'EX', + this.LOCK_TTL, + 'NX', + function (err, gotLock) { + if (err != null) { + return callback(err) + } + if (gotLock === 'OK') { + metrics.inc('doc-not-blocking') + const timeTaken = profile.log('got lock').end() + if (timeTaken > MAX_REDIS_REQUEST_LENGTH) { + // took too long, so try to free the lock + return LockManager.releaseLock( + doc_id, + lockValue, + function (err, result) { + if (err != null) { + return callback(err) + } // error freeing lock + return callback(null, false) + } + ) // tell caller they didn't get the lock + } else { + return callback(null, true, lockValue) + } + } else { + metrics.inc('doc-blocking') + profile.log('doc is locked').end() + return callback(null, false) } - } else { - metrics.inc('doc-blocking') - profile.log('doc is locked').end() - return callback(null, false) } - }) + ) }, getLock(doc_id, callback) { @@ -145,25 +150,28 @@ module.exports = LockManager = { releaseLock(doc_id, lockValue, callback) { const key = keys.blockingKey({ doc_id }) const profile = new Profiler('releaseLock', { doc_id, key, lockValue }) - return rclient.eval(LockManager.unlockScript, 1, key, lockValue, function ( - err, - result - ) { - if (err != null) { - return callback(err) - } else if (result != null && result !== 1) { - // successful unlock should release exactly one key - profile.log('unlockScript:expired-lock').end() - logger.error( - { doc_id, key, lockValue, redis_err: err, redis_result: result }, - 'unlocking error' - ) - metrics.inc('unlock-error') - return callback(new Error('tried to release timed out lock')) - } else { - profile.log('unlockScript:ok').end() - return callback(null, result) + return rclient.eval( + LockManager.unlockScript, + 1, + key, + lockValue, + function (err, result) { + if (err != null) { + return callback(err) + } else if (result != null && result !== 1) { + // successful unlock should release exactly one key + profile.log('unlockScript:expired-lock').end() + logger.error( + { doc_id, key, lockValue, redis_err: err, redis_result: result }, + 'unlocking error' + ) + metrics.inc('unlock-error') + return callback(new Error('tried to release timed out lock')) + } else { + profile.log('unlockScript:ok').end() + return callback(null, result) + } } - }) - } + ) + }, } diff --git a/services/document-updater/app/js/LoggerSerializers.js b/services/document-updater/app/js/LoggerSerializers.js index b2c015f078..ac9605e757 100644 --- a/services/document-updater/app/js/LoggerSerializers.js +++ b/services/document-updater/app/js/LoggerSerializers.js @@ -25,19 +25,19 @@ const showUpdateLength = function (update) { const copy = _.cloneDeep(update) copy.op.forEach(function (element, index) { if ( - __guard__(element != null ? element.i : undefined, (x) => x.length) != + __guard__(element != null ? element.i : undefined, x => x.length) != null ) { copy.op[index].i = element.i.length } if ( - __guard__(element != null ? element.d : undefined, (x1) => x1.length) != + __guard__(element != null ? element.d : undefined, x1 => x1.length) != null ) { copy.op[index].d = element.d.length } if ( - __guard__(element != null ? element.c : undefined, (x2) => x2.length) != + __guard__(element != null ? element.c : undefined, x2 => x2.length) != null ) { return (copy.op[index].c = element.c.length) @@ -57,7 +57,7 @@ module.exports = { docLines: showLength, newDocLines: showLength, ranges: showLength, - update: showUpdateLength + update: showUpdateLength, } function __guard__(value, transform) { diff --git a/services/document-updater/app/js/PersistenceManager.js b/services/document-updater/app/js/PersistenceManager.js index 664d36a3c7..d7df831683 100644 --- a/services/document-updater/app/js/PersistenceManager.js +++ b/services/document-updater/app/js/PersistenceManager.js @@ -21,7 +21,7 @@ const Metrics = require('./Metrics') const logger = require('logger-sharelatex') const request = require('requestretry').defaults({ maxAttempts: 2, - retryDelay: 10 + retryDelay: 10, }) // We have to be quick with HTTP calls because we're holding a lock that @@ -75,15 +75,15 @@ module.exports = PersistenceManager = { url: `${Settings.apis.web.url}${urlPath}`, method: 'GET', headers: { - accept: 'application/json' + accept: 'application/json', }, auth: { user: Settings.apis.web.user, pass: Settings.apis.web.pass, - sendImmediately: true + sendImmediately: true, }, jar: false, - timeout: MAX_HTTP_REQUEST_LENGTH + timeout: MAX_HTTP_REQUEST_LENGTH, }, function (error, res, body) { updateMetric('getDoc', error, res) @@ -164,15 +164,15 @@ module.exports = PersistenceManager = { ranges, version, lastUpdatedBy, - lastUpdatedAt + lastUpdatedAt, }, auth: { user: Settings.apis.web.user, pass: Settings.apis.web.pass, - sendImmediately: true + sendImmediately: true, }, jar: false, - timeout: MAX_HTTP_REQUEST_LENGTH + timeout: MAX_HTTP_REQUEST_LENGTH, }, function (error, res, body) { updateMetric('setDoc', error, res) @@ -196,5 +196,5 @@ module.exports = PersistenceManager = { } } ) - } + }, } diff --git a/services/document-updater/app/js/ProjectFlusher.js b/services/document-updater/app/js/ProjectFlusher.js index 704600fbc3..12f885eb3c 100644 --- a/services/document-updater/app/js/ProjectFlusher.js +++ b/services/document-updater/app/js/ProjectFlusher.js @@ -45,27 +45,31 @@ var ProjectFlusher = { var doIteration = ( cb // avoid hitting redis too hard ) => - node.scan(cursor, 'MATCH', pattern, 'COUNT', batchSize, function ( - error, - reply - ) { - let keys - if (error != null) { - return callback(error) + node.scan( + cursor, + 'MATCH', + pattern, + 'COUNT', + batchSize, + function (error, reply) { + let keys + if (error != null) { + return callback(error) + } + ;[cursor, keys] = Array.from(reply) + for (const key of Array.from(keys)) { + keySet[key] = true + } + keys = Object.keys(keySet) + const noResults = cursor === '0' // redis returns string results not numeric + const limitReached = limit != null && keys.length >= limit + if (noResults || limitReached) { + return callback(null, keys) + } else { + return setTimeout(doIteration, 10) + } } - ;[cursor, keys] = Array.from(reply) - for (const key of Array.from(keys)) { - keySet[key] = true - } - keys = Object.keys(keySet) - const noResults = cursor === '0' // redis returns string results not numeric - const limitReached = limit != null && keys.length >= limit - if (noResults || limitReached) { - return callback(null, keys) - } else { - return setTimeout(doIteration, 10) - } - }) + ) return doIteration() }, @@ -97,12 +101,14 @@ var ProjectFlusher = { if (options.dryRun) { return callback(null, project_ids) } - const jobs = _.map(project_ids, (project_id) => (cb) => - ProjectManager.flushAndDeleteProjectWithLocks( - project_id, - { background: true }, - cb - ) + const jobs = _.map( + project_ids, + project_id => cb => + ProjectManager.flushAndDeleteProjectWithLocks( + project_id, + { background: true }, + cb + ) ) return async.parallelLimit( async.reflectAll(jobs), @@ -123,7 +129,7 @@ var ProjectFlusher = { ) } ) - } + }, } module.exports = ProjectFlusher diff --git a/services/document-updater/app/js/ProjectHistoryRedisManager.js b/services/document-updater/app/js/ProjectHistoryRedisManager.js index 3d8b0cd95a..45e98238f4 100644 --- a/services/document-updater/app/js/ProjectHistoryRedisManager.js +++ b/services/document-updater/app/js/ProjectHistoryRedisManager.js @@ -17,7 +17,7 @@ let ProjectHistoryRedisManager const Settings = require('@overleaf/settings') const projectHistoryKeys = __guard__( Settings.redis != null ? Settings.redis.project_history : undefined, - (x) => x.key_schema + x => x.key_schema ) const rclient = require('@overleaf/redis-wrapper').createClient( Settings.redis.project_history @@ -70,10 +70,10 @@ module.exports = ProjectHistoryRedisManager = { new_pathname: projectUpdate.newPathname, meta: { user_id, - ts: new Date() + ts: new Date(), }, version: projectUpdate.version, - projectHistoryId + projectHistoryId, } projectUpdate[entity_type] = entity_id @@ -104,10 +104,10 @@ module.exports = ProjectHistoryRedisManager = { url: projectUpdate.url, meta: { user_id, - ts: new Date() + ts: new Date(), }, version: projectUpdate.version, - projectHistoryId + projectHistoryId, } projectUpdate[entity_type] = entitiy_id @@ -132,8 +132,8 @@ module.exports = ProjectHistoryRedisManager = { resyncProjectStructure: { docs, files }, projectHistoryId, meta: { - ts: new Date() - } + ts: new Date(), + }, } const jsonUpdate = JSON.stringify(projectUpdate) return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback) @@ -155,18 +155,18 @@ module.exports = ProjectHistoryRedisManager = { const projectUpdate = { resyncDocContent: { content: lines.join('\n'), - version + version, }, projectHistoryId, path: pathname, doc: doc_id, meta: { - ts: new Date() - } + ts: new Date(), + }, } const jsonUpdate = JSON.stringify(projectUpdate) return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback) - } + }, } function __guard__(value, transform) { diff --git a/services/document-updater/app/js/ProjectManager.js b/services/document-updater/app/js/ProjectManager.js index 07284a692a..20f79f3d08 100644 --- a/services/document-updater/app/js/ProjectManager.js +++ b/services/document-updater/app/js/ProjectManager.js @@ -14,7 +14,7 @@ module.exports = { getProjectDocsTimestamps, getProjectDocsAndFlushIfOld, clearProjectState, - updateProjectWithLocks + updateProjectWithLocks, } function flushProjectWithLocks(projectId, _callback) { @@ -29,8 +29,8 @@ function flushProjectWithLocks(projectId, _callback) { return callback(error) } const errors = [] - const jobs = docIds.map((docId) => (callback) => { - DocumentManager.flushDocIfLoadedWithLock(projectId, docId, (error) => { + const jobs = docIds.map(docId => callback => { + DocumentManager.flushDocIfLoadedWithLock(projectId, docId, error => { if (error instanceof Errors.NotFoundError) { logger.warn( { err: error, projectId, docId }, @@ -72,19 +72,14 @@ function flushAndDeleteProjectWithLocks(projectId, options, _callback) { return callback(error) } const errors = [] - const jobs = docIds.map((docId) => (callback) => { - DocumentManager.flushAndDeleteDocWithLock( - projectId, - docId, - {}, - (error) => { - if (error) { - logger.error({ err: error, projectId, docId }, 'error deleting doc') - errors.push(error) - } - callback() + const jobs = docIds.map(docId => callback => { + DocumentManager.flushAndDeleteDocWithLock(projectId, docId, {}, error => { + if (error) { + logger.error({ err: error, projectId, docId }, 'error deleting doc') + errors.push(error) } - ) + callback() + }) }) logger.log({ projectId, docIds }, 'deleting docs') @@ -93,7 +88,7 @@ function flushAndDeleteProjectWithLocks(projectId, options, _callback) { // history is completely flushed because the project may be // deleted in web after this call completes, and so further // attempts to flush would fail after that. - HistoryManager.flushProjectChanges(projectId, options, (error) => { + HistoryManager.flushProjectChanges(projectId, options, error => { if (errors.length > 0) { callback(new Error('Errors deleting docs. See log for details')) } else if (error) { @@ -107,7 +102,7 @@ function flushAndDeleteProjectWithLocks(projectId, options, _callback) { } function queueFlushAndDeleteProject(projectId, callback) { - RedisManager.queueFlushAndDeleteProject(projectId, (error) => { + RedisManager.queueFlushAndDeleteProject(projectId, error => { if (error) { logger.error( { projectId, error }, @@ -176,7 +171,7 @@ function getProjectDocsAndFlushIfOld( return callback(error) } // get the doc lines from redis - const jobs = docIds.map((docId) => (cb) => { + const jobs = docIds.map(docId => cb => { DocumentManager.getDocAndFlushIfOldWithLock( projectId, docId, @@ -288,7 +283,7 @@ function updateProjectWithLocks( } } - async.eachSeries(updates, handleUpdate, (error) => { + async.eachSeries(updates, handleUpdate, error => { if (error) { return callback(error) } diff --git a/services/document-updater/app/js/RangesManager.js b/services/document-updater/app/js/RangesManager.js index 636efcb5a6..0de39134de 100644 --- a/services/document-updater/app/js/RangesManager.js +++ b/services/document-updater/app/js/RangesManager.js @@ -42,7 +42,7 @@ module.exports = RangesManager = { for (const op of Array.from(update.op)) { try { rangesTracker.applyOp(op, { - user_id: update.meta != null ? update.meta.user_id : undefined + user_id: update.meta != null ? update.meta.user_id : undefined, }) } catch (error1) { error = error1 @@ -86,7 +86,7 @@ module.exports = RangesManager = { response.changes != null ? response.changes.length : undefined, commentsCount: response.comments != null ? response.comments.length : undefined, - rangesWereCollapsed + rangesWereCollapsed, }, 'applied updates to ranges' ) @@ -159,5 +159,5 @@ module.exports = RangesManager = { } } return count - } + }, } diff --git a/services/document-updater/app/js/RangesTracker.js b/services/document-updater/app/js/RangesTracker.js index 5991ee2993..2cc8869d99 100644 --- a/services/document-updater/app/js/RangesTracker.js +++ b/services/document-updater/app/js/RangesTracker.js @@ -120,7 +120,7 @@ const load = function () { if (comment == null) { return } - this.comments = this.comments.filter((c) => c.id !== comment_id) + this.comments = this.comments.filter(c => c.id !== comment_id) return this._markAsDirty(comment, 'comment', 'removed') } @@ -257,7 +257,7 @@ const load = function () { if (metadata == null) { metadata = {} } - return Array.from(ops).map((op) => this.applyOp(op, metadata)) + return Array.from(ops).map(op => this.applyOp(op, metadata)) } addComment(op, metadata) { @@ -274,9 +274,9 @@ const load = function () { // Copy because we'll modify in place c: op.c, p: op.p, - t: op.t + t: op.t, }, - metadata + metadata, }) ) this._markAsDirty(comment, 'comment', 'added') @@ -488,9 +488,9 @@ const load = function () { const after_change = { op: { i: after_content, - p: change_start + offset + op_length + p: change_start + offset + op_length, }, - metadata: {} + metadata: {}, } for (const key in change.metadata) { const value = change.metadata[key] @@ -606,7 +606,7 @@ const load = function () { delete_removed_start, delete_removed_start + delete_removed_length ), - p: delete_removed_start + p: delete_removed_start, } if (modification.d.length > 0) { op_modifications.push(modification) @@ -643,7 +643,7 @@ const load = function () { // Copy rather than modify because we still need to apply it to comments op = { p: op.p, - d: this._applyOpModifications(op.d, op_modifications) + d: this._applyOpModifications(op.d, op_modifications), } for (change of Array.from(remove_changes)) { @@ -678,7 +678,7 @@ const load = function () { moved_changes = moved_changes.concat(results.moved_changes) for (change of Array.from(results.remove_changes)) { this._removeChange(change) - moved_changes = moved_changes.filter((c) => c !== change) + moved_changes = moved_changes.filter(c => c !== change) } } @@ -695,7 +695,7 @@ const load = function () { const change = { id: this.newId(), op: this._clone(op), // Don't take a reference to the existing op since we'll modify this in place with future changes - metadata: this._clone(metadata) + metadata: this._clone(metadata), } this.changes.push(change) @@ -717,7 +717,7 @@ const load = function () { } _removeChange(change) { - this.changes = this.changes.filter((c) => c.id !== change.id) + this.changes = this.changes.filter(c => c.id !== change.id) return this._markAsDirty(change, 'change', 'removed') } @@ -813,13 +813,13 @@ const load = function () { comment: { moved: {}, removed: {}, - added: {} + added: {}, }, change: { moved: {}, removed: {}, - added: {} - } + added: {}, + }, }) } diff --git a/services/document-updater/app/js/RateLimitManager.js b/services/document-updater/app/js/RateLimitManager.js index 9b699235b7..48e9b0b8f1 100644 --- a/services/document-updater/app/js/RateLimitManager.js +++ b/services/document-updater/app/js/RateLimitManager.js @@ -47,7 +47,7 @@ module.exports = RateLimiter = class RateLimiter { } this.ActiveWorkerCount++ Metrics.gauge('processingUpdates', this.ActiveWorkerCount) - return task((err) => { + return task(err => { this.ActiveWorkerCount-- Metrics.gauge('processingUpdates', this.ActiveWorkerCount) return callback(err) @@ -65,11 +65,11 @@ module.exports = RateLimiter = class RateLimiter { logger.log( { active: this.ActiveWorkerCount, - currentLimit: Math.ceil(this.CurrentWorkerLimit) + currentLimit: Math.ceil(this.CurrentWorkerLimit), }, 'hit rate limit' ) - return this._trackAndRun(task, (err) => { + return this._trackAndRun(task, err => { if (err == null) { this._adjustLimitUp() } // don't increment rate limit if there was an error diff --git a/services/document-updater/app/js/RealTimeRedisManager.js b/services/document-updater/app/js/RealTimeRedisManager.js index 298fb26940..af359ef227 100644 --- a/services/document-updater/app/js/RealTimeRedisManager.js +++ b/services/document-updater/app/js/RealTimeRedisManager.js @@ -45,7 +45,7 @@ module.exports = RealTimeRedisManager = { for (jsonUpdate of Array.from(jsonUpdates)) { // record metric for each update removed from queue metrics.summary('redis.pendingUpdates', jsonUpdate.length, { - status: 'pop' + status: 'pop', }) } const updates = [] @@ -83,5 +83,5 @@ module.exports = RealTimeRedisManager = { } else { return pubsubClient.publish('applied-ops', blob) } - } + }, } diff --git a/services/document-updater/app/js/RedisManager.js b/services/document-updater/app/js/RedisManager.js index 59eb10e332..11ff1f8fcc 100644 --- a/services/document-updater/app/js/RedisManager.js +++ b/services/document-updater/app/js/RedisManager.js @@ -92,7 +92,7 @@ module.exports = RedisManager = { return callback(error) } // update docsInProject set before writing doc contents - rclient.sadd(keys.docsInProject({ project_id }), doc_id, (error) => { + rclient.sadd(keys.docsInProject({ project_id }), doc_id, error => { if (error) return callback(error) rclient.mset( @@ -103,7 +103,7 @@ module.exports = RedisManager = { [keys.docHash({ doc_id })]: docHash, [keys.ranges({ doc_id })]: ranges, [keys.pathname({ doc_id })]: pathname, - [keys.projectHistoryId({ doc_id })]: projectHistoryId + [keys.projectHistoryId({ doc_id })]: projectHistoryId, }, callback ) @@ -203,7 +203,7 @@ module.exports = RedisManager = { keys.projectHistoryId({ doc_id }), keys.unflushedTime({ doc_id }), keys.lastUpdatedAt({ doc_id }), - keys.lastUpdatedBy({ doc_id }) + keys.lastUpdatedBy({ doc_id }), ] rclient.mget(...collectKeys, (error, ...rest) => { let [ @@ -216,7 +216,7 @@ module.exports = RedisManager = { projectHistoryId, unflushedTime, lastUpdatedAt, - lastUpdatedBy + lastUpdatedBy, ] = Array.from(rest[0]) const timeSpan = timer.done() if (error != null) { @@ -244,7 +244,7 @@ module.exports = RedisManager = { doc_project_id, computedHash, storedHash, - docLines + docLines, }, 'hash mismatch on retrieved document' ) @@ -325,62 +325,64 @@ module.exports = RedisManager = { if (error != null) { return callback(error) } - return rclient.get(keys.docVersion({ doc_id }), function ( - error, - version - ) { - if (error != null) { - return callback(error) - } - version = parseInt(version, 10) - const first_version_in_redis = version - length - - if (start < first_version_in_redis || end > version) { - error = new Errors.OpRangeNotAvailableError( - 'doc ops range is not loaded in redis' - ) - logger.warn( - { err: error, doc_id, length, version, start, end }, - 'doc ops range is not loaded in redis' - ) - return callback(error) - } - - start = start - first_version_in_redis - if (end > -1) { - end = end - first_version_in_redis - } - - if (isNaN(start) || isNaN(end)) { - error = new Error('inconsistent version or lengths') - logger.error( - { err: error, doc_id, length, version, start, end }, - 'inconsistent version or length' - ) - return callback(error) - } - - return rclient.lrange(keys.docOps({ doc_id }), start, end, function ( - error, - jsonOps - ) { - let ops + return rclient.get( + keys.docVersion({ doc_id }), + function (error, version) { if (error != null) { return callback(error) } - try { - ops = jsonOps.map((jsonOp) => JSON.parse(jsonOp)) - } catch (e) { - return callback(e) - } - const timeSpan = timer.done() - if (timeSpan > MAX_REDIS_REQUEST_LENGTH) { - error = new Error('redis getPreviousDocOps exceeded timeout') + version = parseInt(version, 10) + const first_version_in_redis = version - length + + if (start < first_version_in_redis || end > version) { + error = new Errors.OpRangeNotAvailableError( + 'doc ops range is not loaded in redis' + ) + logger.warn( + { err: error, doc_id, length, version, start, end }, + 'doc ops range is not loaded in redis' + ) return callback(error) } - return callback(null, ops) - }) - }) + + start = start - first_version_in_redis + if (end > -1) { + end = end - first_version_in_redis + } + + if (isNaN(start) || isNaN(end)) { + error = new Error('inconsistent version or lengths') + logger.error( + { err: error, doc_id, length, version, start, end }, + 'inconsistent version or length' + ) + return callback(error) + } + + return rclient.lrange( + keys.docOps({ doc_id }), + start, + end, + function (error, jsonOps) { + let ops + if (error != null) { + return callback(error) + } + try { + ops = jsonOps.map(jsonOp => JSON.parse(jsonOp)) + } catch (e) { + return callback(e) + } + const timeSpan = timer.done() + if (timeSpan > MAX_REDIS_REQUEST_LENGTH) { + error = new Error('redis getPreviousDocOps exceeded timeout') + return callback(error) + } + return callback(null, ops) + } + ) + } + ) }) }, @@ -388,15 +390,15 @@ module.exports = RedisManager = { if (callback == null) { callback = function (error, projectHistoryType) {} } - return rclient.get(keys.projectHistoryType({ doc_id }), function ( - error, - projectHistoryType - ) { - if (error != null) { - return callback(error) + return rclient.get( + keys.projectHistoryType({ doc_id }), + function (error, projectHistoryType) { + if (error != null) { + return callback(error) + } + return callback(null, projectHistoryType) } - return callback(null, projectHistoryType) - }) + ) }, setHistoryType(doc_id, projectHistoryType, callback) { @@ -428,192 +430,198 @@ module.exports = RedisManager = { if (callback == null) { callback = function (error) {} } - return RedisManager.getDocVersion(doc_id, function ( - error, - currentVersion, - projectHistoryType - ) { - if (error != null) { - return callback(error) - } - if (currentVersion + appliedOps.length !== newVersion) { - error = new Error(`Version mismatch. '${doc_id}' is corrupted.`) - logger.error( - { - err: error, - doc_id, - currentVersion, - newVersion, - opsLength: appliedOps.length - }, - 'version mismatch' - ) - return callback(error) - } - - const jsonOps = appliedOps.map((op) => JSON.stringify(op)) - for (const op of Array.from(jsonOps)) { - if (op.indexOf('\u0000') !== -1) { - error = new Error('null bytes found in jsonOps') - // this check was added to catch memory corruption in JSON.stringify - logger.error({ err: error, doc_id, jsonOps }, error.message) - return callback(error) - } - } - - const newDocLines = JSON.stringify(docLines) - if (newDocLines.indexOf('\u0000') !== -1) { - error = new Error('null bytes found in doc lines') - // this check was added to catch memory corruption in JSON.stringify - logger.error({ err: error, doc_id, newDocLines }, error.message) - return callback(error) - } - // Do a cheap size check on the serialized blob. - if (newDocLines.length > Settings.max_doc_length) { - const err = new Error('blocking doc update: doc is too large') - const docSize = newDocLines.length - logger.error({ project_id, doc_id, err, docSize }, err.message) - return callback(err) - } - const newHash = RedisManager._computeHash(newDocLines) - - const opVersions = appliedOps.map((op) => (op != null ? op.v : undefined)) - logger.log( - { doc_id, version: newVersion, hash: newHash, op_versions: opVersions }, - 'updating doc in redis' - ) - // record bytes sent to redis in update - metrics.summary('redis.docLines', newDocLines.length, { - status: 'update' - }) - return RedisManager._serializeRanges(ranges, function (error, ranges) { + return RedisManager.getDocVersion( + doc_id, + function (error, currentVersion, projectHistoryType) { if (error != null) { - logger.error({ err: error, doc_id }, error.message) return callback(error) } - if (ranges != null && ranges.indexOf('\u0000') !== -1) { - error = new Error('null bytes found in ranges') - // this check was added to catch memory corruption in JSON.stringify - logger.error({ err: error, doc_id, ranges }, error.message) + if (currentVersion + appliedOps.length !== newVersion) { + error = new Error(`Version mismatch. '${doc_id}' is corrupted.`) + logger.error( + { + err: error, + doc_id, + currentVersion, + newVersion, + opsLength: appliedOps.length, + }, + 'version mismatch' + ) return callback(error) } - const multi = rclient.multi() - multi.mset({ - [keys.docLines({ doc_id })]: newDocLines, - [keys.docVersion({ doc_id })]: newVersion, - [keys.docHash({ doc_id })]: newHash, - [keys.ranges({ doc_id })]: ranges, - [keys.lastUpdatedAt({ doc_id })]: Date.now(), - [keys.lastUpdatedBy({ doc_id })]: updateMeta && updateMeta.user_id - }) - multi.ltrim( - keys.docOps({ doc_id }), - -RedisManager.DOC_OPS_MAX_LENGTH, - -1 - ) // index 3 - // push the ops last so we can get the lengths at fixed index position 7 - if (jsonOps.length > 0) { - multi.rpush(keys.docOps({ doc_id }), ...Array.from(jsonOps)) // index 5 - // expire must come after rpush since before it will be a no-op if the list is empty - multi.expire(keys.docOps({ doc_id }), RedisManager.DOC_OPS_TTL) // index 6 - if (projectHistoryType === 'project-history') { - metrics.inc('history-queue', 1, { status: 'skip-track-changes' }) - logger.log( - { doc_id }, - 'skipping push of uncompressed ops for project using project-history' - ) - } else { - // project is using old track-changes history service - metrics.inc('history-queue', 1, { status: 'track-changes' }) - multi.rpush( - historyKeys.uncompressedHistoryOps({ doc_id }), - ...Array.from(jsonOps) - ) // index 7 - } - // Set the unflushed timestamp to the current time if the doc - // hasn't been modified before (the content in mongo has been - // valid up to this point). Otherwise leave it alone ("NX" flag). - multi.set(keys.unflushedTime({ doc_id }), Date.now(), 'NX') - } - return multi.exec(function (error, result) { - let docUpdateCount - if (error != null) { + + const jsonOps = appliedOps.map(op => JSON.stringify(op)) + for (const op of Array.from(jsonOps)) { + if (op.indexOf('\u0000') !== -1) { + error = new Error('null bytes found in jsonOps') + // this check was added to catch memory corruption in JSON.stringify + logger.error({ err: error, doc_id, jsonOps }, error.message) return callback(error) } + } - if (projectHistoryType === 'project-history') { - docUpdateCount = undefined // only using project history, don't bother with track-changes - } else { - // project is using old track-changes history service - docUpdateCount = result[4] - } + const newDocLines = JSON.stringify(docLines) + if (newDocLines.indexOf('\u0000') !== -1) { + error = new Error('null bytes found in doc lines') + // this check was added to catch memory corruption in JSON.stringify + logger.error({ err: error, doc_id, newDocLines }, error.message) + return callback(error) + } + // Do a cheap size check on the serialized blob. + if (newDocLines.length > Settings.max_doc_length) { + const err = new Error('blocking doc update: doc is too large') + const docSize = newDocLines.length + logger.error({ project_id, doc_id, err, docSize }, err.message) + return callback(err) + } + const newHash = RedisManager._computeHash(newDocLines) - if ( - jsonOps.length > 0 && - __guard__( - Settings.apis != null ? Settings.apis.project_history : undefined, - (x) => x.enabled - ) - ) { - metrics.inc('history-queue', 1, { status: 'project-history' }) - return ProjectHistoryRedisManager.queueOps( - project_id, - ...Array.from(jsonOps), - (error, projectUpdateCount) => - callback(null, docUpdateCount, projectUpdateCount) - ) - } else { - return callback(null, docUpdateCount) - } + const opVersions = appliedOps.map(op => (op != null ? op.v : undefined)) + logger.log( + { + doc_id, + version: newVersion, + hash: newHash, + op_versions: opVersions, + }, + 'updating doc in redis' + ) + // record bytes sent to redis in update + metrics.summary('redis.docLines', newDocLines.length, { + status: 'update', }) - }) - }) + return RedisManager._serializeRanges(ranges, function (error, ranges) { + if (error != null) { + logger.error({ err: error, doc_id }, error.message) + return callback(error) + } + if (ranges != null && ranges.indexOf('\u0000') !== -1) { + error = new Error('null bytes found in ranges') + // this check was added to catch memory corruption in JSON.stringify + logger.error({ err: error, doc_id, ranges }, error.message) + return callback(error) + } + const multi = rclient.multi() + multi.mset({ + [keys.docLines({ doc_id })]: newDocLines, + [keys.docVersion({ doc_id })]: newVersion, + [keys.docHash({ doc_id })]: newHash, + [keys.ranges({ doc_id })]: ranges, + [keys.lastUpdatedAt({ doc_id })]: Date.now(), + [keys.lastUpdatedBy({ doc_id })]: updateMeta && updateMeta.user_id, + }) + multi.ltrim( + keys.docOps({ doc_id }), + -RedisManager.DOC_OPS_MAX_LENGTH, + -1 + ) // index 3 + // push the ops last so we can get the lengths at fixed index position 7 + if (jsonOps.length > 0) { + multi.rpush(keys.docOps({ doc_id }), ...Array.from(jsonOps)) // index 5 + // expire must come after rpush since before it will be a no-op if the list is empty + multi.expire(keys.docOps({ doc_id }), RedisManager.DOC_OPS_TTL) // index 6 + if (projectHistoryType === 'project-history') { + metrics.inc('history-queue', 1, { status: 'skip-track-changes' }) + logger.log( + { doc_id }, + 'skipping push of uncompressed ops for project using project-history' + ) + } else { + // project is using old track-changes history service + metrics.inc('history-queue', 1, { status: 'track-changes' }) + multi.rpush( + historyKeys.uncompressedHistoryOps({ doc_id }), + ...Array.from(jsonOps) + ) // index 7 + } + // Set the unflushed timestamp to the current time if the doc + // hasn't been modified before (the content in mongo has been + // valid up to this point). Otherwise leave it alone ("NX" flag). + multi.set(keys.unflushedTime({ doc_id }), Date.now(), 'NX') + } + return multi.exec(function (error, result) { + let docUpdateCount + if (error != null) { + return callback(error) + } + + if (projectHistoryType === 'project-history') { + docUpdateCount = undefined // only using project history, don't bother with track-changes + } else { + // project is using old track-changes history service + docUpdateCount = result[4] + } + + if ( + jsonOps.length > 0 && + __guard__( + Settings.apis != null + ? Settings.apis.project_history + : undefined, + x => x.enabled + ) + ) { + metrics.inc('history-queue', 1, { status: 'project-history' }) + return ProjectHistoryRedisManager.queueOps( + project_id, + ...Array.from(jsonOps), + (error, projectUpdateCount) => + callback(null, docUpdateCount, projectUpdateCount) + ) + } else { + return callback(null, docUpdateCount) + } + }) + }) + } + ) }, renameDoc(project_id, doc_id, user_id, update, projectHistoryId, callback) { if (callback == null) { callback = function (error) {} } - return RedisManager.getDoc(project_id, doc_id, function ( - error, - lines, - version - ) { - if (error != null) { - return callback(error) - } + return RedisManager.getDoc( + project_id, + doc_id, + function (error, lines, version) { + if (error != null) { + return callback(error) + } - if (lines != null && version != null) { - return rclient.set( - keys.pathname({ doc_id }), - update.newPathname, - function (error) { - if (error != null) { - return callback(error) + if (lines != null && version != null) { + return rclient.set( + keys.pathname({ doc_id }), + update.newPathname, + function (error) { + if (error != null) { + return callback(error) + } + return ProjectHistoryRedisManager.queueRenameEntity( + project_id, + projectHistoryId, + 'doc', + doc_id, + user_id, + update, + callback + ) } - return ProjectHistoryRedisManager.queueRenameEntity( - project_id, - projectHistoryId, - 'doc', - doc_id, - user_id, - update, - callback - ) - } - ) - } else { - return ProjectHistoryRedisManager.queueRenameEntity( - project_id, - projectHistoryId, - 'doc', - doc_id, - user_id, - update, - callback - ) + ) + } else { + return ProjectHistoryRedisManager.queueRenameEntity( + project_id, + projectHistoryId, + 'doc', + doc_id, + user_id, + update, + callback + ) + } } - }) + ) }, clearUnflushedTime(doc_id, callback) { @@ -726,7 +734,7 @@ module.exports = RedisManager = { // note: must specify 'utf8' encoding explicitly, as the default is // binary in node < v5 return crypto.createHash('sha1').update(docLines, 'utf8').digest('hex') - } + }, } function __guard__(value, transform) { diff --git a/services/document-updater/app/js/ShareJsDB.js b/services/document-updater/app/js/ShareJsDB.js index 2339eefab6..65e234d085 100644 --- a/services/document-updater/app/js/ShareJsDB.js +++ b/services/document-updater/app/js/ShareJsDB.js @@ -70,7 +70,7 @@ module.exports = ShareJsDB = class ShareJsDB { return callback(null, { snapshot: this.lines.join('\n'), v: parseInt(this.version, 10), - type: 'text' + type: 'text', }) } } diff --git a/services/document-updater/app/js/ShareJsUpdateManager.js b/services/document-updater/app/js/ShareJsUpdateManager.js index 19bfee9ea4..79b92dfb79 100644 --- a/services/document-updater/app/js/ShareJsUpdateManager.js +++ b/services/document-updater/app/js/ShareJsUpdateManager.js @@ -35,7 +35,7 @@ module.exports = ShareJsUpdateManager = { const db = new ShareJsDB(project_id, doc_id, lines, version) const model = new ShareJsModel(db, { maxDocLength: Settings.max_doc_length, - maximumAge: MAX_AGE_OF_OP + maximumAge: MAX_AGE_OF_OP, }) model.db = db return model @@ -141,5 +141,5 @@ module.exports = ShareJsUpdateManager = { .update('blob ' + content.length + '\x00') .update(content, 'utf8') .digest('hex') - } + }, } diff --git a/services/document-updater/app/js/SnapshotManager.js b/services/document-updater/app/js/SnapshotManager.js index ec7b31315c..3fd87b4540 100644 --- a/services/document-updater/app/js/SnapshotManager.js +++ b/services/document-updater/app/js/SnapshotManager.js @@ -38,7 +38,7 @@ module.exports = SnapshotManager = { lines, pathname, ranges: SnapshotManager.jsonRangesToMongo(ranges), - ts: new Date() + ts: new Date(), }, callback ) @@ -83,5 +83,5 @@ module.exports = SnapshotManager = { } catch (error) { return data } - } + }, } diff --git a/services/document-updater/app/js/UpdateKeys.js b/services/document-updater/app/js/UpdateKeys.js index 8710272cfb..67e85e65c9 100644 --- a/services/document-updater/app/js/UpdateKeys.js +++ b/services/document-updater/app/js/UpdateKeys.js @@ -9,5 +9,5 @@ module.exports = { }, splitProjectIdAndDocId(project_and_doc_id) { return project_and_doc_id.split(':') - } + }, } diff --git a/services/document-updater/app/js/UpdateManager.js b/services/document-updater/app/js/UpdateManager.js index 999994d556..bf30db74b7 100644 --- a/services/document-updater/app/js/UpdateManager.js +++ b/services/document-updater/app/js/UpdateManager.js @@ -37,15 +37,17 @@ module.exports = UpdateManager = { callback = function (error) {} } const timer = new Metrics.Timer('updateManager.processOutstandingUpdates') - return UpdateManager.fetchAndApplyUpdates(project_id, doc_id, function ( - error - ) { - timer.done() - if (error != null) { - return callback(error) + return UpdateManager.fetchAndApplyUpdates( + project_id, + doc_id, + function (error) { + timer.done() + if (error != null) { + return callback(error) + } + return callback() } - return callback() - }) + ) }, processOutstandingUpdatesWithLock(project_id, doc_id, callback) { @@ -54,7 +56,7 @@ module.exports = UpdateManager = { } const profile = new Profiler('processOutstandingUpdatesWithLock', { project_id, - doc_id + doc_id, }) return LockManager.tryLock(doc_id, (error, gotLock, lockValue) => { if (error != null) { @@ -77,7 +79,7 @@ module.exports = UpdateManager = { ) } profile.log('processOutstandingUpdates') - return LockManager.releaseLock(doc_id, lockValue, (error) => { + return LockManager.releaseLock(doc_id, lockValue, error => { if (error != null) { return callback(error) } @@ -155,7 +157,7 @@ module.exports = UpdateManager = { RealTimeRedisManager.sendData({ project_id, doc_id, - error: error.message || error + error: error.message || error, }) profile.log('sendData') } @@ -166,128 +168,125 @@ module.exports = UpdateManager = { var profile = new Profiler('applyUpdate', { project_id, doc_id }) UpdateManager._sanitizeUpdate(update) profile.log('sanitizeUpdate') - return DocumentManager.getDoc(project_id, doc_id, function ( - error, - lines, - version, - ranges, - pathname, - projectHistoryId - ) { - profile.log('getDoc') - if (error != null) { - return callback(error) - } - if (lines == null || version == null) { - return callback( - new Errors.NotFoundError(`document not found: ${doc_id}`) - ) - } - const previousVersion = version - return ShareJsUpdateManager.applyUpdate( - project_id, - doc_id, - update, - lines, - version, - function (error, updatedDocLines, version, appliedOps) { - profile.log('sharejs.applyUpdate') - if (error != null) { - return callback(error) - } - return RangesManager.applyUpdate( - project_id, - doc_id, - ranges, - appliedOps, - updatedDocLines, - function (error, new_ranges, ranges_were_collapsed) { - UpdateManager._addProjectHistoryMetadataToOps( - appliedOps, - pathname, - projectHistoryId, - lines - ) - profile.log('RangesManager.applyUpdate') - if (error != null) { - return callback(error) - } - return RedisManager.updateDocument( - project_id, - doc_id, - updatedDocLines, - version, - appliedOps, - new_ranges, - update.meta, - function (error, doc_ops_length, project_ops_length) { - profile.log('RedisManager.updateDocument') - if (error != null) { - return callback(error) - } - return HistoryManager.recordAndFlushHistoryOps( - project_id, - doc_id, - appliedOps, - doc_ops_length, - project_ops_length, - function (error) { - profile.log('recordAndFlushHistoryOps') - if (error != null) { - return callback(error) - } - if (ranges_were_collapsed) { - logger.log( - { + return DocumentManager.getDoc( + project_id, + doc_id, + function (error, lines, version, ranges, pathname, projectHistoryId) { + profile.log('getDoc') + if (error != null) { + return callback(error) + } + if (lines == null || version == null) { + return callback( + new Errors.NotFoundError(`document not found: ${doc_id}`) + ) + } + const previousVersion = version + return ShareJsUpdateManager.applyUpdate( + project_id, + doc_id, + update, + lines, + version, + function (error, updatedDocLines, version, appliedOps) { + profile.log('sharejs.applyUpdate') + if (error != null) { + return callback(error) + } + return RangesManager.applyUpdate( + project_id, + doc_id, + ranges, + appliedOps, + updatedDocLines, + function (error, new_ranges, ranges_were_collapsed) { + UpdateManager._addProjectHistoryMetadataToOps( + appliedOps, + pathname, + projectHistoryId, + lines + ) + profile.log('RangesManager.applyUpdate') + if (error != null) { + return callback(error) + } + return RedisManager.updateDocument( + project_id, + doc_id, + updatedDocLines, + version, + appliedOps, + new_ranges, + update.meta, + function (error, doc_ops_length, project_ops_length) { + profile.log('RedisManager.updateDocument') + if (error != null) { + return callback(error) + } + return HistoryManager.recordAndFlushHistoryOps( + project_id, + doc_id, + appliedOps, + doc_ops_length, + project_ops_length, + function (error) { + profile.log('recordAndFlushHistoryOps') + if (error != null) { + return callback(error) + } + if (ranges_were_collapsed) { + logger.log( + { + project_id, + doc_id, + previousVersion, + lines, + ranges, + update, + }, + 'update collapsed some ranges, snapshotting previous content' + ) + // Do this last, since it's a mongo call, and so potentially longest running + // If it overruns the lock, it's ok, since all of our redis work is done + return SnapshotManager.recordSnapshot( project_id, doc_id, previousVersion, + pathname, lines, ranges, - update - }, - 'update collapsed some ranges, snapshotting previous content' - ) - // Do this last, since it's a mongo call, and so potentially longest running - // If it overruns the lock, it's ok, since all of our redis work is done - return SnapshotManager.recordSnapshot( - project_id, - doc_id, - previousVersion, - pathname, - lines, - ranges, - function (error) { - if (error != null) { - logger.error( - { - err: error, - project_id, - doc_id, - version, - lines, - ranges - }, - 'error recording snapshot' - ) - return callback(error) - } else { - return callback() + function (error) { + if (error != null) { + logger.error( + { + err: error, + project_id, + doc_id, + version, + lines, + ranges, + }, + 'error recording snapshot' + ) + return callback(error) + } else { + return callback() + } } - } - ) - } else { - return callback() + ) + } else { + return callback() + } } - } - ) - } - ) - } - ) - } - ) - }) + ) + } + ) + } + ) + } + ) + } + ) }, lockUpdatesAndDo(method, project_id, doc_id, ...rest) { @@ -313,32 +312,38 @@ module.exports = UpdateManager = { ) } profile.log('processOutstandingUpdates') - return method(project_id, doc_id, ...Array.from(args), function ( - error, - ...response_args - ) { - if (error != null) { - return UpdateManager._handleErrorInsideLock( + return method( + project_id, + doc_id, + ...Array.from(args), + function (error, ...response_args) { + if (error != null) { + return UpdateManager._handleErrorInsideLock( + doc_id, + lockValue, + error, + callback + ) + } + profile.log('method') + return LockManager.releaseLock( doc_id, lockValue, - error, - callback + function (error) { + if (error != null) { + return callback(error) + } + profile.log('releaseLock').end() + callback(null, ...Array.from(response_args)) + // We held the lock for a while so updates might have queued up + return UpdateManager.continueProcessingUpdatesWithLock( + project_id, + doc_id + ) + } ) } - profile.log('method') - return LockManager.releaseLock(doc_id, lockValue, function (error) { - if (error != null) { - return callback(error) - } - profile.log('releaseLock').end() - callback(null, ...Array.from(response_args)) - // We held the lock for a while so updates might have queued up - return UpdateManager.continueProcessingUpdatesWithLock( - project_id, - doc_id - ) - }) - }) + ) } ) }) @@ -348,7 +353,7 @@ module.exports = UpdateManager = { if (callback == null) { callback = function (error) {} } - return LockManager.releaseLock(doc_id, lockValue, (lock_error) => + return LockManager.releaseLock(doc_id, lockValue, lock_error => callback(original_error) ) }, @@ -408,5 +413,5 @@ module.exports = UpdateManager = { return result })() }) - } + }, } diff --git a/services/document-updater/app/js/mongodb.js b/services/document-updater/app/js/mongodb.js index 115d0adcb7..b265e99426 100644 --- a/services/document-updater/app/js/mongodb.js +++ b/services/document-updater/app/js/mongodb.js @@ -33,5 +33,5 @@ module.exports = { db, ObjectId, healthCheck: require('util').callbackify(healthCheck), - waitForDb + waitForDb, } diff --git a/services/document-updater/app/js/sharejs/count.js b/services/document-updater/app/js/sharejs/count.js index 8d8477caf4..246f6b7985 100644 --- a/services/document-updater/app/js/sharejs/count.js +++ b/services/document-updater/app/js/sharejs/count.js @@ -34,4 +34,4 @@ exports.compose = function (op1, op2) { return [op1[0], op1[1] + op2[1]] } -exports.generateRandomOp = (doc) => [[doc, 1], doc + 1] +exports.generateRandomOp = doc => [[doc, 1], doc + 1] diff --git a/services/document-updater/app/js/sharejs/helpers.js b/services/document-updater/app/js/sharejs/helpers.js index 1d7b268e17..b1ab3c2a26 100644 --- a/services/document-updater/app/js/sharejs/helpers.js +++ b/services/document-updater/app/js/sharejs/helpers.js @@ -31,80 +31,84 @@ exports._bt = bootstrapTransform = function ( } // Transforms rightOp by leftOp. Returns ['rightOp', clientOp'] - type.transformX = type.transformX = transformX = function (leftOp, rightOp) { - checkValidOp(leftOp) - checkValidOp(rightOp) + type.transformX = + type.transformX = + transformX = + function (leftOp, rightOp) { + checkValidOp(leftOp) + checkValidOp(rightOp) - const newRightOp = [] + const newRightOp = [] - for (let rightComponent of Array.from(rightOp)) { - // Generate newLeftOp by composing leftOp by rightComponent - const newLeftOp = [] + for (let rightComponent of Array.from(rightOp)) { + // Generate newLeftOp by composing leftOp by rightComponent + const newLeftOp = [] - let k = 0 - while (k < leftOp.length) { - var l - const nextC = [] - transformComponentX(leftOp[k], rightComponent, newLeftOp, nextC) - k++ + let k = 0 + while (k < leftOp.length) { + var l + const nextC = [] + transformComponentX(leftOp[k], rightComponent, newLeftOp, nextC) + k++ - if (nextC.length === 1) { - rightComponent = nextC[0] - } else if (nextC.length === 0) { - for (l of Array.from(leftOp.slice(k))) { - append(newLeftOp, l) + if (nextC.length === 1) { + rightComponent = nextC[0] + } else if (nextC.length === 0) { + for (l of Array.from(leftOp.slice(k))) { + append(newLeftOp, l) + } + rightComponent = null + break + } else { + // Recurse. + const [l_, r_] = Array.from(transformX(leftOp.slice(k), nextC)) + for (l of Array.from(l_)) { + append(newLeftOp, l) + } + for (const r of Array.from(r_)) { + append(newRightOp, r) + } + rightComponent = null + break + } } - rightComponent = null - break - } else { - // Recurse. - const [l_, r_] = Array.from(transformX(leftOp.slice(k), nextC)) - for (l of Array.from(l_)) { - append(newLeftOp, l) + + if (rightComponent != null) { + append(newRightOp, rightComponent) } - for (const r of Array.from(r_)) { - append(newRightOp, r) - } - rightComponent = null - break + leftOp = newLeftOp } - } - if (rightComponent != null) { - append(newRightOp, rightComponent) + return [leftOp, newRightOp] } - leftOp = newLeftOp - } - - return [leftOp, newRightOp] - } // Transforms op with specified type ('left' or 'right') by otherOp. - return (type.transform = type.transform = function (op, otherOp, type) { - let _ - if (type !== 'left' && type !== 'right') { - throw new Error("type must be 'left' or 'right'") - } + return (type.transform = type.transform = + function (op, otherOp, type) { + let _ + if (type !== 'left' && type !== 'right') { + throw new Error("type must be 'left' or 'right'") + } - if (otherOp.length === 0) { - return op - } + if (otherOp.length === 0) { + return op + } - // TODO: Benchmark with and without this line. I _think_ it'll make a big difference...? - if (op.length === 1 && otherOp.length === 1) { - return transformComponent([], op[0], otherOp[0], type) - } + // TODO: Benchmark with and without this line. I _think_ it'll make a big difference...? + if (op.length === 1 && otherOp.length === 1) { + return transformComponent([], op[0], otherOp[0], type) + } - if (type === 'left') { - let left - ;[left, _] = Array.from(transformX(op, otherOp)) - return left - } else { - let right - ;[_, right] = Array.from(transformX(otherOp, op)) - return right - } - }) + if (type === 'left') { + let left + ;[left, _] = Array.from(transformX(op, otherOp)) + return left + } else { + let right + ;[_, right] = Array.from(transformX(otherOp, op)) + return right + } + }) } if (typeof WEB === 'undefined') { diff --git a/services/document-updater/app/js/sharejs/json-api.js b/services/document-updater/app/js/sharejs/json-api.js index a8db564fdf..f429f2b397 100644 --- a/services/document-updater/app/js/sharejs/json-api.js +++ b/services/document-updater/app/js/sharejs/json-api.js @@ -353,5 +353,5 @@ json.api = { return result })() }) - } + }, } diff --git a/services/document-updater/app/js/sharejs/json.js b/services/document-updater/app/js/sharejs/json.js index fa9b030a82..14c3cbb519 100644 --- a/services/document-updater/app/js/sharejs/json.js +++ b/services/document-updater/app/js/sharejs/json.js @@ -59,12 +59,12 @@ json.invertComponent = function (c) { return c_ } -json.invert = (op) => - Array.from(op.slice().reverse()).map((c) => json.invertComponent(c)) +json.invert = op => + Array.from(op.slice().reverse()).map(c => json.invertComponent(c)) json.checkValidOp = function (op) {} -const isArray = (o) => Object.prototype.toString.call(o) === '[object Array]' +const isArray = o => Object.prototype.toString.call(o) === '[object Array]' json.checkList = function (elem) { if (!isArray(elem)) { throw new Error('Referenced element not a list') @@ -264,7 +264,7 @@ json.normalize = function (op) { // hax, copied from test/types/json. Apparently this is still the fastest way to deep clone an object, assuming // we have browser support for JSON. // http://jsperf.com/cloning-an-object/12 -var clone = (o) => JSON.parse(JSON.stringify(o)) +var clone = o => JSON.parse(JSON.stringify(o)) json.commonPath = function (p1, p2) { p1 = p1.slice() diff --git a/services/document-updater/app/js/sharejs/model.js b/services/document-updater/app/js/sharejs/model.js index 69d258738e..3b881c8d16 100644 --- a/services/document-updater/app/js/sharejs/model.js +++ b/services/document-updater/app/js/sharejs/model.js @@ -27,7 +27,7 @@ const { EventEmitter } = require('events') const queue = require('./syncqueue') const types = require('../types') -const isArray = (o) => Object.prototype.toString.call(o) === '[object Array]' +const isArray = o => Object.prototype.toString.call(o) === '[object Array]' // This constructor creates a new Model object. There will be one model object // per server context. @@ -225,7 +225,7 @@ module.exports = Model = function (db, options) { return callback(error) } - __guardMethod__(options.stats, 'writeOp', (o) => o.writeOp()) + __guardMethod__(options.stats, 'writeOp', o => o.writeOp()) // This is needed when we emit the 'change' event, below. const oldSnapshot = doc.snapshot @@ -303,7 +303,7 @@ module.exports = Model = function (db, options) { // Version of the snapshot thats in the database committedVersion: committedVersion != null ? committedVersion : data.v, snapshotWriteLock: false, - dbMeta + dbMeta, } doc.opQueue = makeOpQueue(docName, doc) @@ -352,9 +352,7 @@ module.exports = Model = function (db, options) { const load = function (docName, callback) { if (docs[docName]) { // The document is already loaded. Return immediately. - __guardMethod__(options.stats, 'cacheHit', (o) => - o.cacheHit('getSnapshot') - ) + __guardMethod__(options.stats, 'cacheHit', o => o.cacheHit('getSnapshot')) return callback(null, docs[docName]) } @@ -370,7 +368,7 @@ module.exports = Model = function (db, options) { return callbacks.push(callback) } - __guardMethod__(options.stats, 'cacheMiss', (o1) => + __guardMethod__(options.stats, 'cacheMiss', o1 => o1.cacheMiss('getSnapshot') ) @@ -447,20 +445,21 @@ module.exports = Model = function (db, options) { ) { let reapTimer clearTimeout(doc.reapTimer) - return (doc.reapTimer = reapTimer = setTimeout( - () => - tryWriteSnapshot(docName, function () { - // If the reaping timeout has been refreshed while we're writing the snapshot, or if we're - // in the middle of applying an operation, don't reap. - if ( - docs[docName].reapTimer === reapTimer && - doc.opQueue.busy === false - ) { - return delete docs[docName] - } - }), - options.reapTime - )) + return (doc.reapTimer = reapTimer = + setTimeout( + () => + tryWriteSnapshot(docName, function () { + // If the reaping timeout has been refreshed while we're writing the snapshot, or if we're + // in the middle of applying an operation, don't reap. + if ( + docs[docName].reapTimer === reapTimer && + doc.opQueue.busy === false + ) { + return delete docs[docName] + } + }), + options.reapTime + )) } }) } @@ -490,7 +489,7 @@ module.exports = Model = function (db, options) { doc.snapshotWriteLock = true - __guardMethod__(options.stats, 'writeSnapshot', (o) => o.writeSnapshot()) + __guardMethod__(options.stats, 'writeSnapshot', o => o.writeSnapshot()) const writeSnapshot = (db != null ? db.writeSnapshot : undefined) || @@ -501,7 +500,7 @@ module.exports = Model = function (db, options) { meta: doc.meta, snapshot: doc.snapshot, // The database doesn't know about object types. - type: doc.type.name + type: doc.type.name, } // Commit snapshot. @@ -551,7 +550,7 @@ module.exports = Model = function (db, options) { snapshot: type.create(), type: type.name, meta: meta || {}, - v: 0 + v: 0, } const done = function (error, dbMeta) { @@ -864,7 +863,7 @@ module.exports = Model = function (db, options) { // Close the database connection. This is needed so nodejs can shut down cleanly. this.closeDb = function () { - __guardMethod__(db, 'close', (o) => o.close()) + __guardMethod__(db, 'close', o => o.close()) return (db = null) } } diff --git a/services/document-updater/app/js/sharejs/server/model.js b/services/document-updater/app/js/sharejs/server/model.js index 5bed90faa3..7b065ea20a 100644 --- a/services/document-updater/app/js/sharejs/server/model.js +++ b/services/document-updater/app/js/sharejs/server/model.js @@ -27,7 +27,7 @@ const { EventEmitter } = require('events') const queue = require('./syncqueue') const types = require('../types') -const isArray = (o) => Object.prototype.toString.call(o) === '[object Array]' +const isArray = o => Object.prototype.toString.call(o) === '[object Array]' // This constructor creates a new Model object. There will be one model object // per server context. @@ -232,7 +232,7 @@ module.exports = Model = function (db, options) { return callback(error) } - __guardMethod__(options.stats, 'writeOp', (o) => o.writeOp()) + __guardMethod__(options.stats, 'writeOp', o => o.writeOp()) // This is needed when we emit the 'change' event, below. const oldSnapshot = doc.snapshot @@ -310,7 +310,7 @@ module.exports = Model = function (db, options) { // Version of the snapshot thats in the database committedVersion: committedVersion != null ? committedVersion : data.v, snapshotWriteLock: false, - dbMeta + dbMeta, } doc.opQueue = makeOpQueue(docName, doc) @@ -359,9 +359,7 @@ module.exports = Model = function (db, options) { const load = function (docName, callback) { if (docs[docName]) { // The document is already loaded. Return immediately. - __guardMethod__(options.stats, 'cacheHit', (o) => - o.cacheHit('getSnapshot') - ) + __guardMethod__(options.stats, 'cacheHit', o => o.cacheHit('getSnapshot')) return callback(null, docs[docName]) } @@ -377,7 +375,7 @@ module.exports = Model = function (db, options) { return callbacks.push(callback) } - __guardMethod__(options.stats, 'cacheMiss', (o1) => + __guardMethod__(options.stats, 'cacheMiss', o1 => o1.cacheMiss('getSnapshot') ) @@ -454,20 +452,21 @@ module.exports = Model = function (db, options) { ) { let reapTimer clearTimeout(doc.reapTimer) - return (doc.reapTimer = reapTimer = setTimeout( - () => - tryWriteSnapshot(docName, function () { - // If the reaping timeout has been refreshed while we're writing the snapshot, or if we're - // in the middle of applying an operation, don't reap. - if ( - docs[docName].reapTimer === reapTimer && - doc.opQueue.busy === false - ) { - return delete docs[docName] - } - }), - options.reapTime - )) + return (doc.reapTimer = reapTimer = + setTimeout( + () => + tryWriteSnapshot(docName, function () { + // If the reaping timeout has been refreshed while we're writing the snapshot, or if we're + // in the middle of applying an operation, don't reap. + if ( + docs[docName].reapTimer === reapTimer && + doc.opQueue.busy === false + ) { + return delete docs[docName] + } + }), + options.reapTime + )) } }) } @@ -497,7 +496,7 @@ module.exports = Model = function (db, options) { doc.snapshotWriteLock = true - __guardMethod__(options.stats, 'writeSnapshot', (o) => o.writeSnapshot()) + __guardMethod__(options.stats, 'writeSnapshot', o => o.writeSnapshot()) const writeSnapshot = (db != null ? db.writeSnapshot : undefined) || @@ -508,7 +507,7 @@ module.exports = Model = function (db, options) { meta: doc.meta, snapshot: doc.snapshot, // The database doesn't know about object types. - type: doc.type.name + type: doc.type.name, } // Commit snapshot. @@ -558,7 +557,7 @@ module.exports = Model = function (db, options) { snapshot: type.create(), type: type.name, meta: meta || {}, - v: 0 + v: 0, } const done = function (error, dbMeta) { @@ -871,7 +870,7 @@ module.exports = Model = function (db, options) { // Close the database connection. This is needed so nodejs can shut down cleanly. this.closeDb = function () { - __guardMethod__(db, 'close', (o) => o.close()) + __guardMethod__(db, 'close', o => o.close()) return (db = null) } } diff --git a/services/document-updater/app/js/sharejs/simple.js b/services/document-updater/app/js/sharejs/simple.js index 781cdc0293..41f7eed285 100644 --- a/services/document-updater/app/js/sharejs/simple.js +++ b/services/document-updater/app/js/sharejs/simple.js @@ -50,5 +50,5 @@ module.exports = { } return { position: pos, text: op1.text } - } + }, } diff --git a/services/document-updater/app/js/sharejs/text-api.js b/services/document-updater/app/js/sharejs/text-api.js index d30f009cdb..aa2beef446 100644 --- a/services/document-updater/app/js/sharejs/text-api.js +++ b/services/document-updater/app/js/sharejs/text-api.js @@ -42,11 +42,11 @@ text.api = { _register() { return this.on('remoteop', function (op) { - return Array.from(op).map((component) => + return Array.from(op).map(component => component.i !== undefined ? this.emit('insert', component.p, component.i) : this.emit('delete', component.p, component.d) ) }) - } + }, } diff --git a/services/document-updater/app/js/sharejs/text-composable-api.js b/services/document-updater/app/js/sharejs/text-composable-api.js index 9b237ce91b..122e119ae4 100644 --- a/services/document-updater/app/js/sharejs/text-composable-api.js +++ b/services/document-updater/app/js/sharejs/text-composable-api.js @@ -44,7 +44,7 @@ type.api = { const op = type.normalize([ pos, { d: this.snapshot.slice(pos, pos + length) }, - this.snapshot.length - pos - length + this.snapshot.length - pos - length, ]) this.submitOp(op, callback) @@ -70,7 +70,7 @@ type.api = { return result })() }) - } + }, } // We don't increment pos, because the position // specified is after the delete has happened. diff --git a/services/document-updater/app/js/sharejs/text-composable.js b/services/document-updater/app/js/sharejs/text-composable.js index 6898589908..58c4df2b28 100644 --- a/services/document-updater/app/js/sharejs/text-composable.js +++ b/services/document-updater/app/js/sharejs/text-composable.js @@ -74,7 +74,7 @@ const checkOp = function (op) { // Makes a function for appending components to a given op. // Exported for the randomOpGenerator. -exports._makeAppend = makeAppend = (op) => +exports._makeAppend = makeAppend = op => function (component) { if (component === 0 || component.i === '' || component.d === '') { } else if (op.length === 0) { diff --git a/services/document-updater/app/js/sharejs/text-tp2-api.js b/services/document-updater/app/js/sharejs/text-tp2-api.js index 3ab7ef1cb5..8ffbd60c50 100644 --- a/services/document-updater/app/js/sharejs/text-tp2-api.js +++ b/services/document-updater/app/js/sharejs/text-tp2-api.js @@ -49,7 +49,7 @@ type.api = { // Flatten a document into a string getText() { const strings = Array.from(this.snapshot.data).filter( - (elem) => typeof elem === 'string' + elem => typeof elem === 'string' ) return strings.join('') }, @@ -129,5 +129,5 @@ type.api = { } } }) - } + }, } diff --git a/services/document-updater/app/js/sharejs/text-tp2.js b/services/document-updater/app/js/sharejs/text-tp2.js index f1e3c97dd6..67a70439c7 100644 --- a/services/document-updater/app/js/sharejs/text-tp2.js +++ b/services/document-updater/app/js/sharejs/text-tp2.js @@ -65,7 +65,7 @@ var type = { } return doc - } + }, } const checkOp = function (op) { @@ -346,7 +346,7 @@ const transformer = function (op, otherOp, goForwards, side) { // transform - insert skips over inserted parts if (side === 'left') { // The left insert should go first. - while (__guard__(peek(), (x) => x.i) !== undefined) { + while (__guard__(peek(), x => x.i) !== undefined) { append(newOp, take()) } } diff --git a/services/document-updater/app/js/sharejs/text.js b/services/document-updater/app/js/sharejs/text.js index 34886b597d..54f7094c45 100644 --- a/services/document-updater/app/js/sharejs/text.js +++ b/services/document-updater/app/js/sharejs/text.js @@ -110,7 +110,7 @@ text._append = append = function (newOp, c) { ) { return (newOp[newOp.length - 1] = { i: strInject(last.i, c.p - last.p, c.i), - p: last.p + p: last.p, }) } else if ( last.d != null && @@ -120,7 +120,7 @@ text._append = append = function (newOp, c) { ) { return (newOp[newOp.length - 1] = { d: strInject(c.d, last.p - c.p, last.d), - p: c.p + p: c.p, }) } else { return newOp.push(c) @@ -142,7 +142,7 @@ text.compose = function (op1, op2) { // Attempt to compress the op components together 'as much as possible'. // This implementation preserves order and preserves create/delete pairs. -text.compress = (op) => text.compose([], op) +text.compress = op => text.compose([], op) text.normalize = function (op) { const newOp = [] @@ -216,7 +216,7 @@ text._tc = transformComponent = function (dest, c, otherC, side) { if (c.i != null) { append(dest, { i: c.i, - p: transformPosition(c.p, otherC, side === 'right') + p: transformPosition(c.p, otherC, side === 'right'), }) } else { // Delete @@ -286,8 +286,8 @@ const invertComponent = function (c) { // No need to use append for invert, because the components won't be able to // cancel with one another. -text.invert = (op) => - Array.from(op.slice().reverse()).map((c) => invertComponent(c)) +text.invert = op => + Array.from(op.slice().reverse()).map(c => invertComponent(c)) if (typeof WEB !== 'undefined' && WEB !== null) { if (!exports.types) { diff --git a/services/document-updater/app/js/sharejs/types/count.js b/services/document-updater/app/js/sharejs/types/count.js index 8d8477caf4..246f6b7985 100644 --- a/services/document-updater/app/js/sharejs/types/count.js +++ b/services/document-updater/app/js/sharejs/types/count.js @@ -34,4 +34,4 @@ exports.compose = function (op1, op2) { return [op1[0], op1[1] + op2[1]] } -exports.generateRandomOp = (doc) => [[doc, 1], doc + 1] +exports.generateRandomOp = doc => [[doc, 1], doc + 1] diff --git a/services/document-updater/app/js/sharejs/types/helpers.js b/services/document-updater/app/js/sharejs/types/helpers.js index 1d7b268e17..b1ab3c2a26 100644 --- a/services/document-updater/app/js/sharejs/types/helpers.js +++ b/services/document-updater/app/js/sharejs/types/helpers.js @@ -31,80 +31,84 @@ exports._bt = bootstrapTransform = function ( } // Transforms rightOp by leftOp. Returns ['rightOp', clientOp'] - type.transformX = type.transformX = transformX = function (leftOp, rightOp) { - checkValidOp(leftOp) - checkValidOp(rightOp) + type.transformX = + type.transformX = + transformX = + function (leftOp, rightOp) { + checkValidOp(leftOp) + checkValidOp(rightOp) - const newRightOp = [] + const newRightOp = [] - for (let rightComponent of Array.from(rightOp)) { - // Generate newLeftOp by composing leftOp by rightComponent - const newLeftOp = [] + for (let rightComponent of Array.from(rightOp)) { + // Generate newLeftOp by composing leftOp by rightComponent + const newLeftOp = [] - let k = 0 - while (k < leftOp.length) { - var l - const nextC = [] - transformComponentX(leftOp[k], rightComponent, newLeftOp, nextC) - k++ + let k = 0 + while (k < leftOp.length) { + var l + const nextC = [] + transformComponentX(leftOp[k], rightComponent, newLeftOp, nextC) + k++ - if (nextC.length === 1) { - rightComponent = nextC[0] - } else if (nextC.length === 0) { - for (l of Array.from(leftOp.slice(k))) { - append(newLeftOp, l) + if (nextC.length === 1) { + rightComponent = nextC[0] + } else if (nextC.length === 0) { + for (l of Array.from(leftOp.slice(k))) { + append(newLeftOp, l) + } + rightComponent = null + break + } else { + // Recurse. + const [l_, r_] = Array.from(transformX(leftOp.slice(k), nextC)) + for (l of Array.from(l_)) { + append(newLeftOp, l) + } + for (const r of Array.from(r_)) { + append(newRightOp, r) + } + rightComponent = null + break + } } - rightComponent = null - break - } else { - // Recurse. - const [l_, r_] = Array.from(transformX(leftOp.slice(k), nextC)) - for (l of Array.from(l_)) { - append(newLeftOp, l) + + if (rightComponent != null) { + append(newRightOp, rightComponent) } - for (const r of Array.from(r_)) { - append(newRightOp, r) - } - rightComponent = null - break + leftOp = newLeftOp } - } - if (rightComponent != null) { - append(newRightOp, rightComponent) + return [leftOp, newRightOp] } - leftOp = newLeftOp - } - - return [leftOp, newRightOp] - } // Transforms op with specified type ('left' or 'right') by otherOp. - return (type.transform = type.transform = function (op, otherOp, type) { - let _ - if (type !== 'left' && type !== 'right') { - throw new Error("type must be 'left' or 'right'") - } + return (type.transform = type.transform = + function (op, otherOp, type) { + let _ + if (type !== 'left' && type !== 'right') { + throw new Error("type must be 'left' or 'right'") + } - if (otherOp.length === 0) { - return op - } + if (otherOp.length === 0) { + return op + } - // TODO: Benchmark with and without this line. I _think_ it'll make a big difference...? - if (op.length === 1 && otherOp.length === 1) { - return transformComponent([], op[0], otherOp[0], type) - } + // TODO: Benchmark with and without this line. I _think_ it'll make a big difference...? + if (op.length === 1 && otherOp.length === 1) { + return transformComponent([], op[0], otherOp[0], type) + } - if (type === 'left') { - let left - ;[left, _] = Array.from(transformX(op, otherOp)) - return left - } else { - let right - ;[_, right] = Array.from(transformX(otherOp, op)) - return right - } - }) + if (type === 'left') { + let left + ;[left, _] = Array.from(transformX(op, otherOp)) + return left + } else { + let right + ;[_, right] = Array.from(transformX(otherOp, op)) + return right + } + }) } if (typeof WEB === 'undefined') { diff --git a/services/document-updater/app/js/sharejs/types/json-api.js b/services/document-updater/app/js/sharejs/types/json-api.js index a8db564fdf..f429f2b397 100644 --- a/services/document-updater/app/js/sharejs/types/json-api.js +++ b/services/document-updater/app/js/sharejs/types/json-api.js @@ -353,5 +353,5 @@ json.api = { return result })() }) - } + }, } diff --git a/services/document-updater/app/js/sharejs/types/json.js b/services/document-updater/app/js/sharejs/types/json.js index fa9b030a82..14c3cbb519 100644 --- a/services/document-updater/app/js/sharejs/types/json.js +++ b/services/document-updater/app/js/sharejs/types/json.js @@ -59,12 +59,12 @@ json.invertComponent = function (c) { return c_ } -json.invert = (op) => - Array.from(op.slice().reverse()).map((c) => json.invertComponent(c)) +json.invert = op => + Array.from(op.slice().reverse()).map(c => json.invertComponent(c)) json.checkValidOp = function (op) {} -const isArray = (o) => Object.prototype.toString.call(o) === '[object Array]' +const isArray = o => Object.prototype.toString.call(o) === '[object Array]' json.checkList = function (elem) { if (!isArray(elem)) { throw new Error('Referenced element not a list') @@ -264,7 +264,7 @@ json.normalize = function (op) { // hax, copied from test/types/json. Apparently this is still the fastest way to deep clone an object, assuming // we have browser support for JSON. // http://jsperf.com/cloning-an-object/12 -var clone = (o) => JSON.parse(JSON.stringify(o)) +var clone = o => JSON.parse(JSON.stringify(o)) json.commonPath = function (p1, p2) { p1 = p1.slice() diff --git a/services/document-updater/app/js/sharejs/types/model.js b/services/document-updater/app/js/sharejs/types/model.js index 69d258738e..3b881c8d16 100644 --- a/services/document-updater/app/js/sharejs/types/model.js +++ b/services/document-updater/app/js/sharejs/types/model.js @@ -27,7 +27,7 @@ const { EventEmitter } = require('events') const queue = require('./syncqueue') const types = require('../types') -const isArray = (o) => Object.prototype.toString.call(o) === '[object Array]' +const isArray = o => Object.prototype.toString.call(o) === '[object Array]' // This constructor creates a new Model object. There will be one model object // per server context. @@ -225,7 +225,7 @@ module.exports = Model = function (db, options) { return callback(error) } - __guardMethod__(options.stats, 'writeOp', (o) => o.writeOp()) + __guardMethod__(options.stats, 'writeOp', o => o.writeOp()) // This is needed when we emit the 'change' event, below. const oldSnapshot = doc.snapshot @@ -303,7 +303,7 @@ module.exports = Model = function (db, options) { // Version of the snapshot thats in the database committedVersion: committedVersion != null ? committedVersion : data.v, snapshotWriteLock: false, - dbMeta + dbMeta, } doc.opQueue = makeOpQueue(docName, doc) @@ -352,9 +352,7 @@ module.exports = Model = function (db, options) { const load = function (docName, callback) { if (docs[docName]) { // The document is already loaded. Return immediately. - __guardMethod__(options.stats, 'cacheHit', (o) => - o.cacheHit('getSnapshot') - ) + __guardMethod__(options.stats, 'cacheHit', o => o.cacheHit('getSnapshot')) return callback(null, docs[docName]) } @@ -370,7 +368,7 @@ module.exports = Model = function (db, options) { return callbacks.push(callback) } - __guardMethod__(options.stats, 'cacheMiss', (o1) => + __guardMethod__(options.stats, 'cacheMiss', o1 => o1.cacheMiss('getSnapshot') ) @@ -447,20 +445,21 @@ module.exports = Model = function (db, options) { ) { let reapTimer clearTimeout(doc.reapTimer) - return (doc.reapTimer = reapTimer = setTimeout( - () => - tryWriteSnapshot(docName, function () { - // If the reaping timeout has been refreshed while we're writing the snapshot, or if we're - // in the middle of applying an operation, don't reap. - if ( - docs[docName].reapTimer === reapTimer && - doc.opQueue.busy === false - ) { - return delete docs[docName] - } - }), - options.reapTime - )) + return (doc.reapTimer = reapTimer = + setTimeout( + () => + tryWriteSnapshot(docName, function () { + // If the reaping timeout has been refreshed while we're writing the snapshot, or if we're + // in the middle of applying an operation, don't reap. + if ( + docs[docName].reapTimer === reapTimer && + doc.opQueue.busy === false + ) { + return delete docs[docName] + } + }), + options.reapTime + )) } }) } @@ -490,7 +489,7 @@ module.exports = Model = function (db, options) { doc.snapshotWriteLock = true - __guardMethod__(options.stats, 'writeSnapshot', (o) => o.writeSnapshot()) + __guardMethod__(options.stats, 'writeSnapshot', o => o.writeSnapshot()) const writeSnapshot = (db != null ? db.writeSnapshot : undefined) || @@ -501,7 +500,7 @@ module.exports = Model = function (db, options) { meta: doc.meta, snapshot: doc.snapshot, // The database doesn't know about object types. - type: doc.type.name + type: doc.type.name, } // Commit snapshot. @@ -551,7 +550,7 @@ module.exports = Model = function (db, options) { snapshot: type.create(), type: type.name, meta: meta || {}, - v: 0 + v: 0, } const done = function (error, dbMeta) { @@ -864,7 +863,7 @@ module.exports = Model = function (db, options) { // Close the database connection. This is needed so nodejs can shut down cleanly. this.closeDb = function () { - __guardMethod__(db, 'close', (o) => o.close()) + __guardMethod__(db, 'close', o => o.close()) return (db = null) } } diff --git a/services/document-updater/app/js/sharejs/types/simple.js b/services/document-updater/app/js/sharejs/types/simple.js index 781cdc0293..41f7eed285 100644 --- a/services/document-updater/app/js/sharejs/types/simple.js +++ b/services/document-updater/app/js/sharejs/types/simple.js @@ -50,5 +50,5 @@ module.exports = { } return { position: pos, text: op1.text } - } + }, } diff --git a/services/document-updater/app/js/sharejs/types/text-api.js b/services/document-updater/app/js/sharejs/types/text-api.js index d30f009cdb..aa2beef446 100644 --- a/services/document-updater/app/js/sharejs/types/text-api.js +++ b/services/document-updater/app/js/sharejs/types/text-api.js @@ -42,11 +42,11 @@ text.api = { _register() { return this.on('remoteop', function (op) { - return Array.from(op).map((component) => + return Array.from(op).map(component => component.i !== undefined ? this.emit('insert', component.p, component.i) : this.emit('delete', component.p, component.d) ) }) - } + }, } diff --git a/services/document-updater/app/js/sharejs/types/text-composable-api.js b/services/document-updater/app/js/sharejs/types/text-composable-api.js index 9b237ce91b..122e119ae4 100644 --- a/services/document-updater/app/js/sharejs/types/text-composable-api.js +++ b/services/document-updater/app/js/sharejs/types/text-composable-api.js @@ -44,7 +44,7 @@ type.api = { const op = type.normalize([ pos, { d: this.snapshot.slice(pos, pos + length) }, - this.snapshot.length - pos - length + this.snapshot.length - pos - length, ]) this.submitOp(op, callback) @@ -70,7 +70,7 @@ type.api = { return result })() }) - } + }, } // We don't increment pos, because the position // specified is after the delete has happened. diff --git a/services/document-updater/app/js/sharejs/types/text-composable.js b/services/document-updater/app/js/sharejs/types/text-composable.js index 34246a5f51..ce0f3ac226 100644 --- a/services/document-updater/app/js/sharejs/types/text-composable.js +++ b/services/document-updater/app/js/sharejs/types/text-composable.js @@ -75,7 +75,7 @@ const checkOp = function (op) { // Makes a function for appending components to a given op. // Exported for the randomOpGenerator. -moduleExport._makeAppend = makeAppend = (op) => +moduleExport._makeAppend = makeAppend = op => function (component) { if (component === 0 || component.i === '' || component.d === '') { } else if (op.length === 0) { diff --git a/services/document-updater/app/js/sharejs/types/text-tp2-api.js b/services/document-updater/app/js/sharejs/types/text-tp2-api.js index 3ab7ef1cb5..8ffbd60c50 100644 --- a/services/document-updater/app/js/sharejs/types/text-tp2-api.js +++ b/services/document-updater/app/js/sharejs/types/text-tp2-api.js @@ -49,7 +49,7 @@ type.api = { // Flatten a document into a string getText() { const strings = Array.from(this.snapshot.data).filter( - (elem) => typeof elem === 'string' + elem => typeof elem === 'string' ) return strings.join('') }, @@ -129,5 +129,5 @@ type.api = { } } }) - } + }, } diff --git a/services/document-updater/app/js/sharejs/types/text-tp2.js b/services/document-updater/app/js/sharejs/types/text-tp2.js index f1e3c97dd6..67a70439c7 100644 --- a/services/document-updater/app/js/sharejs/types/text-tp2.js +++ b/services/document-updater/app/js/sharejs/types/text-tp2.js @@ -65,7 +65,7 @@ var type = { } return doc - } + }, } const checkOp = function (op) { @@ -346,7 +346,7 @@ const transformer = function (op, otherOp, goForwards, side) { // transform - insert skips over inserted parts if (side === 'left') { // The left insert should go first. - while (__guard__(peek(), (x) => x.i) !== undefined) { + while (__guard__(peek(), x => x.i) !== undefined) { append(newOp, take()) } } diff --git a/services/document-updater/app/js/sharejs/types/text.js b/services/document-updater/app/js/sharejs/types/text.js index 530d4c4987..bbbe36e0f6 100644 --- a/services/document-updater/app/js/sharejs/types/text.js +++ b/services/document-updater/app/js/sharejs/types/text.js @@ -124,7 +124,7 @@ text._append = append = function (newOp, c) { ) { return (newOp[newOp.length - 1] = { i: strInject(last.i, c.p - last.p, c.i), - p: last.p + p: last.p, }) } else if ( last.d != null && @@ -134,7 +134,7 @@ text._append = append = function (newOp, c) { ) { return (newOp[newOp.length - 1] = { d: strInject(c.d, last.p - c.p, last.d), - p: c.p + p: c.p, }) } else { return newOp.push(c) @@ -156,7 +156,7 @@ text.compose = function (op1, op2) { // Attempt to compress the op components together 'as much as possible'. // This implementation preserves order and preserves create/delete pairs. -text.compress = (op) => text.compose([], op) +text.compress = op => text.compose([], op) text.normalize = function (op) { const newOp = [] @@ -235,7 +235,7 @@ text._tc = transformComponent = function (dest, c, otherC, side) { if (c.i != null) { append(dest, { i: c.i, - p: transformPosition(c.p, otherC, side === 'right') + p: transformPosition(c.p, otherC, side === 'right'), }) } else if (c.d != null) { // Delete @@ -305,7 +305,7 @@ text._tc = transformComponent = function (dest, c, otherC, side) { append(dest, { c: c.c, p: transformPosition(c.p, otherC, true), - t: c.t + t: c.t, }) } } else if (otherC.d != null) { @@ -362,8 +362,8 @@ const invertComponent = function (c) { // No need to use append for invert, because the components won't be able to // cancel with one another. -text.invert = (op) => - Array.from(op.slice().reverse()).map((c) => invertComponent(c)) +text.invert = op => + Array.from(op.slice().reverse()).map(c => invertComponent(c)) if (typeof WEB !== 'undefined' && WEB !== null) { if (!exports.types) { diff --git a/services/document-updater/config/settings.defaults.js b/services/document-updater/config/settings.defaults.js index f9c5f54921..d98eb9e265 100755 --- a/services/document-updater/config/settings.defaults.js +++ b/services/document-updater/config/settings.defaults.js @@ -2,8 +2,8 @@ module.exports = { internal: { documentupdater: { host: process.env.LISTEN_ADDRESS || 'localhost', - port: 3003 - } + port: 3003, + }, }, apis: { @@ -12,15 +12,15 @@ module.exports = { process.env.WEB_API_HOST || process.env.WEB_HOST || 'localhost' }:${process.env.WEB_API_PORT || process.env.WEB_PORT || 3000}`, user: process.env.WEB_API_USER || 'sharelatex', - pass: process.env.WEB_API_PASSWORD || 'password' + pass: process.env.WEB_API_PASSWORD || 'password', }, trackchanges: { - url: `http://${process.env.TRACK_CHANGES_HOST || 'localhost'}:3015` + url: `http://${process.env.TRACK_CHANGES_HOST || 'localhost'}:3015`, }, project_history: { enabled: true, - url: `http://${process.env.PROJECT_HISTORY_HOST || 'localhost'}:3054` - } + url: `http://${process.env.PROJECT_HISTORY_HOST || 'localhost'}:3054`, + }, }, redis: { @@ -32,7 +32,7 @@ module.exports = { process.env.PUBSUB_REDIS_PASSWORD || process.env.REDIS_PASSWORD || '', maxRetriesPerRequest: parseInt( process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20' - ) + ), }, history: { @@ -50,8 +50,8 @@ module.exports = { }, docsWithHistoryOps({ project_id: projectId }) { return `DocsWithHistoryOps:{${projectId}}` - } - } + }, + }, }, project_history: { @@ -74,8 +74,8 @@ module.exports = { }, projectHistoryFirstOpTimestamp({ project_id: projectId }) { return `ProjectHistory:FirstOpTimestamp:{${projectId}}` - } - } + }, + }, }, lock: { @@ -90,8 +90,8 @@ module.exports = { key_schema: { blockingKey({ doc_id: docId }) { return `Blocking:{${docId}}` - } - } + }, + }, }, documentupdater: { @@ -159,9 +159,9 @@ module.exports = { }, flushAndDeleteQueue() { return 'DocUpdaterFlushAndDeleteQueue' - } - } - } + }, + }, + }, }, max_doc_length: 2 * 1024 * 1024, // 2mb @@ -173,15 +173,15 @@ module.exports = { mongo: { options: { useUnifiedTopology: - (process.env.MONGO_USE_UNIFIED_TOPOLOGY || 'true') === 'true' + (process.env.MONGO_USE_UNIFIED_TOPOLOGY || 'true') === 'true', }, url: process.env.MONGO_CONNECTION_STRING || - `mongodb://${process.env.MONGO_HOST || '127.0.0.1'}/sharelatex` + `mongodb://${process.env.MONGO_HOST || '127.0.0.1'}/sharelatex`, }, sentry: { - dsn: process.env.SENTRY_DSN + dsn: process.env.SENTRY_DSN, }, publishOnIndividualChannels: @@ -191,5 +191,5 @@ module.exports = { smoothingOffset: process.env.SMOOTHING_OFFSET || 1000, // milliseconds - disableDoubleFlush: process.env.DISABLE_DOUBLE_FLUSH || false // don't flush track-changes for projects using project-history + disableDoubleFlush: process.env.DISABLE_DOUBLE_FLUSH || false, // don't flush track-changes for projects using project-history } diff --git a/services/document-updater/expire_docops.js b/services/document-updater/expire_docops.js index b9fafb072d..c3b2b80706 100644 --- a/services/document-updater/expire_docops.js +++ b/services/document-updater/expire_docops.js @@ -39,7 +39,7 @@ const getKeys = function (pattern, callback) { return async.concatSeries(nodes, doKeyLookupForNode, callback) } -const expireDocOps = (callback) => +const expireDocOps = callback => // eslint-disable-next-line handle-callback-err getKeys(keys.docOps({ doc_id: '*' }), (error, keys) => async.mapSeries( diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js index 4c069008bc..ec14300d21 100644 --- a/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js +++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js @@ -42,10 +42,10 @@ describe('Applying updates to a doc', function () { op: [ { i: 'one and a half\n', - p: 4 - } + p: 4, + }, ], - v: this.version + v: this.version, } this.result = ['one', 'one and a half', 'two', 'three'] return DocUpdaterApp.ensureRunning(done) @@ -55,19 +55,19 @@ describe('Applying updates to a doc', function () { before(function (done) { ;[this.project_id, this.doc_id] = Array.from([ DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() + DocUpdaterClient.randomId(), ]) sinon.spy(MockWebApi, 'getDocument') this.startTime = Date.now() MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, - version: this.version + version: this.version, }) DocUpdaterClient.sendUpdate( this.project_id, this.doc_id, this.update, - (error) => { + error => { if (error != null) { throw error } @@ -144,7 +144,7 @@ describe('Applying updates to a doc', function () { it('should set the first op timestamp', function (done) { rclient_project_history.get( ProjectHistoryKeys.projectHistoryFirstOpTimestamp({ - project_id: this.project_id + project_id: this.project_id, }), (error, result) => { if (error != null) { @@ -167,7 +167,7 @@ describe('Applying updates to a doc', function () { this.project_id, this.doc_id, this.second_update, - (error) => { + error => { if (error != null) { throw error } @@ -180,7 +180,7 @@ describe('Applying updates to a doc', function () { return it('should not change the first op timestamp', function (done) { rclient_project_history.get( ProjectHistoryKeys.projectHistoryFirstOpTimestamp({ - project_id: this.project_id + project_id: this.project_id, }), (error, result) => { if (error != null) { @@ -199,14 +199,14 @@ describe('Applying updates to a doc', function () { before(function (done) { ;[this.project_id, this.doc_id] = Array.from([ DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() + DocUpdaterClient.randomId(), ]) MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, - version: this.version + version: this.version, }) - DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => { + DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { if (error != null) { throw error } @@ -215,7 +215,7 @@ describe('Applying updates to a doc', function () { this.project_id, this.doc_id, this.update, - (error) => { + error => { if (error != null) { throw error } @@ -284,15 +284,15 @@ describe('Applying updates to a doc', function () { before(function (done) { ;[this.project_id, this.doc_id] = Array.from([ DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() + DocUpdaterClient.randomId(), ]) MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, version: this.version, - projectHistoryType: 'project-history' + projectHistoryType: 'project-history', }) - DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => { + DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { if (error != null) { throw error } @@ -301,7 +301,7 @@ describe('Applying updates to a doc', function () { this.project_id, this.doc_id, this.update, - (error) => { + error => { if (error != null) { throw error } @@ -360,12 +360,12 @@ describe('Applying updates to a doc', function () { before(function (done) { ;[this.project_id, this.doc_id] = Array.from([ DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() + DocUpdaterClient.randomId(), ]) const lines = ['', '', ''] MockWebApi.insertDoc(this.project_id, this.doc_id, { lines, - version: 0 + version: 0, }) this.updates = [ { doc_id: this.doc_id, v: 0, op: [{ i: 'h', p: 0 }] }, @@ -378,7 +378,7 @@ describe('Applying updates to a doc', function () { { doc_id: this.doc_id, v: 7, op: [{ i: 'o', p: 7 }] }, { doc_id: this.doc_id, v: 8, op: [{ i: 'r', p: 8 }] }, { doc_id: this.doc_id, v: 9, op: [{ i: 'l', p: 9 }] }, - { doc_id: this.doc_id, v: 10, op: [{ i: 'd', p: 10 }] } + { doc_id: this.doc_id, v: 10, op: [{ i: 'd', p: 10 }] }, ] this.my_result = ['hello world', '', ''] return done() @@ -388,8 +388,8 @@ describe('Applying updates to a doc', function () { let update const actions = [] for (update of Array.from(this.updates.slice(0, 6))) { - ;((update) => { - return actions.push((callback) => + ;(update => { + return actions.push(callback => DocUpdaterClient.sendUpdate( this.project_id, this.doc_id, @@ -399,12 +399,12 @@ describe('Applying updates to a doc', function () { ) })(update) } - actions.push((callback) => + actions.push(callback => DocUpdaterClient.deleteDoc(this.project_id, this.doc_id, callback) ) for (update of Array.from(this.updates.slice(6))) { - ;((update) => { - return actions.push((callback) => + ;(update => { + return actions.push(callback => DocUpdaterClient.sendUpdate( this.project_id, this.doc_id, @@ -415,7 +415,7 @@ describe('Applying updates to a doc', function () { })(update) } - async.series(actions, (error) => { + async.series(actions, error => { if (error != null) { throw error } @@ -437,7 +437,7 @@ describe('Applying updates to a doc', function () { 0, -1, (error, updates) => { - updates = Array.from(updates).map((u) => JSON.parse(u)) + updates = Array.from(updates).map(u => JSON.parse(u)) for (let i = 0; i < this.updates.length; i++) { const appliedUpdate = this.updates[i] appliedUpdate.op.should.deep.equal(updates[i].op) @@ -462,7 +462,7 @@ describe('Applying updates to a doc', function () { 0, -1, (error, updates) => { - updates = Array.from(updates).map((u) => JSON.parse(u)) + updates = Array.from(updates).map(u => JSON.parse(u)) for (let i = 0; i < this.updates.length; i++) { const appliedUpdate = this.updates[i] appliedUpdate.op.should.deep.equal(updates[i].op) @@ -478,12 +478,12 @@ describe('Applying updates to a doc', function () { before(function (done) { ;[this.project_id, this.doc_id] = Array.from([ DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() + DocUpdaterClient.randomId(), ]) const lines = ['', '', ''] MockWebApi.insertDoc(this.project_id, this.doc_id, { lines, - version: 0 + version: 0, }) this.updates = [ { doc_id: this.doc_id, v: 0, op: [{ i: 'h', p: 0 }] }, @@ -491,7 +491,7 @@ describe('Applying updates to a doc', function () { { doc_id: this.doc_id, v: 2, op: [{ i: 'l', p: 2 }] }, { doc_id: this.doc_id, v: 3, op: [{ i: 'l', p: 3 }] }, { doc_id: this.doc_id, v: 4, op: [{ i: 'o', p: 4 }] }, - { doc_id: this.doc_id, v: 0, op: [{ i: 'world', p: 1 }] } + { doc_id: this.doc_id, v: 0, op: [{ i: 'world', p: 1 }] }, ] this.my_result = ['hello', 'world', ''] return done() @@ -501,8 +501,8 @@ describe('Applying updates to a doc', function () { let update const actions = [] for (update of Array.from(this.updates.slice(0, 5))) { - ;((update) => { - return actions.push((callback) => + ;(update => { + return actions.push(callback => DocUpdaterClient.sendUpdate( this.project_id, this.doc_id, @@ -512,12 +512,12 @@ describe('Applying updates to a doc', function () { ) })(update) } - actions.push((callback) => + actions.push(callback => DocUpdaterClient.deleteDoc(this.project_id, this.doc_id, callback) ) for (update of Array.from(this.updates.slice(5))) { - ;((update) => { - return actions.push((callback) => + ;(update => { + return actions.push(callback => DocUpdaterClient.sendUpdate( this.project_id, this.doc_id, @@ -528,7 +528,7 @@ describe('Applying updates to a doc', function () { })(update) } - async.series(actions, (error) => { + async.series(actions, error => { if (error != null) { throw error } @@ -550,16 +550,16 @@ describe('Applying updates to a doc', function () { before(function (done) { ;[this.project_id, this.doc_id] = Array.from([ DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() + DocUpdaterClient.randomId(), ]) this.broken_update = { doc_id: this.doc_id, v: this.version, - op: [{ d: 'not the correct content', p: 0 }] + op: [{ d: 'not the correct content', p: 0 }], } MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, - version: this.version + version: this.version, }) DocUpdaterClient.subscribeToAppliedOps( @@ -570,7 +570,7 @@ describe('Applying updates to a doc', function () { this.project_id, this.doc_id, this.broken_update, - (error) => { + error => { if (error != null) { throw error } @@ -599,7 +599,7 @@ describe('Applying updates to a doc', function () { return JSON.parse(message).should.deep.include({ project_id: this.project_id, doc_id: this.doc_id, - error: 'Delete component does not match' + error: 'Delete component does not match', }) }) }) @@ -608,7 +608,7 @@ describe('Applying updates to a doc', function () { before(function (done) { ;[this.project_id, this.doc_id] = Array.from([ DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() + DocUpdaterClient.randomId(), ]) const updates = [] for (let v = 0; v <= 199; v++) { @@ -616,7 +616,7 @@ describe('Applying updates to a doc', function () { updates.push({ doc_id: this.doc_id, op: [{ i: v.toString(), p: 0 }], - v + v, }) } @@ -624,14 +624,14 @@ describe('Applying updates to a doc', function () { MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, - version: 0 + version: 0, }) // Send updates in chunks to causes multiple flushes const actions = [] for (let i = 0; i <= 19; i++) { - ;((i) => { - return actions.push((cb) => { + ;(i => { + return actions.push(cb => { return DocUpdaterClient.sendUpdates( this.project_id, this.doc_id, @@ -641,7 +641,7 @@ describe('Applying updates to a doc', function () { }) })(i) } - async.series(actions, (error) => { + async.series(actions, error => { if (error != null) { throw error } @@ -663,22 +663,22 @@ describe('Applying updates to a doc', function () { before(function (done) { ;[this.project_id, this.doc_id] = Array.from([ DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() + DocUpdaterClient.randomId(), ]) MockWebApi.insertDoc(this.project_id, this.doc_id, { - lines: this.lines + lines: this.lines, }) const update = { doc: this.doc_id, op: this.update.op, - v: 0 + v: 0, } DocUpdaterClient.sendUpdate( this.project_id, this.doc_id, update, - (error) => { + error => { if (error != null) { throw error } @@ -705,11 +705,11 @@ describe('Applying updates to a doc', function () { before(function (done) { ;[this.project_id, this.doc_id] = Array.from([ DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() + DocUpdaterClient.randomId(), ]) MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, - version: this.version + version: this.version, }) DocUpdaterClient.subscribeToAppliedOps( @@ -725,15 +725,15 @@ describe('Applying updates to a doc', function () { op: [ { i: 'one and a half\n', - p: 4 - } + p: 4, + }, ], v: this.version, meta: { - source: 'ikHceq3yfAdQYzBo4-xZ' - } + source: 'ikHceq3yfAdQYzBo4-xZ', + }, }, - (error) => { + error => { if (error != null) { throw error } @@ -746,16 +746,16 @@ describe('Applying updates to a doc', function () { op: [ { i: 'one and a half\n', - p: 4 - } + p: 4, + }, ], v: this.version, dupIfSource: ['ikHceq3yfAdQYzBo4-xZ'], meta: { - source: 'ikHceq3yfAdQYzBo4-xZ' - } + source: 'ikHceq3yfAdQYzBo4-xZ', + }, }, - (error) => { + error => { if (error != null) { throw error } @@ -795,12 +795,12 @@ describe('Applying updates to a doc', function () { before(function (done) { ;[this.project_id, this.doc_id] = Array.from([ DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() + DocUpdaterClient.randomId(), ]) this.non_existing = { doc_id: this.doc_id, v: this.version, - op: [{ d: 'content', p: 0 }] + op: [{ d: 'content', p: 0 }], } DocUpdaterClient.subscribeToAppliedOps( @@ -811,7 +811,7 @@ describe('Applying updates to a doc', function () { this.project_id, this.doc_id, this.non_existing, - (error) => { + error => { if (error != null) { throw error } @@ -840,7 +840,7 @@ describe('Applying updates to a doc', function () { return JSON.parse(message).should.deep.include({ project_id: this.project_id, doc_id: this.doc_id, - error: `doc not not found: /project/${this.project_id}/doc/${this.doc_id}` + error: `doc not not found: /project/${this.project_id}/doc/${this.doc_id}`, }) }) }) diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js index ea4cc7b0ee..6d4fc3c019 100644 --- a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js +++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js @@ -23,10 +23,10 @@ describe("Applying updates to a project's structure", function () { type: 'rename-file', id: DocUpdaterClient.randomId(), pathname: '/file-path', - newPathname: '/new-file-path' + newPathname: '/new-file-path', } this.updates = [this.fileUpdate] - DocUpdaterApp.ensureRunning((error) => { + DocUpdaterApp.ensureRunning(error => { if (error) { return done(error) } @@ -35,7 +35,7 @@ describe("Applying updates to a project's structure", function () { this.user_id, this.updates, this.version, - (error) => { + error => { if (error) { return done(error) } @@ -75,7 +75,7 @@ describe("Applying updates to a project's structure", function () { type: 'rename-doc', id: DocUpdaterClient.randomId(), pathname: '/doc-path', - newPathname: '/new-doc-path' + newPathname: '/new-doc-path', } this.updates = [this.update] }) @@ -88,7 +88,7 @@ describe("Applying updates to a project's structure", function () { this.user_id, this.updates, this.version, - (error) => { + error => { if (error) { return done(error) } @@ -125,28 +125,24 @@ describe("Applying updates to a project's structure", function () { before(function (done) { this.project_id = DocUpdaterClient.randomId() MockWebApi.insertDoc(this.project_id, this.update.id, {}) - DocUpdaterClient.preloadDoc( - this.project_id, - this.update.id, - (error) => { - if (error) { - return done(error) - } - sinon.spy(MockWebApi, 'getDocument') - DocUpdaterClient.sendProjectUpdate( - this.project_id, - this.user_id, - this.updates, - this.version, - (error) => { - if (error) { - return done(error) - } - setTimeout(done, 200) - } - ) + DocUpdaterClient.preloadDoc(this.project_id, this.update.id, error => { + if (error) { + return done(error) } - ) + sinon.spy(MockWebApi, 'getDocument') + DocUpdaterClient.sendProjectUpdate( + this.project_id, + this.user_id, + this.updates, + this.version, + error => { + if (error) { + return done(error) + } + setTimeout(done, 200) + } + ) + }) }) after(function () { @@ -198,31 +194,31 @@ describe("Applying updates to a project's structure", function () { type: 'rename-doc', id: DocUpdaterClient.randomId(), pathname: '/doc-path0', - newPathname: '/new-doc-path0' + newPathname: '/new-doc-path0', } this.docUpdate1 = { type: 'rename-doc', id: DocUpdaterClient.randomId(), pathname: '/doc-path1', - newPathname: '/new-doc-path1' + newPathname: '/new-doc-path1', } this.fileUpdate0 = { type: 'rename-file', id: DocUpdaterClient.randomId(), pathname: '/file-path0', - newPathname: '/new-file-path0' + newPathname: '/new-file-path0', } this.fileUpdate1 = { type: 'rename-file', id: DocUpdaterClient.randomId(), pathname: '/file-path1', - newPathname: '/new-file-path1' + newPathname: '/new-file-path1', } this.updates = [ this.docUpdate0, this.docUpdate1, this.fileUpdate0, - this.fileUpdate1 + this.fileUpdate1, ] }) @@ -234,7 +230,7 @@ describe("Applying updates to a project's structure", function () { this.user_id, this.updates, this.version, - (error) => { + error => { if (error) { return done(error) } @@ -299,7 +295,7 @@ describe("Applying updates to a project's structure", function () { type: 'add-file', id: DocUpdaterClient.randomId(), pathname: '/file-path', - url: 'filestore.example.com' + url: 'filestore.example.com', } this.updates = [this.fileUpdate] DocUpdaterClient.sendProjectUpdate( @@ -307,7 +303,7 @@ describe("Applying updates to a project's structure", function () { this.user_id, this.updates, this.version, - (error) => { + error => { if (error) { return done(error) } @@ -347,7 +343,7 @@ describe("Applying updates to a project's structure", function () { type: 'add-doc', id: DocUpdaterClient.randomId(), pathname: '/file-path', - docLines: 'a\nb' + docLines: 'a\nb', } this.updates = [this.docUpdate] DocUpdaterClient.sendProjectUpdate( @@ -355,7 +351,7 @@ describe("Applying updates to a project's structure", function () { this.user_id, this.updates, this.version, - (error) => { + error => { if (error) { return done(error) } @@ -401,7 +397,7 @@ describe("Applying updates to a project's structure", function () { type: 'add-doc', id: DocUpdaterClient.randomId(), pathname: '/file-' + v, - docLines: 'a\nb' + docLines: 'a\nb', }) } @@ -424,7 +420,7 @@ describe("Applying updates to a project's structure", function () { userId, updates.slice(250), this.version1, - (error) => { + error => { if (error) { return done(error) } @@ -460,7 +456,7 @@ describe("Applying updates to a project's structure", function () { type: 'add-doc', id: DocUpdaterClient.randomId(), pathname: '/file-' + v, - docLines: 'a\nb' + docLines: 'a\nb', }) } @@ -483,7 +479,7 @@ describe("Applying updates to a project's structure", function () { userId, updates.slice(10), this.version1, - (error) => { + error => { if (error) { return done(error) } diff --git a/services/document-updater/test/acceptance/js/DeletingADocumentTests.js b/services/document-updater/test/acceptance/js/DeletingADocumentTests.js index 4051d4f5a4..5647c55d83 100644 --- a/services/document-updater/test/acceptance/js/DeletingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/DeletingADocumentTests.js @@ -26,10 +26,10 @@ describe('Deleting a document', function () { op: [ { i: 'one and a half\n', - p: 4 - } + p: 4, + }, ], - v: this.version + v: this.version, } this.result = ['one', 'one and a half', 'two', 'three'] @@ -47,19 +47,19 @@ describe('Deleting a document', function () { before(function (done) { ;[this.project_id, this.doc_id] = Array.from([ DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() + DocUpdaterClient.randomId(), ]) sinon.spy(MockWebApi, 'setDocument') sinon.spy(MockWebApi, 'getDocument') MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, - version: this.version + version: this.version, }) return DocUpdaterClient.preloadDoc( this.project_id, this.doc_id, - (error) => { + error => { if (error != null) { throw error } @@ -67,7 +67,7 @@ describe('Deleting a document', function () { this.project_id, this.doc_id, this.update, - (error) => { + error => { if (error != null) { throw error } @@ -133,10 +133,10 @@ describe('Deleting a document', function () { before(function (done) { ;[this.project_id, this.doc_id] = Array.from([ DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() + DocUpdaterClient.randomId(), ]) MockWebApi.insertDoc(this.project_id, this.doc_id, { - lines: this.lines + lines: this.lines, }) sinon.spy(MockWebApi, 'setDocument') sinon.spy(MockWebApi, 'getDocument') diff --git a/services/document-updater/test/acceptance/js/DeletingAProjectTests.js b/services/document-updater/test/acceptance/js/DeletingAProjectTests.js index b07ffae0f5..e9ffeb8806 100644 --- a/services/document-updater/test/acceptance/js/DeletingAProjectTests.js +++ b/services/document-updater/test/acceptance/js/DeletingAProjectTests.js @@ -33,12 +33,12 @@ describe('Deleting a project', function () { op: [ { i: 'one and a half\n', - p: 4 - } + p: 4, + }, ], - v: 0 + v: 0, }, - updatedLines: ['one', 'one and a half', 'two', 'three'] + updatedLines: ['one', 'one and a half', 'two', 'three'], }, { id: (doc_id1 = DocUpdaterClient.randomId()), @@ -48,18 +48,18 @@ describe('Deleting a project', function () { op: [ { i: 'four and a half\n', - p: 5 - } + p: 5, + }, ], - v: 0 + v: 0, }, - updatedLines: ['four', 'four and a half', 'five', 'six'] - } + updatedLines: ['four', 'four and a half', 'five', 'six'], + }, ] for (const doc of Array.from(this.docs)) { MockWebApi.insertDoc(this.project_id, doc.id, { lines: doc.lines, - version: doc.update.v + version: doc.update.v, }) } @@ -73,12 +73,12 @@ describe('Deleting a project', function () { sinon.spy(MockProjectHistoryApi, 'flushProject') return async.series( - this.docs.map((doc) => { - return (callback) => { + this.docs.map(doc => { + return callback => { return DocUpdaterClient.preloadDoc( this.project_id, doc.id, - (error) => { + error => { if (error != null) { return callback(error) } @@ -86,7 +86,7 @@ describe('Deleting a project', function () { this.project_id, doc.id, doc.update, - (error) => { + error => { return callback(error) } ) @@ -94,7 +94,7 @@ describe('Deleting a project', function () { ) } }), - (error) => { + error => { if (error != null) { throw error } @@ -122,7 +122,7 @@ describe('Deleting a project', function () { }) it('should send each document to the web api', function () { - return Array.from(this.docs).map((doc) => + return Array.from(this.docs).map(doc => MockWebApi.setDocument .calledWith(this.project_id, doc.id, doc.updatedLines) .should.equal(true) @@ -132,8 +132,8 @@ describe('Deleting a project', function () { it('should need to reload the docs if read again', function (done) { sinon.spy(MockWebApi, 'getDocument') return async.series( - this.docs.map((doc) => { - return (callback) => { + this.docs.map(doc => { + return callback => { MockWebApi.getDocument .calledWith(this.project_id, doc.id) .should.equal(false) @@ -157,7 +157,7 @@ describe('Deleting a project', function () { }) it('should flush each doc in track changes', function () { - return Array.from(this.docs).map((doc) => + return Array.from(this.docs).map(doc => MockTrackChangesApi.flushDoc.calledWith(doc.id).should.equal(true) ) }) @@ -176,8 +176,8 @@ describe('Deleting a project', function () { sinon.spy(MockProjectHistoryApi, 'flushProject') return async.series( - this.docs.map((doc) => { - return (callback) => { + this.docs.map(doc => { + return callback => { return DocUpdaterClient.preloadDoc( this.project_id, doc.id, @@ -185,7 +185,7 @@ describe('Deleting a project', function () { ) } }), - (error) => { + error => { if (error != null) { throw error } @@ -232,8 +232,8 @@ describe('Deleting a project', function () { sinon.spy(MockProjectHistoryApi, 'flushProject') return async.series( - this.docs.map((doc) => { - return (callback) => { + this.docs.map(doc => { + return callback => { return DocUpdaterClient.preloadDoc( this.project_id, doc.id, @@ -241,7 +241,7 @@ describe('Deleting a project', function () { ) } }), - (error) => { + error => { if (error != null) { throw error } @@ -273,7 +273,7 @@ describe('Deleting a project', function () { }) it('should send each document to the web api', function () { - return Array.from(this.docs).map((doc) => + return Array.from(this.docs).map(doc => MockWebApi.setDocument .calledWith(this.project_id, doc.id, doc.updatedLines) .should.equal(true) @@ -281,7 +281,7 @@ describe('Deleting a project', function () { }) it('should flush each doc in track changes', function () { - return Array.from(this.docs).map((doc) => + return Array.from(this.docs).map(doc => MockTrackChangesApi.flushDoc.calledWith(doc.id).should.equal(true) ) }) diff --git a/services/document-updater/test/acceptance/js/FlushingAProjectTests.js b/services/document-updater/test/acceptance/js/FlushingAProjectTests.js index 4f4abc2730..0d7a46a990 100644 --- a/services/document-updater/test/acceptance/js/FlushingAProjectTests.js +++ b/services/document-updater/test/acceptance/js/FlushingAProjectTests.js @@ -31,12 +31,12 @@ describe('Flushing a project', function () { op: [ { i: 'one and a half\n', - p: 4 - } + p: 4, + }, ], - v: 0 + v: 0, }, - updatedLines: ['one', 'one and a half', 'two', 'three'] + updatedLines: ['one', 'one and a half', 'two', 'three'], }, { id: (doc_id1 = DocUpdaterClient.randomId()), @@ -46,18 +46,18 @@ describe('Flushing a project', function () { op: [ { i: 'four and a half\n', - p: 5 - } + p: 5, + }, ], - v: 0 + v: 0, }, - updatedLines: ['four', 'four and a half', 'five', 'six'] - } + updatedLines: ['four', 'four and a half', 'five', 'six'], + }, ] for (const doc of Array.from(this.docs)) { MockWebApi.insertDoc(this.project_id, doc.id, { lines: doc.lines, - version: doc.update.v + version: doc.update.v, }) } return DocUpdaterApp.ensureRunning(done) @@ -68,12 +68,12 @@ describe('Flushing a project', function () { sinon.spy(MockWebApi, 'setDocument') return async.series( - this.docs.map((doc) => { - return (callback) => { + this.docs.map(doc => { + return callback => { return DocUpdaterClient.preloadDoc( this.project_id, doc.id, - (error) => { + error => { if (error != null) { return callback(error) } @@ -81,7 +81,7 @@ describe('Flushing a project', function () { this.project_id, doc.id, doc.update, - (error) => { + error => { return callback(error) } ) @@ -89,7 +89,7 @@ describe('Flushing a project', function () { ) } }), - (error) => { + error => { if (error != null) { throw error } @@ -115,7 +115,7 @@ describe('Flushing a project', function () { }) it('should send each document to the web api', function () { - return Array.from(this.docs).map((doc) => + return Array.from(this.docs).map(doc => MockWebApi.setDocument .calledWith(this.project_id, doc.id, doc.updatedLines) .should.equal(true) @@ -124,8 +124,8 @@ describe('Flushing a project', function () { return it('should update the lines in the doc updater', function (done) { return async.series( - this.docs.map((doc) => { - return (callback) => { + this.docs.map(doc => { + return callback => { return DocUpdaterClient.getDoc( this.project_id, doc.id, diff --git a/services/document-updater/test/acceptance/js/FlushingDocsTests.js b/services/document-updater/test/acceptance/js/FlushingDocsTests.js index 5eac9fa2f3..fdb82dc40d 100644 --- a/services/document-updater/test/acceptance/js/FlushingDocsTests.js +++ b/services/document-updater/test/acceptance/js/FlushingDocsTests.js @@ -31,10 +31,10 @@ describe('Flushing a doc to Mongo', function () { op: [ { i: 'one and a half\n', - p: 4 - } + p: 4, + }, ], - v: this.version + v: this.version, } this.result = ['one', 'one and a half', 'two', 'three'] return DocUpdaterApp.ensureRunning(done) @@ -44,19 +44,19 @@ describe('Flushing a doc to Mongo', function () { before(function (done) { ;[this.project_id, this.doc_id] = Array.from([ DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() + DocUpdaterClient.randomId(), ]) sinon.spy(MockWebApi, 'setDocument') MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, - version: this.version + version: this.version, }) return DocUpdaterClient.sendUpdates( this.project_id, this.doc_id, [this.update], - (error) => { + error => { if (error != null) { throw error } @@ -90,10 +90,10 @@ describe('Flushing a doc to Mongo', function () { before(function (done) { ;[this.project_id, this.doc_id] = Array.from([ DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() + DocUpdaterClient.randomId(), ]) MockWebApi.insertDoc(this.project_id, this.doc_id, { - lines: this.lines + lines: this.lines, }) sinon.spy(MockWebApi, 'setDocument') return DocUpdaterClient.flushDoc(this.project_id, this.doc_id, done) @@ -112,11 +112,11 @@ describe('Flushing a doc to Mongo', function () { before(function (done) { ;[this.project_id, this.doc_id] = Array.from([ DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() + DocUpdaterClient.randomId(), ]) MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, - version: this.version + version: this.version, }) let t = 30000 sinon diff --git a/services/document-updater/test/acceptance/js/GettingADocumentTests.js b/services/document-updater/test/acceptance/js/GettingADocumentTests.js index 50dc35059c..b6acd9ebfb 100644 --- a/services/document-updater/test/acceptance/js/GettingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/GettingADocumentTests.js @@ -29,13 +29,13 @@ describe('Getting a document', function () { before(function (done) { ;[this.project_id, this.doc_id] = Array.from([ DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() + DocUpdaterClient.randomId(), ]) sinon.spy(MockWebApi, 'getDocument') MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, - version: this.version + version: this.version, }) return DocUpdaterClient.getDoc( @@ -71,17 +71,17 @@ describe('Getting a document', function () { before(function (done) { ;[this.project_id, this.doc_id] = Array.from([ DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() + DocUpdaterClient.randomId(), ]) MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, - version: this.version + version: this.version, }) return DocUpdaterClient.preloadDoc( this.project_id, this.doc_id, - (error) => { + error => { if (error != null) { throw error } @@ -115,23 +115,23 @@ describe('Getting a document', function () { before(function (done) { ;[this.project_id, this.doc_id] = Array.from([ DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() + DocUpdaterClient.randomId(), ]) MockWebApi.insertDoc(this.project_id, this.doc_id, { - lines: (this.lines = ['one', 'two', 'three']) + lines: (this.lines = ['one', 'two', 'three']), }) - this.updates = __range__(0, 199, true).map((v) => ({ + this.updates = __range__(0, 199, true).map(v => ({ doc_id: this.doc_id, op: [{ i: v.toString(), p: 0 }], - v + v, })) return DocUpdaterClient.sendUpdates( this.project_id, this.doc_id, this.updates, - (error) => { + error => { if (error != null) { throw error } @@ -191,7 +191,7 @@ describe('Getting a document', function () { before(function (done) { ;[this.project_id, this.doc_id] = Array.from([ DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() + DocUpdaterClient.randomId(), ]) return DocUpdaterClient.getDoc( this.project_id, @@ -212,7 +212,7 @@ describe('Getting a document', function () { before(function (done) { ;[this.project_id, this.doc_id] = Array.from([ DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() + DocUpdaterClient.randomId(), ]) sinon .stub(MockWebApi, 'getDocument') @@ -246,7 +246,7 @@ describe('Getting a document', function () { this.timeout = 10000 ;[this.project_id, this.doc_id] = Array.from([ DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() + DocUpdaterClient.randomId(), ]) sinon .stub(MockWebApi, 'getDocument') diff --git a/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js b/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js index b32ccb0837..7d72d8161c 100644 --- a/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js +++ b/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js @@ -30,17 +30,17 @@ describe('Getting documents for project', function () { this.projectStateHash = DocUpdaterClient.randomId() ;[this.project_id, this.doc_id] = Array.from([ DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() + DocUpdaterClient.randomId(), ]) MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, - version: this.version + version: this.version, }) return DocUpdaterClient.preloadDoc( this.project_id, this.doc_id, - (error) => { + error => { if (error != null) { throw error } @@ -67,17 +67,17 @@ describe('Getting documents for project', function () { this.projectStateHash = DocUpdaterClient.randomId() ;[this.project_id, this.doc_id] = Array.from([ DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() + DocUpdaterClient.randomId(), ]) MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, - version: this.version + version: this.version, }) return DocUpdaterClient.preloadDoc( this.project_id, this.doc_id, - (error) => { + error => { if (error != null) { throw error } @@ -110,7 +110,7 @@ describe('Getting documents for project', function () { return it('should return the documents', function () { return this.returnedDocs.should.deep.equal([ - { _id: this.doc_id, lines: this.lines, v: this.version } + { _id: this.doc_id, lines: this.lines, v: this.version }, ]) }) }) @@ -120,17 +120,17 @@ describe('Getting documents for project', function () { this.projectStateHash = DocUpdaterClient.randomId() ;[this.project_id, this.doc_id] = Array.from([ DocUpdaterClient.randomId(), - DocUpdaterClient.randomId() + DocUpdaterClient.randomId(), ]) MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, - version: this.version + version: this.version, }) return DocUpdaterClient.preloadDoc( this.project_id, this.doc_id, - (error) => { + error => { if (error != null) { throw error } diff --git a/services/document-updater/test/acceptance/js/RangesTests.js b/services/document-updater/test/acceptance/js/RangesTests.js index 7034436440..0275bab1fb 100644 --- a/services/document-updater/test/acceptance/js/RangesTests.js +++ b/services/document-updater/test/acceptance/js/RangesTests.js @@ -32,36 +32,36 @@ describe('Ranges', function () { this.id_seed = '587357bd35e64f6157' this.doc = { id: DocUpdaterClient.randomId(), - lines: ['aaa'] + lines: ['aaa'], } this.updates = [ { doc: this.doc.id, op: [{ i: '123', p: 1 }], v: 0, - meta: { user_id: this.user_id } + meta: { user_id: this.user_id }, }, { doc: this.doc.id, op: [{ i: '456', p: 5 }], v: 1, - meta: { user_id: this.user_id, tc: this.id_seed } + meta: { user_id: this.user_id, tc: this.id_seed }, }, { doc: this.doc.id, op: [{ d: '12', p: 1 }], v: 2, - meta: { user_id: this.user_id } - } + meta: { user_id: this.user_id }, + }, ] MockWebApi.insertDoc(this.project_id, this.doc.id, { lines: this.doc.lines, - version: 0 + version: 0, }) const jobs = [] for (const update of Array.from(this.updates)) { - ;((update) => { - return jobs.push((callback) => + ;(update => { + return jobs.push(callback => DocUpdaterClient.sendUpdate( this.project_id, this.doc.id, @@ -72,18 +72,18 @@ describe('Ranges', function () { })(update) } - return DocUpdaterApp.ensureRunning((error) => { + return DocUpdaterApp.ensureRunning(error => { if (error != null) { throw error } return DocUpdaterClient.preloadDoc( this.project_id, this.doc.id, - (error) => { + error => { if (error != null) { throw error } - return async.series(jobs, (error) => { + return async.series(jobs, error => { if (error != null) { throw error } @@ -119,25 +119,25 @@ describe('Ranges', function () { this.user_id = DocUpdaterClient.randomId() this.doc = { id: DocUpdaterClient.randomId(), - lines: ['foo bar baz'] + lines: ['foo bar baz'], } this.updates = [ { doc: this.doc.id, op: [ - { c: 'bar', p: 4, t: (this.tid = DocUpdaterClient.randomId()) } + { c: 'bar', p: 4, t: (this.tid = DocUpdaterClient.randomId()) }, ], - v: 0 - } + v: 0, + }, ] MockWebApi.insertDoc(this.project_id, this.doc.id, { lines: this.doc.lines, - version: 0 + version: 0, }) const jobs = [] for (const update of Array.from(this.updates)) { - ;((update) => { - return jobs.push((callback) => + ;(update => { + return jobs.push(callback => DocUpdaterClient.sendUpdate( this.project_id, this.doc.id, @@ -150,11 +150,11 @@ describe('Ranges', function () { return DocUpdaterClient.preloadDoc( this.project_id, this.doc.id, - (error) => { + error => { if (error != null) { throw error } - return async.series(jobs, (error) => { + return async.series(jobs, error => { if (error != null) { throw error } @@ -188,31 +188,31 @@ describe('Ranges', function () { this.user_id = DocUpdaterClient.randomId() this.doc = { id: DocUpdaterClient.randomId(), - lines: ['foo bar baz'] + lines: ['foo bar baz'], } this.updates = [ { doc: this.doc.id, op: [{ i: 'ABC', p: 3 }], v: 0, - meta: { user_id: this.user_id } + meta: { user_id: this.user_id }, }, { doc: this.doc.id, op: [ - { c: 'bar', p: 4, t: (this.tid = DocUpdaterClient.randomId()) } + { c: 'bar', p: 4, t: (this.tid = DocUpdaterClient.randomId()) }, ], - v: 0 - } + v: 0, + }, ] MockWebApi.insertDoc(this.project_id, this.doc.id, { lines: this.doc.lines, - version: 0 + version: 0, }) const jobs = [] for (const update of Array.from(this.updates)) { - ;((update) => { - return jobs.push((callback) => + ;(update => { + return jobs.push(callback => DocUpdaterClient.sendUpdate( this.project_id, this.doc.id, @@ -225,11 +225,11 @@ describe('Ranges', function () { return DocUpdaterClient.preloadDoc( this.project_id, this.doc.id, - (error) => { + error => { if (error != null) { throw error } - return async.series(jobs, (error) => { + return async.series(jobs, error => { if (error != null) { throw error } @@ -265,13 +265,13 @@ describe('Ranges', function () { this.id_seed = '587357bd35e64f6157' this.doc = { id: DocUpdaterClient.randomId(), - lines: ['a123aa'] + lines: ['a123aa'], } this.update = { doc: this.doc.id, op: [{ i: '456', p: 5 }], v: 0, - meta: { user_id: this.user_id, tc: this.id_seed } + meta: { user_id: this.user_id, tc: this.id_seed }, } MockWebApi.insertDoc(this.project_id, this.doc.id, { lines: this.doc.lines, @@ -282,16 +282,16 @@ describe('Ranges', function () { op: { i: '123', p: 1 }, metadata: { user_id: this.user_id, - ts: new Date() - } - } - ] - } + ts: new Date(), + }, + }, + ], + }, }) return DocUpdaterClient.preloadDoc( this.project_id, this.doc.id, - (error) => { + error => { if (error != null) { throw error } @@ -299,7 +299,7 @@ describe('Ranges', function () { this.project_id, this.doc.id, this.update, - (error) => { + error => { if (error != null) { throw error } @@ -327,25 +327,21 @@ describe('Ranges', function () { }) return it('should flush the ranges to the persistence layer again', function (done) { - return DocUpdaterClient.flushDoc( - this.project_id, - this.doc.id, - (error) => { - if (error != null) { - throw error - } - return MockWebApi.getDocument( - this.project_id, - this.doc.id, - (error, doc) => { - const { changes } = doc.ranges - changes[0].op.should.deep.equal({ i: '123', p: 1 }) - changes[1].op.should.deep.equal({ i: '456', p: 5 }) - return done() - } - ) + return DocUpdaterClient.flushDoc(this.project_id, this.doc.id, error => { + if (error != null) { + throw error } - ) + return MockWebApi.getDocument( + this.project_id, + this.doc.id, + (error, doc) => { + const { changes } = doc.ranges + changes[0].op.should.deep.equal({ i: '123', p: 1 }) + changes[1].op.should.deep.equal({ i: '456', p: 5 }) + return done() + } + ) + }) }) }) @@ -356,22 +352,22 @@ describe('Ranges', function () { this.id_seed = '587357bd35e64f6157' this.doc = { id: DocUpdaterClient.randomId(), - lines: ['aaa'] + lines: ['aaa'], } this.update = { doc: this.doc.id, op: [{ i: '456', p: 1 }], v: 0, - meta: { user_id: this.user_id, tc: this.id_seed } + meta: { user_id: this.user_id, tc: this.id_seed }, } MockWebApi.insertDoc(this.project_id, this.doc.id, { lines: this.doc.lines, - version: 0 + version: 0, }) return DocUpdaterClient.preloadDoc( this.project_id, this.doc.id, - (error) => { + error => { if (error != null) { throw error } @@ -379,7 +375,7 @@ describe('Ranges', function () { this.project_id, this.doc.id, this.update, - (error) => { + error => { if (error != null) { throw error } @@ -411,7 +407,7 @@ describe('Ranges', function () { this.project_id, this.doc.id, this.id_seed + '000001', - (error) => { + error => { if (error != null) { throw error } @@ -437,21 +433,21 @@ describe('Ranges', function () { this.user_id = DocUpdaterClient.randomId() this.doc = { id: DocUpdaterClient.randomId(), - lines: ['foo bar'] + lines: ['foo bar'], } this.update = { doc: this.doc.id, op: [{ c: 'bar', p: 4, t: (this.tid = DocUpdaterClient.randomId()) }], - v: 0 + v: 0, } MockWebApi.insertDoc(this.project_id, this.doc.id, { lines: this.doc.lines, - version: 0 + version: 0, }) return DocUpdaterClient.preloadDoc( this.project_id, this.doc.id, - (error) => { + error => { if (error != null) { throw error } @@ -459,7 +455,7 @@ describe('Ranges', function () { this.project_id, this.doc.id, this.update, - (error) => { + error => { if (error != null) { throw error } @@ -518,7 +514,7 @@ describe('Ranges', function () { this.id_seed = DocUpdaterClient.randomId() this.doc = { id: DocUpdaterClient.randomId(), - lines: ['aaa'] + lines: ['aaa'], } this.i = new Array(3 * 1024 * 1024).join('a') this.updates = [ @@ -526,17 +522,17 @@ describe('Ranges', function () { doc: this.doc.id, op: [{ i: this.i, p: 1 }], v: 0, - meta: { user_id: this.user_id, tc: this.id_seed } - } + meta: { user_id: this.user_id, tc: this.id_seed }, + }, ] MockWebApi.insertDoc(this.project_id, this.doc.id, { lines: this.doc.lines, - version: 0 + version: 0, }) const jobs = [] for (const update of Array.from(this.updates)) { - ;((update) => { - return jobs.push((callback) => + ;(update => { + return jobs.push(callback => DocUpdaterClient.sendUpdate( this.project_id, this.doc.id, @@ -549,11 +545,11 @@ describe('Ranges', function () { return DocUpdaterClient.preloadDoc( this.project_id, this.doc.id, - (error) => { + error => { if (error != null) { throw error } - return async.series(jobs, (error) => { + return async.series(jobs, error => { if (error != null) { throw error } @@ -593,34 +589,34 @@ describe('Ranges', function () { op: { c: 'a', p: 5, - tid: (this.tid = DocUpdaterClient.randomId()) + tid: (this.tid = DocUpdaterClient.randomId()), }, metadata: { user_id: this.user_id, - ts: new Date() - } - } - ] - } + ts: new Date(), + }, + }, + ], + }, }) this.updates = [ { doc: this.doc_id, op: [{ d: 'foo ', p: 0 }], v: 0, - meta: { user_id: this.user_id } + meta: { user_id: this.user_id }, }, { doc: this.doc_id, op: [{ d: 'bar ', p: 0 }], v: 1, - meta: { user_id: this.user_id } - } + meta: { user_id: this.user_id }, + }, ] const jobs = [] for (const update of Array.from(this.updates)) { - ;((update) => { - return jobs.push((callback) => + ;(update => { + return jobs.push(callback => DocUpdaterClient.sendUpdate( this.project_id, this.doc_id, @@ -633,7 +629,7 @@ describe('Ranges', function () { return DocUpdaterClient.preloadDoc( this.project_id, this.doc_id, - (error) => { + error => { if (error != null) { throw error } @@ -669,7 +665,7 @@ describe('Ranges', function () { db.docSnapshots .find({ project_id: ObjectId(this.project_id), - doc_id: ObjectId(this.doc_id) + doc_id: ObjectId(this.doc_id), }) .toArray((error, docSnapshots) => { if (error != null) { @@ -681,7 +677,7 @@ describe('Ranges', function () { expect(docSnapshots[0].ranges.comments[0].op).to.deep.equal({ c: 'a', p: 1, - tid: this.tid + tid: this.tid, }) return done() }) diff --git a/services/document-updater/test/acceptance/js/SettingADocumentTests.js b/services/document-updater/test/acceptance/js/SettingADocumentTests.js index 60d46dcdec..05955df5b0 100644 --- a/services/document-updater/test/acceptance/js/SettingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/SettingADocumentTests.js @@ -21,10 +21,10 @@ describe('Setting a document', function () { op: [ { i: 'one and a half\n', - p: 4 - } + p: 4, + }, ], - v: this.version + v: this.version, } this.result = ['one', 'one and a half', 'two', 'three'] this.newLines = ['these', 'are', 'the', 'new', 'lines'] @@ -49,9 +49,9 @@ describe('Setting a document', function () { this.doc_id = DocUpdaterClient.randomId() MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, - version: this.version + version: this.version, }) - DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => { + DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { if (error) { throw error } @@ -59,7 +59,7 @@ describe('Setting a document', function () { this.project_id, this.doc_id, this.update, - (error) => { + error => { if (error) { throw error } @@ -149,7 +149,7 @@ describe('Setting a document', function () { this.doc_id = DocUpdaterClient.randomId() MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, - version: this.version + version: this.version, }) DocUpdaterClient.setDocLines( this.project_id, @@ -212,23 +212,23 @@ describe('Setting a document', function () { { desc: 'when the updated doc is too large for the body parser', size: Settings.maxJsonRequestSize, - expectedStatusCode: 413 + expectedStatusCode: 413, }, { desc: 'when the updated doc is larger than the HTTP controller limit', size: Settings.max_doc_length, - expectedStatusCode: 406 - } + expectedStatusCode: 406, + }, ] - DOC_TOO_LARGE_TEST_CASES.forEach((testCase) => { + DOC_TOO_LARGE_TEST_CASES.forEach(testCase => { describe(testCase.desc, function () { before(function (done) { this.project_id = DocUpdaterClient.randomId() this.doc_id = DocUpdaterClient.randomId() MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, - version: this.version + version: this.version, }) this.newLines = [] while (JSON.stringify(this.newLines).length <= testCase.size) { @@ -281,7 +281,7 @@ describe('Setting a document', function () { this.doc_id = DocUpdaterClient.randomId() MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, - version: this.version + version: this.version, }) this.newLines = [] @@ -333,14 +333,14 @@ describe('Setting a document', function () { op: [ { d: 'one and a half\n', - p: 4 - } + p: 4, + }, ], meta: { tc: this.id_seed, - user_id: this.user_id + user_id: this.user_id, }, - v: this.version + v: this.version, } }) @@ -350,9 +350,9 @@ describe('Setting a document', function () { this.doc_id = DocUpdaterClient.randomId() MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, - version: this.version + version: this.version, }) - DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => { + DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { if (error) { throw error } @@ -360,7 +360,7 @@ describe('Setting a document', function () { this.project_id, this.doc_id, this.update, - (error) => { + error => { if (error) { throw error } @@ -413,9 +413,9 @@ describe('Setting a document', function () { this.doc_id = DocUpdaterClient.randomId() MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, - version: this.version + version: this.version, }) - DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => { + DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { if (error) { throw error } @@ -423,7 +423,7 @@ describe('Setting a document', function () { this.project_id, this.doc_id, this.update, - (error) => { + error => { if (error) { throw error } diff --git a/services/document-updater/test/acceptance/js/SizeCheckTests.js b/services/document-updater/test/acceptance/js/SizeCheckTests.js index ed893884be..6267df3b1a 100644 --- a/services/document-updater/test/acceptance/js/SizeCheckTests.js +++ b/services/document-updater/test/acceptance/js/SizeCheckTests.js @@ -16,10 +16,10 @@ describe('SizeChecks', function () { op: [ { i: 'insert some more lines that will bring it above the limit\n', - p: 42 - } + p: 42, + }, ], - v: this.version + v: this.version, } this.project_id = DocUpdaterClient.randomId() this.doc_id = DocUpdaterClient.randomId() @@ -30,7 +30,7 @@ describe('SizeChecks', function () { this.lines = ['0123456789'.repeat(Settings.max_doc_length / 10 + 1)] MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, - v: this.version + v: this.version, }) }) @@ -47,13 +47,13 @@ describe('SizeChecks', function () { const update = { doc: this.doc_id, op: this.update.op, - v: this.version + v: this.version, } DocUpdaterClient.sendUpdate( this.project_id, this.doc_id, update, - (error) => { + error => { if (error != null) { throw error } @@ -77,7 +77,7 @@ describe('SizeChecks', function () { this.lines = ['0123456789'.repeat(Settings.max_doc_length / 10 - 1)] MockWebApi.insertDoc(this.project_id, this.doc_id, { lines: this.lines, - v: this.version + v: this.version, }) }) @@ -98,13 +98,13 @@ describe('SizeChecks', function () { const update = { doc: this.doc_id, op: this.update.op, - v: this.version + v: this.version, } DocUpdaterClient.sendUpdate( this.project_id, this.doc_id, update, - (error) => { + error => { if (error != null) { throw error } diff --git a/services/document-updater/test/acceptance/js/helpers/DocUpdaterApp.js b/services/document-updater/test/acceptance/js/helpers/DocUpdaterApp.js index 8d53e69118..270f4ca0c6 100644 --- a/services/document-updater/test/acceptance/js/helpers/DocUpdaterApp.js +++ b/services/document-updater/test/acceptance/js/helpers/DocUpdaterApp.js @@ -31,7 +31,7 @@ module.exports = { this.initing = true this.callbacks.push(callback) waitForDb().then(() => { - return app.listen(3003, 'localhost', (error) => { + return app.listen(3003, 'localhost', error => { if (error != null) { throw error } @@ -45,5 +45,5 @@ module.exports = { })() }) }) - } + }, } diff --git a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js index ee356d3bf7..71e7915c0f 100644 --- a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js +++ b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js @@ -40,12 +40,12 @@ module.exports = DocUpdaterClient = { rclient.rpush( keys.pendingUpdates({ doc_id: docId }), JSON.stringify(update), - (error) => { + error => { if (error) { return callback(error) } const docKey = `${projectId}:${docId}` - rclient.sadd('DocsWithPendingUpdates', docKey, (error) => { + rclient.sadd('DocsWithPendingUpdates', docKey, error => { if (error) { return callback(error) } @@ -61,14 +61,14 @@ module.exports = DocUpdaterClient = { }, sendUpdates(projectId, docId, updates, callback) { - DocUpdaterClient.preloadDoc(projectId, docId, (error) => { + DocUpdaterClient.preloadDoc(projectId, docId, error => { if (error) { return callback(error) } - const jobs = updates.map((update) => (callback) => { + const jobs = updates.map(update => callback => { DocUpdaterClient.sendUpdate(projectId, docId, update, callback) }) - async.series(jobs, (err) => { + async.series(jobs, err => { if (err) { return callback(err) } @@ -80,7 +80,7 @@ module.exports = DocUpdaterClient = { waitForPendingUpdates(projectId, docId, callback) { async.retry( { times: 30, interval: 100 }, - (cb) => + cb => rclient.llen(keys.pendingUpdates({ doc_id: docId }), (err, length) => { if (err) { return cb(err) @@ -138,8 +138,8 @@ module.exports = DocUpdaterClient = { lines, source, user_id: userId, - undoing - } + undoing, + }, }, (error, res, body) => callback(error, res, body) ) @@ -204,9 +204,9 @@ module.exports = DocUpdaterClient = { request.post( { url: `http://localhost:3003/project/${projectId}`, - json: { userId, updates, version } + json: { userId, updates, version }, }, (error, res, body) => callback(error, res, body) ) - } + }, } diff --git a/services/document-updater/test/acceptance/js/helpers/MockProjectHistoryApi.js b/services/document-updater/test/acceptance/js/helpers/MockProjectHistoryApi.js index 6d72c77baf..513475da3d 100644 --- a/services/document-updater/test/acceptance/js/helpers/MockProjectHistoryApi.js +++ b/services/document-updater/test/acceptance/js/helpers/MockProjectHistoryApi.js @@ -24,7 +24,7 @@ module.exports = MockProjectHistoryApi = { run() { app.post('/project/:project_id/flush', (req, res, next) => { - return this.flushProject(req.params.project_id, (error) => { + return this.flushProject(req.params.project_id, error => { if (error != null) { return res.sendStatus(500) } else { @@ -33,12 +33,12 @@ module.exports = MockProjectHistoryApi = { }) }) - return app.listen(3054, (error) => { + return app.listen(3054, error => { if (error != null) { throw error } }) - } + }, } MockProjectHistoryApi.run() diff --git a/services/document-updater/test/acceptance/js/helpers/MockTrackChangesApi.js b/services/document-updater/test/acceptance/js/helpers/MockTrackChangesApi.js index 319a02b7ca..eb66b2b3b5 100644 --- a/services/document-updater/test/acceptance/js/helpers/MockTrackChangesApi.js +++ b/services/document-updater/test/acceptance/js/helpers/MockTrackChangesApi.js @@ -24,7 +24,7 @@ module.exports = MockTrackChangesApi = { run() { app.post('/project/:project_id/doc/:doc_id/flush', (req, res, next) => { - return this.flushDoc(req.params.doc_id, (error) => { + return this.flushDoc(req.params.doc_id, error => { if (error != null) { return res.sendStatus(500) } else { @@ -34,16 +34,16 @@ module.exports = MockTrackChangesApi = { }) return app - .listen(3015, (error) => { + .listen(3015, error => { if (error != null) { throw error } }) - .on('error', (error) => { + .on('error', error => { console.error('error starting MockTrackChangesApi:', error.message) return process.exit(1) }) - } + }, } MockTrackChangesApi.run() diff --git a/services/document-updater/test/acceptance/js/helpers/MockWebApi.js b/services/document-updater/test/acceptance/js/helpers/MockWebApi.js index fc6bd49e27..818895fcba 100644 --- a/services/document-updater/test/acceptance/js/helpers/MockWebApi.js +++ b/services/document-updater/test/acceptance/js/helpers/MockWebApi.js @@ -96,7 +96,7 @@ module.exports = MockWebApi = { req.body.ranges, req.body.lastUpdatedAt, req.body.lastUpdatedBy, - (error) => { + error => { if (error != null) { return res.sendStatus(500) } else { @@ -108,16 +108,16 @@ module.exports = MockWebApi = { ) return app - .listen(3000, (error) => { + .listen(3000, error => { if (error != null) { throw error } }) - .on('error', (error) => { + .on('error', error => { console.error('error starting MockWebApi:', error.message) return process.exit(1) }) - } + }, } MockWebApi.run() diff --git a/services/document-updater/test/cluster_failover/js/test_blpop_failover.js b/services/document-updater/test/cluster_failover/js/test_blpop_failover.js index e3f52f6339..b6a83e3b34 100644 --- a/services/document-updater/test/cluster_failover/js/test_blpop_failover.js +++ b/services/document-updater/test/cluster_failover/js/test_blpop_failover.js @@ -4,18 +4,18 @@ const rclient1 = redis.createClient({ cluster: [ { port: '7000', - host: 'localhost' - } - ] + host: 'localhost', + }, + ], }) const rclient2 = redis.createClient({ cluster: [ { port: '7000', - host: 'localhost' - } - ] + host: 'localhost', + }, + ], }) let counter = 0 @@ -23,7 +23,7 @@ const sendPing = function (cb) { if (cb == null) { cb = function () {} } - return rclient1.rpush('test-blpop', counter, (error) => { + return rclient1.rpush('test-blpop', counter, error => { if (error != null) { console.error('[SENDING ERROR]', error.message) } @@ -35,7 +35,7 @@ const sendPing = function (cb) { } let previous = null -const listenForPing = (cb) => +const listenForPing = cb => rclient2.blpop('test-blpop', 200, (error, result) => { if (error != null) { return cb(error) @@ -57,7 +57,7 @@ const listenForPing = (cb) => const PING_DELAY = 100 ;(sendPings = () => sendPing(() => setTimeout(sendPings, PING_DELAY)))() ;(listenInBackground = () => - listenForPing((error) => { + listenForPing(error => { if (error) { console.error('[RECEIVING ERROR]', error.message) } diff --git a/services/document-updater/test/cluster_failover/js/test_pubsub_failover.js b/services/document-updater/test/cluster_failover/js/test_pubsub_failover.js index 3da52be287..44ad70c6ec 100644 --- a/services/document-updater/test/cluster_failover/js/test_pubsub_failover.js +++ b/services/document-updater/test/cluster_failover/js/test_pubsub_failover.js @@ -4,18 +4,18 @@ const rclient1 = redis.createClient({ cluster: [ { port: '7000', - host: 'localhost' - } - ] + host: 'localhost', + }, + ], }) const rclient2 = redis.createClient({ cluster: [ { port: '7000', - host: 'localhost' - } - ] + host: 'localhost', + }, + ], }) let counter = 0 @@ -23,7 +23,7 @@ const sendPing = function (cb) { if (cb == null) { cb = function () {} } - return rclient1.publish('test-pubsub', counter, (error) => { + return rclient1.publish('test-pubsub', counter, error => { if (error) { console.error('[SENDING ERROR]', error.message) } diff --git a/services/document-updater/test/setup.js b/services/document-updater/test/setup.js index 0fb9848427..0212544400 100644 --- a/services/document-updater/test/setup.js +++ b/services/document-updater/test/setup.js @@ -13,16 +13,16 @@ const stubs = { log: sandbox.stub(), warn: sandbox.stub(), err: sandbox.stub(), - error: sandbox.stub() - } + error: sandbox.stub(), + }, } // SandboxedModule configuration SandboxedModule.configure({ requires: { - 'logger-sharelatex': stubs.logger + 'logger-sharelatex': stubs.logger, }, - globals: { Buffer, JSON, Math, console, process } + globals: { Buffer, JSON, Math, console, process }, }) // Mocha hooks @@ -33,5 +33,5 @@ exports.mochaHooks = { afterEach() { sandbox.reset() - } + }, } diff --git a/services/document-updater/test/stress/js/run.js b/services/document-updater/test/stress/js/run.js index da78735a95..8b0a9f353b 100644 --- a/services/document-updater/test/stress/js/run.js +++ b/services/document-updater/test/stress/js/run.js @@ -31,7 +31,7 @@ const transform = function (op1, op2) { if (op2.p < op1.p) { return { p: op1.p + op2.i.length, - i: op1.i + i: op1.i, } } else { return op1 @@ -61,7 +61,7 @@ class StressTestClient { conflicts: 0, local_updates: 0, remote_updates: 0, - max_delay: 0 + max_delay: 0, } DocUpdaterClient.subscribeToAppliedOps((channel, update) => { @@ -81,7 +81,7 @@ class StressTestClient { this.content = insert(this.content, this.pos, data) this.inflight_op = { i: data, - p: this.pos++ + p: this.pos++, } this.resendUpdate() return (this.inflight_op_sent = Date.now()) @@ -94,9 +94,9 @@ class StressTestClient { op: [this.inflight_op], v: this.version, meta: { - source: this.client_id + source: this.client_id, }, - dupIfSource: [this.client_id] + dupIfSource: [this.client_id], }) return (this.update_timer = setTimeout(() => { console.log( @@ -277,7 +277,7 @@ const checkDocument = function (project_id, doc_id, clients, callback) { if (callback == null) { callback = function (error) {} } - const jobs = clients.map((client) => (cb) => client.check(cb)) + const jobs = clients.map(client => cb => client.check(cb)) return async.parallel(jobs, callback) } @@ -304,7 +304,7 @@ const printSummary = function (doc_id, clients) { local_updates: 0, remote_updates: 0, conflicts: 0, - max_delay: 0 + max_delay: 0, }) ) } @@ -326,7 +326,7 @@ for (const doc_and_project_id of Array.from(process.argv.slice(5))) { [new Array(CLIENT_COUNT + 2).join('a')], null, null, - (error) => { + error => { if (error != null) { throw error } @@ -360,22 +360,23 @@ for (const doc_and_project_id of Array.from(process.argv.slice(5))) { content, pos, version, - updateDelay: UPDATE_DELAY + updateDelay: UPDATE_DELAY, }) return clients.push(client) })(pos) } return (runBatch = function () { - const jobs = clients.map((client) => (cb) => - client.runForNUpdates(SAMPLE_INTERVAL / UPDATE_DELAY, cb) + const jobs = clients.map( + client => cb => + client.runForNUpdates(SAMPLE_INTERVAL / UPDATE_DELAY, cb) ) - return async.parallel(jobs, (error) => { + return async.parallel(jobs, error => { if (error != null) { throw error } printSummary(doc_id, clients) - return checkDocument(project_id, doc_id, clients, (error) => { + return checkDocument(project_id, doc_id, clients, error => { if (error != null) { throw error } diff --git a/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js b/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js index d498d6b45c..4f8b188c15 100644 --- a/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js +++ b/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js @@ -32,8 +32,8 @@ describe('DiffCodec', function () { expect(ops).to.deep.equal([ { i: 'beautiful ', - p: 6 - } + p: 6, + }, ]) return done() } @@ -49,7 +49,7 @@ describe('DiffCodec', function () { (error, ops) => { expect(ops).to.deep.equal([ { i: 'tall ', p: 4 }, - { i: 'red ', p: 29 } + { i: 'red ', p: 29 }, ]) return done() } @@ -66,8 +66,8 @@ describe('DiffCodec', function () { expect(ops).to.deep.equal([ { d: 'beautiful ', - p: 6 - } + p: 6, + }, ]) return done() } @@ -83,7 +83,7 @@ describe('DiffCodec', function () { (error, ops) => { expect(ops).to.deep.equal([ { d: 'tall ', p: 4 }, - { d: 'red ', p: 24 } + { d: 'red ', p: 24 }, ]) return done() } diff --git a/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js b/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js index d4cb72b89d..4e17d58fff 100644 --- a/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js +++ b/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js @@ -25,8 +25,8 @@ describe('DispatchManager', function () { './UpdateManager': (this.UpdateManager = {}), '@overleaf/settings': (this.settings = { redis: { - documentupdater: {} - } + documentupdater: {}, + }, }), '@overleaf/redis-wrapper': (this.redis = {}), './RateLimitManager': {}, @@ -40,15 +40,15 @@ describe('DispatchManager', function () { } Timer.initClass() return Timer - })()) - }) - } + })()), + }), + }, }) this.callback = sinon.stub() return (this.RateLimiter = { run(task, cb) { return task(cb) - } + }, }) }) // run task without rate limit @@ -144,7 +144,7 @@ describe('DispatchManager', function () { beforeEach(function (done) { this.client = { auth: sinon.stub(), - blpop: sinon.stub().callsArgWith(2) + blpop: sinon.stub().callsArgWith(2), } this.redis.createClient = sinon.stub().returns(this.client) this.queueShardNumber = 7 @@ -166,7 +166,7 @@ describe('DispatchManager', function () { return describe('run', function () { return it('should call _waitForUpdateThenDispatchWorker until shutting down', function (done) { let callCount = 0 - this.worker._waitForUpdateThenDispatchWorker = (callback) => { + this.worker._waitForUpdateThenDispatchWorker = callback => { if (callback == null) { callback = function (error) {} } diff --git a/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js b/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js index 8ca42df757..bac5ae6a85 100644 --- a/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js +++ b/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js @@ -29,7 +29,7 @@ describe('DocumentManager', function () { './PersistenceManager': (this.PersistenceManager = {}), './HistoryManager': (this.HistoryManager = { flushDocChangesAsync: sinon.stub(), - flushProjectChangesAsync: sinon.stub() + flushProjectChangesAsync: sinon.stub(), }), './Metrics': (this.Metrics = { Timer: (Timer = (function () { @@ -40,14 +40,14 @@ describe('DocumentManager', function () { } Timer.initClass() return Timer - })()) + })()), }), './RealTimeRedisManager': (this.RealTimeRedisManager = {}), './DiffCodec': (this.DiffCodec = {}), './UpdateManager': (this.UpdateManager = {}), './RangesManager': (this.RangesManager = {}), - './Errors': Errors - } + './Errors': Errors, + }, }) this.project_id = 'project-id-123' this.projectHistoryId = 'history-id-123' @@ -123,7 +123,7 @@ describe('DocumentManager', function () { this.project_id, this.doc_id, {}, - (error) => { + error => { error.should.exist this.RedisManager.removeDocFromMemory.called.should.equal(false) return done() @@ -137,7 +137,7 @@ describe('DocumentManager', function () { this.project_id, this.doc_id, { ignoreFlushErrors: true }, - (error) => { + error => { if (error != null) { return done(error) } @@ -484,7 +484,7 @@ describe('DocumentManager', function () { this.afterLines = ['after', 'lines'] this.ops = [ { i: 'foo', p: 4 }, - { d: 'bar', p: 42 } + { d: 'bar', p: 42 }, ] this.DocumentManager.getDoc = sinon .stub() @@ -543,8 +543,8 @@ describe('DocumentManager', function () { meta: { type: 'external', source: this.source, - user_id: this.user_id - } + user_id: this.user_id, + }, }) .should.equal(true) }) @@ -636,7 +636,7 @@ describe('DocumentManager', function () { // Copy ops so we don't interfere with other tests this.ops = [ { i: 'foo', p: 4 }, - { d: 'bar', p: 42 } + { d: 'bar', p: 42 }, ] this.DiffCodec.diffAsShareJsOp = sinon .stub() @@ -653,7 +653,7 @@ describe('DocumentManager', function () { }) return it('should set the undo flag on each op', function () { - return Array.from(this.ops).map((op) => op.u.should.equal(true)) + return Array.from(this.ops).map(op => op.u.should.equal(true)) }) }) }) @@ -666,7 +666,7 @@ describe('DocumentManager', function () { 'mock-change-id-1', 'mock-change-id-2', 'mock-change-id-3', - 'mock-change-id-4' + 'mock-change-id-4', ] this.version = 34 this.lines = ['original', 'lines'] diff --git a/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js b/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js index 5f1529ab0b..988333c9b8 100644 --- a/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js +++ b/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js @@ -25,19 +25,19 @@ describe('HistoryManager', function () { apis: { project_history: { enabled: true, - url: 'http://project_history.example.com' + url: 'http://project_history.example.com', }, trackchanges: { - url: 'http://trackchanges.example.com' - } - } + url: 'http://trackchanges.example.com', + }, + }, }), './DocumentManager': (this.DocumentManager = {}), './HistoryRedisManager': (this.HistoryRedisManager = {}), './RedisManager': (this.RedisManager = {}), './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), - './Metrics': (this.metrics = { inc: sinon.stub() }) - } + './Metrics': (this.metrics = { inc: sinon.stub() }), + }, }) this.project_id = 'mock-project-id' this.doc_id = 'mock-doc-id' @@ -118,7 +118,7 @@ describe('HistoryManager', function () { return this.request.post .calledWith({ url: `${this.Settings.apis.project_history.url}/project/${this.project_id}/flush`, - qs: { background: true } + qs: { background: true }, }) .should.equal(true) }) @@ -131,7 +131,7 @@ describe('HistoryManager', function () { .stub() .callsArgWith(1, null, { statusCode: 204 }) return this.HistoryManager.flushProjectChanges(this.project_id, { - background: true + background: true, }) }) @@ -139,7 +139,7 @@ describe('HistoryManager', function () { return this.request.post .calledWith({ url: `${this.Settings.apis.project_history.url}/project/${this.project_id}/flush`, - qs: { background: true } + qs: { background: true }, }) .should.equal(true) }) @@ -149,7 +149,7 @@ describe('HistoryManager', function () { beforeEach(function () { this.request.post = sinon.stub() return this.HistoryManager.flushProjectChanges(this.project_id, { - skip_history_flush: true + skip_history_flush: true, }) }) @@ -372,15 +372,15 @@ describe('HistoryManager', function () { this.docs = [ { doc: this.doc_id, - path: 'main.tex' - } + path: 'main.tex', + }, ] this.files = [ { file: 'mock-file-id', path: 'universe.png', - url: `www.filestore.test/${this.project_id}/mock-file-id` - } + url: `www.filestore.test/${this.project_id}/mock-file-id`, + }, ] this.ProjectHistoryRedisManager.queueResyncProjectStructure = sinon .stub() diff --git a/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js b/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js index 626398fc6b..942884ec58 100644 --- a/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js +++ b/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js @@ -20,7 +20,7 @@ describe('HistoryRedisManager', function () { beforeEach(function () { this.rclient = { auth() {}, - exec: sinon.stub() + exec: sinon.stub(), } this.rclient.multi = () => this.rclient this.HistoryRedisManager = SandboxedModule.require(modulePath, { @@ -35,12 +35,12 @@ describe('HistoryRedisManager', function () { }, docsWithHistoryOps({ project_id }) { return `DocsWithHistoryOps:${project_id}` - } - } - }) - } - } - } + }, + }, + }), + }, + }, + }, }) this.doc_id = 'doc-id-123' this.project_id = 'project-id-123' diff --git a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js index 64477eb944..3a926d5e0c 100644 --- a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js +++ b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js @@ -9,14 +9,14 @@ describe('HttpController', function () { requires: { './DocumentManager': (this.DocumentManager = {}), './HistoryManager': (this.HistoryManager = { - flushProjectChangesAsync: sinon.stub() + flushProjectChangesAsync: sinon.stub(), }), './ProjectManager': (this.ProjectManager = {}), './ProjectFlusher': { flushAllProjects() {} }, './DeleteQueueManager': (this.DeleteQueueManager = {}), './Metrics': (this.Metrics = {}), - './Errors': Errors - } + './Errors': Errors, + }, }) this.Metrics.Timer = class Timer {} this.Metrics.Timer.prototype.done = sinon.stub() @@ -27,7 +27,7 @@ describe('HttpController', function () { this.res = { send: sinon.stub(), sendStatus: sinon.stub(), - json: sinon.stub() + json: sinon.stub(), } }) @@ -42,10 +42,10 @@ describe('HttpController', function () { this.req = { params: { project_id: this.project_id, - doc_id: this.doc_id + doc_id: this.doc_id, }, query: {}, - body: {} + body: {}, } }) @@ -79,7 +79,7 @@ describe('HttpController', function () { version: this.version, ops: [], ranges: this.ranges, - pathname: this.pathname + pathname: this.pathname, }) .should.equal(true) }) @@ -129,7 +129,7 @@ describe('HttpController', function () { version: this.version, ops: this.ops, ranges: this.ranges, - pathname: this.pathname + pathname: this.pathname, }) .should.equal(true) }) @@ -186,15 +186,15 @@ describe('HttpController', function () { headers: {}, params: { project_id: this.project_id, - doc_id: this.doc_id + doc_id: this.doc_id, }, query: {}, body: { lines: this.lines, source: this.source, user_id: this.user_id, - undoing: (this.undoing = true) - } + undoing: (this.undoing = true), + }, } }) @@ -230,7 +230,7 @@ describe('HttpController', function () { lines: this.lines, source: this.source, userId: this.user_id, - undoing: this.undoing + undoing: this.undoing, }, 'setting doc via http' ) @@ -280,10 +280,10 @@ describe('HttpController', function () { beforeEach(function () { this.req = { params: { - project_id: this.project_id + project_id: this.project_id, }, query: {}, - body: {} + body: {}, } }) @@ -338,10 +338,10 @@ describe('HttpController', function () { this.req = { params: { project_id: this.project_id, - doc_id: this.doc_id + doc_id: this.doc_id, }, query: {}, - body: {} + body: {}, } }) @@ -396,10 +396,10 @@ describe('HttpController', function () { this.req = { params: { project_id: this.project_id, - doc_id: this.doc_id + doc_id: this.doc_id, }, query: {}, - body: {} + body: {}, } }) @@ -414,7 +414,7 @@ describe('HttpController', function () { it('should flush and delete the doc', function () { this.DocumentManager.flushAndDeleteDocWithLock .calledWith(this.project_id, this.doc_id, { - ignoreFlushErrors: false + ignoreFlushErrors: false, }) .should.equal(true) }) @@ -485,10 +485,10 @@ describe('HttpController', function () { beforeEach(function () { this.req = { params: { - project_id: this.project_id + project_id: this.project_id, }, query: {}, - body: {} + body: {}, } }) @@ -560,10 +560,10 @@ describe('HttpController', function () { params: { project_id: this.project_id, doc_id: this.doc_id, - change_id: (this.change_id = 'mock-change-od-1') + change_id: (this.change_id = 'mock-change-od-1'), }, query: {}, - body: {} + body: {}, } }) @@ -605,7 +605,7 @@ describe('HttpController', function () { 'mock-change-od-1', 'mock-change-od-2', 'mock-change-od-3', - 'mock-change-od-4' + 'mock-change-od-4', ] this.req.body = { change_ids: this.change_ids } this.DocumentManager.acceptChangesWithLock = sinon @@ -650,10 +650,10 @@ describe('HttpController', function () { params: { project_id: this.project_id, doc_id: this.doc_id, - comment_id: (this.comment_id = 'mock-comment-id') + comment_id: (this.comment_id = 'mock-comment-id'), }, query: {}, - body: {} + body: {}, } }) @@ -681,7 +681,7 @@ describe('HttpController', function () { { projectId: this.project_id, docId: this.doc_id, - commentId: this.comment_id + commentId: this.comment_id, }, 'deleting comment via http' ) @@ -712,16 +712,16 @@ describe('HttpController', function () { this.state = '01234567890abcdef' this.docs = [ { _id: '1234', lines: 'hello', v: 23 }, - { _id: '4567', lines: 'world', v: 45 } + { _id: '4567', lines: 'world', v: 45 }, ] this.req = { params: { - project_id: this.project_id + project_id: this.project_id, }, query: { - state: this.state + state: this.state, }, - body: {} + body: {}, } }) @@ -817,16 +817,16 @@ describe('HttpController', function () { type: 'rename-doc', id: 1, pathname: 'thesis.tex', - newPathname: 'book.tex' + newPathname: 'book.tex', }, { type: 'add-doc', id: 2, pathname: 'article.tex', docLines: 'hello' }, { type: 'rename-file', id: 3, pathname: 'apple.png', - newPathname: 'banana.png' + newPathname: 'banana.png', }, - { type: 'add-file', id: 4, url: 'filestore.example.com/4' } + { type: 'add-file', id: 4, url: 'filestore.example.com/4' }, ] this.version = 1234567 this.req = { @@ -835,11 +835,11 @@ describe('HttpController', function () { projectHistoryId: this.projectHistoryId, userId: this.userId, updates: this.updates, - version: this.version + version: this.version, }, params: { - project_id: this.project_id - } + project_id: this.project_id, + }, } }) @@ -895,11 +895,11 @@ describe('HttpController', function () { body: { projectHistoryId: this.projectHistoryId, docs: this.docs, - files: this.files + files: this.files, }, params: { - project_id: this.project_id - } + project_id: this.project_id, + }, } }) diff --git a/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js b/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js index 4f700cc144..034974f805 100644 --- a/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js +++ b/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js @@ -29,9 +29,9 @@ describe('LockManager - checking the lock', function () { createClient() { return { auth() {}, - exists: existsStub + exists: existsStub, } - } + }, }, './Metrics': { inc() {} }, './Profiler': (Profiler = (function () { @@ -43,7 +43,7 @@ describe('LockManager - checking the lock', function () { } Profiler.initClass() return Profiler - })()) + })()), } const LockManager = SandboxedModule.require(modulePath, { requires: mocks }) diff --git a/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js b/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js index 177869d0db..47ad907a6b 100644 --- a/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js +++ b/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js @@ -24,11 +24,11 @@ describe('LockManager - releasing the lock', function () { let Profiler this.client = { auth() {}, - eval: sinon.stub() + eval: sinon.stub(), } const mocks = { '@overleaf/redis-wrapper': { - createClient: () => this.client + createClient: () => this.client, }, '@overleaf/settings': { redis: { @@ -36,10 +36,10 @@ describe('LockManager - releasing the lock', function () { key_schema: { blockingKey({ doc_id }) { return `Blocking:${doc_id}` - } - } - } - } + }, + }, + }, + }, }, './Metrics': { inc() {} }, './Profiler': (Profiler = (function () { @@ -51,7 +51,7 @@ describe('LockManager - releasing the lock', function () { } Profiler.initClass() return Profiler - })()) + })()), } this.LockManager = SandboxedModule.require(modulePath, { requires: mocks }) this.lockValue = 'lock-value-stub' diff --git a/services/document-updater/test/unit/js/LockManager/getLockTests.js b/services/document-updater/test/unit/js/LockManager/getLockTests.js index 0b938c3753..d1ba5cf728 100644 --- a/services/document-updater/test/unit/js/LockManager/getLockTests.js +++ b/services/document-updater/test/unit/js/LockManager/getLockTests.js @@ -26,7 +26,7 @@ describe('LockManager - getting the lock', function () { '@overleaf/redis-wrapper': { createClient: () => { return { auth() {} } - } + }, }, './Metrics': { inc() {} }, './Profiler': (Profiler = (function () { @@ -38,8 +38,8 @@ describe('LockManager - getting the lock', function () { } Profiler.initClass() return Profiler - })()) - } + })()), + }, }) this.callback = sinon.stub() return (this.doc_id = 'doc-id-123') diff --git a/services/document-updater/test/unit/js/LockManager/tryLockTests.js b/services/document-updater/test/unit/js/LockManager/tryLockTests.js index ef92b50214..210307d51b 100644 --- a/services/document-updater/test/unit/js/LockManager/tryLockTests.js +++ b/services/document-updater/test/unit/js/LockManager/tryLockTests.js @@ -24,9 +24,9 @@ describe('LockManager - trying the lock', function () { createClient: () => { return { auth() {}, - set: (this.set = sinon.stub()) + set: (this.set = sinon.stub()), } - } + }, }, './Metrics': { inc() {} }, '@overleaf/settings': { @@ -35,22 +35,26 @@ describe('LockManager - trying the lock', function () { key_schema: { blockingKey({ doc_id }) { return `Blocking:${doc_id}` + }, + }, + }, + }, + }, + './Profiler': + (this.Profiler = Profiler = + (function () { + Profiler = class Profiler { + static initClass() { + this.prototype.log = sinon + .stub() + .returns({ end: sinon.stub() }) + this.prototype.end = sinon.stub() } } - } - } - }, - './Profiler': (this.Profiler = Profiler = (function () { - Profiler = class Profiler { - static initClass() { - this.prototype.log = sinon.stub().returns({ end: sinon.stub() }) - this.prototype.end = sinon.stub() - } - } - Profiler.initClass() - return Profiler - })()) - } + Profiler.initClass() + return Profiler + })()), + }, }) this.callback = sinon.stub() diff --git a/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js b/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js index a9137175b6..cc669481f7 100644 --- a/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js +++ b/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js @@ -34,10 +34,10 @@ describe('PersistenceManager', function () { Timer.initClass() return Timer })()), - inc: sinon.stub() + inc: sinon.stub(), }), - './Errors': Errors - } + './Errors': Errors, + }, }) this.project_id = 'project-id-123' this.projectHistoryId = 'history-id-123' @@ -53,8 +53,8 @@ describe('PersistenceManager', function () { web: { url: (this.url = 'www.example.com'), user: (this.user = 'sharelatex'), - pass: (this.pass = 'password') - } + pass: (this.pass = 'password'), + }, }) }) @@ -65,7 +65,7 @@ describe('PersistenceManager', function () { version: this.version, ranges: this.ranges, pathname: this.pathname, - projectHistoryId: this.projectHistoryId + projectHistoryId: this.projectHistoryId, }) }) @@ -90,15 +90,15 @@ describe('PersistenceManager', function () { url: `${this.url}/project/${this.project_id}/doc/${this.doc_id}`, method: 'GET', headers: { - accept: 'application/json' + accept: 'application/json', }, auth: { user: this.user, pass: this.pass, - sendImmediately: true + sendImmediately: true, }, jar: false, - timeout: 5000 + timeout: 5000, }) .should.equal(true) }) @@ -309,16 +309,16 @@ describe('PersistenceManager', function () { version: this.version, ranges: this.ranges, lastUpdatedAt: this.lastUpdatedAt, - lastUpdatedBy: this.lastUpdatedBy + lastUpdatedBy: this.lastUpdatedBy, }, method: 'POST', auth: { user: this.user, pass: this.pass, - sendImmediately: true + sendImmediately: true, }, jar: false, - timeout: 5000 + timeout: 5000, }) .should.equal(true) }) diff --git a/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js b/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js index 8e20214e88..941d6b2008 100644 --- a/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js +++ b/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js @@ -37,16 +37,16 @@ describe('ProjectHistoryRedisManager', function () { }, projectHistoryFirstOpTimestamp({ project_id }) { return `ProjectHistory:FirstOpTimestamp:${project_id}` - } - } - } - } + }, + }, + }, + }, }), '@overleaf/redis-wrapper': { - createClient: () => this.rclient + createClient: () => this.rclient, }, - './Metrics': (this.metrics = { summary: sinon.stub() }) - } + './Metrics': (this.metrics = { summary: sinon.stub() }), + }, } )) }) @@ -97,7 +97,7 @@ describe('ProjectHistoryRedisManager', function () { this.rawUpdate = { pathname: (this.pathname = '/old'), newPathname: (this.newPathname = '/new'), - version: (this.version = 2) + version: (this.version = 2), } this.ProjectHistoryRedisManager.queueOps = sinon.stub() @@ -118,11 +118,11 @@ describe('ProjectHistoryRedisManager', function () { new_pathname: this.newPathname, meta: { user_id: this.user_id, - ts: new Date() + ts: new Date(), }, version: this.version, projectHistoryId: this.projectHistoryId, - file: this.file_id + file: this.file_id, } return this.ProjectHistoryRedisManager.queueOps @@ -144,7 +144,7 @@ describe('ProjectHistoryRedisManager', function () { pathname: (this.pathname = '/old'), docLines: (this.docLines = 'a\nb'), version: (this.version = 2), - url: (this.url = 'filestore.example.com') + url: (this.url = 'filestore.example.com'), } this.ProjectHistoryRedisManager.queueOps = sinon.stub() @@ -166,11 +166,11 @@ describe('ProjectHistoryRedisManager', function () { url: this.url, meta: { user_id: this.user_id, - ts: new Date() + ts: new Date(), }, version: this.version, projectHistoryId: this.projectHistoryId, - doc: this.doc_id + doc: this.doc_id, } return this.ProjectHistoryRedisManager.queueOps diff --git a/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js index d8342c0cff..da4013a534 100644 --- a/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js @@ -25,7 +25,7 @@ describe('ProjectManager - flushAndDeleteProject', function () { './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), './DocumentManager': (this.DocumentManager = {}), './HistoryManager': (this.HistoryManager = { - flushProjectChanges: sinon.stub().callsArg(2) + flushProjectChanges: sinon.stub().callsArg(2), }), './Metrics': (this.Metrics = { Timer: (Timer = (function () { @@ -36,9 +36,9 @@ describe('ProjectManager - flushAndDeleteProject', function () { } Timer.initClass() return Timer - })()) - }) - } + })()), + }), + }, }) this.project_id = 'project-id-123' return (this.callback = sinon.stub()) @@ -54,7 +54,7 @@ describe('ProjectManager - flushAndDeleteProject', function () { return this.ProjectManager.flushAndDeleteProjectWithLocks( this.project_id, {}, - (error) => { + error => { this.callback(error) return done() } @@ -68,7 +68,7 @@ describe('ProjectManager - flushAndDeleteProject', function () { }) it('should delete each doc in the project', function () { - return Array.from(this.doc_ids).map((doc_id) => + return Array.from(this.doc_ids).map(doc_id => this.DocumentManager.flushAndDeleteDocWithLock .calledWith(this.project_id, doc_id, {}) .should.equal(true) @@ -110,7 +110,7 @@ describe('ProjectManager - flushAndDeleteProject', function () { return this.ProjectManager.flushAndDeleteProjectWithLocks( this.project_id, {}, - (error) => { + error => { this.callback(error) return done() } @@ -118,7 +118,7 @@ describe('ProjectManager - flushAndDeleteProject', function () { }) it('should still flush each doc in the project', function () { - return Array.from(this.doc_ids).map((doc_id) => + return Array.from(this.doc_ids).map(doc_id => this.DocumentManager.flushAndDeleteDocWithLock .calledWith(this.project_id, doc_id, {}) .should.equal(true) diff --git a/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js index 70ae03e861..d607840494 100644 --- a/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js @@ -36,9 +36,9 @@ describe('ProjectManager - flushProject', function () { } Timer.initClass() return Timer - })()) - }) - } + })()), + }), + }, }) this.project_id = 'project-id-123' return (this.callback = sinon.stub()) @@ -53,7 +53,7 @@ describe('ProjectManager - flushProject', function () { this.DocumentManager.flushDocIfLoadedWithLock = sinon.stub().callsArg(2) return this.ProjectManager.flushProjectWithLocks( this.project_id, - (error) => { + error => { this.callback(error) return done() } @@ -67,7 +67,7 @@ describe('ProjectManager - flushProject', function () { }) it('should flush each doc in the project', function () { - return Array.from(this.doc_ids).map((doc_id) => + return Array.from(this.doc_ids).map(doc_id => this.DocumentManager.flushDocIfLoadedWithLock .calledWith(this.project_id, doc_id) .should.equal(true) @@ -105,7 +105,7 @@ describe('ProjectManager - flushProject', function () { ) return this.ProjectManager.flushProjectWithLocks( this.project_id, - (error) => { + error => { this.callback(error) return done() } @@ -113,7 +113,7 @@ describe('ProjectManager - flushProject', function () { }) it('should still flush each doc in the project', function () { - return Array.from(this.doc_ids).map((doc_id) => + return Array.from(this.doc_ids).map(doc_id => this.DocumentManager.flushDocIfLoadedWithLock .calledWith(this.project_id, doc_id) .should.equal(true) diff --git a/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js b/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js index 467a190168..7bd2c27aa2 100644 --- a/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js @@ -33,10 +33,10 @@ describe('ProjectManager - getProjectDocsAndFlushIfOld', function () { } Timer.initClass() return Timer - })()) + })()), }), - './Errors': Errors - } + './Errors': Errors, + }, }) this.project_id = 'project-id-123' this.callback = sinon.stub() @@ -49,24 +49,24 @@ describe('ProjectManager - getProjectDocsAndFlushIfOld', function () { this.doc_lines = [ ['aaa', 'aaa'], ['bbb', 'bbb'], - ['ccc', 'ccc'] + ['ccc', 'ccc'], ] this.docs = [ { _id: this.doc_ids[0], lines: this.doc_lines[0], - v: this.doc_versions[0] + v: this.doc_versions[0], }, { _id: this.doc_ids[1], lines: this.doc_lines[1], - v: this.doc_versions[1] + v: this.doc_versions[1], }, { _id: this.doc_ids[2], lines: this.doc_lines[2], - v: this.doc_versions[2] - } + v: this.doc_versions[2], + }, ] this.RedisManager.checkOrSetProjectState = sinon .stub() @@ -200,7 +200,7 @@ describe('ProjectManager - getProjectDocsAndFlushIfOld', function () { return describe('clearing the project state with clearProjectState', function () { beforeEach(function (done) { this.RedisManager.clearProjectState = sinon.stub().callsArg(1) - return this.ProjectManager.clearProjectState(this.project_id, (error) => { + return this.ProjectManager.clearProjectState(this.project_id, error => { this.callback(error) return done() }) diff --git a/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js index 896517679c..ffc1257fe0 100644 --- a/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js +++ b/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js @@ -8,17 +8,17 @@ describe('ProjectManager', function () { this.RedisManager = {} this.ProjectHistoryRedisManager = { queueRenameEntity: sinon.stub().yields(), - queueAddEntity: sinon.stub().yields() + queueAddEntity: sinon.stub().yields(), } this.DocumentManager = { - renameDocWithLock: sinon.stub().yields() + renameDocWithLock: sinon.stub().yields(), } this.HistoryManager = { flushProjectChangesAsync: sinon.stub(), - shouldFlushHistoryOps: sinon.stub().returns(false) + shouldFlushHistoryOps: sinon.stub().returns(false), } this.Metrics = { - Timer: class Timer {} + Timer: class Timer {}, } this.Metrics.Timer.prototype.done = sinon.stub() @@ -28,8 +28,8 @@ describe('ProjectManager', function () { './ProjectHistoryRedisManager': this.ProjectHistoryRedisManager, './DocumentManager': this.DocumentManager, './HistoryManager': this.HistoryManager, - './Metrics': this.Metrics - } + './Metrics': this.Metrics, + }, }) this.project_id = 'project-id-123' @@ -46,24 +46,24 @@ describe('ProjectManager', function () { type: 'rename-doc', id: 1, pathname: 'foo', - newPathname: 'foo' + newPathname: 'foo', } this.secondDocUpdate = { type: 'rename-doc', id: 2, pathname: 'bar', - newPathname: 'bar2' + newPathname: 'bar2', } this.firstFileUpdate = { type: 'rename-file', id: 2, pathname: 'bar', - newPathname: 'bar2' + newPathname: 'bar2', } this.updates = [ this.firstDocUpdate, this.secondDocUpdate, - this.firstFileUpdate + this.firstFileUpdate, ] }) @@ -81,7 +81,7 @@ describe('ProjectManager', function () { it('should rename the docs in the updates', function () { const firstDocUpdateWithVersion = _.extend({}, this.firstDocUpdate, { - version: `${this.version}.0` + version: `${this.version}.0`, }) const secondDocUpdateWithVersion = _.extend( {}, @@ -201,28 +201,28 @@ describe('ProjectManager', function () { this.firstDocUpdate = { type: 'add-doc', id: 1, - docLines: 'a\nb' + docLines: 'a\nb', } this.secondDocUpdate = { type: 'add-doc', id: 2, - docLines: 'a\nb' + docLines: 'a\nb', } this.firstFileUpdate = { type: 'add-file', id: 3, - url: 'filestore.example.com/2' + url: 'filestore.example.com/2', } this.secondFileUpdate = { type: 'add-file', id: 4, - url: 'filestore.example.com/3' + url: 'filestore.example.com/3', } this.updates = [ this.firstDocUpdate, this.secondDocUpdate, this.firstFileUpdate, - this.secondFileUpdate + this.secondFileUpdate, ] }) @@ -240,7 +240,7 @@ describe('ProjectManager', function () { it('should add the docs in the updates', function () { const firstDocUpdateWithVersion = _.extend({}, this.firstDocUpdate, { - version: `${this.version}.0` + version: `${this.version}.0`, }) const secondDocUpdateWithVersion = _.extend( {}, diff --git a/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js b/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js index c857153888..6fbad0557a 100644 --- a/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js +++ b/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js @@ -32,39 +32,39 @@ describe('RangesManager', function () { this.updates = [ { meta: { - user_id: this.user_id + user_id: this.user_id, }, op: [ { i: 'two ', - p: 4 - } - ] - } + p: 4, + }, + ], + }, ] this.entries = { comments: [ { op: { c: 'three ', - p: 4 + p: 4, }, metadata: { - user_id: this.user_id - } - } + user_id: this.user_id, + }, + }, ], changes: [ { op: { i: 'five', - p: 15 + p: 15, }, metadata: { - user_id: this.user_id - } - } - ] + user_id: this.user_id, + }, + }, + ], } return (this.newDocLines = ['one two three four five']) }) // old is "one three four five" @@ -90,11 +90,11 @@ describe('RangesManager', function () { expect(ranges_were_collapsed).to.equal(false) entries.comments[0].op.should.deep.equal({ c: 'three ', - p: 8 + p: 8, }) return entries.changes[0].op.should.deep.equal({ i: 'five', - p: 19 + p: 19, }) }) }) @@ -149,16 +149,16 @@ describe('RangesManager', function () { this.updates = [ { meta: { - user_id: this.user_id + user_id: this.user_id, }, op: [ { c: 'one', p: 0, - t: 'thread-id-1' - } - ] - } + t: 'thread-id-1', + }, + ], + }, ] this.entries = { comments: [ @@ -166,24 +166,24 @@ describe('RangesManager', function () { op: { c: 'three ', p: 4, - t: 'thread-id-2' + t: 'thread-id-2', }, metadata: { - user_id: this.user_id - } + user_id: this.user_id, + }, }, { op: { c: 'four ', p: 10, - t: 'thread-id-3' + t: 'thread-id-3', }, metadata: { - user_id: this.user_id - } - } + user_id: this.user_id, + }, + }, ], - changes: [] + changes: [], } return this.RangesManager.applyUpdate( this.project_id, @@ -212,38 +212,38 @@ describe('RangesManager', function () { { meta: { user_id: this.user_id, - tc: 'track-changes-id-yes' + tc: 'track-changes-id-yes', }, op: [ { i: 'one ', - p: 0 - } - ] - } + p: 0, + }, + ], + }, ] this.entries = { changes: [ { op: { i: 'three', - p: 4 + p: 4, }, metadata: { - user_id: this.user_id - } + user_id: this.user_id, + }, }, { op: { i: 'four', - p: 10 + p: 10, }, metadata: { - user_id: this.user_id - } - } + user_id: this.user_id, + }, + }, ], - comments: [] + comments: [], } this.newDocLines = ['one two three four'] return this.RangesManager.applyUpdate( @@ -272,15 +272,15 @@ describe('RangesManager', function () { this.updates = [ { meta: { - user_id: this.user_id + user_id: this.user_id, }, op: [ { c: "doesn't match", - p: 0 - } - ] - } + p: 0, + }, + ], + }, ] return this.RangesManager.applyUpdate( this.project_id, @@ -308,16 +308,16 @@ describe('RangesManager', function () { this.updates = [ { meta: { - user_id: this.user_id + user_id: this.user_id, }, op: [ { d: 'one', p: 0, - t: 'thread-id-1' - } - ] - } + t: 'thread-id-1', + }, + ], + }, ] this.entries = { comments: [ @@ -325,14 +325,14 @@ describe('RangesManager', function () { op: { c: 'n', p: 1, - t: 'thread-id-2' + t: 'thread-id-2', }, metadata: { - user_id: this.user_id - } - } + user_id: this.user_id, + }, + }, ], - changes: [] + changes: [], } return this.RangesManager.applyUpdate( this.project_id, @@ -360,8 +360,8 @@ describe('RangesManager', function () { requires: { './RangesTracker': (this.RangesTracker = SandboxedModule.require( '../../../../app/js/RangesTracker.js' - )) - } + )), + }, }) this.ranges = { @@ -371,38 +371,38 @@ describe('RangesManager', function () { id: 'a1', op: { i: 'lorem', - p: 0 - } + p: 0, + }, }, { id: 'a2', op: { i: 'ipsum', - p: 10 - } + p: 10, + }, }, { id: 'a3', op: { i: 'dolor', - p: 20 - } + p: 20, + }, }, { id: 'a4', op: { i: 'sit', - p: 30 - } + p: 30, + }, }, { id: 'a5', op: { i: 'amet', - p: 40 - } - } - ] + p: 40, + }, + }, + ], } return (this.removeChangeIdsSpy = sinon.spy( this.RangesTracker.prototype, @@ -438,7 +438,7 @@ describe('RangesManager', function () { it('should remove the change', function () { return expect( this.rangesResponse.changes.find( - (change) => change.id === this.ranges.changes[1].id + change => change.id === this.ranges.changes[1].id ) ).to.be.undefined }) @@ -450,10 +450,10 @@ describe('RangesManager', function () { }) return it('should not touch other changes', function () { - return [0, 2, 3, 4].map((i) => + return [0, 2, 3, 4].map(i => expect( this.rangesResponse.changes.find( - (change) => change.id === this.ranges.changes[i].id + change => change.id === this.ranges.changes[i].id ) ).to.deep.equal(this.ranges.changes[i]) ) @@ -465,7 +465,7 @@ describe('RangesManager', function () { this.change_ids = [ this.ranges.changes[1].id, this.ranges.changes[3].id, - this.ranges.changes[4].id + this.ranges.changes[4].id, ] return this.RangesManager.acceptChanges( this.change_ids, @@ -491,10 +491,10 @@ describe('RangesManager', function () { it('should remove the changes', function () { return [1, 3, 4].map( - (i) => + i => expect( this.rangesResponse.changes.find( - (change) => change.id === this.ranges.changes[1].id + change => change.id === this.ranges.changes[1].id ) ).to.be.undefined ) @@ -507,10 +507,10 @@ describe('RangesManager', function () { }) return it('should not touch other changes', function () { - return [0, 2].map((i) => + return [0, 2].map(i => expect( this.rangesResponse.changes.find( - (change) => change.id === this.ranges.changes[i].id + change => change.id === this.ranges.changes[i].id ) ).to.deep.equal(this.ranges.changes[i]) ) diff --git a/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js b/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js index 6c488c4c29..09c4ebac52 100644 --- a/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js +++ b/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js @@ -31,9 +31,9 @@ describe('RateLimitManager', function () { Timer.initClass() return Timer })()), - gauge: sinon.stub() - }) - } + gauge: sinon.stub(), + }), + }, }) this.callback = sinon.stub() return (this.RateLimiter = new this.RateLimitManager(1)) @@ -63,18 +63,18 @@ describe('RateLimitManager', function () { beforeEach(function (done) { this.task = sinon.stub() this.finalTask = sinon.stub() - const task = (cb) => { + const task = cb => { this.task() return setTimeout(cb, 100) } - const finalTask = (cb) => { + const finalTask = cb => { this.finalTask() return setTimeout(cb, 100) } this.RateLimiter.run(task, this.callback) this.RateLimiter.run(task, this.callback) this.RateLimiter.run(task, this.callback) - return this.RateLimiter.run(finalTask, (err) => { + return this.RateLimiter.run(finalTask, err => { this.callback(err) return done() }) @@ -101,14 +101,14 @@ describe('RateLimitManager', function () { beforeEach(function (done) { this.task = sinon.stub() this.finalTask = sinon.stub() - const finalTask = (cb) => { + const finalTask = cb => { this.finalTask() return setTimeout(cb, 100) } this.RateLimiter.run(this.task, this.callback) this.RateLimiter.run(this.task, this.callback) this.RateLimiter.run(this.task, this.callback) - return this.RateLimiter.run(finalTask, (err) => { + return this.RateLimiter.run(finalTask, err => { this.callback(err) return done() }) diff --git a/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js b/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js index c05ede76df..b0672bc088 100644 --- a/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js +++ b/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js @@ -19,15 +19,15 @@ describe('RealTimeRedisManager', function () { beforeEach(function () { this.rclient = { auth() {}, - exec: sinon.stub() + exec: sinon.stub(), } this.rclient.multi = () => this.rclient this.pubsubClient = { publish: sinon.stub() } this.RealTimeRedisManager = SandboxedModule.require(modulePath, { requires: { '@overleaf/redis-wrapper': { - createClient: (config) => - config.name === 'pubsub' ? this.pubsubClient : this.rclient + createClient: config => + config.name === 'pubsub' ? this.pubsubClient : this.rclient, }, '@overleaf/settings': { redis: { @@ -35,23 +35,23 @@ describe('RealTimeRedisManager', function () { key_schema: { pendingUpdates({ doc_id }) { return `PendingUpdates:${doc_id}` - } - } + }, + }, }), pubsub: { - name: 'pubsub' - } - } + name: 'pubsub', + }, + }, }, crypto: (this.crypto = { randomBytes: sinon .stub() .withArgs(4) - .returns(Buffer.from([0x1, 0x2, 0x3, 0x4])) + .returns(Buffer.from([0x1, 0x2, 0x3, 0x4])), }), os: (this.os = { hostname: sinon.stub().returns('somehost') }), - './Metrics': (this.metrics = { summary: sinon.stub() }) - } + './Metrics': (this.metrics = { summary: sinon.stub() }), + }, }) this.doc_id = 'doc-id-123' @@ -69,9 +69,9 @@ describe('RealTimeRedisManager', function () { beforeEach(function () { this.updates = [ { op: [{ i: 'foo', p: 4 }] }, - { op: [{ i: 'foo', p: 4 }] } + { op: [{ i: 'foo', p: 4 }] }, ] - this.jsonUpdates = this.updates.map((update) => JSON.stringify(update)) + this.jsonUpdates = this.updates.map(update => JSON.stringify(update)) this.rclient.exec = sinon .stub() .callsArgWith(0, null, [this.jsonUpdates]) @@ -102,7 +102,7 @@ describe('RealTimeRedisManager', function () { beforeEach(function () { this.jsonUpdates = [ JSON.stringify({ op: [{ i: 'foo', p: 4 }] }), - 'broken json' + 'broken json', ] this.rclient.exec = sinon .stub() diff --git a/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js index 7679d217a3..67ea6c7972 100644 --- a/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js +++ b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js @@ -30,7 +30,7 @@ describe('RedisManager', function () { '@overleaf/settings': (this.settings = { documentupdater: { logHashErrors: { write: true, read: true } }, apis: { - project_history: { enabled: true } + project_history: { enabled: true }, }, redis: { documentupdater: { @@ -82,8 +82,8 @@ describe('RedisManager', function () { }, lastUpdatedAt({ doc_id }) { return `lastUpdatedAt:${doc_id}` - } - } + }, + }, }, history: { key_schema: { @@ -92,13 +92,13 @@ describe('RedisManager', function () { }, docsWithHistoryOps({ project_id }) { return `DocsWithHistoryOps:${project_id}` - } - } - } - } + }, + }, + }, + }, }), '@overleaf/redis-wrapper': { - createClient: () => this.rclient + createClient: () => this.rclient, }, './Metrics': (this.metrics = { inc: sinon.stub(), @@ -112,10 +112,10 @@ describe('RedisManager', function () { const timeSpan = new Date() - this.start return timeSpan } - }) + }), }), - './Errors': Errors - } + './Errors': Errors, + }, }) this.doc_id = 'doc-id-123' @@ -151,7 +151,7 @@ describe('RedisManager', function () { this.json_ranges, this.pathname, this.projectHistoryId.toString(), - this.unflushed_time + this.unflushed_time, ]) }) @@ -212,7 +212,7 @@ describe('RedisManager', function () { this.version, this.badHash, this.project_id, - this.json_ranges + this.json_ranges, ]) return this.RedisManager.getDoc( this.project_id, @@ -244,7 +244,7 @@ describe('RedisManager', function () { this.another_project_id, this.json_ranges, this.pathname, - this.unflushed_time + this.unflushed_time, ]) } @@ -278,7 +278,7 @@ describe('RedisManager', function () { this.another_project_id, this.json_ranges, this.pathname, - this.unflushed_time + this.unflushed_time, ]) return this.RedisManager.getDoc( this.project_id, @@ -304,7 +304,7 @@ describe('RedisManager', function () { this.start = 50 this.end = 60 this.ops = [{ mock: 'op-1' }, { mock: 'op-2' }] - this.jsonOps = this.ops.map((op) => JSON.stringify(op)) + this.jsonOps = this.ops.map(op => JSON.stringify(op)) this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length) this.rclient.get = sinon .stub() @@ -353,7 +353,7 @@ describe('RedisManager', function () { this.start = 50 this.end = -1 this.ops = [{ mock: 'op-1' }, { mock: 'op-2' }] - this.jsonOps = this.ops.map((op) => JSON.stringify(op)) + this.jsonOps = this.ops.map(op => JSON.stringify(op)) this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length) this.rclient.get = sinon .stub() @@ -390,7 +390,7 @@ describe('RedisManager', function () { this.start = 20 this.end = -1 this.ops = [{ mock: 'op-1' }, { mock: 'op-2' }] - this.jsonOps = this.ops.map((op) => JSON.stringify(op)) + this.jsonOps = this.ops.map(op => JSON.stringify(op)) this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length) this.rclient.get = sinon .stub() @@ -423,7 +423,7 @@ describe('RedisManager', function () { this.start = 50 this.end = 60 this.ops = [{ mock: 'op-1' }, { mock: 'op-2' }] - this.jsonOps = this.ops.map((op) => JSON.stringify(op)) + this.jsonOps = this.ops.map(op => JSON.stringify(op)) this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length) this.rclient.get = sinon .stub() @@ -483,7 +483,7 @@ describe('RedisManager', function () { null, this.doc_update_list_length, null, - null + null, ]) return (this.ProjectHistoryRedisManager.queueOps = sinon .stub() @@ -529,7 +529,7 @@ describe('RedisManager', function () { [`DocHash:${this.doc_id}`]: this.hash, [`Ranges:${this.doc_id}`]: JSON.stringify(this.ranges), [`lastUpdatedAt:${this.doc_id}`]: Date.now(), - [`lastUpdatedBy:${this.doc_id}`]: 'last-author-fake-id' + [`lastUpdatedBy:${this.doc_id}`]: 'last-author-fake-id', }) .should.equal(true) }) @@ -728,7 +728,7 @@ describe('RedisManager', function () { [`DocHash:${this.doc_id}`]: this.hash, [`Ranges:${this.doc_id}`]: JSON.stringify(this.ranges), [`lastUpdatedAt:${this.doc_id}`]: Date.now(), - [`lastUpdatedBy:${this.doc_id}`]: 'last-author-fake-id' + [`lastUpdatedBy:${this.doc_id}`]: 'last-author-fake-id', }) .should.equal(true) }) @@ -759,7 +759,7 @@ describe('RedisManager', function () { [`DocHash:${this.doc_id}`]: this.hash, [`Ranges:${this.doc_id}`]: null, [`lastUpdatedAt:${this.doc_id}`]: Date.now(), - [`lastUpdatedBy:${this.doc_id}`]: 'last-author-fake-id' + [`lastUpdatedBy:${this.doc_id}`]: 'last-author-fake-id', }) .should.equal(true) }) @@ -856,7 +856,7 @@ describe('RedisManager', function () { [`DocHash:${this.doc_id}`]: this.hash, [`Ranges:${this.doc_id}`]: JSON.stringify(this.ranges), [`lastUpdatedAt:${this.doc_id}`]: Date.now(), - [`lastUpdatedBy:${this.doc_id}`]: undefined + [`lastUpdatedBy:${this.doc_id}`]: undefined, }) .should.equal(true) }) @@ -900,7 +900,7 @@ describe('RedisManager', function () { [`DocHash:${this.doc_id}`]: this.hash, [`Ranges:${this.doc_id}`]: JSON.stringify(this.ranges), [`Pathname:${this.doc_id}`]: this.pathname, - [`ProjectHistoryId:${this.doc_id}`]: this.projectHistoryId + [`ProjectHistoryId:${this.doc_id}`]: this.projectHistoryId, }) .should.equal(true) }) @@ -939,7 +939,7 @@ describe('RedisManager', function () { [`DocHash:${this.doc_id}`]: this.hash, [`Ranges:${this.doc_id}`]: null, [`Pathname:${this.doc_id}`]: this.pathname, - [`ProjectHistoryId:${this.doc_id}`]: this.projectHistoryId + [`ProjectHistoryId:${this.doc_id}`]: this.projectHistoryId, }) .should.equal(true) }) @@ -1070,7 +1070,7 @@ describe('RedisManager', function () { return (this.update = { id: this.doc_id, pathname: (this.pathname = 'pathname'), - newPathname: (this.newPathname = 'new-pathname') + newPathname: (this.newPathname = 'new-pathname'), }) }) diff --git a/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js b/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js index a5e3a8599a..0946a78442 100644 --- a/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js +++ b/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js @@ -105,7 +105,7 @@ describe('ShareJS text type', function () { text._tc(dest, { d: 'foo', p: 3 }, { i: 'bar', p: 4 }) return dest.should.deep.equal([ { d: 'f', p: 3 }, - { d: 'oo', p: 6 } + { d: 'oo', p: 6 }, ]) }) }) @@ -418,7 +418,7 @@ describe('ShareJS text type', function () { op1_t, op2_t, rt12_comments: rt12.comments, - rt21_comments: rt21.comments + rt21_comments: rt21.comments, }, 'Comments are not consistent' ) diff --git a/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js b/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js index 1b4e4422a6..4ac16a8fe7 100644 --- a/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js +++ b/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js @@ -25,8 +25,8 @@ describe('ShareJsDB', function () { this.ShareJsDB = SandboxedModule.require(modulePath, { requires: { './RedisManager': (this.RedisManager = {}), - './Errors': Errors - } + './Errors': Errors, + }, }) this.version = 42 @@ -130,14 +130,14 @@ describe('ShareJsDB', function () { this.opData = { op: { p: 20, t: 'foo' }, meta: { source: 'bar' }, - v: this.version + v: this.version, } return this.db.writeOp(this.doc_key, this.opData, this.callback) }) it('should write into appliedOps', function () { return expect(this.db.appliedOps[this.doc_key]).to.deep.equal([ - this.opData + this.opData, ]) }) diff --git a/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js b/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js index d6e9700bff..6fea29287f 100644 --- a/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js +++ b/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js @@ -31,14 +31,14 @@ describe('ShareJsUpdateManager', function () { '@overleaf/redis-wrapper': { createClient: () => { return (this.rclient = { auth() {} }) - } + }, }, './RealTimeRedisManager': (this.RealTimeRedisManager = {}), - './Metrics': (this.metrics = { inc: sinon.stub() }) + './Metrics': (this.metrics = { inc: sinon.stub() }), }, globals: { - clearTimeout: (this.clearTimeout = sinon.stub()) - } + clearTimeout: (this.clearTimeout = sinon.stub()), + }, })) }) @@ -58,8 +58,8 @@ describe('ShareJsUpdateManager', function () { applyOp: sinon.stub().callsArg(2), getSnapshot: sinon.stub(), db: { - appliedOps: {} - } + appliedOps: {}, + }, } this.ShareJsUpdateManager.getNewShareJsModel = sinon .stub() @@ -74,11 +74,10 @@ describe('ShareJsUpdateManager', function () { beforeEach(function (done) { this.model.getSnapshot.callsArgWith(1, null, { snapshot: this.updatedDocLines.join('\n'), - v: this.version + v: this.version, }) - this.model.db.appliedOps[ - `${this.project_id}:${this.doc_id}` - ] = this.appliedOps = ['mock-ops'] + this.model.db.appliedOps[`${this.project_id}:${this.doc_id}`] = + this.appliedOps = ['mock-ops'] return this.ShareJsUpdateManager.applyUpdate( this.project_id, this.doc_id, @@ -172,11 +171,10 @@ describe('ShareJsUpdateManager', function () { this.error = new Error('invalid hash') this.model.getSnapshot.callsArgWith(1, null, { snapshot: 'unexpected content', - v: this.version + v: this.version, }) - this.model.db.appliedOps[ - `${this.project_id}:${this.doc_id}` - ] = this.appliedOps = ['mock-ops'] + this.model.db.appliedOps[`${this.project_id}:${this.doc_id}`] = + this.appliedOps = ['mock-ops'] return this.ShareJsUpdateManager.applyUpdate( this.project_id, this.doc_id, @@ -203,7 +201,7 @@ describe('ShareJsUpdateManager', function () { this.model = { on: (event, callback) => { return (this.callback = callback) - } + }, } sinon.spy(this.model, 'on') return this.ShareJsUpdateManager._listenForOps(this.model) @@ -217,7 +215,7 @@ describe('ShareJsUpdateManager', function () { beforeEach(function () { this.opData = { op: { t: 'foo', p: 1 }, - meta: { source: 'bar' } + meta: { source: 'bar' }, } this.RealTimeRedisManager.sendData = sinon.stub() return this.callback(`${this.project_id}:${this.doc_id}`, this.opData) @@ -228,7 +226,7 @@ describe('ShareJsUpdateManager', function () { .calledWith({ project_id: this.project_id, doc_id: this.doc_id, - op: this.opData + op: this.opData, }) .should.equal(true) }) diff --git a/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js b/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js index 907fac3d12..2f23de7f68 100644 --- a/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js +++ b/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js @@ -38,7 +38,7 @@ describe('UpdateManager', function () { } Timer.initClass() return Timer - })()) + })()), }), '@overleaf/settings': (this.Settings = {}), './DocumentManager': (this.DocumentManager = {}), @@ -53,8 +53,8 @@ describe('UpdateManager', function () { } Profiler.initClass() return Profiler - })()) - } + })()), + }, })) }) @@ -272,7 +272,7 @@ describe('UpdateManager', function () { }) it('should apply the updates', function () { - return Array.from(this.updates).map((update) => + return Array.from(this.updates).map(update => this.UpdateManager.applyUpdate .calledWith(this.project_id, this.doc_id, update) .should.equal(true) @@ -320,7 +320,7 @@ describe('UpdateManager', function () { this.updated_ranges = { entries: 'updated', comments: 'updated' } this.appliedOps = [ { v: 42, op: 'mock-op-42' }, - { v: 45, op: 'mock-op-45' } + { v: 45, op: 'mock-op-45' }, ] this.doc_ops_length = sinon.stub() this.project_ops_length = sinon.stub() @@ -465,7 +465,7 @@ describe('UpdateManager', function () { .calledWith({ project_id: this.project_id, doc_id: this.doc_id, - error: this.error.message + error: this.error.message, }) .should.equal(true) }) @@ -512,17 +512,17 @@ describe('UpdateManager', function () { v: 42, op: [ { i: 'foo', p: 4 }, - { i: 'bar', p: 6 } - ] + { i: 'bar', p: 6 }, + ], }, { v: 45, op: [ { d: 'qux', p: 4 }, - { i: 'bazbaz', p: 14 } - ] + { i: 'bazbaz', p: 14 }, + ], }, - { v: 49, op: [{ i: 'penguin', p: 18 }] } + { v: 49, op: [{ i: 'penguin', p: 18 }] }, ] this.UpdateManager._addProjectHistoryMetadataToOps( appliedOps, @@ -536,24 +536,24 @@ describe('UpdateManager', function () { v: 42, op: [ { i: 'foo', p: 4 }, - { i: 'bar', p: 6 } + { i: 'bar', p: 6 }, ], meta: { pathname: this.pathname, - doc_length: 14 - } + doc_length: 14, + }, }, { projectHistoryId: this.projectHistoryId, v: 45, op: [ { d: 'qux', p: 4 }, - { i: 'bazbaz', p: 14 } + { i: 'bazbaz', p: 14 }, ], meta: { pathname: this.pathname, - doc_length: 20 - } // 14 + 'foo' + 'bar' + doc_length: 20, + }, // 14 + 'foo' + 'bar' }, { projectHistoryId: this.projectHistoryId, @@ -561,9 +561,9 @@ describe('UpdateManager', function () { op: [{ i: 'penguin', p: 18 }], meta: { pathname: this.pathname, - doc_length: 23 - } // 14 - 'qux' + 'bazbaz' - } + doc_length: 23, + }, // 14 - 'qux' + 'bazbaz' + }, ]) }) }) From e8697f7f976b9df03d62ae1b7eb05462dc2ce5b5 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Tue, 13 Jul 2021 12:21:07 +0100 Subject: [PATCH 763/769] [misc] temporary override a few new/changed eslint rules --- services/document-updater/.eslintrc | 15 ++++++++++++++- services/document-updater/app/js/sharejs/model.js | 1 - .../app/js/sharejs/server/model.js | 1 - .../app/js/sharejs/types/model.js | 1 - 4 files changed, 14 insertions(+), 4 deletions(-) diff --git a/services/document-updater/.eslintrc b/services/document-updater/.eslintrc index 1c14f50efe..a97661b15f 100644 --- a/services/document-updater/.eslintrc +++ b/services/document-updater/.eslintrc @@ -5,7 +5,7 @@ "extends": [ "eslint:recommended", "standard", - "prettier", + "prettier" ], "parserOptions": { "ecmaVersion": 2018 @@ -20,6 +20,19 @@ "mocha": true }, "rules": { + // TODO(das7pad): remove overrides after fixing all the violations manually (https://github.com/overleaf/issues/issues/3882#issuecomment-878999671) + // START of temporary overrides + "array-callback-return": "off", + "no-dupe-else-if": "off", + "no-var": "off", + "no-empty": "off", + "node/handle-callback-err": "off", + "no-loss-of-precision": "off", + "node/no-callback-literal": "off", + "node/no-path-concat": "off", + "prefer-regex-literals": "off", + // END of temporary overrides + // Swap the no-unused-expressions rule with a more chai-friendly one "no-unused-expressions": 0, "chai-friendly/no-unused-expressions": "error", diff --git a/services/document-updater/app/js/sharejs/model.js b/services/document-updater/app/js/sharejs/model.js index 3b881c8d16..aebcd8d549 100644 --- a/services/document-updater/app/js/sharejs/model.js +++ b/services/document-updater/app/js/sharejs/model.js @@ -1,7 +1,6 @@ /* eslint-disable no-console, no-return-assign, - standard/no-callback-literal, */ // TODO: This file was created by bulk-decaffeinate. // Fix any style issues and re-enable lint. diff --git a/services/document-updater/app/js/sharejs/server/model.js b/services/document-updater/app/js/sharejs/server/model.js index 7b065ea20a..a5682f71e3 100644 --- a/services/document-updater/app/js/sharejs/server/model.js +++ b/services/document-updater/app/js/sharejs/server/model.js @@ -1,7 +1,6 @@ /* eslint-disable no-console, no-return-assign, - standard/no-callback-literal, */ // TODO: This file was created by bulk-decaffeinate. // Fix any style issues and re-enable lint. diff --git a/services/document-updater/app/js/sharejs/types/model.js b/services/document-updater/app/js/sharejs/types/model.js index 3b881c8d16..aebcd8d549 100644 --- a/services/document-updater/app/js/sharejs/types/model.js +++ b/services/document-updater/app/js/sharejs/types/model.js @@ -1,7 +1,6 @@ /* eslint-disable no-console, no-return-assign, - standard/no-callback-literal, */ // TODO: This file was created by bulk-decaffeinate. // Fix any style issues and re-enable lint. From 40aafa27e0e15faab62aef961acc597b155a1181 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Tue, 13 Jul 2021 12:26:30 +0100 Subject: [PATCH 764/769] [misc] upgrade node version to latest v12 LTS version 12.22.3 --- services/document-updater/.nvmrc | 2 +- services/document-updater/Dockerfile | 2 +- services/document-updater/buildscript.txt | 2 +- services/document-updater/docker-compose.yml | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/services/document-updater/.nvmrc b/services/document-updater/.nvmrc index e68b860383..5a80a7e912 100644 --- a/services/document-updater/.nvmrc +++ b/services/document-updater/.nvmrc @@ -1 +1 @@ -12.21.0 +12.22.3 diff --git a/services/document-updater/Dockerfile b/services/document-updater/Dockerfile index 4f417a2a4b..6b286376dc 100644 --- a/services/document-updater/Dockerfile +++ b/services/document-updater/Dockerfile @@ -2,7 +2,7 @@ # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -FROM node:12.21.0 as base +FROM node:12.22.3 as base WORKDIR /app diff --git a/services/document-updater/buildscript.txt b/services/document-updater/buildscript.txt index b75d1c1e34..339a3c847f 100644 --- a/services/document-updater/buildscript.txt +++ b/services/document-updater/buildscript.txt @@ -3,6 +3,6 @@ document-updater --docker-repos=gcr.io/overleaf-ops --env-add= --env-pass-through= ---node-version=12.21.0 +--node-version=12.22.3 --public-repo=True --script-version=3.11.0 diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml index 6a1c097a30..e3c720c9cd 100644 --- a/services/document-updater/docker-compose.yml +++ b/services/document-updater/docker-compose.yml @@ -6,7 +6,7 @@ version: "2.3" services: test_unit: - image: node:12.21.0 + image: node:12.22.3 volumes: - .:/app working_dir: /app @@ -18,7 +18,7 @@ services: user: node test_acceptance: - image: node:12.21.0 + image: node:12.22.3 volumes: - .:/app working_dir: /app From 1184f8f5b67af8e60e97b9251403033e615b5633 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Tue, 13 Jul 2021 12:35:32 +0100 Subject: [PATCH 765/769] [misc] fix chai assertions .equal.false -> .equals(false) --- .../test/acceptance/js/DeletingADocumentTests.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/document-updater/test/acceptance/js/DeletingADocumentTests.js b/services/document-updater/test/acceptance/js/DeletingADocumentTests.js index 5647c55d83..84c7300383 100644 --- a/services/document-updater/test/acceptance/js/DeletingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/DeletingADocumentTests.js @@ -103,7 +103,7 @@ describe('Deleting a document', function () { }) it('should need to reload the doc if read again', function (done) { - MockWebApi.getDocument.called.should.equal.false + MockWebApi.getDocument.called.should.equals(false) return DocUpdaterClient.getDoc( this.project_id, this.doc_id, @@ -164,7 +164,7 @@ describe('Deleting a document', function () { }) it('should need to reload the doc if read again', function (done) { - MockWebApi.getDocument.called.should.equal.false + MockWebApi.getDocument.called.should.equals(false) return DocUpdaterClient.getDoc( this.project_id, this.doc_id, From bf8224e2a06477fb25197557075f0b937424f6d8 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Tue, 13 Jul 2021 12:51:06 +0100 Subject: [PATCH 766/769] [misc] ApplyingUpdatesToADocTests: adhere to stricter chai assertions --- .../test/acceptance/js/ApplyingUpdatesToADocTests.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js index ec14300d21..5ae6f4eabd 100644 --- a/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js +++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js @@ -150,6 +150,7 @@ describe('Applying updates to a doc', function () { if (error != null) { throw error } + result = parseInt(result, 10) result.should.be.within(this.startTime, Date.now()) this.firstOpTimestamp = result return done() @@ -186,6 +187,7 @@ describe('Applying updates to a doc', function () { if (error != null) { throw error } + result = parseInt(result, 10) result.should.equal(this.firstOpTimestamp) return done() } From 29cea1743b535651442942151082f2e99a5b0335 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Tue, 13 Jul 2021 13:12:34 +0100 Subject: [PATCH 767/769] [misc] DeletingADocumentTests: fix behavior checks The preloading setup does an initial getDocument request to web-api. --- .../test/acceptance/js/DeletingADocumentTests.js | 1 + 1 file changed, 1 insertion(+) diff --git a/services/document-updater/test/acceptance/js/DeletingADocumentTests.js b/services/document-updater/test/acceptance/js/DeletingADocumentTests.js index 84c7300383..9fa53d8052 100644 --- a/services/document-updater/test/acceptance/js/DeletingADocumentTests.js +++ b/services/document-updater/test/acceptance/js/DeletingADocumentTests.js @@ -103,6 +103,7 @@ describe('Deleting a document', function () { }) it('should need to reload the doc if read again', function (done) { + MockWebApi.getDocument.resetHistory() MockWebApi.getDocument.called.should.equals(false) return DocUpdaterClient.getDoc( this.project_id, From 0fd24b5133f74473efc912b2d13ce3b28434117e Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 30 Jul 2021 16:13:48 +0100 Subject: [PATCH 768/769] peek at docs without fetching from mongo --- services/document-updater/app.js | 1 + .../document-updater/app/js/HttpController.js | 18 ++++++++++++++++++ .../js/HttpController/HttpControllerTests.js | 1 + 3 files changed, 20 insertions(+) diff --git a/services/document-updater/app.js b/services/document-updater/app.js index c724b74d33..61f254d7f2 100644 --- a/services/document-updater/app.js +++ b/services/document-updater/app.js @@ -53,6 +53,7 @@ app.param('doc_id', (req, res, next, docId) => { }) app.get('/project/:project_id/doc/:doc_id', HttpController.getDoc) +app.get('/project/:project_id/doc/:doc_id/peek', HttpController.peekDoc) // temporarily keep the GET method for backwards compatibility app.get('/project/:project_id/doc', HttpController.getProjectDocsAndFlushIfOld) // will migrate to the POST method of get_and_flush_if_old instead diff --git a/services/document-updater/app/js/HttpController.js b/services/document-updater/app/js/HttpController.js index 6bffb6ec4a..4ea7a00d4c 100644 --- a/services/document-updater/app/js/HttpController.js +++ b/services/document-updater/app/js/HttpController.js @@ -1,6 +1,7 @@ const DocumentManager = require('./DocumentManager') const HistoryManager = require('./HistoryManager') const ProjectManager = require('./ProjectManager') +const RedisManager = require('./RedisManager') const Errors = require('./Errors') const logger = require('logger-sharelatex') const Settings = require('@overleaf/settings') @@ -11,6 +12,7 @@ const async = require('async') module.exports = { getDoc, + peekDoc, getProjectDocsAndFlushIfOld, clearProjectState, setDoc, @@ -65,6 +67,22 @@ function getDoc(req, res, next) { ) } +// return the doc from redis if present, but don't load it from mongo +function peekDoc(req, res, next) { + const docId = req.params.doc_id + const projectId = req.params.project_id + logger.log({ projectId, docId }, 'peeking at doc via http') + RedisManager.getDoc(projectId, docId, function (error, lines, version) { + if (error) { + return next(error) + } + if (lines == null || version == null) { + return next(new Errors.NotFoundError('document not found')) + } + res.json({ id: docId, lines, version }) + }) +} + function _getTotalSizeOfLines(lines) { let size = 0 for (const line of lines) { diff --git a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js index 3a926d5e0c..7bea76edd0 100644 --- a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js +++ b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js @@ -14,6 +14,7 @@ describe('HttpController', function () { './ProjectManager': (this.ProjectManager = {}), './ProjectFlusher': { flushAllProjects() {} }, './DeleteQueueManager': (this.DeleteQueueManager = {}), + './RedisManager': (this.RedisManager = {}), './Metrics': (this.Metrics = {}), './Errors': Errors, }, From 5e2d4d21698fb3712631278fd898f4cd89d3be2a Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 2 Aug 2021 14:13:38 +0100 Subject: [PATCH 769/769] add acceptance tests for peek --- .../test/acceptance/js/PeekingADoc.js | 99 +++++++++++++++++++ .../acceptance/js/helpers/DocUpdaterClient.js | 12 +++ 2 files changed, 111 insertions(+) create mode 100644 services/document-updater/test/acceptance/js/PeekingADoc.js diff --git a/services/document-updater/test/acceptance/js/PeekingADoc.js b/services/document-updater/test/acceptance/js/PeekingADoc.js new file mode 100644 index 0000000000..43e463ca51 --- /dev/null +++ b/services/document-updater/test/acceptance/js/PeekingADoc.js @@ -0,0 +1,99 @@ +const sinon = require('sinon') +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') + +describe('Peeking a document', function () { + before(function (done) { + this.lines = ['one', 'two', 'three'] + this.version = 42 + return DocUpdaterApp.ensureRunning(done) + }) + + describe('when the document is not loaded', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + sinon.spy(MockWebApi, 'getDocument') + + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version, + }) + + return DocUpdaterClient.peekDoc( + this.project_id, + this.doc_id, + (error, res, returnedDoc) => { + this.error = error + this.res = res + this.returnedDoc = returnedDoc + return done() + } + ) + }) + + after(function () { + return MockWebApi.getDocument.restore() + }) + + it('should return a 404 response', function () { + this.res.statusCode.should.equal(404) + }) + + it('should not load the document from the web API', function () { + return MockWebApi.getDocument.called.should.equal(false) + }) + }) + + describe('when the document is already loaded', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version, + }) + return DocUpdaterClient.preloadDoc( + this.project_id, + this.doc_id, + error => { + if (error != null) { + throw error + } + sinon.spy(MockWebApi, 'getDocument') + return DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, returnedDoc) => { + this.res = res + this.returnedDoc = returnedDoc + return done() + } + ) + } + ) + }) + + after(function () { + return MockWebApi.getDocument.restore() + }) + + it('should return a 200 response', function () { + this.res.statusCode.should.equal(200) + }) + + it('should return the document lines', function () { + return this.returnedDoc.lines.should.deep.equal(this.lines) + }) + + it('should return the document version', function () { + return this.returnedDoc.version.should.equal(this.version) + }) + + it('should not load the document from the web API', function () { + return MockWebApi.getDocument.called.should.equal(false) + }) + }) +}) diff --git a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js index 71e7915c0f..9a3234628c 100644 --- a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js +++ b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js @@ -123,6 +123,18 @@ module.exports = DocUpdaterClient = { DocUpdaterClient.getDoc(projectId, docId, callback) }, + peekDoc(projectId, docId, callback) { + request.get( + `http://localhost:3003/project/${projectId}/doc/${docId}/peek`, + (error, res, body) => { + if (body != null && res.statusCode >= 200 && res.statusCode < 300) { + body = JSON.parse(body) + } + callback(error, res, body) + } + ) + }, + flushDoc(projectId, docId, callback) { request.post( `http://localhost:3003/project/${projectId}/doc/${docId}/flush`,